prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>actions.js<|end_file_name|><|fim▁begin|>import { createAction } from 'redux-actions';
import { APP_LAYOUT_CHANGE, APP_LAYOUT_INIT } from './constants';
// 选中菜单列表项
const appLayoutInit = createAction(APP_LAYOUT_INIT);
export const onAppLayoutInit = (key) => {
return (dispatch) => {
dispatch(appLayoutInit(key));
};
};
const appLayoutChange = createAction(APP_LAYOUT_CHANGE);
export const onAppLayoutChange = (key) => {
return (dispatch) => {
dispatch(appLayoutChange(key));<|fim▁hole|><|fim▁end|> | };
}; |
<|file_name|>S_PARTY_MEMBER_INTERVAL_POS_UPDATE.py<|end_file_name|><|fim▁begin|>from util.tipo import tipo
class S_PARTY_MEMBER_INTERVAL_POS_UPDATE(object):<|fim▁hole|>
def __init__(self, tracker, time, direction, opcode, data):
print(str(type(self)).split('.')[3]+'('+str(len(data))+'): '+ str(data.get_array_hex(1))[1:-1])<|fim▁end|> | |
<|file_name|>api_outbound_test.go<|end_file_name|><|fim▁begin|>package testnewsfeed
import (
"time"
"testing"
"github.com/go-redis/redis"
sw "../go"
)
type MockErrorWrapper struct {
t *testing.T
}
func (lw MockErrorWrapper) LogError(err error, format string, status int) {
lw.t.Errorf(format, err)
}
var InboundCounter = 0
type MockCassandraWrapper struct {
}
func (cw MockCassandraWrapper) AddInbound(i sw.Inbound) {
InboundCounter++
}
func (cw MockCassandraWrapper) AddOutbound(o sw.Outbound) {
}
type MockRedisWrapper struct {
SetCounter int64
}
func (rw MockRedisWrapper) Get(key string) (string, error) {
return "", redis.Nil
}
func (rw MockRedisWrapper) Set(key string, value string, ttl time.Duration) {
rw.SetCounter++
}
func (rw MockRedisWrapper) Close() {
}
type MockMySqlWrapper struct {
Friends []sw.Friend
}
func (mw MockMySqlWrapper) Close() {
}
func (mw MockMySqlWrapper) FetchFriends(id string)([]sw.Friend, error) {
return mw.Friends, nil
}
func AddFriend(results []sw.Friend, id int64, from int64, to int64) ([]sw.Friend) {<|fim▁hole|> From: sw.ToLink(from),
To: sw.ToLink(to),
}
results = append(results, f)
return results
}
func TestAddOutboundInner(t *testing.T) {
ew := MockErrorWrapper{
t: t,
}
cw := MockCassandraWrapper{
}
rw := MockRedisWrapper{
SetCounter: 0,
}
ob := sw.Outbound {
From: sw.ToLink(1),
Occurred: time.Now(),
Subject: "test subject",
Story: "test story",
}
var results []sw.Friend
results = AddFriend(results, 1, 1, 2)
results = AddFriend(results, 2, 1, 3)
results = AddFriend(results, 3, 1, 4)
mw := MockMySqlWrapper{
Friends: results,
}
sw.AddOutboundInner(ob, ew, cw, rw, mw)
if InboundCounter != 3 {
t.Errorf("expected 3 inbound but got %d instead", InboundCounter)
}
}<|fim▁end|> | f := sw.Friend{
Id: id, |
<|file_name|>integ_test.go<|end_file_name|><|fim▁begin|>// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT.
//go:build go1.16 && integration
// +build go1.16,integration
package appstream_test
import (
"context"
"testing"
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/aws/request"
"github.com/aws/aws-sdk-go/awstesting/integration"
"github.com/aws/aws-sdk-go/service/appstream"
)
var _ aws.Config
var _ awserr.Error
var _ request.Request
func TestInteg_00_DescribeStacks(t *testing.T) {
ctx, cancelFn := context.WithTimeout(context.Background(), 5*time.Second)
defer cancelFn()
<|fim▁hole|> svc := appstream.New(sess)
params := &appstream.DescribeStacksInput{}
_, err := svc.DescribeStacksWithContext(ctx, params, func(r *request.Request) {
r.Handlers.Validate.RemoveByName("core.ValidateParametersHandler")
})
if err != nil {
t.Errorf("expect no error, got %v", err)
}
}<|fim▁end|> | sess := integration.SessionWithDefaultRegion("us-west-2") |
<|file_name|>test_image_anchor05.py<|end_file_name|><|fim▁begin|>###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2015, John McNamara, [email protected]
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'image_anchor05.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.image_dir = test_dir + 'images/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file with image(s)."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
worksheet.insert_image(
'D7', self.image_dir + 'yellow.png',
{'x_offset': 1, 'y_offset': 2, 'positioning': 2})<|fim▁hole|> workbook.close()
self.assertExcelEqual()<|fim▁end|> | |
<|file_name|>0030_auto__add_field_invoice_date_of_issue.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Invoice.date_of_issue'
db.add_column('books_invoice', 'date_of_issue',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Invoice.date_of_issue'
db.delete_column('books_invoice', 'date_of_issue')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'books.category': {
'Meta': {'object_name': 'Category'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '140'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'books.client': {
'Meta': {'object_name': 'Client'},
'city': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '100'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'organization_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'postal_code': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'street_adress': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
'books.expense': {
'Meta': {'object_name': 'Expense'},
'amount': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '2'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['books.Category']", 'null': 'True', 'blank': 'True'}),
'client': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['books.Client']", 'null': 'True', 'blank': 'True'}),
'date': ('django.db.models.fields.DateField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notes': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'receipt': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'taxes': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['books.Tax']", 'null': 'True', 'blank': 'True'}),
'vendor': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['books.Vendor']", 'null': 'True', 'blank': 'True'})
},
'books.invoice': {
'Meta': {'object_name': 'Invoice'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'client': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['books.Client']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_of_issue': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invoice_number': ('django.db.models.fields.PositiveIntegerField', [], {'blank': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'paid_notes': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'Dr'", 'max_length': '2', 'null': 'True'}),
'sub_description': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'sub_notes': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'blank': 'True'}),
'terms': ('django.db.models.fields.CharField', [], {'max_length': '1000'})
},
'books.item': {
'Meta': {'object_name': 'Item'},
'client': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['books.Client']", 'null': 'True'}),
'cost': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '2'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'blank': 'True'})
},
'books.project': {
'Meta': {'object_name': 'Project'},
'client': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['books.Client']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'rate_per_hour': ('django.db.models.fields.DecimalField', [], {'max_digits': '5', 'decimal_places': '2'})
},
'books.report': {
'Meta': {'object_name': 'Report'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {}),
'expense': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invoice': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'taxes': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'timesheet': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'books.task': {
'Meta': {'object_name': 'Task'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['books.Project']"}),
'rate_per_hour': ('django.db.models.fields.DecimalField', [], {'max_digits': '5', 'decimal_places': '2'})
},
'books.tax': {
'Meta': {'object_name': 'Tax'},
'compound_tax': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'gouv_number': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'number': ('django.db.models.fields.PositiveIntegerField', [], {}),
'rate': ('django.db.models.fields.DecimalField', [], {'max_digits': '4', 'decimal_places': '2', 'blank': 'True'})
},
'books.time': {<|fim▁hole|> 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invoice': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['books.Invoice']", 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '1000', 'blank': 'True'}),
'rate_per_hour': ('django.db.models.fields.DecimalField', [], {'max_digits': '5', 'decimal_places': '2'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['books.Task']", 'null': 'True', 'blank': 'True'}),
'time': ('django.db.models.fields.DecimalField', [], {'max_digits': '5', 'decimal_places': '2', 'blank': 'True'})
},
'books.vendor': {
'Meta': {'object_name': 'Vendor'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '140'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['books']<|fim▁end|> | 'Meta': {'object_name': 'Time'}, |
<|file_name|>test_rng.rs<|end_file_name|><|fim▁begin|>extern crate cryptopals;
extern crate rand;
extern crate time;
use cryptopals::crypto::rng::{MT, untemper};
use rand::{Rng, SeedableRng, thread_rng};
use time::{get_time};
#[test]
fn test_rng_deterministic() {
let mut m1: MT = SeedableRng::from_seed(314159);
let mut m2: MT = SeedableRng::from_seed(314159);
for _ in 0 .. 1024 {
assert_eq!(m1.gen::<u32>(), m2.gen::<u32>());
}<|fim▁hole|>fn test_seed_recovery_from_time() {
let mut time = get_time().sec;
time += thread_rng().gen_range(40, 1000);
let mut m: MT = SeedableRng::from_seed(time as u32);
let output = m.gen::<u32>();
for seed in get_time().sec + 2000 .. 0 {
let mut checker: MT = SeedableRng::from_seed(seed as u32);
if checker.gen::<u32>() == output {
assert_eq!(seed, time);
break;
}
}
}
#[test]
fn test_untemper() {
let mut m: MT = SeedableRng::from_seed(314159);
for i in 0 .. 624 {
let output = m.gen::<u32>();
assert_eq!(untemper(output), m.state[i]);
}
}
#[test]
fn test_rng_clone_from_output() {
let mut m: MT = SeedableRng::from_seed(314159);
let mut state = [0; 624];
for i in 0 .. 624 {
state[i] = untemper(m.gen::<u32>());
}
let mut cloned = MT { state: state, index: 624 };
for _ in 0 .. 1024 {
assert_eq!(cloned.gen::<u32>(), m.gen::<u32>());
}
}<|fim▁end|> | }
#[test] |
<|file_name|>ListPlayers.java<|end_file_name|><|fim▁begin|>package gui;
import java.awt.Color;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.List;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import javax.swing.ListSelectionModel;
import javax.swing.border.EmptyBorder;
import javax.swing.table.DefaultTableModel;
import logic.DB.MongoUserManager;
import logic.model.Statistics;
import logic.model.User;
import javax.swing.JLabel;
import java.awt.Font;
import java.awt.Toolkit;
public class ListPlayers extends JFrame {
/**
*
*/
private static final long serialVersionUID = 1L;
private JPanel contentPane;
private JScrollPane spUsers;
private JTable tabUsers;
private MongoUserManager mongo = new MongoUserManager();
private List<User> users;
private JButton btnClose;
private JLabel lbListUsers;
/**
* Launch the application.
*/
/*public static void main(String[] args) {
EventQueue.invokeLater(new Runnable() {
public void run() {
try {
ListPlayers frame = new ListPlayers();
frame.setVisible(true);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}*/
/**
* Create the frame.
*/
public ListPlayers() {
setIconImage(Toolkit.getDefaultToolkit().getImage("C:\\Users\\Raquel\\Desktop\\ASWProject\\Trivial_i1b\\Game\\src\\main\\resources\\Images\\icono.png"));
setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
setBounds(100, 100, 532, 340);
contentPane = new JPanel();
contentPane.setBackground(new Color(0,0,139));
contentPane.setBorder(new EmptyBorder(5, 5, 5, 5));
setContentPane(contentPane);
contentPane.setLayout(null);
contentPane.add(getSpUsers());
contentPane.add(getBtnClose());
contentPane.setBackground(InitialWindow.pnFondo.getBackground());
JButton btnSeeStatistics = new JButton("See statistics");
btnSeeStatistics.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
users = mongo.getAllUsers();
StatisticsWindow statistics = new StatisticsWindow();
statistics.setVisible(true);
statistics.txPlayer.setText((String) tabUsers.getValueAt(tabUsers.getSelectedRow(), 0));
int row = tabUsers.getSelectedRow();
int newRow = 0;
for (User u : users){
if (u.getEmail().equals(tabUsers.getValueAt(row, 1))){
Statistics s = u.getStatistics();
statistics.tabStatistics.setValueAt(s.getQuestionsMatched(), newRow, 0);
statistics.tabStatistics.setValueAt(s.getQuestionsAnswered(), newRow, 1);<|fim▁hole|> }
}
});
btnSeeStatistics.setBounds(357, 42, 123, 23);
contentPane.add(btnSeeStatistics);
contentPane.add(getLbListUsers());
}
private JScrollPane getSpUsers() {
if (spUsers == null) {
spUsers = new JScrollPane();
spUsers.setBounds(42, 103, 306, 128);
spUsers.setViewportView(getTabUsers());
spUsers.setBackground(InitialWindow.pnFondo.getBackground());
}
return spUsers;
}
private JTable getTabUsers() {
if (tabUsers == null) {
tabUsers = new JTable();
tabUsers.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
tabUsers.setModel(new DefaultTableModel(
new Object[][] {
},
new String[] {
"Username", "Email"
}
));
}
DefaultTableModel model = (DefaultTableModel)tabUsers.getModel();
listUsers(model);
return tabUsers;
}
private JButton getBtnClose() {
if (btnClose == null) {
btnClose = new JButton("Close");
btnClose.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
dispose();
}
});
btnClose.setBounds(378, 230, 76, 23);
}
return btnClose;
}
private void listUsers(DefaultTableModel model) {
users = mongo.getAllUsers();
Object[] row = new Object[2];
for (int i = 0; i < users.size(); i++) {
row[0] = users.get(i).getUsername();
row[1] = users.get(i).getEmail();
model.addRow(row);
}
}
private JLabel getLbListUsers() {
if (lbListUsers == null) {
lbListUsers = new JLabel("List of users:");
lbListUsers.setFont(new Font("Arial", Font.PLAIN, 25));
lbListUsers.setBounds(142, 32, 195, 32);
}
return lbListUsers;
}
}<|fim▁end|> | statistics.tabStatistics.setValueAt(s.getTimesPlayed(), newRow, 2);
newRow++;
} |
<|file_name|>oauth2_test.go<|end_file_name|><|fim▁begin|>// Copyright 2014 The oauth2 Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package oauth2
import (
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"net/http"
"net/http/httptest"
"reflect"
"strconv"
"testing"
"time"
"github.com/flynn/flynn/Godeps/_workspace/src/golang.org/x/net/context"
)
type mockTransport struct {
rt func(req *http.Request) (resp *http.Response, err error)
}
func (t *mockTransport) RoundTrip(req *http.Request) (resp *http.Response, err error) {
return t.rt(req)
}
type mockCache struct {
token *Token
readErr error
}
func (c *mockCache) ReadToken() (*Token, error) {
return c.token, c.readErr
}
func (c *mockCache) WriteToken(*Token) {
// do nothing
}
func newConf(url string) *Config {
return &Config{
ClientID: "CLIENT_ID",
ClientSecret: "CLIENT_SECRET",
RedirectURL: "REDIRECT_URL",
Scopes: []string{"scope1", "scope2"},
Endpoint: Endpoint{
AuthURL: url + "/auth",
TokenURL: url + "/token",
},
}
}
func TestAuthCodeURL(t *testing.T) {
conf := newConf("server")
url := conf.AuthCodeURL("foo", AccessTypeOffline, ApprovalForce)
if url != "server/auth?access_type=offline&approval_prompt=force&client_id=CLIENT_ID&redirect_uri=REDIRECT_URL&response_type=code&scope=scope1+scope2&state=foo" {
t.Errorf("Auth code URL doesn't match the expected, found: %v", url)
}
}
func TestAuthCodeURL_CustomParam(t *testing.T) {
conf := newConf("server")
param := SetAuthURLParam("foo", "bar")
url := conf.AuthCodeURL("baz", param)
if url != "server/auth?client_id=CLIENT_ID&foo=bar&redirect_uri=REDIRECT_URL&response_type=code&scope=scope1+scope2&state=baz" {
t.Errorf("Auth code URL doesn't match the expected, found: %v", url)
}
}
func TestAuthCodeURL_Optional(t *testing.T) {
conf := &Config{
ClientID: "CLIENT_ID",
Endpoint: Endpoint{
AuthURL: "/auth-url",
TokenURL: "/token-url",
},
}
url := conf.AuthCodeURL("")
if url != "/auth-url?client_id=CLIENT_ID&response_type=code" {
t.Fatalf("Auth code URL doesn't match the expected, found: %v", url)
}
}
func TestExchangeRequest(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.String() != "/token" {
t.Errorf("Unexpected exchange request URL, %v is found.", r.URL)
}
headerAuth := r.Header.Get("Authorization")
if headerAuth != "Basic Q0xJRU5UX0lEOkNMSUVOVF9TRUNSRVQ=" {
t.Errorf("Unexpected authorization header, %v is found.", headerAuth)
}
headerContentType := r.Header.Get("Content-Type")
if headerContentType != "application/x-www-form-urlencoded" {
t.Errorf("Unexpected Content-Type header, %v is found.", headerContentType)
}
body, err := ioutil.ReadAll(r.Body)
if err != nil {
t.Errorf("Failed reading request body: %s.", err)
}
if string(body) != "client_id=CLIENT_ID&code=exchange-code&grant_type=authorization_code&redirect_uri=REDIRECT_URL&scope=scope1+scope2" {
t.Errorf("Unexpected exchange payload, %v is found.", string(body))
}
w.Header().Set("Content-Type", "application/x-www-form-urlencoded")
w.Write([]byte("access_token=90d64460d14870c08c81352a05dedd3465940a7c&scope=user&token_type=bearer"))
}))
defer ts.Close()
conf := newConf(ts.URL)
tok, err := conf.Exchange(NoContext, "exchange-code")
if err != nil {
t.Error(err)
}
if !tok.Valid() {
t.Fatalf("Token invalid. Got: %#v", tok)
}
if tok.AccessToken != "90d64460d14870c08c81352a05dedd3465940a7c" {
t.Errorf("Unexpected access token, %#v.", tok.AccessToken)
}
if tok.TokenType != "bearer" {
t.Errorf("Unexpected token type, %#v.", tok.TokenType)
}
scope := tok.Extra("scope")
if scope != "user" {
t.Errorf("Unexpected value for scope: %v", scope)
}
}
func TestExchangeRequest_JSONResponse(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.String() != "/token" {
t.Errorf("Unexpected exchange request URL, %v is found.", r.URL)
}
headerAuth := r.Header.Get("Authorization")
if headerAuth != "Basic Q0xJRU5UX0lEOkNMSUVOVF9TRUNSRVQ=" {
t.Errorf("Unexpected authorization header, %v is found.", headerAuth)
}
headerContentType := r.Header.Get("Content-Type")
if headerContentType != "application/x-www-form-urlencoded" {
t.Errorf("Unexpected Content-Type header, %v is found.", headerContentType)
}
body, err := ioutil.ReadAll(r.Body)
if err != nil {
t.Errorf("Failed reading request body: %s.", err)
}
if string(body) != "client_id=CLIENT_ID&code=exchange-code&grant_type=authorization_code&redirect_uri=REDIRECT_URL&scope=scope1+scope2" {
t.Errorf("Unexpected exchange payload, %v is found.", string(body))
}
w.Header().Set("Content-Type", "application/json")
w.Write([]byte(`{"access_token": "90d64460d14870c08c81352a05dedd3465940a7c", "scope": "user", "token_type": "bearer", "expires_in": 86400}`))
}))
defer ts.Close()
conf := newConf(ts.URL)
tok, err := conf.Exchange(NoContext, "exchange-code")
if err != nil {
t.Error(err)
}
if !tok.Valid() {
t.Fatalf("Token invalid. Got: %#v", tok)
}
if tok.AccessToken != "90d64460d14870c08c81352a05dedd3465940a7c" {
t.Errorf("Unexpected access token, %#v.", tok.AccessToken)
}
if tok.TokenType != "bearer" {
t.Errorf("Unexpected token type, %#v.", tok.TokenType)
}
scope := tok.Extra("scope")
if scope != "user" {
t.Errorf("Unexpected value for scope: %v", scope)
}
}
const day = 24 * time.Hour
func TestExchangeRequest_JSONResponse_Expiry(t *testing.T) {
seconds := int32(day.Seconds())
jsonNumberType := reflect.TypeOf(json.Number("0"))
for _, c := range []struct {
expires string
expect error
}{
{fmt.Sprintf(`"expires_in": %d`, seconds), nil},
{fmt.Sprintf(`"expires_in": "%d"`, seconds), nil}, // PayPal case
{fmt.Sprintf(`"expires": %d`, seconds), nil}, // Facebook case
{`"expires": false`, &json.UnmarshalTypeError{Value: "bool", Type: jsonNumberType}}, // wrong type
{`"expires": {}`, &json.UnmarshalTypeError{Value: "object", Type: jsonNumberType}}, // wrong type
{`"expires": "zzz"`, &strconv.NumError{Func: "ParseInt", Num: "zzz", Err: strconv.ErrSyntax}}, // wrong value
} {
testExchangeRequest_JSONResponse_expiry(t, c.expires, c.expect)
}
}
func testExchangeRequest_JSONResponse_expiry(t *testing.T, exp string, expect error) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
w.Write([]byte(fmt.Sprintf(`{"access_token": "90d", "scope": "user", "token_type": "bearer", %s}`, exp)))
}))
defer ts.Close()
conf := newConf(ts.URL)
t1 := time.Now().Add(day)
tok, err := conf.Exchange(NoContext, "exchange-code")
t2 := time.Now().Add(day)
// Do a fmt.Sprint comparison so either side can be
// nil. fmt.Sprint just stringifies them to "<nil>", and no
// non-nil expected error ever stringifies as "<nil>", so this
// isn't terribly disgusting. We do this because Go 1.4 and
// Go 1.5 return a different deep value for
// json.UnmarshalTypeError. In Go 1.5, the
// json.UnmarshalTypeError contains a new field with a new
// non-zero value. Rather than ignore it here with reflect or
// add new files and +build tags, just look at the strings.
if fmt.Sprint(err) != fmt.Sprint(expect) {
t.Errorf("Error = %v; want %v", err, expect)
}
if err != nil {
return
}
if !tok.Valid() {
t.Fatalf("Token invalid. Got: %#v", tok)
}
expiry := tok.Expiry
if expiry.Before(t1) || expiry.After(t2) {
t.Errorf("Unexpected value for Expiry: %v (shold be between %v and %v)", expiry, t1, t2)
}
}
func TestExchangeRequest_BadResponse(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
w.Write([]byte(`{"scope": "user", "token_type": "bearer"}`))
}))
defer ts.Close()
conf := newConf(ts.URL)
tok, err := conf.Exchange(NoContext, "code")
if err != nil {
t.Fatal(err)
}
if tok.AccessToken != "" {
t.Errorf("Unexpected access token, %#v.", tok.AccessToken)
}
}
func TestExchangeRequest_BadResponseType(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
w.Write([]byte(`{"access_token":123, "scope": "user", "token_type": "bearer"}`))
}))
defer ts.Close()
conf := newConf(ts.URL)
_, err := conf.Exchange(NoContext, "exchange-code")
if err == nil {
t.Error("expected error from invalid access_token type")
}
}
func TestExchangeRequest_NonBasicAuth(t *testing.T) {
tr := &mockTransport{
rt: func(r *http.Request) (w *http.Response, err error) {
headerAuth := r.Header.Get("Authorization")
if headerAuth != "" {
t.Errorf("Unexpected authorization header, %v is found.", headerAuth)
}
return nil, errors.New("no response")
},
}
c := &http.Client{Transport: tr}
conf := &Config{
ClientID: "CLIENT_ID",
Endpoint: Endpoint{
AuthURL: "https://accounts.google.com/auth",
TokenURL: "https://accounts.google.com/token",
},
}
ctx := context.WithValue(context.Background(), HTTPClient, c)
conf.Exchange(ctx, "code")
}
func TestPasswordCredentialsTokenRequest(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
defer r.Body.Close()
expected := "/token"
if r.URL.String() != expected {
t.Errorf("URL = %q; want %q", r.URL, expected)
}
headerAuth := r.Header.Get("Authorization")
expected = "Basic Q0xJRU5UX0lEOkNMSUVOVF9TRUNSRVQ="
if headerAuth != expected {
t.Errorf("Authorization header = %q; want %q", headerAuth, expected)
}
headerContentType := r.Header.Get("Content-Type")
expected = "application/x-www-form-urlencoded"
if headerContentType != expected {
t.Errorf("Content-Type header = %q; want %q", headerContentType, expected)
}
body, err := ioutil.ReadAll(r.Body)
if err != nil {
t.Errorf("Failed reading request body: %s.", err)
}
expected = "client_id=CLIENT_ID&grant_type=password&password=password1&scope=scope1+scope2&username=user1"
if string(body) != expected {<|fim▁hole|> w.Write([]byte("access_token=90d64460d14870c08c81352a05dedd3465940a7c&scope=user&token_type=bearer"))
}))
defer ts.Close()
conf := newConf(ts.URL)
tok, err := conf.PasswordCredentialsToken(NoContext, "user1", "password1")
if err != nil {
t.Error(err)
}
if !tok.Valid() {
t.Fatalf("Token invalid. Got: %#v", tok)
}
expected := "90d64460d14870c08c81352a05dedd3465940a7c"
if tok.AccessToken != expected {
t.Errorf("AccessToken = %q; want %q", tok.AccessToken, expected)
}
expected = "bearer"
if tok.TokenType != expected {
t.Errorf("TokenType = %q; want %q", tok.TokenType, expected)
}
}
func TestTokenRefreshRequest(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.String() == "/somethingelse" {
return
}
if r.URL.String() != "/token" {
t.Errorf("Unexpected token refresh request URL, %v is found.", r.URL)
}
headerContentType := r.Header.Get("Content-Type")
if headerContentType != "application/x-www-form-urlencoded" {
t.Errorf("Unexpected Content-Type header, %v is found.", headerContentType)
}
body, _ := ioutil.ReadAll(r.Body)
if string(body) != "client_id=CLIENT_ID&grant_type=refresh_token&refresh_token=REFRESH_TOKEN" {
t.Errorf("Unexpected refresh token payload, %v is found.", string(body))
}
}))
defer ts.Close()
conf := newConf(ts.URL)
c := conf.Client(NoContext, &Token{RefreshToken: "REFRESH_TOKEN"})
c.Get(ts.URL + "/somethingelse")
}
func TestFetchWithNoRefreshToken(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.String() == "/somethingelse" {
return
}
if r.URL.String() != "/token" {
t.Errorf("Unexpected token refresh request URL, %v is found.", r.URL)
}
headerContentType := r.Header.Get("Content-Type")
if headerContentType != "application/x-www-form-urlencoded" {
t.Errorf("Unexpected Content-Type header, %v is found.", headerContentType)
}
body, _ := ioutil.ReadAll(r.Body)
if string(body) != "client_id=CLIENT_ID&grant_type=refresh_token&refresh_token=REFRESH_TOKEN" {
t.Errorf("Unexpected refresh token payload, %v is found.", string(body))
}
}))
defer ts.Close()
conf := newConf(ts.URL)
c := conf.Client(NoContext, nil)
_, err := c.Get(ts.URL + "/somethingelse")
if err == nil {
t.Errorf("Fetch should return an error if no refresh token is set")
}
}
func TestRefreshToken_RefreshTokenReplacement(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
w.Write([]byte(`{"access_token":"ACCESS TOKEN", "scope": "user", "token_type": "bearer", "refresh_token": "NEW REFRESH TOKEN"}`))
return
}))
defer ts.Close()
conf := newConf(ts.URL)
tkr := tokenRefresher{
conf: conf,
ctx: NoContext,
refreshToken: "OLD REFRESH TOKEN",
}
tk, err := tkr.Token()
if err != nil {
t.Errorf("Unexpected refreshToken error returned: %v", err)
return
}
if tk.RefreshToken != tkr.refreshToken {
t.Errorf("tokenRefresher.refresh_token = %s; want %s", tkr.refreshToken, tk.RefreshToken)
}
}
func TestConfigClientWithToken(t *testing.T) {
tok := &Token{
AccessToken: "abc123",
}
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if got, want := r.Header.Get("Authorization"), fmt.Sprintf("Bearer %s", tok.AccessToken); got != want {
t.Errorf("Authorization header = %q; want %q", got, want)
}
return
}))
defer ts.Close()
conf := newConf(ts.URL)
c := conf.Client(NoContext, tok)
req, err := http.NewRequest("GET", ts.URL, nil)
if err != nil {
t.Error(err)
}
_, err = c.Do(req)
if err != nil {
t.Error(err)
}
}<|fim▁end|> | t.Errorf("res.Body = %q; want %q", string(body), expected)
}
w.Header().Set("Content-Type", "application/x-www-form-urlencoded") |
<|file_name|>Ventilation_Mining_Python_Toolbox.py<|end_file_name|><|fim▁begin|>import math
"""
The Purpose of this Program is to automate the task of Ventilation Surveying in Underground Mining Engineering.
This program can accept input of tabulated values (in csv file) for Leapfrogging and Roving-Base Altimeter
Indirect Method Surveys and provide data analysis and calculation output to a csv file.
Also provides many tools (Psychometric Properties of Air, Head Loss around Circuit, Specific Weight/Humidity Calc)
Analysis for Mine Ventilation Engineers.
pg 206, Mine Ventilation and Air Conditioning
Measured Values:
-Air Velocities (Vane Anemometer: v)
-Absolute and Differential Pressures or Heads (Manometer/Barometer/Altimeter: pb)
-Dry- and Wet-Bulb Temperatures (Thermometers: Tw, Td)
-Airway Dimensions (A)
Determine:
-Air Quantities
-Pressure Losses
-Air Specific Weights/Humidities
-Airway Resistance
Copyright Joaquin Roibal, August 2016, Latest Revision: 10/3/2016
All Rights Reserved
"""
def ManometerDirectMethod(TopList, BottomList, Manometer_Reading):
"""
A Manometer is used to directly measure pressure difference (Direct Method).
This function will perform the data analysis for Manometer Surveying given measured input values.
Hl12 = (Hs1-Hs2)+(Hz1-Hz2) #Head Loss (1->2) components
Equations and Values from Example 6.3, page 208, Mine Ventilation and Air Conditioning Ramani and Wang
"""
p3 = (BottomList[1] + (Manometer_Reading/27.69)) #Page 210, Pressure 3 = p2 + manometer reading (converted to psi)
Ws = (TopList[-1]+BottomList[-1])/2 #Mean Specific Weight of Air in Hose
Wh = (TopList[-1]+0.0705)/2 #Mean Specific Weight of Air in Shaft
#Hl13 = (144*(Top[1]-p3)/5.2)+(Wh*(Top[0]-Bottom[0])/5.2) #Page 209, Head Loss 1->3
#Hl12 = (144*(Top[1]-Bottom[1])/5.2)+(Ws*(Top[0]-Bottom[0])/5.2) #Page 209, Head Loss 1 -> 2
Hl12 = round((144/5.2)*(p3-BottomList[1])+(1/5.2)*(TopList[0]-BottomList[0])*(Ws-Wh), 3)
return "Manometer Survey Results:\nWs: %s, Wh: %s, Head Loss in. Water: %s" %(Ws, Wh, Hl12)
def CalculateFrictionFactor(head_loss_f, length, diameter, quantity, spec_weight_air=0.075):
"""
The Purpose of this function is to calculate the friction factor of an airway/ducting given parameters
Utilizes Darcy-Weisbach equation and Atkinson Equation, 5.20 page 153 Mine Ventilation and Air Conditioning 3rd Edition
"""
duct_perimeter = 2 * 3.14159 * (diameter / 2)
area_opening = 3.14159 * (diameter / 2)**2
rubbing_surface = length * duct_perimeter
friction_factor_k = (spec_weight_air/0.075) * (head_loss_f*5.2*area_opening**3) / (duct_perimeter * length * quantity**2)
return friction_factor_k
def NaturalVentilation(ShaftATop, ShaftABottom, ShaftBTop, ShaftBBottom):
"""
The purpose of this function is to calculate the Natural Ventilation Head in Inches Water Gage.
Inputs required: Lists in the following format: [DryBulbTemp, WetBulbTemp, Elevation, Pressure (in Hg)
Method 2, page 297 in Ramani "Mine Ventilation And Air Conditioning" is used in commented example
Equation used is from ME 440: Mine Ventilation with Dr. Bhattacharyya, ignoring vapor pressure
:param ShaftATop:
:param ShaftABottom:
:param ShaftBTop:
:param ShaftBBottom:
:return:
"""
"""
This Section is Commented Out Because NOT WORKING: Alternative Method Below
spec_weight_air_shaft_a_top = psychrometricPropAir(ShaftATop[0], ShaftATop[1], ShaftATop[3])
spec_weight_air_shaft_a_bottom = psychrometricPropAir(ShaftABottom[0], ShaftABottom[1], ShaftABottom[3])
spec_weight_air_avg_upcast = (spec_weight_air_shaft_a_top[8] + spec_weight_air_shaft_a_bottom[8])/2
spec_weight_air_shaft_b_top = psychrometricPropAir(ShaftBTop[0], ShaftBTop[1], ShaftBTop[3])
spec_weight_air_shaft_b_bottom = psychrometricPropAir(ShaftBBottom[0], ShaftBBottom[1], ShaftBBottom[3])
spec_weight_air_avg_downcast = (spec_weight_air_shaft_b_top[8] + spec_weight_air_shaft_b_bottom[8])/2
L = ShaftBTop[2]-ShaftATop[2]
print(L)
print("Specific Weight Air Top A: ", spec_weight_air_shaft_a_top[9])
print("Specific Weight Air Bottom A: ", spec_weight_air_shaft_a_bottom[9])
print("Avg Spec Weight Upcast: ", spec_weight_air_avg_upcast)
print("Avg Spec Weight Downcast: ", spec_weight_air_avg_downcast)
inches_water_gage = (L/5.2)*(spec_weight_air_avg_downcast-spec_weight_air_avg_upcast)
return inches_water_gage
"""
#The Following Method Utilizes the equation from ME 440: Mine Ventilation by Dr. Bhattacharyya
#NOTE: IGNORES VAPOR PRESSURE
density_air_shaft_a_top = round((1.327/(460+ShaftATop[0]))*ShaftATop[-1], 6)
print("Density Air Shaft A Top: ", density_air_shaft_a_top)
density_air_shaft_a_bottom = round((1.327/(460+ShaftABottom[0])*ShaftABottom[-1]), 6)
print("Density Air Shaft A Bottom: ", density_air_shaft_a_bottom)
density_air_shaft_b_top = round((1.327/(460+ShaftBTop[0])*ShaftBTop[-1]), 6)
print("Density Air Shaft B Top: ", density_air_shaft_b_top)
density_air_shaft_b_bottom = round((1.327/(460+ShaftBBottom[0])*ShaftBBottom[-1]), 6)
print("Density Air Shaft B Bottom: ", density_air_shaft_b_bottom)
density_avg_shaft_a = (density_air_shaft_a_bottom + density_air_shaft_a_top)/2
density_avg_shaft_b = (density_air_shaft_b_bottom + density_air_shaft_b_top)/2
pressure_diff = round(abs((density_avg_shaft_a - density_avg_shaft_b)), 6)
elevation_diff = (ShaftBTop[-2]-ShaftABottom[-2])
print("Pressure Difference: ", pressure_diff)
print("Elevation Difference: ", elevation_diff)
inches_water_gage = round((pressure_diff*elevation_diff)/5.2, 4)
return inches_water_gage
def psychrometricPropAir(td, tw, pb):
"""
The purpose of this function is to accept input of measured values (wet-bulb, dry-bulb temp, barometric pressure)
to calculate the Psychrometric properties of Air (Spec Weight)and return a list of values calculated:
Ps, Ps Prime, Pv, Phi, W, Ws, Mu, Pa, V, w, h . Will be used in other functions to calculate head loss, etc.
Example Values and Equations from Page 13, Mine Ventilation And Air Conditioning Textbook by Ramani and Wang
:param td: Dry Bulb Temperature
:param tw: Wet Bulb Temperature
:param pb: Pressure (in Hg)
:return:
"""
Td = (td + 459.67) #Convert Temperature from Fahrenheit to Kelvin
val_list = [td, Td, tw, pb] #List of Values to be returned by function,
#Final Format for List: [td, Td, tw, pb, ps, ps_prime, pv, phi, W, Ws, mu, pa, v, w, h]
m = 28.97 #Molecular Weight
s = 1 #Specific Gravity
R = 53.35 #ft*lb/lb mass*Degree Rankine, Gas Constant
w = 0.0750 #lb/ft^3, Specific Weight at Standard Conditions
standard_pb = 29.92 #in. Hg, Standard Barometric Pressure at Sea Level
cp = 0.2403 #Btu/lb*degreeF, Specific Heat at Constant Pressure
cv = 0.1714 #Btu/lb*degreeF, Specific Heat at Constant Volume
gamma = 1.402 #Ratio of Spec heats at constant pressure and volume for diatomic gas
#Calculate Saturation Vapor Pressures: (Page 15, Mine Ventilation and Air Conditioning)
ps = 0.18079*math.exp((17.27*td-552.64)/(td+395.14)) #in. Hg, Saturation Vapor Pressure, Dry Bulb Temp, eq 2.2
val_list.append(ps)
ps_prime = 0.18079*math.exp((17.27*tw-552.64)/(tw+395.14)) #in. Hg, Saturation Vapor Pressure, Wet Bulb Temp
val_list.append(ps_prime)
pv = ps_prime - ((pb-ps_prime)*(td-tw))/(2800-1.3*tw) #in. Hg, Partial Pressure of Water Vapor in Air, eq. 2.3
val_list.append(pv)
phi = pv/ps*100 #Relative Humidity, eq. 2.4
val_list.append(phi)
W = 0.622*pv/(pb-pv) #lb/lb dry air, Specific Humidity, Eq. 2.5
val_list.append(W)
W_grain = W*7000 #grains/lb dry air
Ws = 0.622*ps/(pb-ps) #lb/lb wet air, Specific Humidity, Eq. 2.5 (Wet Bulb Temp)
val_list.append(Ws)
Ws_grain = Ws*7000 #grains/lb wet air
mu = W/Ws*100 #Degree of Saturation, eq 2.6
val_list.append(mu)
pa = pb-pv #in Hg, Pressure of Dry Air
val_list.append(pa)
v = (R*Td)/(pa*0.491*144) #ft**3/lb dry air, Specific Volume (volume per unit weight of dry air), eq. 2.7
val_list.append(v)
w = (1/v)*(W+1) #lb/ft**3, Specific Weight of Moist air or Mixture, eq. 2.8
val_list.append(w)
#w1 = (1.325/Td)*(pb-0.378*pv_prime) #Alt Method for Calculating Spec. Weight. pv_prime unknown (?), eq. 2.9
#h =ha+hv = cp*td+W*(hfg+hf) #Enthalpy, total heat content of Air
h = cp*td+W*(1060+0.45*td) #Btu/lb dry air, Enthalpy, eq. 2.10
val_list.append(h)
return val_list
def PressureSurveyCalc(pa2, pa1, pb2, pb1, pb, td, pv_prime, Z2, Z1, V2, V1):
"""
The Pressure Survey Calc function will perform the calculations required for Indirect Method of
Ventilation Survey (Leapfrogging Method, Roving-Base Altimeter), including:
-Head Loss
:return:
"""
w1, w2 = 0.0750, 0.0750 #Assumed Values for specific weight, Page 217
CF = 69 #Conversion Factor Assumed to be 69 ft of air column = 1 inch Water (Example 6.5)
DR = 1 #((1.325/(460+50))*pb) / ((1.325/(460+td))*(pb-0.378*pv_prime)) #Density Ratio, Eq 6.13 page 216
#HL21 = (H2 - H1) + (Ha2-Ha1) + (Hv2-Hv1) + (Hz2-Hz1) Head Loss Equation, Eq 6.11
HL21 = -((pa2-pa1)-(pb2-pb1)-(Z2-Z1)/DR)/CF + (V2**2-V1**2)/(4009**2) #Calculate Head Loss Based on Altimeter
#Units, Elevation and Temperature, Equation 6.12 Page 216
Hv21 = ((V2**2-V1**2)/(4009**2))*(((w1+w2)/2)/0.0750) #Velocity Head, Eq 6.14
return [HL21, Hv21, DR, CF]
def RovingBaseAltimeter(measurement_list):
"""
Roving Base Altimeter Function will accept inputted list of measured values and output a formatted table of
calculated results. Formatting Based on Example 6.6 page 222-223 in Mine Ventilation and Air Conditioning.
Input Format: Stat - Location - I/R - Elev (ft) - Time - RAR, ft - WetBulb T - DryBulb T - Velocity (fpm) - BAR, ft
Output Format: Stat - Phi - Hv in Water - Diff Elev - DR - Alt Diff - Base Corr. - Elev. Corr. - Head ft Air - (cont)
- Avg Alt Reading - Feet of Air per in Water - Delta Hs - Delta Hv - Delta Ht - Ht
:param measurement_list:
:return:
"""
Altimeter_Vent_Survey_Table = []
for measurement in measurement_list:
results_table = [] #Create Empty List which will be used to append calculated values in table format
air_prop_list = psychrometricPropAir(measurement[6], measurement[7], measurement[9]) #Calculate Psychometric Prop
results_table.append(measurement[0]) #Append Station Number
results_table.append(air_prop_list[7]) #Append Relative Humidity % (Phi) from Psychometric Prop List
#[Hl, Hv, DR, CF] = PressureSurveyCalc() #Retrieve Velocity Head Values from PressureSurveyCalc
#results_table.append(Hv) #Append Velocity Head Values to Results Table
#results_table.append(Elev_Diff) #Append Elevation Difference to Results Table
#results_table.append(DR) #Append DR from Pressure Survey Calc function
#Altimeter_Diff = measurement[5]-Prev_Altimeter #Calculate Altimeter Difference from Previous Altimeter Value
#results_table.append(Altimeter_Diff) #Append Calculated Altimeter Difference Value to Results Table
#results_table.append(Base_Correct) #Append Base Correction
#results_table.append(Elev_Correct) #Append Elevation Correction
#results_table.append(HeadFtOfAir) #Append Head Feet of Air
#results_table.append(AvgAltReading)
#results_table.append(CF)
#results_table.append(DeltaHs) #All Head in in H20
#results_table.append(DeltaHv)
#results_table.append(DeltaHt)
#results_table.append(Ht)
Altimeter_Vent_Survey_Table.append(results_table) #Append Results Table as One Line in Altimeter Vent Survey Table
return Altimeter_Vent_Survey_Table
<|fim▁hole|> """
Leap Frog Altimeter is a Function To Determine Values for a Leapfrogging Altimeter Ventilation Survey.
Accepts Input in csv format and returns a list of calculated values in format:
- Hl (Head Loss) - Hv (Head Loss due to Velocity) - DR (Density Ratio) - CF (Conversion Factor, ft air per in water)
Uses Example 6.5 page 220 as example to verify process
:param User_List:
:return:
"""
Vent_list_leapfrog = LoadVentDataLeapfrog(User_List, [])
print(Vent_list_leapfrog)
Results_List = [] #Create Empty List to return Results Table of Calculated Values for Leapfrog Altimeter Surv
for vent in Vent_list_leapfrog:
line_list = [] #Create Empty List for each vent point
line_list.append(str(vent[0]) + "-" + str(vent[1])) #Display Stations
line_list.append(int(vent[4])-int(vent[5])) #Calculate Altimeter Difference
line_list.append(int(vent[2])-int(vent[3])) #Calculate and Append Elevation Difference
[Hl, Hv, DR, CF] = PressureSurveyCalc(int(vent[4]), int(vent[5]), 0, 0, 0, 0, 0, int(vent[2]),
int(vent[3]), int(vent[-2]), int(vent[-1]))
line_list.append(Hl) #Calculate Head Loss
air_flow = ((int(vent[-1])+int(vent[-2]))/2)*((float(vent[-4])+float(vent[-3]))/2) #Calculate
line_list.append(air_flow)
Results_List.append(line_list)
print(Results_List)
def LoadVentDataLeapfrog(vent_data_csv, vent_data_list):
#This Function Will Load Vent Data from a CSV file and send to AddVentData Function to create a list of dicts
with open(vent_data_csv, 'r') as vent_file:
i = 0
for line in vent_file:
new_line = line.split(',')
if i<3: #Skip first two lines of CSV file due to headings
i += 1
pass
else:
vent_data_list.append([new_line[0], new_line[1], new_line[2], new_line[3], new_line[4], new_line[5],
new_line[6], new_line[7], new_line[8], new_line[9], new_line[10], new_line[11],
new_line[12], new_line[13].strip("\n")]) #Create List of Formatted CSV Values
return vent_data_list
def HeadLossCurcuit(List_Head):
"""
Head Loss Circuit is a function which calculates the head loss around a closed ventilation circuit.
Accepts input of a list (Junctions From-To) and Head Losses, in Water
A closed-circuit head loss is calculate and returned as a percentage (%)
Returns a Tuple of (Head Loss Error, Error Percentage)
"""
HeadLossVal = 0 #Set Initial Head Loss to 0
TotalHeadLoss = min(List_Head) #Total Head Loss Determined by Lowest Press. Measurement, Error Percentage (%)
for HeadLoss in List_Head:
HeadLossVal += HeadLoss #All Values are summed to determine closure error of circuit
#print(TotalHeadLoss)
percentage_error = round(abs(HeadLossVal)/abs(TotalHeadLoss)*100, 2)
print("Error Percentage of Head Loss Circuit:", percentage_error)
return (round(HeadLossVal, 3), percentage_error)
def main():
"""
Examples and Solutions based on Mine Ventilation and Air Conditioning Textbook to
demonstrate the proper usage of functions and verify process and Data Analysis.
:return:
"""
#An example of direct method of pressure measurement with Manometer
#Key = [Elevation (ft), Pressure (psi), Temp (Dry Bulb F), Temp (Wet Bulb F), Spec Humid, Spec Weight]
Top = [-1748.7, 12.594, 59.4, 50, 0.0072, 0.0655]
Bottom = [-4368.70, 13.773, 67.3, 57,0, 0.0082, 0.0702]
Manometer_Reading = 1.51 #Inches of Water
print(ManometerDirectMethod(Top, Bottom, Manometer_Reading)) #Page 209/210 Example Mine Ventilation Textbook
print(psychrometricPropAir(70, 50, 29.921)) #Example 2.1 from Mine Ventilation and Air Conditioning, Page 17
list_of_head = [.445, 1.075, -8.6, 0.245, 2.8, 0.19, 0.084, 0.455, 1.50, 1.71] #Example 6.4 pg 211 Mine Ventilation
print("Head Loss in in H20: ", HeadLossCurcuit(list_of_head))
LeapfroggingAltimeter()
#An Example of Natural Ventilation Head in Inches Water, Example from Dr. Bhattacharyya ME 440 HW #1
ShaftATop = [63, 63, 1000, 28.95]
ShaftABottom = [65, 65, 300, 29.80]
ShaftBTop = [67, 59, 1200, 28.75]
ShaftBBottom = [59, 53, 500, 29.60]
NaturalVent = NaturalVentilation(ShaftATop, ShaftABottom, ShaftBTop, ShaftBBottom)
print(NaturalVent)
#An Example 5.6, page 159 Ramani, Wang, Mutmansky and Hartman to calculate friction factor
frict_factor_k = CalculateFrictionFactor(21.04, 3000, 4, 48000, 0.075)
print("Example 5.6, Friction Factor K: ", frict_factor_k)
if __name__ == "__main__":
main()<|fim▁end|> | def LeapfroggingAltimeter(User_List ="Table63_AltimeterLeapfrogging.csv" ):
|
<|file_name|>unwindtest.cc<|end_file_name|><|fim▁begin|>/* Area: ffi_closure, unwind info
Purpose: Check if the unwind information is passed correctly.
Limitations: none.
PR: none.<|fim▁hole|>#include "ffitestcxx.h"
#if defined HAVE_STDINT_H
#include <stdint.h>
#endif
#if defined HAVE_INTTYPES_H
#include <inttypes.h>
#endif
void
closure_test_fn(ffi_cif* cif __UNUSED__, void* resp __UNUSED__,
void** args __UNUSED__, void* userdata __UNUSED__)
{
throw 9;
}
typedef void (*closure_test_type)();
void closure_test_fn1(ffi_cif* cif __UNUSED__, void* resp,
void** args, void* userdata __UNUSED__)
{
*(ffi_arg*)resp =
(int)*(float *)args[0] +(int)(*(float *)args[1]) +
(int)(*(float *)args[2]) + (int)*(float *)args[3] +
(int)(*(signed short *)args[4]) + (int)(*(float *)args[5]) +
(int)*(float *)args[6] + (int)(*(int *)args[7]) +
(int)(*(double*)args[8]) + (int)*(int *)args[9] +
(int)(*(int *)args[10]) + (int)(*(float *)args[11]) +
(int)*(int *)args[12] + (int)(*(int *)args[13]) +
(int)(*(int *)args[14]) + *(int *)args[15] + (int)(intptr_t)userdata;
printf("%d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d: %d\n",
(int)*(float *)args[0], (int)(*(float *)args[1]),
(int)(*(float *)args[2]), (int)*(float *)args[3],
(int)(*(signed short *)args[4]), (int)(*(float *)args[5]),
(int)*(float *)args[6], (int)(*(int *)args[7]),
(int)(*(double *)args[8]), (int)*(int *)args[9],
(int)(*(int *)args[10]), (int)(*(float *)args[11]),
(int)*(int *)args[12], (int)(*(int *)args[13]),
(int)(*(int *)args[14]), *(int *)args[15],
(int)(intptr_t)userdata, (int)*(ffi_arg*)resp);
throw (int)*(ffi_arg*)resp;
}
typedef int (*closure_test_type1)(float, float, float, float, signed short,
float, float, int, double, int, int, float,
int, int, int, int);
int main (void)
{
ffi_cif cif;
void *code;
ffi_closure *pcl = (ffi_closure *)ffi_closure_alloc(sizeof(ffi_closure), &code);
ffi_type * cl_arg_types[17];
{
cl_arg_types[1] = NULL;
CHECK(ffi_prep_cif(&cif, FFI_DEFAULT_ABI, 0,
&ffi_type_void, cl_arg_types) == FFI_OK);
CHECK(ffi_prep_closure_loc(pcl, &cif, closure_test_fn, NULL, code) == FFI_OK);
try
{
(*((closure_test_type)(code)))();
} catch (int exception_code)
{
CHECK(exception_code == 9);
}
printf("part one OK\n");
/* { dg-output "part one OK" } */
}
{
cl_arg_types[0] = &ffi_type_float;
cl_arg_types[1] = &ffi_type_float;
cl_arg_types[2] = &ffi_type_float;
cl_arg_types[3] = &ffi_type_float;
cl_arg_types[4] = &ffi_type_sshort;
cl_arg_types[5] = &ffi_type_float;
cl_arg_types[6] = &ffi_type_float;
cl_arg_types[7] = &ffi_type_uint;
cl_arg_types[8] = &ffi_type_double;
cl_arg_types[9] = &ffi_type_uint;
cl_arg_types[10] = &ffi_type_uint;
cl_arg_types[11] = &ffi_type_float;
cl_arg_types[12] = &ffi_type_uint;
cl_arg_types[13] = &ffi_type_uint;
cl_arg_types[14] = &ffi_type_uint;
cl_arg_types[15] = &ffi_type_uint;
cl_arg_types[16] = NULL;
/* Initialize the cif */
CHECK(ffi_prep_cif(&cif, FFI_DEFAULT_ABI, 16,
&ffi_type_sint, cl_arg_types) == FFI_OK);
CHECK(ffi_prep_closure_loc(pcl, &cif, closure_test_fn1,
(void *) 3 /* userdata */, code) == FFI_OK);
try
{
(*((closure_test_type1)code))
(1.1, 2.2, 3.3, 4.4, 127, 5.5, 6.6, 8, 9, 10, 11, 12.0, 13,
19, 21, 1);
/* { dg-output "\n1 2 3 4 127 5 6 8 9 10 11 12 13 19 21 1 3: 255" } */
} catch (int exception_code)
{
CHECK(exception_code == 255);
}
printf("part two OK\n");
/* { dg-output "\npart two OK" } */
}
exit(0);
}<|fim▁end|> | Originator: Jeff Sturm <[email protected]> */
/* { dg-do run } */
|
<|file_name|>search.py<|end_file_name|><|fim▁begin|># Author: Nic Wolfe <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import os
import re
import threading
import datetime
import traceback
import sickbeard
from common import SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, Quality, SEASON_RESULT, MULTI_EP_RESULT
from sickbeard import logger, db, show_name_helpers, exceptions, helpers
from sickbeard import sab
from sickbeard import nzbget
from sickbeard import clients
from sickbeard import history
from sickbeard import notifiers
from sickbeard import nzbSplitter
from sickbeard import ui
from sickbeard import encodingKludge as ek
from sickbeard import failed_history
from sickbeard.exceptions import ex
from sickbeard.providers.generic import GenericProvider
from sickbeard.blackandwhitelist import BlackAndWhiteList
from sickbeard import common
def _downloadResult(result):
"""
Downloads a result to the appropriate black hole folder.
Returns a bool representing success.
result: SearchResult instance to download.
"""
resProvider = result.provider
if resProvider == None:
logger.log(u"Invalid provider name - this is a coding error, report it please", logger.ERROR)
return False
# nzbs with an URL can just be downloaded from the provider
if result.resultType == "nzb":
newResult = resProvider.downloadResult(result)
# if it's an nzb data result
elif result.resultType == "nzbdata":
# get the final file path to the nzb
fileName = ek.ek(os.path.join, sickbeard.NZB_DIR, result.name + ".nzb")
logger.log(u"Saving NZB to " + fileName)
newResult = True
# save the data to disk
try:
with ek.ek(open, fileName, 'w') as fileOut:
fileOut.write(result.extraInfo[0])
helpers.chmodAsParent(fileName)
except EnvironmentError, e:
logger.log(u"Error trying to save NZB to black hole: " + ex(e), logger.ERROR)
newResult = False
elif resProvider.providerType == "torrent":
newResult = resProvider.downloadResult(result)
else:
logger.log(u"Invalid provider type - this is a coding error, report it please", logger.ERROR)
newResult = False
return newResult
def snatchEpisode(result, endStatus=SNATCHED):
"""
Contains the internal logic necessary to actually "snatch" a result that
has been found.
Returns a bool representing success.
result: SearchResult instance to be snatched.
endStatus: the episode status that should be used for the episode object once it's snatched.
"""
if result is None:
return False
result.priority = 0 # -1 = low, 0 = normal, 1 = high
if sickbeard.ALLOW_HIGH_PRIORITY:
# if it aired recently make it high priority
for curEp in result.episodes:
if datetime.date.today() - curEp.airdate <= datetime.timedelta(days=7):
result.priority = 1
if re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', result.name, re.I) != None:
endStatus = SNATCHED_PROPER
# NZBs can be sent straight to SAB or saved to disk
if result.resultType in ("nzb", "nzbdata"):
if sickbeard.NZB_METHOD == "blackhole":
dlResult = _downloadResult(result)
elif sickbeard.NZB_METHOD == "sabnzbd":
dlResult = sab.sendNZB(result)
elif sickbeard.NZB_METHOD == "nzbget":
is_proper = True if endStatus == SNATCHED_PROPER else False
dlResult = nzbget.sendNZB(result, is_proper)
else:
logger.log(u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR)
dlResult = False
# TORRENTs can be sent to clients or saved to disk
elif result.resultType == "torrent":
# torrents are saved to disk when blackhole mode
if sickbeard.TORRENT_METHOD == "blackhole":
dlResult = _downloadResult(result)
else:
# make sure we have the torrent file content
if not result.content:
if not result.url.startswith('magnet'):
result.content = result.provider.getURL(result.url)
if not result.content:
logger.log(
u"Torrent content failed to download from " + result.url, logger.ERROR
)
# Snatches torrent with client
client = clients.getClientIstance(sickbeard.TORRENT_METHOD)()
dlResult = client.sendTORRENT(result)
else:
logger.log(u"Unknown result type, unable to download it", logger.ERROR)
dlResult = False
if not dlResult:
return False
if sickbeard.USE_FAILED_DOWNLOADS:
failed_history.logSnatch(result)
ui.notifications.message('Episode snatched', result.name)
history.logSnatch(result)
# don't notify when we re-download an episode
sql_l = []
for curEpObj in result.episodes:
with curEpObj.lock:
if isFirstBestMatch(result):
curEpObj.status = Quality.compositeStatus(SNATCHED_BEST, result.quality)
else:
curEpObj.status = Quality.compositeStatus(endStatus, result.quality)
sql_l.append(curEpObj.get_sql())
if curEpObj.status not in Quality.DOWNLOADED:
notifiers.notify_snatch(curEpObj._format_pattern('%SN - %Sx%0E - %EN - %QN'))
if len(sql_l) > 0:
myDB = db.DBConnection()
myDB.mass_action(sql_l)
return True
def filter_release_name(name, filter_words):
"""
Filters out results based on filter_words
name: name to check
filter_words : Words to filter on, separated by comma
Returns: False if the release name is OK, True if it contains one of the filter_words
"""
if filter_words:
filters = [re.compile('.*%s.*' % filter.strip(), re.I) for filter in filter_words.split(',')]
for regfilter in filters:
if regfilter.search(name):
logger.log(u"" + name + " contains pattern: " + regfilter.pattern, logger.DEBUG)
return True
return False
def pickBestResult(results, show, quality_list=None):
logger.log(u"Picking the best result out of " + str([x.name for x in results]), logger.DEBUG)
# build the black And white list
bwl = None
if show:
if show.is_anime:
bwl = BlackAndWhiteList(show.indexerid)
else:
logger.log("Could not create black and white list no show was given", logger.DEBUG)
# find the best result for the current episode
bestResult = None
for cur_result in results:
logger.log("Quality of " + cur_result.name + " is " + Quality.qualityStrings[cur_result.quality])
if bwl:
if not bwl.is_valid(cur_result):
logger.log(cur_result.name+" does not match the blacklist or the whitelist, rejecting it. Result: " + bwl.get_last_result_msg(), logger.MESSAGE)
continue
if quality_list and cur_result.quality not in quality_list:
logger.log(cur_result.name + " is a quality we know we don't want, rejecting it", logger.DEBUG)
continue
if show.rls_ignore_words and filter_release_name(cur_result.name, show.rls_ignore_words):
logger.log(u"Ignoring " + cur_result.name + " based on ignored words filter: " + show.rls_ignore_words,
logger.MESSAGE)
continue
if show.rls_require_words and not filter_release_name(cur_result.name, show.rls_require_words):
logger.log(u"Ignoring " + cur_result.name + " based on required words filter: " + show.rls_require_words,
logger.MESSAGE)
continue
if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed(cur_result.name, cur_result.size,
cur_result.provider.name):
logger.log(cur_result.name + u" has previously failed, rejecting it")
continue
if not bestResult or bestResult.quality < cur_result.quality and cur_result.quality != Quality.UNKNOWN:
bestResult = cur_result
elif bestResult.quality == cur_result.quality:
if "proper" in cur_result.name.lower() or "repack" in cur_result.name.lower():
bestResult = cur_result
elif "internal" in bestResult.name.lower() and "internal" not in cur_result.name.lower():
bestResult = cur_result
elif "xvid" in bestResult.name.lower() and "x264" in cur_result.name.lower():
logger.log(u"Preferring " + cur_result.name + " (x264 over xvid)")
bestResult = cur_result
if bestResult:
logger.log(u"Picked " + bestResult.name + " as the best", logger.DEBUG)
else:
logger.log(u"No result picked.", logger.DEBUG)
return bestResult
def isFinalResult(result):
"""
Checks if the given result is good enough quality that we can stop searching for other ones.
If the result is the highest quality in both the any/best quality lists then this function
returns True, if not then it's False
"""
logger.log(u"Checking if we should keep searching after we've found " + result.name, logger.DEBUG)
show_obj = result.episodes[0].show
bwl = None
if show_obj.is_anime:
bwl = BlackAndWhiteList(show_obj.indexerid)
any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
# if there is a redownload that's higher than this then we definitely need to keep looking
if best_qualities and result.quality < max(best_qualities):
return False
# if it does not match the shows black and white list its no good
elif bwl and not bwl.is_valid(result):
return False
# if there's no redownload that's higher (above) and this is the highest initial download then we're good
elif any_qualities and result.quality in any_qualities:
return True
elif best_qualities and result.quality == max(best_qualities):
# if this is the best redownload but we have a higher initial download then keep looking
if any_qualities and result.quality < max(any_qualities):
return False
# if this is the best redownload and we don't have a higher initial download then we're done
else:
return True
# if we got here than it's either not on the lists, they're empty, or it's lower than the highest required
else:
return False
def isFirstBestMatch(result):
"""
Checks if the given result is a best quality match and if we want to archive the episode on first match.
"""
logger.log(u"Checking if we should archive our first best quality match for for episode " + result.name,
logger.DEBUG)
show_obj = result.episodes[0].show
any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
# if there is a redownload that's a match to one of our best qualities and we want to archive the episode then we are done
if best_qualities and show_obj.archive_firstmatch and result.quality in best_qualities:
return True
return False
def wantedEpisodes(show, fromDate):
anyQualities, bestQualities = common.Quality.splitQuality(show.quality) # @UnusedVariable
allQualities = list(set(anyQualities + bestQualities))
logger.log(u"Seeing if we need anything from " + show.name)
myDB = db.DBConnection()
if show.air_by_date:
sqlResults = myDB.select(
"SELECT ep.status, ep.season, ep.episode FROM tv_episodes ep, tv_shows show WHERE season != 0 AND ep.showid = show.indexer_id AND show.paused = 0 AND ep.airdate > ? AND ep.showid = ? AND show.air_by_date = 1",
[fromDate.toordinal(), show.indexerid])
else:
sqlResults = myDB.select(
"SELECT status, season, episode FROM tv_episodes WHERE showid = ? AND season > 0 and airdate > ?",
[show.indexerid, fromDate.toordinal()])
# check through the list of statuses to see if we want any
wanted = []
for result in sqlResults:
curCompositeStatus = int(result["status"])
curStatus, curQuality = common.Quality.splitCompositeStatus(curCompositeStatus)
if bestQualities:
highestBestQuality = max(allQualities)
else:
highestBestQuality = 0
# if we need a better one then say yes
if (curStatus in (common.DOWNLOADED, common.SNATCHED, common.SNATCHED_PROPER,
common.SNATCHED_BEST) and curQuality < highestBestQuality) or curStatus == common.WANTED:
epObj = show.getEpisode(int(result["season"]), int(result["episode"]))
epObj.wantedQuality = [i for i in allQualities if (i > curQuality and i != common.Quality.UNKNOWN)]
wanted.append(epObj)
return wanted
def searchForNeededEpisodes():
foundResults = {}
didSearch = False
origThreadName = threading.currentThread().name
threads = []
show_list = sickbeard.showList
fromDate = datetime.date.fromordinal(1)
episodes = []
for curShow in show_list:
if curShow.paused:
continue
episodes.extend(wantedEpisodes(curShow, fromDate))
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x.enable_daily]
for curProvider in providers:
# spawn separate threads for each provider so we don't need to wait for providers with slow network operation
threads.append(threading.Thread(target=curProvider.cache.updateCache, name=origThreadName +
" :: [" + curProvider.name + "]"))
# start the thread we just created
threads[-1].start()
# wait for all threads to finish
for t in threads:
t.join()
for curProvider in providers:
threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"
curFoundResults = curProvider.searchRSS(episodes)
didSearch = True
# pick a single result for each episode, respecting existing results
for curEp in curFoundResults:
if curEp.show.paused:
logger.log(
u"Show " + curEp.show.name + " is paused, ignoring all RSS items for " + curEp.prettyName(),
logger.DEBUG)
continue
# find the best result for the current episode
bestResult = None
for curResult in curFoundResults[curEp]:
if not bestResult or bestResult.quality < curResult.quality:
bestResult = curResult
bestResult = pickBestResult(curFoundResults[curEp], curEp.show)
# if all results were rejected move on to the next episode
if not bestResult:
logger.log(u"All found results for " + curEp.prettyName() + " were rejected.", logger.DEBUG)
continue
# if it's already in the list (from another provider) and the newly found quality is no better then skip it
if curEp in foundResults and bestResult.quality <= foundResults[curEp].quality:
continue
# filter out possible bad torrents from providers such as ezrss
if bestResult.resultType == "torrent" and sickbeard.TORRENT_METHOD != "blackhole":
bestResult.content = None
if not bestResult.url.startswith('magnet'):
bestResult.content = bestResult.provider.getURL(bestResult.url)
if not bestResult.content:
continue
foundResults[curEp] = bestResult
threading.currentThread().name = origThreadName
if not didSearch:
logger.log(
u"No NZB/Torrent providers found or enabled in the SickGear config for daily searches. Please check your settings.",
logger.ERROR)
return foundResults.values()
def searchProviders(show, episodes, manualSearch=False):
foundResults = {}
finalResults = []
didSearch = False
# build name cache for show
sickbeard.name_cache.buildNameCache(show)
origThreadName = threading.currentThread().name
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x.enable_backlog]
for providerNum, curProvider in enumerate(providers):
if curProvider.anime_only and not show.is_anime:
logger.log(u"" + str(show.name) + " is not an anime, skiping", logger.DEBUG)
continue
threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"
foundResults[curProvider.name] = {}
searchCount = 0
search_mode = curProvider.search_mode
while(True):
searchCount += 1
if search_mode == 'eponly':
logger.log(u"Performing episode search for " + show.name)
else:
logger.log(u"Performing season pack search for " + show.name)
try:
curProvider.cache.updateCache()
searchResults = curProvider.findSearchResults(show, episodes, search_mode, manualSearch)
except exceptions.AuthException, e:
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
break
except Exception, e:
logger.log(u"Error while searching " + curProvider.name + ", skipping: " + ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG)
break
finally:
threading.currentThread().name = origThreadName
didSearch = True
if len(searchResults):
# make a list of all the results for this provider
for curEp in searchResults:
# skip non-tv crap
searchResults[curEp] = filter(
lambda x: show_name_helpers.filterBadReleases(x.name, parse=False) and x.show == show, searchResults[curEp])
if curEp in foundResults:
foundResults[curProvider.name][curEp] += searchResults[curEp]
else:
foundResults[curProvider.name][curEp] = searchResults[curEp]
break
elif not curProvider.search_fallback or searchCount == 2:
break
if search_mode == 'sponly':
logger.log(u"FALLBACK EPISODE SEARCH INITIATED ...")
search_mode = 'eponly'
else:
logger.log(u"FALLBACK SEASON PACK SEARCH INITIATED ...")
search_mode = 'sponly'
# skip to next provider if we have no results to process
if not len(foundResults[curProvider.name]):
continue
anyQualities, bestQualities = Quality.splitQuality(show.quality)
# pick the best season NZB
bestSeasonResult = None
if SEASON_RESULT in foundResults[curProvider.name]:
bestSeasonResult = pickBestResult(foundResults[curProvider.name][SEASON_RESULT], show,
anyQualities + bestQualities)
highest_quality_overall = 0
for cur_episode in foundResults[curProvider.name]:
for cur_result in foundResults[curProvider.name][cur_episode]:
if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall:
highest_quality_overall = cur_result.quality
logger.log(u"The highest quality of any match is " + Quality.qualityStrings[highest_quality_overall],
logger.DEBUG)
# see if every episode is wanted
if bestSeasonResult:
searchedSeasons = []
searchedSeasons = [str(x.season) for x in episodes]
# get the quality of the season nzb
seasonQual = bestSeasonResult.quality
logger.log(
u"The quality of the season " + bestSeasonResult.provider.providerType + " is " + Quality.qualityStrings[
seasonQual], logger.DEBUG)
myDB = db.DBConnection()
allEps = [int(x["episode"])
for x in myDB.select("SELECT episode FROM tv_episodes WHERE showid = ? AND ( season IN ( " + ','.join(searchedSeasons) + " ) )",
[show.indexerid])]
logger.log(u"Executed query: [SELECT episode FROM tv_episodes WHERE showid = %s AND season in %s]" % (show.indexerid, ','.join(searchedSeasons)))
logger.log(u"Episode list: " + str(allEps), logger.DEBUG)
allWanted = True
anyWanted = False
for curEpNum in allEps:
for season in set([x.season for x in episodes]):
if not show.wantEpisode(season, curEpNum, seasonQual):
allWanted = False
else:
anyWanted = True
# if we need every ep in the season and there's nothing better then just download this and be done with it (unless single episodes are preferred)
if allWanted and bestSeasonResult.quality == highest_quality_overall:
logger.log(
u"Every ep in this season is needed, downloading the whole " + bestSeasonResult.provider.providerType + " " + bestSeasonResult.name)
epObjs = []
for curEpNum in allEps:
epObjs.append(show.getEpisode(season, curEpNum))
bestSeasonResult.episodes = epObjs
return [bestSeasonResult]
elif not anyWanted:
logger.log(
u"No eps from this season are wanted at this quality, ignoring the result of " + bestSeasonResult.name,
logger.DEBUG)
else:
if bestSeasonResult.provider.providerType == GenericProvider.NZB:
logger.log(u"Breaking apart the NZB and adding the individual ones to our results", logger.DEBUG)
# if not, break it apart and add them as the lowest priority results
individualResults = nzbSplitter.splitResult(bestSeasonResult)
individualResults = filter(
lambda x: show_name_helpers.filterBadReleases(x.name, parse=False) and x.show == show, individualResults)
for curResult in individualResults:
if len(curResult.episodes) == 1:
epNum = curResult.episodes[0].episode
elif len(curResult.episodes) > 1:
epNum = MULTI_EP_RESULT
if epNum in foundResults[curProvider.name]:
foundResults[curProvider.name][epNum].append(curResult)
else:
foundResults[curProvider.name][epNum] = [curResult]
# If this is a torrent all we can do is leech the entire torrent, user will have to select which eps not do download in his torrent client
else:
# Season result from Torrent Provider must be a full-season torrent, creating multi-ep result for it.
logger.log(
u"Adding multi-ep result for full-season torrent. Set the episodes you don't want to 'don't download' in your torrent client if desired!")
epObjs = []
for curEpNum in allEps:
epObjs.append(show.getEpisode(season, curEpNum))
bestSeasonResult.episodes = epObjs
epNum = MULTI_EP_RESULT
if epNum in foundResults[curProvider.name]:
foundResults[curProvider.name][epNum].append(bestSeasonResult)<|fim▁hole|>
# go through multi-ep results and see if we really want them or not, get rid of the rest
multiResults = {}
if MULTI_EP_RESULT in foundResults[curProvider.name]:
for multiResult in foundResults[curProvider.name][MULTI_EP_RESULT]:
logger.log(u"Seeing if we want to bother with multi-episode result " + multiResult.name, logger.DEBUG)
if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed(multiResult.name, multiResult.size,
multiResult.provider.name):
logger.log(multiResult.name + u" has previously failed, rejecting this multi-ep result")
continue
# see how many of the eps that this result covers aren't covered by single results
neededEps = []
notNeededEps = []
for epObj in multiResult.episodes:
epNum = epObj.episode
# if we have results for the episode
if epNum in foundResults[curProvider.name] and len(foundResults[curProvider.name][epNum]) > 0:
neededEps.append(epNum)
else:
notNeededEps.append(epNum)
logger.log(
u"Single-ep check result is neededEps: " + str(neededEps) + ", notNeededEps: " + str(notNeededEps),
logger.DEBUG)
if not notNeededEps:
logger.log(u"All of these episodes were covered by single episode results, ignoring this multi-episode result", logger.DEBUG)
continue
# check if these eps are already covered by another multi-result
multiNeededEps = []
multiNotNeededEps = []
for epObj in multiResult.episodes:
epNum = epObj.episode
if epNum in multiResults:
multiNotNeededEps.append(epNum)
else:
multiNeededEps.append(epNum)
logger.log(
u"Multi-ep check result is multiNeededEps: " + str(multiNeededEps) + ", multiNotNeededEps: " + str(
multiNotNeededEps), logger.DEBUG)
if not multiNeededEps:
logger.log(
u"All of these episodes were covered by another multi-episode nzbs, ignoring this multi-ep result",
logger.DEBUG)
continue
# if we're keeping this multi-result then remember it
for epObj in multiResult.episodes:
multiResults[epObj.episode] = multiResult
# don't bother with the single result if we're going to get it with a multi result
for epObj in multiResult.episodes:
epNum = epObj.episode
if epNum in foundResults[curProvider.name]:
logger.log(
u"A needed multi-episode result overlaps with a single-episode result for ep #" + str(
epNum) + ", removing the single-episode results from the list", logger.DEBUG)
del foundResults[curProvider.name][epNum]
# of all the single ep results narrow it down to the best one for each episode
finalResults += set(multiResults.values())
for curEp in foundResults[curProvider.name]:
if curEp in (MULTI_EP_RESULT, SEASON_RESULT):
continue
if len(foundResults[curProvider.name][curEp]) == 0:
continue
bestResult = pickBestResult(foundResults[curProvider.name][curEp], show)
# if all results were rejected move on to the next episode
if not bestResult:
continue
# filter out possible bad torrents from providers such as ezrss
if bestResult.resultType == "torrent" and sickbeard.TORRENT_METHOD != "blackhole":
bestResult.content = None
if not bestResult.url.startswith('magnet'):
bestResult.content = bestResult.provider.getURL(bestResult.url)
if not bestResult.content:
continue
# add result if its not a duplicate and
found = False
for i, result in enumerate(finalResults):
for bestResultEp in bestResult.episodes:
if bestResultEp in result.episodes:
if result.quality < bestResult.quality:
finalResults.pop(i)
else:
found = True
if not found:
finalResults += [bestResult]
# check that we got all the episodes we wanted first before doing a match and snatch
wantedEpCount = 0
for wantedEp in episodes:
for result in finalResults:
if wantedEp in result.episodes and isFinalResult(result):
wantedEpCount += 1
# make sure we search every provider for results unless we found everything we wanted
if wantedEpCount == len(episodes):
break
if not didSearch:
logger.log(u"No NZB/Torrent providers found or enabled in the SickGear config for backlog searches. Please check your settings.",
logger.ERROR)
return finalResults<|fim▁end|> | else:
foundResults[curProvider.name][epNum] = [bestSeasonResult] |
<|file_name|>xmlparser.cc<|end_file_name|><|fim▁begin|>/** @file io/xmlparser.cc xml parsing class used in xmlreader.cc */
///////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2004-2004 California Institute of Technology
// Copyright (c) 2004-2007 University of Southern California
// Rob Peters <https://github.com/rjpcal/>
//
// created: Tue May 25 10:29:42 2004
//
// --------------------------------------------------------------------
//
// This file is part of GroovX.
// [https://github.com/rjpcal/groovx]
//
// GroovX is free software; you can redistribute it and/or modify it
// under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// GroovX is distributed in the hope that it will be useful, but
// WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with GroovX; if not, write to the Free Software Foundation,
// Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
//
///////////////////////////////////////////////////////////////////////
#include "io/xmlparser.h"
#include "rutz/error.h"
#include "rutz/fstring.h"
#include "rutz/sfmt.h"
#include <cstdio> // for EOF
#include <istream>
#include <limits>
#include "rutz/trace.h"
#include "rutz/debug.h"
GVX_DBG_REGISTER
#ifndef XML_STATUS_OK
#define XML_STATUS_OK 1
#define XML_STATUS_ERROR 0
#endif
io::xml_parser::xml_parser(std::istream& is, int bufsize) :
m_parser(XML_ParserCreate(/*encoding*/0)),
m_stream(is),
m_buf_size(bufsize)
{
GVX_TRACE("io::xml_parser::xml_parser");
if (m_parser == nullptr)
{
throw rutz::error("couldn't allocate memory for XML_Parser",
SRC_POS);
}
XML_SetUserData(m_parser, this);
XML_SetElementHandler(m_parser, &c_element_start, &c_element_end);
XML_SetCharacterDataHandler(m_parser, &c_character_data);
}
io::xml_parser::~xml_parser()
{
GVX_TRACE("io::xml_parser::~xml_parser");
XML_ParserFree(m_parser);
}
void io::xml_parser::character_data(const char* /*text*/, size_t /*length*/)
{
GVX_TRACE("io::xml_parser::character_data");
}
void io::xml_parser::c_element_start(void* data, const char* el, const char** attr)
{
GVX_TRACE("io::xml_parser::c_element_start");<|fim▁hole|> p->element_start(el, attr);
}
void io::xml_parser::c_element_end(void* data, const char* el)
{
GVX_TRACE("io::xml_parser::c_element_end");
io::xml_parser* p = static_cast<io::xml_parser*>(data);
GVX_ASSERT(p != nullptr);
p->element_end(el);
}
void io::xml_parser::c_character_data(void* data, const char* text, int length)
{
GVX_TRACE("io::xml_parser::c_character_data");
io::xml_parser* p = static_cast<io::xml_parser*>(data);
GVX_ASSERT(p != nullptr);
GVX_ASSERT(length >= 0);
p->character_data(text, size_t(length));
}
void io::xml_parser::parse()
{
GVX_TRACE("io::xml_parser::parse");
while (1)
{
void* const buf = XML_GetBuffer(m_parser, m_buf_size);
if (buf == nullptr)
{
throw rutz::error("couldn't get buffer in io::xml_parser::parse()",
SRC_POS);
}
// very strangely I wasn't able to get things to work using a
// readsome() approach here...
m_stream.read(static_cast<char*>(buf), m_buf_size);
const ssize_t len = m_stream.gcount();
if (!m_stream.eof() && m_stream.fail())
{
throw rutz::error("read error in io::xml_parser::parse()",
SRC_POS);
}
const int peek = m_stream.peek();
const int done = (peek == EOF);
if (GVX_DBG_LEVEL() >= 3)
{
dbg_eval(3, buf);
dbg_eval(3, m_buf_size);
dbg_eval(3, len);
dbg_eval(3, peek);
dbg_eval_nl(3, done);
}
GVX_ASSERT(len < std::numeric_limits<int>::max());
// alternate: use XML_Parse(m_parser, m_buf, len, done) if we have
// our own memory buffer
if (XML_ParseBuffer(m_parser, int(len), done) != XML_STATUS_OK)
{
throw rutz::error
(rutz::sfmt("xml parse error at input line %d:\n%s",
int(XML_GetCurrentLineNumber(m_parser)),
XML_ErrorString(XML_GetErrorCode(m_parser))),
SRC_POS);
}
if (done)
return;
}
}
#if 0
// here's a simple subclass of io::xml_parser that prints an outline
// of an XML file just to show that everything is getting parsed
// properly
class Outliner : public io::xml_parser
{
public:
Outliner(std::istream& is) :
io::xml_parser(is),
m_depth(0) {}
virtual ~Outliner() {}
protected:
virtual void element_start(const char* el, const char** attr) override;
virtual void element_end(const char* el) override;
private:
int m_depth;
};
void Outliner::element_start(const char* el, const char** attr)
{
for (int i = 0; i < m_depth; i++)
printf(" ");
printf("%s", el);
for (int i = 0; attr[i]; i += 2)
{
printf(" %s='%s'", attr[i], attr[i + 1]);
}
printf("\n");
++m_depth;
}
void Outliner::element_end(const char* el)
{
--m_depth;
}
#endif<|fim▁end|> | io::xml_parser* p = static_cast<io::xml_parser*>(data);
GVX_ASSERT(p != nullptr); |
<|file_name|>fundrawtransaction.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
def get_unspent(listunspent, amount):
for utx in listunspent:
if utx['amount'] == amount:
return utx
raise AssertionError('Could not find unspent with amount={}'.format(amount))
class RawTransactionsTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 4
def setup_network(self, split=False):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
connect_nodes_bi(self.nodes,0,3)
self.is_network_split=False
self.sync_all()
def run_test(self):
print("Mining blocks...")
min_relay_tx_fee = self.nodes[0].getnetworkinfo()['relayfee']
# This test is not meant to test fee estimation and we'd like
# to be sure all txs are sent at a consistent desired feerate
for node in self.nodes:
node.settxfee(min_relay_tx_fee)
# if the fee's positive delta is higher than this value tests will fail,
# neg. delta always fail the tests.
# The size of the signature of every input may be at most 2 bytes larger
# than a minimum sized signature.
# = 2 bytes * minRelayTxFeePerByte
feeTolerance = 2 * min_relay_tx_fee/1000
self.nodes[2].generate(1)
self.sync_all()
self.nodes[0].generate(121)
self.sync_all()
watchonly_address = self.nodes[0].getnewaddress()
watchonly_pubkey = self.nodes[0].validateaddress(watchonly_address)["pubkey"]
watchonly_amount = Decimal(200)
self.nodes[3].importpubkey(watchonly_pubkey, "", True)
watchonly_txid = self.nodes[0].sendtoaddress(watchonly_address, watchonly_amount)
self.nodes[0].sendtoaddress(self.nodes[3].getnewaddress(), watchonly_amount / 10)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.5)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 5.0)
self.nodes[0].generate(1)
self.sync_all()
###############
# simple test #
###############
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0) #test that we have enough inputs
##############################
# simple test with two coins #
##############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.2 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0) #test if we have enough inputs
##############################
# simple test with two coins #
##############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.6 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0)
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
################################
# simple test with two outputs #
################################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.6, self.nodes[1].getnewaddress() : 2.5 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert(len(dec_tx['vin']) > 0)
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
#########################################################################
# test a fundrawtransaction with a VIN greater than the required amount #
#########################################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
#####################################################################
# test a fundrawtransaction with which will not get a change output #
#####################################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : Decimal(5.0) - fee - feeTolerance }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(rawtxfund['changepos'], -1)
assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
####################################################
# test a fundrawtransaction with an invalid option #
####################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ]
outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
try:
self.nodes[2].fundrawtransaction(rawtx, {'foo': 'bar'})
raise AssertionError("Accepted invalid option foo")
except JSONRPCException as e:
assert("Unexpected key foo" in e.error['message'])
############################################################
# test a fundrawtransaction with an invalid change address #
############################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ]
outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
try:
self.nodes[2].fundrawtransaction(rawtx, {'changeAddress': 'foobar'})
raise AssertionError("Accepted invalid elysium address")
except JSONRPCException as e:
assert("changeAddress must be a valid elysium address" in e.error['message'])
############################################################
# test a fundrawtransaction with a provided change address #
############################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ]
outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
change = self.nodes[2].getnewaddress()
try:
rawtxfund = self.nodes[2].fundrawtransaction(rawtx, {'changeAddress': change, 'changePosition': 2})
except JSONRPCException as e:
assert('changePosition out of bounds' == e.error['message'])
else:
assert(False)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx, {'changeAddress': change, 'changePosition': 0})
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
out = dec_tx['vout'][0];
assert_equal(change, out['scriptPubKey']['addresses'][0])
#########################################################################
# test a fundrawtransaction with a VIN smaller than the required amount #
#########################################################################
utx = get_unspent(self.nodes[2].listunspent(), 1)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
# 4-byte version + 1-byte vin count + 36-byte prevout then script_len
rawtx = rawtx[:82] + "0100" + rawtx[84:]
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for i, out in enumerate(dec_tx['vout']):
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
else:
assert_equal(i, rawtxfund['changepos'])
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
assert_equal(matchingOuts, 1)
assert_equal(len(dec_tx['vout']), 2)
###########################################
# test a fundrawtransaction with two VINs #
###########################################
utx = get_unspent(self.nodes[2].listunspent(), 1)
utx2 = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
outputs = { self.nodes[0].getnewaddress() : 6.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
assert_equal(matchingOuts, 1)
assert_equal(len(dec_tx['vout']), 2)
matchingIns = 0
for vinOut in dec_tx['vin']:
for vinIn in inputs:
if vinIn['txid'] == vinOut['txid']:
matchingIns+=1
assert_equal(matchingIns, 2) #we now must see two vins identical to vins given as params
#########################################################
# test a fundrawtransaction with two VINs and two vOUTs #
#########################################################
utx = get_unspent(self.nodes[2].listunspent(), 1)
utx2 = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
outputs = { self.nodes[0].getnewaddress() : 6.0, self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
assert_equal(matchingOuts, 2)
assert_equal(len(dec_tx['vout']), 3)
<|fim▁hole|> ##############################################
# test a fundrawtransaction with invalid vin #
##############################################
listunspent = self.nodes[2].listunspent()
inputs = [ {'txid' : "1c7f966dab21119bac53213a2bc7532bff1fa844c124fd750a7d0b1332440bd1", 'vout' : 0} ] #invalid vin!
outputs = { self.nodes[0].getnewaddress() : 1.0}
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
try:
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
raise AssertionError("Spent more than available")
except JSONRPCException as e:
assert("Insufficient" in e.error['message'])
############################################################
#compare fee of a standard pubkeyhash transaction
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1.1)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a standard pubkeyhash transaction with multiple outputs
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1,self.nodes[1].getnewaddress():1.2,self.nodes[1].getnewaddress():0.1,self.nodes[1].getnewaddress():1.3,self.nodes[1].getnewaddress():0.2,self.nodes[1].getnewaddress():0.3}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendmany("", outputs)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a 2of2 multisig p2sh transaction
# create 2of2 addr
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[1].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[1].validateaddress(addr2)
mSigObj = self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
inputs = []
outputs = {mSigObj:1.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(mSigObj, 1.1)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a standard pubkeyhash transaction
# create 4of5 addr
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[1].getnewaddress()
addr3 = self.nodes[1].getnewaddress()
addr4 = self.nodes[1].getnewaddress()
addr5 = self.nodes[1].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[1].validateaddress(addr2)
addr3Obj = self.nodes[1].validateaddress(addr3)
addr4Obj = self.nodes[1].validateaddress(addr4)
addr5Obj = self.nodes[1].validateaddress(addr5)
mSigObj = self.nodes[1].addmultisigaddress(4, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey'], addr4Obj['pubkey'], addr5Obj['pubkey']])
inputs = []
outputs = {mSigObj:1.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(mSigObj, 1.1)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
# spend a 2of2 multisig transaction over fundraw
# create 2of2 addr
addr1 = self.nodes[2].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[2].validateaddress(addr1)
addr2Obj = self.nodes[2].validateaddress(addr2)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
# send 1.2 BTC to msig addr
txId = self.nodes[0].sendtoaddress(mSigObj, 1.2)
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
oldBalance = self.nodes[1].getbalance()
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1}
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[2].fundrawtransaction(rawTx)
signedTx = self.nodes[2].signrawtransaction(fundedTx['hex'])
txId = self.nodes[2].sendrawtransaction(signedTx['hex'])
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
# make sure funds are received at node1
assert_equal(oldBalance+Decimal('1.10000000'), self.nodes[1].getbalance())
############################################################
# locked wallet test
self.nodes[1].encryptwallet("test")
self.nodes.pop(1)
stop_nodes(self.nodes)
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
# This test is not meant to test fee estimation and we'd like
# to be sure all txs are sent at a consistent desired feerate
for node in self.nodes:
node.settxfee(min_relay_tx_fee)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
connect_nodes_bi(self.nodes,0,3)
self.is_network_split=False
self.sync_all()
# drain the keypool
self.nodes[1].getnewaddress()
inputs = []
outputs = {self.nodes[0].getnewaddress():1.1}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
# fund a transaction that requires a new key for the change output
# creating the key must be impossible because the wallet is locked
try:
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
raise AssertionError("Wallet unlocked without passphrase")
except JSONRPCException as e:
assert('Keypool ran out' in e.error['message'])
#refill the keypool
self.nodes[1].walletpassphrase("test", 100)
self.nodes[1].walletlock()
try:
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1.2)
raise AssertionError("Wallet unlocked without passphrase")
except JSONRPCException as e:
assert('walletpassphrase' in e.error['message'])
oldBalance = self.nodes[0].getbalance()
inputs = []
outputs = {self.nodes[0].getnewaddress():1.1}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
#now we need to unlock
self.nodes[1].walletpassphrase("test", 100)
signedTx = self.nodes[1].signrawtransaction(fundedTx['hex'])
txId = self.nodes[1].sendrawtransaction(signedTx['hex'])
self.nodes[1].generate(1)
self.sync_all()
# make sure funds are received at node1
assert_equal(oldBalance+Decimal('51.10000000'), self.nodes[0].getbalance())
###############################################
# multiple (~19) inputs tx test | Compare fee #
###############################################
#empty node1, send some small coins from node0 to node1
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
for i in range(0,20):
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
self.nodes[0].generate(1)
self.sync_all()
#fund a tx with ~20 small inputs
inputs = []
outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[1].sendmany("", outputs)
signedFee = self.nodes[1].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance*19) #~19 inputs
#############################################
# multiple (~19) inputs tx test | sign/send #
#############################################
#again, empty node1, send some small coins from node0 to node1
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
for i in range(0,20):
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
self.nodes[0].generate(1)
self.sync_all()
#fund a tx with ~20 small inputs
oldBalance = self.nodes[0].getbalance()
inputs = []
outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
fundedAndSignedTx = self.nodes[1].signrawtransaction(fundedTx['hex'])
txId = self.nodes[1].sendrawtransaction(fundedAndSignedTx['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(oldBalance+Decimal('50.19000000'), self.nodes[0].getbalance()) #0.19+block reward
#####################################################
# test fundrawtransaction with OP_RETURN and no vin #
#####################################################
rawtx = "0100000000010000000000000000066a047465737400000000"
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(len(dec_tx['vin']), 0)
assert_equal(len(dec_tx['vout']), 1)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert_greater_than(len(dec_tx['vin']), 0) # at least one vin
assert_equal(len(dec_tx['vout']), 2) # one change output added
##################################################
# test a fundrawtransaction using only watchonly #
##################################################
inputs = []
outputs = {self.nodes[2].getnewaddress() : watchonly_amount / 2}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result = self.nodes[3].fundrawtransaction(rawtx, {'includeWatching': True })
res_dec = self.nodes[0].decoderawtransaction(result["hex"])
assert_equal(len(res_dec["vin"]), 1)
assert_equal(res_dec["vin"][0]["txid"], watchonly_txid)
assert("fee" in result.keys())
assert_greater_than(result["changepos"], -1)
###############################################################
# test fundrawtransaction using the entirety of watched funds #
###############################################################
inputs = []
outputs = {self.nodes[2].getnewaddress() : watchonly_amount}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
# Backward compatibility test (2nd param is includeWatching)
result = self.nodes[3].fundrawtransaction(rawtx, True)
res_dec = self.nodes[0].decoderawtransaction(result["hex"])
assert_equal(len(res_dec["vin"]), 2)
assert(res_dec["vin"][0]["txid"] == watchonly_txid or res_dec["vin"][1]["txid"] == watchonly_txid)
assert_greater_than(result["fee"], 0)
assert_greater_than(result["changepos"], -1)
assert_equal(result["fee"] + res_dec["vout"][result["changepos"]]["value"], watchonly_amount / 10)
signedtx = self.nodes[3].signrawtransaction(result["hex"])
assert(not signedtx["complete"])
signedtx = self.nodes[0].signrawtransaction(signedtx["hex"])
assert(signedtx["complete"])
self.nodes[0].sendrawtransaction(signedtx["hex"])
self.nodes[0].generate(1)
self.sync_all()
#######################
# Test feeRate option #
#######################
# Make sure there is exactly one input so coin selection can't skew the result
assert_equal(len(self.nodes[3].listunspent(1)), 1)
inputs = []
outputs = {self.nodes[2].getnewaddress() : 1}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result = self.nodes[3].fundrawtransaction(rawtx) # uses min_relay_tx_fee (set by settxfee)
result2 = self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2*min_relay_tx_fee})
result3 = self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 10*min_relay_tx_fee})
result_fee_rate = result['fee'] * 1000 / count_bytes(result['hex'])
assert_fee_amount(result2['fee'], count_bytes(result2['hex']), 2 * result_fee_rate)
assert_fee_amount(result3['fee'], count_bytes(result3['hex']), 10 * result_fee_rate)
if __name__ == '__main__':
RawTransactionsTest().main()<|fim▁end|> | |
<|file_name|>saved_action_approval.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015 Tanium Inc
#
# Generated from console.wsdl version 0.0.1
#
#<|fim▁hole|>
from .base import BaseType
class SavedActionApproval(BaseType):
_soap_tag = 'saved_action_approval'
def __init__(self):
BaseType.__init__(
self,
simple_properties={'id': int,
'name': str,
'approved_flag': int},
complex_properties={'metadata': MetadataList},
list_properties={},
)
self.id = None
self.name = None
self.approved_flag = None
self.metadata = None
from metadata_list import MetadataList<|fim▁end|> | |
<|file_name|>LeveledUp.tsx<|end_file_name|><|fim▁begin|>/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
import React from 'react';
import { styled } from '@csegames/linaria/react';
const LeveledUpContainer = styled.div`
position: relative;
display: flex;
align-items: center;
justify-content: center;
padding: 5px;
&.announcement {<|fim▁hole|> padding-bottom: 5px;
padding-right: 20px;
padding-left: 0;
}
&:before {
content: '';
position: absolute;
top: 0;
height: 1px;
width: 100%;
background: linear-gradient(to right, transparent 1%, white, transparent 99%);
}
&:after {
content: '';
position: absolute;
bottom: 0;
height: 1px;
width: 100%;
background: linear-gradient(to right, transparent 1%, white, transparent 99%);
}
`;
const LeveledUpText = styled.div`
font-family: RobotoCondensed;
font-weight: bold;
color: #ffdf5d;
text-shadow: 3px 3px 0 #884b02;
font-size: 25px;
text-transform: uppercase;
`;
const KeybindBox = styled.div`
display: flex;
align-items: center;
justify-content: center;
font-family: RobotoCondensed;
font-style: italic;
font-weight: bold;
font-size: 16px;
width: 25px;
height: 25px;
background-color: rgba(0, 0, 0, 0.5);
margin-left: 10px;
color: white;
`;
export interface Props {
isAnnouncement?: boolean;
}
export function LeveledUp(props: Props) {
const backgroundClass = props.isAnnouncement ? 'announcement' : '';
return (
<LeveledUpContainer className={backgroundClass}>
<LeveledUpText>Leveled Up!</LeveledUpText>
<KeybindBox>G</KeybindBox>
</LeveledUpContainer>
);
}<|fim▁end|> | justify-content: flex-end;
background: linear-gradient(to right, transparent, rgba(255, 255, 255, 0.7));
padding-top: 5px; |
<|file_name|>webpack.prod.config.js<|end_file_name|><|fim▁begin|>const path = require('path')
const merge = require('webpack-merge')
const webpack = require('webpack')
const baseWebpackConfig = require('./webpack.base.config')
const ExtractTextPlugin = require("extract-text-webpack-plugin")
const OptimizeCSSPlugin = require('optimize-css-assets-webpack-plugin')
function resolve (dir) {
return path.join(__dirname, '..', dir)
}
module.exports = merge(baseWebpackConfig, {
output: {
path: resolve('dist'),
filename: '[name].[hash].js'
},
plugins: [
// optimize css for production<|fim▁hole|> }),
new ExtractTextPlugin('style.[hash].css'),
// split vendor js into separate file
new webpack.optimize.CommonsChunkPlugin({
name: 'vendor',
minChunks: function (module) {
// this assumes your vendor imports exist in the node_modules directory
return module.context && module.context.indexOf("node_modules") !== -1;
}
}),
// extract webpack runtime and module manifest to its own file in order to
// prevent vendor hash from being updated whenever app bundle is updated
new webpack.optimize.CommonsChunkPlugin({
name: 'manifest',
chunks: ['vendor']
}),
]
})<|fim▁end|> | new OptimizeCSSPlugin({
cssProcessorOptions: {
safe: true
} |
<|file_name|>customFormlayout.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
formlayout
==========
Module creating Qt form dialogs/layouts to edit various type of parameters
formlayout License Agreement (MIT License)
------------------------------------------
Copyright (c) 2009 Pierre Raybaut
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
# History:
# 1.0.10: added float validator (disable "Ok" and "Apply" button when not valid)
# 1.0.7: added support for "Apply" button
# 1.0.6: code cleaning
from __future__ import (absolute_import, division, print_function,
unicode_literals)
__version__ = '1.0.10'
__license__ = __doc__
DEBUG = False
import copy
import datetime
import warnings
import six
from matplotlib import colors as mcolors
from matplotlib.backends.qt_compat import QtGui, QtWidgets, QtCore
BLACKLIST = set(["title", "label"])
class ColorButton(QtWidgets.QPushButton):
"""
Color choosing push button
"""
colorChanged = QtCore.Signal(QtGui.QColor)
def __init__(self, parent=None):
QtWidgets.QPushButton.__init__(self, parent)
self.setFixedSize(20, 20)
self.setIconSize(QtCore.QSize(12, 12))
self.clicked.connect(self.choose_color)
self._color = QtGui.QColor()
def choose_color(self):
color = QtWidgets.QColorDialog.getColor(
self._color, self.parentWidget(), "",
QtWidgets.QColorDialog.ShowAlphaChannel)
if color.isValid():
self.set_color(color)
def get_color(self):
return self._color
@QtCore.Slot(QtGui.QColor)
def set_color(self, color):
if color != self._color:
self._color = color
self.colorChanged.emit(self._color)
pixmap = QtGui.QPixmap(self.iconSize())
pixmap.fill(color)
self.setIcon(QtGui.QIcon(pixmap))
color = QtCore.Property(QtGui.QColor, get_color, set_color)
def to_qcolor(color):
"""Create a QColor from a matplotlib color"""
qcolor = QtGui.QColor()
try:
rgba = mcolors.to_rgba(color)
except ValueError:
warnings.warn('Ignoring invalid color %r' % color)
return qcolor # return invalid QColor<|fim▁hole|> return qcolor
class ColorLayout(QtWidgets.QHBoxLayout):
"""Color-specialized QLineEdit layout"""
def __init__(self, color, parent=None):
QtWidgets.QHBoxLayout.__init__(self)
assert isinstance(color, QtGui.QColor)
self.lineedit = QtWidgets.QLineEdit(
mcolors.to_hex(color.getRgbF(), keep_alpha=True), parent)
self.lineedit.editingFinished.connect(self.update_color)
self.addWidget(self.lineedit)
self.colorbtn = ColorButton(parent)
self.colorbtn.color = color
self.colorbtn.colorChanged.connect(self.update_text)
self.addWidget(self.colorbtn)
def update_color(self):
color = self.text()
qcolor = to_qcolor(color)
self.colorbtn.color = qcolor # defaults to black if not qcolor.isValid()
def update_text(self, color):
self.lineedit.setText(mcolors.to_hex(color.getRgbF(), keep_alpha=True))
def text(self):
return self.lineedit.text()
def font_is_installed(font):
"""Check if font is installed"""
return [fam for fam in QtGui.QFontDatabase().families()
if six.text_type(fam) == font]
def tuple_to_qfont(tup):
"""
Create a QFont from tuple:
(family [string], size [int], italic [bool], bold [bool])
"""
if not (isinstance(tup, tuple) and len(tup) == 4
and font_is_installed(tup[0])
and isinstance(tup[1], int)
and isinstance(tup[2], bool)
and isinstance(tup[3], bool)):
return None
font = QtGui.QFont()
family, size, italic, bold = tup
font.setFamily(family)
font.setPointSize(size)
font.setItalic(italic)
font.setBold(bold)
return font
def qfont_to_tuple(font):
return (six.text_type(font.family()), int(font.pointSize()),
font.italic(), font.bold())
class FontLayout(QtWidgets.QGridLayout):
"""Font selection"""
def __init__(self, value, parent=None):
QtWidgets.QGridLayout.__init__(self)
font = tuple_to_qfont(value)
assert font is not None
# Font family
self.family = QtWidgets.QFontComboBox(parent)
self.family.setCurrentFont(font)
self.addWidget(self.family, 0, 0, 1, -1)
# Font size
self.size = QtWidgets.QComboBox(parent)
self.size.setEditable(True)
sizelist = list(range(6, 12)) + list(range(12, 30, 2)) + [36, 48, 72]
size = font.pointSize()
if size not in sizelist:
sizelist.append(size)
sizelist.sort()
self.size.addItems([str(s) for s in sizelist])
self.size.setCurrentIndex(sizelist.index(size))
self.addWidget(self.size, 1, 0)
# Italic or not
self.italic = QtWidgets.QCheckBox(self.tr("Italic"), parent)
self.italic.setChecked(font.italic())
self.addWidget(self.italic, 1, 1)
# Bold or not
self.bold = QtWidgets.QCheckBox(self.tr("Bold"), parent)
self.bold.setChecked(font.bold())
self.addWidget(self.bold, 1, 2)
def get_font(self):
font = self.family.currentFont()
font.setItalic(self.italic.isChecked())
font.setBold(self.bold.isChecked())
font.setPointSize(int(self.size.currentText()))
return qfont_to_tuple(font)
def is_edit_valid(edit):
text = edit.text()
state = edit.validator().validate(text, 0)[0]
return state == QtGui.QDoubleValidator.Acceptable
class FormWidget(QtWidgets.QWidget):
update_buttons = QtCore.Signal()
def __init__(self, data, comment="", parent=None):
QtWidgets.QWidget.__init__(self, parent)
self.data = copy.deepcopy(data)
self.widgets = []
self.formlayout = QtWidgets.QFormLayout(self)
if comment:
self.formlayout.addRow(QtWidgets.QLabel(comment))
self.formlayout.addRow(QtWidgets.QLabel(" "))
if DEBUG:
print("\n"+("*"*80))
print("DATA:", self.data)
print("*"*80)
print("COMMENT:", comment)
print("*"*80)
def get_dialog(self):
"""Return FormDialog instance"""
dialog = self.parent()
while not isinstance(dialog, QtWidgets.QDialog):
dialog = dialog.parent()
return dialog
def setup(self):
# self.formlayout.setFieldGrowthPolicy(1)
for label, value in self.data:
if DEBUG:
print("value:", value)
if label is None and value is None:
# Separator: (None, None)
self.formlayout.addRow(QtWidgets.QLabel(" "), QtWidgets.QLabel(" "))
self.widgets.append(None)
continue
elif label is None:
# Comment
self.formlayout.addRow(QtWidgets.QLabel(value))
self.widgets.append(None)
continue
elif tuple_to_qfont(value) is not None:
field = FontLayout(value, self)
elif (label.lower() not in BLACKLIST
and mcolors.is_color_like(value)):
field = ColorLayout(to_qcolor(value), self)
elif isinstance(value, six.string_types):
field = QtWidgets.QLineEdit(value, self)
field.setSizePolicy(QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Maximum))
elif isinstance(value, (list, tuple)):
if isinstance(value, tuple):
value = list(value)
selindex = value.pop(0)
field = QtWidgets.QComboBox(self)
if isinstance(value[0], (list, tuple)):
keys = [key for key, _val in value]
value = [val for _key, val in value]
else:
keys = value
field.addItems(value)
if selindex in value:
selindex = value.index(selindex)
elif selindex in keys:
selindex = keys.index(selindex)
elif not isinstance(selindex, int):
warnings.warn(
"index '%s' is invalid (label: %s, value: %s)" %
(selindex, label, value))
selindex = 0
field.setCurrentIndex(selindex)
elif isinstance(value, bool):
field = QtWidgets.QCheckBox(self)
if value:
field.setCheckState(QtCore.Qt.Checked)
else:
field.setCheckState(QtCore.Qt.Unchecked)
elif isinstance(value, float):
field = QtWidgets.QLineEdit(repr(value), self)
field.setCursorPosition(0)
field.setValidator(QtGui.QDoubleValidator(field))
field.validator().setLocale(QtCore.QLocale("C"))
dialog = self.get_dialog()
dialog.register_float_field(field)
field.textChanged.connect(lambda text: dialog.update_buttons())
elif isinstance(value, int):
field = QtWidgets.QSpinBox(self)
field.setRange(-1e9, 1e9)
field.setValue(value)
elif isinstance(value, datetime.datetime):
field = QtWidgets.QDateTimeEdit(self)
field.setDateTime(value)
elif isinstance(value, datetime.date):
field = QtWidgets.QDateEdit(self)
field.setDate(value)
else:
field = QtWidgets.QLineEdit(repr(value), self)
self.formlayout.addRow(label, field)
# print(self.formlayout.fieldGrowthPolicy())
self.widgets.append(field)
def get(self):
valuelist = []
for index, (label, value) in enumerate(self.data):
field = self.widgets[index]
if label is None:
# Separator / Comment
continue
elif tuple_to_qfont(value) is not None:
value = field.get_font()
elif (isinstance(value, six.string_types)
or mcolors.is_color_like(value)):
value = six.text_type(field.text())
elif isinstance(value, (list, tuple)):
index = int(field.currentIndex())
if isinstance(value[0], (list, tuple)):
value = value[index][0]
else:
value = value[index]
elif isinstance(value, bool):
value = field.checkState() == QtCore.Qt.Checked
elif isinstance(value, float):
value = float(str(field.text()))
elif isinstance(value, int):
value = int(field.value())
elif isinstance(value, datetime.datetime):
value = field.dateTime().toPyDateTime()
elif isinstance(value, datetime.date):
value = field.date().toPyDate()
else:
value = eval(str(field.text()))
valuelist.append(value)
return valuelist
class FormComboWidget(QtWidgets.QWidget):
update_buttons = QtCore.Signal()
def __init__(self, datalist, comment="", parent=None):
QtWidgets.QWidget.__init__(self, parent)
layout = QtWidgets.QVBoxLayout()
self.setLayout(layout)
self.combobox = QtWidgets.QComboBox()
layout.addWidget(self.combobox)
self.stackwidget = QtWidgets.QStackedWidget(self)
layout.addWidget(self.stackwidget)
self.combobox.currentIndexChanged.connect(self.stackwidget.setCurrentIndex)
self.widgetlist = []
for data, title, comment in datalist:
self.combobox.addItem(title)
widget = FormWidget(data, comment=comment, parent=self)
self.stackwidget.addWidget(widget)
self.widgetlist.append(widget)
def setup(self):
for widget in self.widgetlist:
widget.setup()
def get(self):
return [widget.get() for widget in self.widgetlist]
class FormTabWidget(QtWidgets.QWidget):
update_buttons = QtCore.Signal()
def __init__(self, datalist, comment="", parent=None):
QtWidgets.QWidget.__init__(self, parent)
layout = QtWidgets.QVBoxLayout()
self.tabwidget = QtWidgets.QTabWidget()
layout.addWidget(self.tabwidget)
self.setLayout(layout)
self.widgetlist = []
for data, title, comment in datalist:
if len(data[0]) == 3:
widget = FormComboWidget(data, comment=comment, parent=self)
else:
widget = FormWidget(data, comment=comment, parent=self)
index = self.tabwidget.addTab(widget, title)
self.tabwidget.setTabToolTip(index, comment)
self.widgetlist.append(widget)
def setup(self):
for widget in self.widgetlist:
widget.setup()
def get(self):
return [widget.get() for widget in self.widgetlist]
class FormDialog(QtWidgets.QDialog):
"""Form Dialog"""
def __init__(self, data, title="", comment="", icon=None, parent=None, apply=None):
QtWidgets.QDialog.__init__(self, parent)
self.apply_callback = apply
# Form
if isinstance(data[0][0], (list, tuple)):
self.formwidget = FormTabWidget(data, comment=comment, parent=self)
elif len(data[0]) == 3:
self.formwidget = FormComboWidget(data, comment=comment, parent=self)
else:
self.formwidget = FormWidget(data, comment=comment, parent=self)
layout = QtWidgets.QVBoxLayout()
layout.addWidget(self.formwidget)
self.float_fields = []
self.formwidget.setup()
# Button box
self.bbox = bbox = QtWidgets.QDialogButtonBox(
QtWidgets.QDialogButtonBox.Ok | QtWidgets.QDialogButtonBox.Cancel)
self.formwidget.update_buttons.connect(self.update_buttons)
if self.apply_callback is not None:
apply_btn = bbox.addButton(QtWidgets.QDialogButtonBox.Apply)
apply_btn.clicked.connect(self.apply)
bbox.accepted.connect(self.accept)
bbox.rejected.connect(self.reject)
layout.addWidget(bbox)
self.setLayout(layout)
self.setWindowTitle(title)
if not isinstance(icon, QtGui.QIcon):
icon = QtWidgets.QWidget().style().standardIcon(QtWidgets.QStyle.SP_MessageBoxQuestion)
self.setWindowIcon(icon)
def register_float_field(self, field):
self.float_fields.append(field)
def update_buttons(self):
valid = True
for field in self.float_fields:
if not is_edit_valid(field):
valid = False
for btn_type in (QtWidgets.QDialogButtonBox.Ok,
QtWidgets.QDialogButtonBox.Apply):
btn = self.bbox.button(btn_type)
if btn is not None:
btn.setEnabled(valid)
def accept(self):
self.data = self.formwidget.get()
QtWidgets.QDialog.accept(self)
def reject(self):
self.data = None
QtWidgets.QDialog.reject(self)
def apply(self):
self.apply_callback(self.formwidget.get())
def get(self):
"""Return form result"""
return self.data
def fedit(data, title="", comment="", icon=None, parent=None, apply=None):
"""
Create form dialog and return result
(if Cancel button is pressed, return None)
data: datalist, datagroup
title: string
comment: string
icon: QIcon instance
parent: parent QWidget
apply: apply callback (function)
datalist: list/tuple of (field_name, field_value)
datagroup: list/tuple of (datalist *or* datagroup, title, comment)
-> one field for each member of a datalist
-> one tab for each member of a top-level datagroup
-> one page (of a multipage widget, each page can be selected with a combo
box) for each member of a datagroup inside a datagroup
Supported types for field_value:
- int, float, str, unicode, bool
- colors: in Qt-compatible text form, i.e. in hex format or name (red,...)
(automatically detected from a string)
- list/tuple:
* the first element will be the selected index (or value)
* the other elements can be couples (key, value) or only values
"""
# Create a QApplication instance if no instance currently exists
# (e.g., if the module is used directly from the interpreter)
if QtWidgets.QApplication.startingUp():
_app = QtWidgets.QApplication([])
dialog = FormDialog(data, title, comment, icon, parent, apply)
if dialog.exec_():
return dialog.get()
if __name__ == "__main__":
# def create_datalist_example():
# return [('str', 'this is a string'),
# ('list', [0, '1', '3', '4']),
# ('list2', ['--', ('none', 'None'), ('--', 'Dashed'),
# ('-.', 'DashDot'), ('-', 'Solid'),
# ('steps', 'Steps'), (':', 'Dotted')]),
# ('float', 1.2),
# (None, 'Other:'),
# ('int', 12),
# ('font', ('Arial', 10, False, True)),
# ('color', '#123409'),
# ('bool', True),
# ('date', datetime.date(2010, 10, 10)),
# ('datetime', datetime.datetime(2010, 10, 10)),
# ]
#
# def create_datagroup_example():
# datalist = create_datalist_example()
# return ((datalist, "Category 1", "Category 1 comment"),
# (datalist, "Category 2", "Category 2 comment"),
# (datalist, "Category 3", "Category 3 comment"))
#
# #--------- datalist example
# datalist = create_datalist_example()
#
# def apply_test(data):
# print("data:", data)
# print("result:", fedit(datalist, title="Example",
# comment="This is just an <b>example</b>.",
# apply=apply_test))
# --------- datagroup example
# datagroup = create_datagroup_example()
# print("result:", fedit(datagroup, "Global title"))
#--------- datagroup inside a datagroup example
# datalist = create_datalist_example()
# datagroup = create_datagroup_example()
# print("result:", fedit(((datagroup, "Title 1", "Tab 1 comment"),
# (datalist, "Title 2", "Tab 2 comment"),
# (datalist, "Title 3", "Tab 3 comment")),
# "Global title"))
# MY TEST
data = [('str', 'this is a string'),
('str', 'this is a string'),
('str', 'this is a string'),
('list', [0, '1', '3', '4']),
('list', [2, '1', '3', '4']),
('list2', ['--', ('none', 'None'), ('--', 'Dashed'),
('-.', 'DashDot'), ('-', 'Solid'),
('steps', 'Steps'), (':', 'Dotted')]),
('float', 1.2),
(None, 'Other:'),
('int', 12),
('font', ('Arial', 10, False, True)),
('color', '#123409'),
('bool', True),
('date', datetime.date(2010, 10, 10)),
('datetime', datetime.datetime(2010, 10, 10)),
]
def apply_test(a):
print(a)
fedit(data, title='henlo', comment='haahha', apply=apply_test)<|fim▁end|> | qcolor.setRgbF(*rgba) |
<|file_name|>slice_dump.py<|end_file_name|><|fim▁begin|>"""
>>> sd = SliceDump()
<|fim▁hole|> >>> sd[1]
1
>>> sd[2:5]
slice(2, 5, None)
>>> sd[:2]
slice(None, 2, None)
>>> sd[7:]
slice(7, None, None)
>>> sd[:]
slice(None, None, None)
>>> sd[1:9:3]
slice(1, 9, 3)
>>> sd[1:9:3, 2:3]
(slice(1, 9, 3), slice(2, 3, None))
>>> s = sd[1:9:3]
>>> s.indices(20)
(1, 9, 3)
>>> s.indices(5)
(1, 5, 3)
>>> s.indices(1)
(1, 1, 3)
>>> s.indices(0)
(0, 0, 3)
"""
class SliceDump:
def __getitem__(self, pos):
return pos<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from .base import WhiteNoise
__version__ = '2.0.3'
<|fim▁hole|><|fim▁end|> | __all__ = ['WhiteNoise'] |
<|file_name|>main.go<|end_file_name|><|fim▁begin|>package main
import (
"fmt"
"log"
"github.com/jackytck/projecteuler/tools"
)
func match(p, s string) bool {
m := make(map[rune]bool)
for _, r := range s {
m[r] = true
}
var e string<|fim▁hole|> if m[r] {
e += string(r)
}
}
return e == s
}
func check(p string, log []string) bool {
for _, s := range log {
if !match(p, s) {
return false
}
}
return true
}
func solve(path string) string {
// read
logs, err := tools.ReadFile(path)
if err != nil {
log.Fatal(err)
}
// make unique set
exist := make(map[string]bool)
var unique []string
for _, v := range logs {
if !exist[v] {
unique = append(unique, v)
exist[v] = true
}
}
// assume no repeated digits and no digit 4 and 5
for p := range tools.Perms([]int{0, 1, 2, 3, 6, 7, 8, 9}) {
psw := tools.JoinIntsString(p...)
if check(psw, unique) {
return psw
}
}
return ""
}
func main() {
fmt.Println(solve("./p079_keylog.txt"))
}
// Given that the three characters are always asked for in order, analyse the
// file so as to determine the shortest possible secret passcode of unknown
// length.
// Note:
// Assume the passcode has no repeated character.<|fim▁end|> | for _, r := range p { |
<|file_name|>tasks.ts<|end_file_name|><|fim▁begin|>import request from './request'
export type UUID = string
export type DateTime = string
// for options see class names here: https://github.com/GothenburgBitFactory/taskwarrior/blob/01696a307b6785be973e3e6428e6ade2a3872c1e/src/columns/ColUDA.h#L36
export type TaskwarriorDataType = 'string' | 'numeric' | 'date' | 'duration'
export interface Task {
id: UUID
uuid: UUID // Same as 'id'
short_id: number
// status: for options see https://github.com/GothenburgBitFactory/taskwarrior/blob/6727d08da05b1090e0eda2270bc35d09a4528e87/src/Task.h#L71
status: 'pending' | 'completed' | 'deleted' | 'recurring' | 'waiting'
urgency: number
description: string
project?: string
due?: DateTime
entry: DateTime
modified: DateTime
start?: DateTime
end?: DateTime
wait?: DateTime
until?: DateTime
scheduled?: DateTime
depends?: UUID[]
blocks?: UUID[]
annotations?: string[]
tags?: string[]
imask?: string
udas: {
[key: string]: any | undefined
}
}
export const TaskFieldTypes: {[key: string]: TaskwarriorDataType} = {
id: 'string',
uuid: 'string',
short_id: 'string',
status: 'string',<|fim▁hole|> entry: 'date',
modified: 'date',
start: 'date',
end: 'date',
wait: 'date',
until: 'date',
scheduled: 'date',
}
export const TaskArrayFieldTypes: {[key: string]: TaskwarriorDataType} = {
depends: 'string',
blocks: 'string',
annotations: 'string',
tags: 'string',
imask: 'string',
}
export interface TaskUpdate extends Partial<Task> {}
export async function getTasks(): Promise<Task[]> {
return request<Task[]>('GET', 'tasks', {})
}
export async function getTask(uuid: UUID): Promise<Task> {
return request<Task>('GET', `tasks/${uuid}`, {})
}
export async function createTask(task: TaskUpdate): Promise<Task> {
return request<Task>('POST', `tasks`, {
data: task,
})
}
export async function updateTask(task: Task): Promise<void> {
return request<void>('PUT', `tasks/${task.uuid}`, {
data: task,
})
}
export async function completeTask(uuid: UUID): Promise<void> {
return request<void>('DELETE', `tasks/${uuid}`, {})
}
export async function deleteTask(uuid: UUID): Promise<void> {
return request<void>('POST', `tasks/${uuid}/delete`, {})
}
export async function startTask(uuid: UUID): Promise<void> {
return request<void>('POST', `tasks/${uuid}/start`, {})
}
export async function stopTask(uuid: UUID): Promise<void> {
return request<void>('POST', `tasks/${uuid}/stop`, {})
}<|fim▁end|> | urgency: 'numeric',
description: 'string',
project: 'string',
due: 'date', |
<|file_name|>strength_and_resolution.py<|end_file_name|><|fim▁begin|>############################################
# [config.py]
# CONFIGURATION SETTINGS FOR A PARTICULAR METER
#
#
# Set the long-form name of this meter
name = "*PEAK only"
#
# [Do not remove or uncomment the following line]
Cs={}
############################################
############################################
# STRUCTURE PARAMETERS
#
# Parameters subject to conscious control by the poet. Kiparsky & Hanson (1996)
# call these "formally independent of phonological structure." By contrast,
# "realization parameters"--e.g., the size of a metrical position, which positions
# are regulated, and other constraints--"determine the way the structure is
# linguistically manifested, and are dependent on the prosodic givens of languge."
#
#
####
# [Number of feet in a line]
#
#Cs['number_feet!=2'] = 1 # require dimeter
#Cs['number_feet!=3'] = 1 # require trimeter
#Cs['number_feet!=4'] = 1 # require tetrameter
#Cs['number_feet!=5'] = 1 # require pentameter
#Cs['number_feet!=6'] = 1 # require hexameter
#Cs['number_feet!=7'] = 1 # require heptameter
#
#
####
# [Headedness of the line]
#
#Cs['headedness!=falling'] = 1 # require a falling rhythm (e.g. trochaic, dactylic)
#Cs['headedness!=rising'] = 1 # require a rising rhythm (e.g., iambic, anapestic)
#
############################################
############################################
# REALIZATION PARAMETERS
#
# All subsequent constraints can be seen as "realization parameters."
# See note to "structure parameters" above for more information.
#
#############################################
# METRICAL PARSING: POSITION SIZE
#
# Select how many syllables are at least *possible* in strong or weak positions
# cf. Kiparsky & Hanson's "position size" parameter ("Parametric Theory" 1996)
#
#
######
# [Maximum position size]
#
# The maximum number of syllables allowed in strong metrical positions (i.e. "s")
maxS=2
#
# The maximum number of syllables allowed in weak metrical positions (i.e. "w")
maxW=2
#
#
######
# [Minimum position size]
#
# (Recommended) Positions are at minimum one syllable in size
splitheavies=0
#
# (Unrecommended) Allow positions to be as small as a single mora
# i.e. (a split heavy syllable can straddle two metrical positions)
#splitheavies=1
############################################
############################################
# METRICAL PARSING: METRICAL CONSTRAINTS
#
# Here you can configure the constraints used by the metrical parser.
# Each constraint is expressed in the form:
# Cs['(constraint name)']=(constraint weight)
# Constraint weights do not affect harmonic bounding (i.e. which parses
# survive as possibilities), but they do affect how those possibilities
# are sorted to select the "best" parse.
#
#
######
# [Constraints regulating the 'STRENGTH' of a syllable]
#
# A syllable is strong if it is a peak in a polysyllabic word:
# the syllables in 'liberty', stressed-unstressed-unstressed,
# are, in terms of *strength*, strong-weak-neutral, because
# the first syllable is more stressed than its neighbor;
# the second syllable less stressed; and the third equally stressed.
#
###
# [Stricter versions:]
#
# A strong metrical position should not contain any weak syllables ("troughs"):
#Cs['strength.s=>-u']=1
#
# A weak metrical position may not contain any strong syllables ("peaks"):
# [Kiparsky and Hanson believe this is Shakespeare's meter]
Cs['strength.w=>-p']=1
#
###
# [Laxer versions:]
#
# A strong metrical position should contain at least one strong syllable:
#Cs['strength.s=>p']=3
#
# A weak metrical position should contain at least one weak syllable:
#Cs['strength.w=>u']=3
#
#
#
######
# [Constraints regulating the STRESS of a syllable]
#
###
# [Stricter versions:]
#
# A strong metrical position should not contain any unstressed syllables:
# [Kiparsky and Hanson believe this is Hopkins' meter]
#Cs['stress.s=>-u']=1<|fim▁hole|>###
# [Laxer versions:]
#
# A strong metrical position should contain at least one stressed syllable:
#Cs['stress.s=>p']=2
#
# A weak metrical position must contain at least one unstressed syllable;
#Cs['stress.w=>u']=2
#
#
#
######
# [Constraints regulating the WEIGHT of a syllable]
#
# The weight of a syllable is its "quantity": short or long.
# These constraints are designed for "quantitative verse",
# as for example in classical Latin and Greek poetry.
#
###
# [Stricter versions:]
#
# A strong metrical position should not contain any light syllables:
#Cs['weight.s=>-u']=2
#
# A weak metrical position should not contain any heavy syllables:
#Cs['weight.w=>-p']=2
#
###
# [Laxer versions:]
#
# A strong metrical position should contain at least one heavy syllable:
#Cs['weight.s=>p']=2
#
# A weak metrical position must contain at least one light syllable;
#Cs['weight.w=>u']=2
#
#
#
######
# [Constraints regulating what's permissible as a DISYLLABIC metrical position]
# [(with thanks to Sam Bowman, who programmed many of these constraints)]
#
###
# [Based on weight:]
#
# A disyllabic metrical position should not contain more than a minimal foot:
# i.e. W-resolution requires first syllable to be light and unstressed.
Cs['footmin-w-resolution']=1
#
#
# A disyllabic metrical position should not contain more than a minimal foot:
# (i.e. allowed positions are syllables weighted light-light or light-heavy)
#Cs['footmin-noHX']=1000
#
#
# A disyllabic STRONG metrical position should not contain more than a minimal foot:
# (i.e. allowed positions are syllables weighted light-light or light-heavy)
#Cs['footmin-s-noHX']=1
#
# A disyllabic metrical position should be syllables weighted light-light:
#Cs['footmin-noLH-noHX']=1
#
###
# [Categorical:]
#
# A metrical position should not contain more than one syllable:
# [use to discourage disyllabic positions]
#Cs['footmin-none']=1
#
# A strong metrical position should not contain more than one syllable:
#Cs['footmin-no-s']=1
#
# A weak metrical position should not contain more than one syllable:
#Cs['footmin-no-w']=1
#
# A metrical position should not contain more than one syllable,
# *unless* that metrical position is the *first* or *second* in the line:
# [use to discourage disyllabic positions, but not trochaic inversions,
# or an initial "extrametrical" syllable]
#Cs['footmin-none-unless-in-first-two-positions']=1
#
# A metrical position should not contain more than one syllable,
# *unless* that metrical position is the *second* in the line:
# [use to discourage disyllabic positions, but not trochaic inversions]
#Cs['footmin-none-unless-in-second-position']=1
#
# A strong metrical position should not contain more than one syllable,
# *unless* it is preceded by a disyllabic *weak* metrical position:
# [use to implement the metrical pattern described by Derek Attridge,
# in The Rhythms of English Poetry (1982), and commented on by Bruce Hayes
# in his review of the book in Language 60.1 (1984).
# e.g. Shakespeare's "when.your|SWEET.IS|ue.your|SWEET.FORM|should|BEAR"
# [this implementation is different in that it only takes into account
# double-weak beats *preceding* -- due to the way in which the parser
# throws away bounded parses as it goes, it might not be possible for now
# to write a constraint referencing future positions]
#Cs['footmin-no-s-unless-preceded-by-ww']=10
# [The version that does reference future positions; but appears to be unstable]:
#Cs['attridge-ss-not-by-ww']=10
#
###
# [For disyllabic positions crossing a word boundary...
# (i.e. having two syllables, each from a different word)...
#
# ...allow only F-resolutions:
# (both words must be function words and be in a weak metrical position)
Cs['footmin-f-resolution']=1
#
# ...it should never cross a word boundary to begin with:
#Cs['footmin-wordbound']=1000
#
# ...both words should be function words:
#Cs['footmin-wordbound-bothnotfw']=1
#
# ...at least one word should be a function word:
#Cs['footmin-wordbound-neitherfw']=1
#
# ...the left-hand syllable should be a function-word:
#Cs['footmin-wordbound-leftfw']=1
#
# ...the right-hand syllable should be a function word:
#Cs['footmin-wordbound-rightfw']=1
#
# ...neither word should be a monosyllable:
#Cs['footmin-wordbound-nomono']=1
#
# ...neither word should be a LEXICAL monosyllable
# (i.e. function words and polysyllabic words ok)
#Cs['footmin-wordbound-lexmono']=1
###
# [Miscellaneous constraints relating to disyllabic positions]
#
# A disyllabic metrical position may contain a strong syllable
# of a lexical word only if the syllable is (i) light and
# (ii) followed within the same position by an unstressed
# syllable normally belonging to the same word.
# [written by Sam Bowman]
#Cs['footmin-strongconstraint']=1
#
# The final metrical position of the line should not be 'ww'
# [use to encourage "...LI|ber|TY" rather than "...LI|ber.ty"]
#Cs['posthoc-no-final-ww']=2
#
# The final metrical position of the line should not be 'w' or 'ww'
#Cs['posthoc-no-final-w']=2
#
# A line should have all 'ww' or all 'w':
# It works by:
# Nw = Number of weak positions in the line
# Mw = Maximum number of occurrences of 'w' metrical position
# Mww = Maximum number of occurrences of 'ww' metrical position
# M = Whichever is bigger, Mw or Mww
# V = Nw - M
# Violation Score = V * [Weight]
# [use to encourage consistency of meter across line]
# [feel free to make this a decimal number, like 0.25]
#Cs['posthoc-standardize-weakpos']=1
#
#
#
######
# [MISCELLANEOUS constraints]
#
# A function word can fall only in a weak position:
#Cs['functiontow']=2
#
# An initial syllable must be in a weak position:
#Cs['initialstrong']=2
#
# The first metrical position will not be evaluated
# for any of the strength/stress/weight correspondence constraints:
# [set to 1 to be true]
#Cs['extrametrical-first-pos']=1
#
# The first two metrical positions will not be evaluated
# for any of the strength/stress/weight correspondence constraints:
# [set to 1 to be true]
Cs['skip_initial_foot']=1
#
# A word should not be an elision [use to discourage elisions]:
#Cs['word-elision']=1
#
# A weak metrical position should not contain any syllables
# that are stressed and heavy: [Meter of Finnish "Kalevala"]
#Cs['kalevala.w=>-p']=1
#
# A strong metrical position should not contain any syllables
# that are stressed and light: [Meter of Finnish "Kalevala"]
#Cs['kalevala.s=>-u']=1
############################################<|fim▁end|> | #
# A weak metrical position should not contain any stressed syllables:
#Cs['stress.w=>-p']=1
# |
<|file_name|>test_requesocks.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3
# requires:
# - pysocks for socks5 proxy socket, required by requesocks
# - certifi for HTTPS certificate validation, also used in depths of requesocks
# - requesocks
import requesocks
import certifi
#SOCKS5_PROXY = '127.0.0.1:9050'
SOCKS5_PROXY = '192.168.20.1:9050'
USER_AGENT = 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) ' \
'Chrome/45.0.2454.85 Safari/537.36'
def main():
session = requesocks.session()
session.proxies = {
'http': 'socks5://{0}'.format(SOCKS5_PROXY),
'https': 'socks5://{0}'.format(SOCKS5_PROXY)<|fim▁hole|> # url = 'http://yandex.ru/internet'
# url = 'https://www.whatismyip.com/my-ip-information/'
url = 'http://httpbin.org/ip'
print('Using proxy: {0}'.format(SOCKS5_PROXY))
print('Requesting URL: {0}'.format(url))
r = session.get(url)
if r.status_code == 200:
text = r.text
if text is None:
if type(r.content) == bytes:
text = r.content.decode('UTF-8')
if text:
print(text)
with open('res.html', 'wt', encoding=r.encoding) as f:
f.write(text)
else:
print('status code: {0}'.format(r.status_code))
if __name__ == '__main__':
main()<|fim▁end|> | }
session.headers.update({'user-agent': USER_AGENT}) |
<|file_name|>tenantaccessgit.go<|end_file_name|><|fim▁begin|>package apimanagement
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/autorest/validation"
"github.com/Azure/go-autorest/tracing"
"net/http"
)
// TenantAccessGitClient is the apiManagement Client
type TenantAccessGitClient struct {
BaseClient
}
// NewTenantAccessGitClient creates an instance of the TenantAccessGitClient client.
func NewTenantAccessGitClient(subscriptionID string) TenantAccessGitClient {
return NewTenantAccessGitClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewTenantAccessGitClientWithBaseURI creates an instance of the TenantAccessGitClient client.
func NewTenantAccessGitClientWithBaseURI(baseURI string, subscriptionID string) TenantAccessGitClient {
return TenantAccessGitClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// Get gets the Git access configuration for the tenant.
// Parameters:
// resourceGroupName - the name of the resource group.
// serviceName - the name of the API Management service.
func (client TenantAccessGitClient) Get(ctx context.Context, resourceGroupName string, serviceName string) (result AccessInformationContract, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/TenantAccessGitClient.Get")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
if err := validation.Validate([]validation.Validation{
{TargetValue: serviceName,
Constraints: []validation.Constraint{{Target: "serviceName", Name: validation.MaxLength, Rule: 50, Chain: nil},
{Target: "serviceName", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "serviceName", Name: validation.Pattern, Rule: `^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$`, Chain: nil}}}}); err != nil {
return result, validation.NewError("apimanagement.TenantAccessGitClient", "Get", err.Error())
}
req, err := client.GetPreparer(ctx, resourceGroupName, serviceName)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.TenantAccessGitClient", "Get", nil, "Failure preparing request")
return
}
resp, err := client.GetSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "apimanagement.TenantAccessGitClient", "Get", resp, "Failure sending request")
return
}
result, err = client.GetResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.TenantAccessGitClient", "Get", resp, "Failure responding to request")
}
return
}
// GetPreparer prepares the Get request.
func (client TenantAccessGitClient) GetPreparer(ctx context.Context, resourceGroupName string, serviceName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"accessName": autorest.Encode("path", "access"),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"serviceName": autorest.Encode("path", serviceName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2018-01-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/tenant/{accessName}/git", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client TenantAccessGitClient) GetSender(req *http.Request) (*http.Response, error) {
sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
return autorest.SendWithSender(client, req, sd...)
}
// GetResponder handles the response to the Get request. The method always
// closes the http.Response Body.
func (client TenantAccessGitClient) GetResponder(resp *http.Response) (result AccessInformationContract, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// RegeneratePrimaryKey regenerate primary access key for GIT.
// Parameters:
// resourceGroupName - the name of the resource group.
// serviceName - the name of the API Management service.
func (client TenantAccessGitClient) RegeneratePrimaryKey(ctx context.Context, resourceGroupName string, serviceName string) (result autorest.Response, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/TenantAccessGitClient.RegeneratePrimaryKey")
defer func() {
sc := -1
if result.Response != nil {
sc = result.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
if err := validation.Validate([]validation.Validation{
{TargetValue: serviceName,
Constraints: []validation.Constraint{{Target: "serviceName", Name: validation.MaxLength, Rule: 50, Chain: nil},
{Target: "serviceName", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "serviceName", Name: validation.Pattern, Rule: `^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$`, Chain: nil}}}}); err != nil {
return result, validation.NewError("apimanagement.TenantAccessGitClient", "RegeneratePrimaryKey", err.Error())
}
req, err := client.RegeneratePrimaryKeyPreparer(ctx, resourceGroupName, serviceName)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.TenantAccessGitClient", "RegeneratePrimaryKey", nil, "Failure preparing request")
return
}
resp, err := client.RegeneratePrimaryKeySender(req)
if err != nil {
result.Response = resp
err = autorest.NewErrorWithError(err, "apimanagement.TenantAccessGitClient", "RegeneratePrimaryKey", resp, "Failure sending request")
return
}
result, err = client.RegeneratePrimaryKeyResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.TenantAccessGitClient", "RegeneratePrimaryKey", resp, "Failure responding to request")
}
return
}
// RegeneratePrimaryKeyPreparer prepares the RegeneratePrimaryKey request.
func (client TenantAccessGitClient) RegeneratePrimaryKeyPreparer(ctx context.Context, resourceGroupName string, serviceName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"accessName": autorest.Encode("path", "access"),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"serviceName": autorest.Encode("path", serviceName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2018-01-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsPost(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/tenant/{accessName}/git/regeneratePrimaryKey", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// RegeneratePrimaryKeySender sends the RegeneratePrimaryKey request. The method will close the
// http.Response Body if it receives an error.
func (client TenantAccessGitClient) RegeneratePrimaryKeySender(req *http.Request) (*http.Response, error) {
sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
return autorest.SendWithSender(client, req, sd...)
}
// RegeneratePrimaryKeyResponder handles the response to the RegeneratePrimaryKey request. The method always
// closes the http.Response Body.
func (client TenantAccessGitClient) RegeneratePrimaryKeyResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent),
autorest.ByClosing())
result.Response = resp
return
}
// RegenerateSecondaryKey regenerate secondary access key for GIT.
// Parameters:
// resourceGroupName - the name of the resource group.
// serviceName - the name of the API Management service.
func (client TenantAccessGitClient) RegenerateSecondaryKey(ctx context.Context, resourceGroupName string, serviceName string) (result autorest.Response, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/TenantAccessGitClient.RegenerateSecondaryKey")
defer func() {
sc := -1
if result.Response != nil {
sc = result.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
if err := validation.Validate([]validation.Validation{
{TargetValue: serviceName,
Constraints: []validation.Constraint{{Target: "serviceName", Name: validation.MaxLength, Rule: 50, Chain: nil},
{Target: "serviceName", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "serviceName", Name: validation.Pattern, Rule: `^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$`, Chain: nil}}}}); err != nil {
return result, validation.NewError("apimanagement.TenantAccessGitClient", "RegenerateSecondaryKey", err.Error())
}
req, err := client.RegenerateSecondaryKeyPreparer(ctx, resourceGroupName, serviceName)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.TenantAccessGitClient", "RegenerateSecondaryKey", nil, "Failure preparing request")
return
}
resp, err := client.RegenerateSecondaryKeySender(req)
if err != nil {
result.Response = resp
err = autorest.NewErrorWithError(err, "apimanagement.TenantAccessGitClient", "RegenerateSecondaryKey", resp, "Failure sending request")
return
}
result, err = client.RegenerateSecondaryKeyResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.TenantAccessGitClient", "RegenerateSecondaryKey", resp, "Failure responding to request")
}
return
}
// RegenerateSecondaryKeyPreparer prepares the RegenerateSecondaryKey request.
func (client TenantAccessGitClient) RegenerateSecondaryKeyPreparer(ctx context.Context, resourceGroupName string, serviceName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"accessName": autorest.Encode("path", "access"),<|fim▁hole|> "serviceName": autorest.Encode("path", serviceName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2018-01-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsPost(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/tenant/{accessName}/git/regenerateSecondaryKey", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// RegenerateSecondaryKeySender sends the RegenerateSecondaryKey request. The method will close the
// http.Response Body if it receives an error.
func (client TenantAccessGitClient) RegenerateSecondaryKeySender(req *http.Request) (*http.Response, error) {
sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
return autorest.SendWithSender(client, req, sd...)
}
// RegenerateSecondaryKeyResponder handles the response to the RegenerateSecondaryKey request. The method always
// closes the http.Response Body.
func (client TenantAccessGitClient) RegenerateSecondaryKeyResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent),
autorest.ByClosing())
result.Response = resp
return
}<|fim▁end|> | "resourceGroupName": autorest.Encode("path", resourceGroupName), |
<|file_name|>discover.js<|end_file_name|><|fim▁begin|>import Ember from 'ember';
import moment from 'moment';
import dateFormat from '../utils/date-format';
export default Ember.Controller.extend({
loadingMeta: false,
notify: Ember.inject.service(),
aggController: Ember.inject.controller('discover.aggregate'),
queryParams: ['center', 'obs_date__le', 'obs_date__ge', 'agg', 'location_geom__within'],
obs_date__le: dateFormat(moment()),
obs_date__ge: dateFormat(moment().subtract(90, 'days')),
agg: 'week',
center: 'default',
location_geom__within: null,
_resetParams() {
this.set('obs_date__le', dateFormat(moment()));
this.set('obs_date__ge', dateFormat(moment().subtract(90, 'days')));
this.set('agg', 'week');
this.set('center', 'default');
this.set('location_geom__within', null);
},
queryParamsHash: Ember.computed('obs_date__le', 'obs_date__ge',
'agg', 'center', 'location_geom__within', function () {
return this.getProperties(this.get('queryParams'));
}),
queryParamsClone() {
return Ember.copy(this.get('queryParamsHash'));
},
// Central location to define all acceptable values for aggregate-query-maker
// IDs for cities, their display names, and bounds (usually city limits)
// City bounding boxes determined via https://www.mapdevelopers.com/geocode_bounding_box.php
cities: {
default: {
// "Cities" named "default" are not shown to the user
// This is a copy of Chicago
bounds: [
[42.023131, -87.940267], // NW corner
[41.644335, -87.523661], // SE corner
],
location: [41.795509, -87.581916],
zoom: 10,
},
chicago: {
label: 'Chicago, IL',
bounds: [
[42.023131, -87.940267], // NW corner
[41.644335, -87.523661], // SE corner
],
location: [41.795509, -87.581916],
zoom: 10,
},
newyork: {
label: 'New York, NY',
bounds: [
[40.917577, -74.259090], // NW corner
[40.477399, -73.700272], // SE corner
],
location: [40.7268362, -74.0017699],
zoom: 10,
},
seattle: {
label: 'Seattle, WA',
bounds: [
[47.734140, -122.459696],
[47.491912, -122.224433],
],
location: [47.6076397, -122.3258644],
zoom: 10,
},
sanfrancisco: {
label: 'San Francisco, CA',
bounds: [
[37.929820, -123.173825], // NW corner (yes, the city limits DO include those tiny islands)
[37.639830, -122.281780], // SE corner
],
location: [37.7618864, -122.4406926],
zoom: 12,
},
austin: {
label: 'Austin, TX',
bounds: [
[30.516863, -97.938383], // NW corner
[30.098659, -97.568420], // SE corner
],
location: [30.3075693, -97.7399898],
zoom: 10,
},
denver: {<|fim▁hole|> ],
location: [39.7534338, -104.890141],
zoom: 11,
},
bristol: {
label: 'Bristol, England, UK',
bounds: [
[51.544433, -2.730516], // NW corner
[51.392545, -2.450902], // SE corner
],
location: [51.4590572, -2.5909956],
zoom: 11,
},
atlanta: {
label: 'Atlanta, GA',
bounds: [
[33.647808, -84.551819],
[33.887618, -84.2891076],
],
location: [33.748998, -84.388113],
zoom: 10,
},
},
aggOptions: ([
{ id: 'day', label: 'day' },
{ id: 'week', label: 'week' },
{ id: 'month', label: 'month' },
{ id: 'quarter', label: 'quarter' },
{ id: 'year', label: 'year' },
]),
resOptions: ([
{ id: '100', label: '100 meters' },
{ id: '200', label: '200 meters' },
{ id: '300', label: '300 meters' },
{ id: '400', label: '400 meters' },
{ id: '500', label: '500 meters' },
{ id: '1000', label: '1 kilometer' },
]),
// ------------- end of central aggregate-query-maker values ---------------//
// _zoomIn() {
// this.set('zoom', true);
// const self = this;
// Ember.run.next(() => {
// self.set('zoom', false);
// });
// },
_resetDatePickers() {
this.set('override', true);
Ember.run.next(() => {
this.set('override', false);
});
},
_inIndex() {
// Thanks: https://gist.github.com/eliotsykes/8954cf64fcd0df16f519
return Ember.getOwner(this).lookup('controller:application').currentPath === 'discover.index';
},
actions: {
submit() {
if (this.get('submitCooldown')) {
this.get('notify').info('Cooldown active. Please wait a few seconds between query submissions.');
return;
}
// Implement a cooldown on the submit button to
// prevent double-clicks from reloading the query
// before a new one begins (resulting in undefined behavior)
this.set('submitCooldown', true);
Ember.run.later(this, function () {
this.set('submitCooldown', false);
}, 500);
// Reflect to find if we need to transition,
// or just reload current model.
if (this._inIndex()) {
this.transitionToRoute('discover.aggregate');
} else {
this.get('aggController').send('submit');
}
// Refocus map on user-drawn shape.
// if (this.get('location_geom__within')) {
// this._zoomIn();
// }
},
reset() {
if (!this._inIndex()) {
this.transitionToRoute('index');
}
this._resetParams();
this._resetDatePickers();
},
},
});<|fim▁end|> | label: 'Denver, CO',
bounds: [
[39.914247, -105.109927], // NW corner
[39.614430, -104.600296], // SE corner |
<|file_name|>status.go<|end_file_name|><|fim▁begin|>// Copyright 2017 The TensorFlow Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ==============================================================================
package commands
import (
"fmt"
"log"
"strings"
"sync"
"time"<|fim▁hole|>
"context"
"flag"
"github.com/fatih/color"
"github.com/google/subcommands"
"github.com/tensorflow/tpu/tools/ctpu/config"
"github.com/tensorflow/tpu/tools/ctpu/ctrl"
)
// StatusTPUCP encapsulates the control plane interfaces required to execute the Status command.
type StatusTPUCP interface {
// OptionallyRetrieveInstance retrieves the instance, but can optionally not enable the TPU API.
OptionallyRetrieveInstance(bool) (*ctrl.TPUInstance, bool, error)
}
// StatusGCECP encapsulates the control plane interfaces required to execute the Status command.
type StatusGCECP interface {
// OptionallyRetrieveInstance retrieves the instance, but can optionally not enable the TPU API.
OptionallyRetrieveInstance(bool) (*ctrl.GCEInstance, bool, error)
}
type statusCmd struct {
cfg *config.Config
tpu StatusTPUCP
gce StatusGCECP
details bool
noColor bool
}
// StatusCommand creates the status command.
func StatusCommand(cfg *config.Config, tpu StatusTPUCP, gce StatusGCECP) subcommands.Command {
return &statusCmd{
cfg: cfg,
tpu: tpu,
gce: gce,
}
}
func (statusCmd) Name() string {
return "status"
}
func (s *statusCmd) SetFlags(f *flag.FlagSet) {
s.cfg.SetFlags(f) // Allow users to specify cfg flags either before or after the subcommand name.
f.BoolVar(&s.details, "details", false,
"Prints out more details about the state of the Compute Engine VM and Cloud TPU.")
f.BoolVar(&s.noColor, "no-color", false, "Disable color in the output.")
}
func (statusCmd) Synopsis() string {
return "queries the control planes for the current Compute Engine & TPU status."
}
func (statusCmd) Usage() string {
return `ctpu status [--no-color]
`
}
type statusCmdAlias struct {
statusCmd
}
// StatusCommandAlias creates an alias for the status command with a shorter name.
func StatusCommandAlias(cfg *config.Config, tpu StatusTPUCP, gce StatusGCECP) subcommands.Command {
return &statusCmdAlias{statusCmd{cfg: cfg, tpu: tpu, gce: gce}}
}
func (statusCmdAlias) Name() string { return "st" }
func (statusCmdAlias) Synopsis() string {
return "alias to ctpu status (retrieves info on current instances)"
}
func (statusCmdAlias) Usage() string { return "ctpu st\n" }
func (s *statusCmd) runnableStatus(exists, isRunning bool, status string) string {
if !exists {
return color.YellowString("--")
}
if isRunning {
return color.GreenString("RUNNING")
}
return color.RedString(status)
}
func (s *statusCmd) vmStatus(vm *ctrl.GCEInstance) string {
var status string
if vm != nil {
status = vm.Status
}
exists := vm != nil
isRunning := vm != nil && vm.IsRunning()
return s.runnableStatus(exists, isRunning, status)
}
func (s *statusCmd) timeDelta(t time.Time) string {
delta := time.Since(t).Round(time.Minute)
if delta < 0 {
return "--"
}
if delta.Minutes() < 1 {
return "< 1 minute"
}
minutes := (delta / time.Minute) % 60
hours := delta / time.Hour
days := delta / (time.Hour * 24)
if days > 3 {
return fmt.Sprintf("%dd %dh", days, hours%24)
}
if hours == 0 {
return fmt.Sprintf("%dm", minutes)
}
return fmt.Sprintf("%dh %dm", hours, minutes)
}
func (s *statusCmd) flockStatus(vm *ctrl.GCEInstance, tpu *ctrl.TPUInstance) string {
if vm == nil && tpu == nil {
return color.BlueString("No instances currently exist.")
}
if vm != nil && vm.IsRunning() && tpu != nil && tpu.IsRunning() {
return color.GreenString("Your cluster is running!")
}
if vm != nil && !vm.IsRunning() && tpu == nil {
return color.YellowString("Your cluster is paused.")
}
return color.RedString("Your cluster is in an unhealthy state.")
}
func (s *statusCmd) tpuStatus(tpu *ctrl.TPUInstance) string {
var status string
if tpu != nil {
status = tpu.State
}
exists := tpu != nil
isRunning := tpu != nil && tpu.IsRunning()
return s.runnableStatus(exists, isRunning, status)
}
func (s *statusCmd) Execute(ctx context.Context, flags *flag.FlagSet, args ...interface{}) subcommands.ExitStatus {
err := s.cfg.Validate()
if err != nil {
log.Print(err)
return subcommands.ExitFailure
}
if s.noColor {
color.NoColor = true
}
var vm *ctrl.GCEInstance
var tpu *ctrl.TPUInstance
var gceEnabled, tpuEnabled bool
var exitTPU, exitVM subcommands.ExitStatus
var wg sync.WaitGroup
wg.Add(2)
go func() {
var err error
vm, gceEnabled, err = s.gce.OptionallyRetrieveInstance(false)
if err != nil {
log.Print(err)
exitVM = subcommands.ExitFailure
}
wg.Done()
}()
go func() {
var err error
tpu, tpuEnabled, err = s.tpu.OptionallyRetrieveInstance(false)
if err != nil {
log.Print(err)
exitTPU = subcommands.ExitFailure
}
wg.Done()
}()
wg.Wait()
if exitTPU != subcommands.ExitSuccess {
return exitTPU
}
if exitVM != subcommands.ExitSuccess {
return exitVM
}
if !gceEnabled || !tpuEnabled {
if !gceEnabled && !tpuEnabled {
fmt.Println("Neither the Compute Engine nor the Cloud TPU services have been enabled.")
} else if !gceEnabled {
fmt.Println("The Compute Engine service has not been enabled.")
} else {
fmt.Println("The Cloud TPU service has not been enabled.")
}
return subcommands.ExitFailure
}
fmt.Printf(`%s
Compute Engine VM: %s
Cloud TPU: %s
`, s.flockStatus(vm, tpu), s.vmStatus(vm), s.tpuStatus(tpu))
vmIP, vmCreated, vmCreateDelta, machineType := "--", "--", "--", "--"
if vm != nil {
if len(vm.NetworkInterfaces) > 0 {
vmIP = vm.NetworkInterfaces[0].NetworkIP
}
vmCreated = vm.CreationTimestamp
if createTime, err := time.Parse(time.RFC3339, vmCreated); err == nil {
vmCreateDelta = s.timeDelta(createTime)
}
machineTypeParts := strings.Split(vm.MachineType, "/")
machineType = machineTypeParts[len(machineTypeParts)-1]
}
tpuType, tpuIP, tpuVer, tpuSA, tpuCreated, tpuCreateDelta, tpuState, tpuHealth, tpuPreemptible := "--", "--", "--", "--", "--", "--", "--", "--", "--"
if tpu != nil {
tpuType = tpu.AcceleratorType
if len(tpu.NetworkEndpoints) > 0 {
tpuIP = tpu.NetworkEndpoints[0].IpAddress
}
tpuVer = tpu.TensorflowVersion
tpuSA = tpu.ServiceAccount
tpuCreated = tpu.CreateTime
if createTime, err := time.Parse(time.RFC3339Nano, tpuCreated); err == nil {
tpuCreateDelta = s.timeDelta(createTime)
}
tpuState = tpu.State
tpuHealth = tpu.Health
tpuPreemptible = fmt.Sprintf("%v", tpu.IsPreemptible())
}
if s.details {
fmt.Printf(`
Compute Engine IP Address: %s
Compute Engine Created: %s ago (@: %s)
Compute Engine Machine Type: %s
TPU Accelerator Type: %s
TPU IP Address: %s
TPU TF Version: %s
TPU Service Acct: %s
TPU Created: %s ago (@: %s)
TPU State: %s
TPU Health: %s
TPU Preemptible: %s
`, vmIP, vmCreateDelta, vmCreated, machineType, tpuType, tpuIP, tpuVer, tpuSA, tpuCreateDelta, tpuCreated, tpuState, tpuHealth, tpuPreemptible)
}
return subcommands.ExitSuccess
}<|fim▁end|> | |
<|file_name|>shapefile.py<|end_file_name|><|fim▁begin|>"""
hapefile.py
Provides read and write support for ESRI Shapefiles.
author: jlawhead<at>geospatialpython.com
date: 20110927
version: 1.1.4
Compatible with Python versions 2.4-3.x
"""
from struct import pack, unpack, calcsize, error
import os
import sys
import time
import array
#
# Constants for shape types
NULL = 0
POINT = 1
POLYLINE = 3
POLYGON = 5
MULTIPOINT = 8
POINTZ = 11
POLYLINEZ = 13
POLYGONZ = 15
MULTIPOINTZ = 18
POINTM = 21
POLYLINEM = 23
POLYGONM = 25
MULTIPOINTM = 28
MULTIPATCH = 31
PYTHON3 = sys.version_info[0] == 3
def b(v):
if PYTHON3:<|fim▁hole|> return v.encode('utf-8')
elif isinstance(v, bytes):
# Already bytes.
return v
else:
# Error.
raise Exception('Unknown input type')
else:
# For python 2 assume str passed in and return str.
return v
def u(v):
if PYTHON3:
if isinstance(v, bytes):
# For python 3 decode bytes to str.
return v.decode('utf-8')
elif isinstance(v, str):
# Already str.
return v
else:
# Error.
raise Exception('Unknown input type')
else:
# For python 2 assume str passed in and return str.
return v
def is_string(v):
if PYTHON3:
return isinstance(v, str)
else:
return isinstance(v, basestring)
class _Array(array.array):
"""Converts python tuples to lits of the appropritate type.
Used to unpack different shapefile header parts."""
def __repr__(self):
return str(self.tolist())
class _Shape:
def __init__(self, shapeType=None):
"""Stores the geometry of the different shape types
specified in the Shapefile spec. Shape types are
usually point, polyline, or polygons. Every shape type
except the "Null" type contains points at some level for
example verticies in a polygon. If a shape type has
multiple shapes containing points within a single
geometry record then those shapes are called parts. Parts
are designated by their starting index in geometry record's
list of shapes."""
self.shapeType = shapeType
self.points = []
class _ShapeRecord:
"""A shape object of any type."""
def __init__(self, shape=None, record=None):
self.shape = shape
self.record = record
class ShapefileException(Exception):
"""An exception to handle shapefile specific problems."""
pass
class Reader:
"""Reads the three files of a shapefile as a unit or
separately. If one of the three files (.shp, .shx,
.dbf) is missing no exception is thrown until you try
to call a method that depends on that particular file.
The .shx index file is used if available for efficiency
but is not required to read the geometry from the .shp
file. The "shapefile" argument in the constructor is the
name of the file you want to open.
You can instantiate a Reader without specifying a shapefile
and then specify one later with the load() method.
Only the shapefile headers are read upon loading. Content
within each file is only accessed when required and as
efficiently as possible. Shapefiles are usually not large
but they can be.
"""
def __init__(self, *args, **kwargs):
self.shp = None
self.shx = None
self.dbf = None
self.shapeName = "Not specified"
self._offsets = []
self.shpLength = None
self.numRecords = None
self.fields = []
self.__dbfHdrLength = 0
# See if a shapefile name was passed as an argument
if len(args) > 0:
if type(args[0]) is type("stringTest"):
self.load(args[0])
return
if "shp" in kwargs.keys():
if hasattr(kwargs["shp"], "read"):
self.shp = kwargs["shp"]
if hasattr(self.shp, "seek"):
self.shp.seek(0)
if "shx" in kwargs.keys():
if hasattr(kwargs["shx"], "read"):
self.shx = kwargs["shx"]
if hasattr(self.shx, "seek"):
self.shx.seek(0)
if "dbf" in kwargs.keys():
if hasattr(kwargs["dbf"], "read"):
self.dbf = kwargs["dbf"]
if hasattr(self.dbf, "seek"):
self.dbf.seek(0)
if self.shp or self.dbf:
self.load()
else:
raise ShapefileException("Shapefile Reader requires a shapefile or file-like object.")
def load(self, shapefile=None):
"""Opens a shapefile from a filename or file-like
object. Normally this method would be called by the
constructor with the file object or file name as an
argument."""
if shapefile:
(shapeName, ext) = os.path.splitext(shapefile)
self.shapeName = shapeName
try:
self.shp = open("%s.shp" % shapeName, "rb")
except IOError:
raise ShapefileException("Unable to open %s.shp" % shapeName)
try:
self.shx = open("%s.shx" % shapeName, "rb")
except IOError:
raise ShapefileException("Unable to open %s.shx" % shapeName)
try:
self.dbf = open("%s.dbf" % shapeName, "rb")
except IOError:
raise ShapefileException("Unable to open %s.dbf" % shapeName)
if self.shp:
self.__shpHeader()
if self.dbf:
self.__dbfHeader()
def __getFileObj(self, f):
"""Checks to see if the requested shapefile file object is
available. If not a ShapefileException is raised."""
if not f:
raise ShapefileException("Shapefile Reader requires a shapefile or file-like object.")
if self.shp and self.shpLength is None:
self.load()
if self.dbf and len(self.fields) == 0:
self.load()
return f
def __restrictIndex(self, i):
"""Provides list-like handling of a record index with a clearer
error message if the index is out of bounds."""
if self.numRecords:
rmax = self.numRecords - 1
if abs(i) > rmax:
raise IndexError("Shape or Record index out of range.")
if i < 0: i = range(self.numRecords)[i]
return i
def __shpHeader(self):
"""Reads the header information from a .shp or .shx file."""
if not self.shp:
raise ShapefileException("Shapefile Reader requires a shapefile or file-like object. (no shp file found")
shp = self.shp
# File length (16-bit word * 2 = bytes)
shp.seek(24)
self.shpLength = unpack(">i", shp.read(4))[0] * 2
# Shape type
shp.seek(32)
self.shapeType= unpack("<i", shp.read(4))[0]
# The shapefile's bounding box (lower left, upper right)
self.bbox = _Array('d', unpack("<4d", shp.read(32)))
# Elevation
self.elevation = _Array('d', unpack("<2d", shp.read(16)))
# Measure
self.measure = _Array('d', unpack("<2d", shp.read(16)))
def __shape(self):
"""Returns the header info and geometry for a single shape."""
f = self.__getFileObj(self.shp)
record = _Shape()
nParts = nPoints = zmin = zmax = mmin = mmax = None
(recNum, recLength) = unpack(">2i", f.read(8))
shapeType = unpack("<i", f.read(4))[0]
record.shapeType = shapeType
# For Null shapes create an empty points list for consistency
if shapeType == 0:
record.points = []
# All shape types capable of having a bounding box
elif shapeType in (3,5,8,13,15,18,23,25,28,31):
record.bbox = _Array('d', unpack("<4d", f.read(32)))
# Shape types with parts
if shapeType in (3,5,13,15,23,25,31):
nParts = unpack("<i", f.read(4))[0]
# Shape types with points
if shapeType in (3,5,8,13,15,23,25,31):
nPoints = unpack("<i", f.read(4))[0]
# Read parts
if nParts:
record.parts = _Array('i', unpack("<%si" % nParts, f.read(nParts * 4)))
# Read part types for Multipatch - 31
if shapeType == 31:
record.partTypes = _Array('i', unpack("<%si" % nParts, f.read(nParts * 4)))
# Read points - produces a list of [x,y] values
if nPoints:
record.points = [_Array('d', unpack("<2d", f.read(16))) for p in range(nPoints)]
# Read z extremes and values
if shapeType in (13,15,18,31):
(zmin, zmax) = unpack("<2d", f.read(16))
record.z = _Array('d', unpack("<%sd" % nPoints, f.read(nPoints * 8)))
# Read m extremes and values
if shapeType in (13,15,18,23,25,28,31):
(mmin, mmax) = unpack("<2d", f.read(16))
# Measure values less than -10e38 are nodata values according to the spec
record.m = []
for m in _Array('d', unpack("%sd" % nPoints, f.read(nPoints * 8))):
if m > -10e38:
record.m.append(m)
else:
record.m.append(None)
# Read a single point
if shapeType in (1,11,21):
record.points = [_Array('d', unpack("<2d", f.read(16)))]
# Read a single Z value
if shapeType == 11:
record.z = unpack("<d", f.read(8))
# Read a single M value
if shapeType in (11,21):
record.m = unpack("<d", f.read(8))
return record
def __shapeIndex(self, i=None):
"""Returns the offset in a .shp file for a shape based on information
in the .shx index file."""
shx = self.shx
if not shx:
return None
if not self._offsets:
# File length (16-bit word * 2 = bytes) - header length
shx.seek(24)
shxRecordLength = (unpack(">i", shx.read(4))[0] * 2) - 100
numRecords = shxRecordLength // 8
# Jump to the first record.
shx.seek(100)
for r in range(numRecords):
# Offsets are 16-bit words just like the file length
self._offsets.append(unpack(">i", shx.read(4))[0] * 2)
shx.seek(shx.tell() + 4)
if not i == None:
return self._offsets[i]
def shape(self, i=0):
"""Returns a shape object for a shape in the the geometry
record file."""
shp = self.__getFileObj(self.shp)
i = self.__restrictIndex(i)
offset = self.__shapeIndex(i)
if not offset:
# Shx index not available so use the full list.
shapes = self.shapes()
return shapes[i]
shp.seek(offset)
return self.__shape()
def shapes(self):
"""Returns all shapes in a shapefile."""
shp = self.__getFileObj(self.shp)
shp.seek(100)
shapes = []
while shp.tell() < self.shpLength:
shapes.append(self.__shape())
return shapes
def __dbfHeaderLength(self):
"""Retrieves the header length of a dbf file header."""
if not self.__dbfHdrLength:
if not self.dbf:
raise ShapefileException("Shapefile Reader requires a shapefile or file-like object. (no dbf file found)")
dbf = self.dbf
(self.numRecords, self.__dbfHdrLength) = \
unpack("<xxxxLH22x", dbf.read(32))
return self.__dbfHdrLength
def __dbfHeader(self):
"""Reads a dbf header. Xbase-related code borrows heavily from ActiveState Python Cookbook Recipe 362715 by Raymond Hettinger"""
if not self.dbf:
raise ShapefileException("Shapefile Reader requires a shapefile or file-like object. (no dbf file found)")
dbf = self.dbf
headerLength = self.__dbfHeaderLength()
numFields = (headerLength - 33) // 32
for field in range(numFields):
fieldDesc = list(unpack("<11sc4xBB14x", dbf.read(32)))
name = 0
idx = 0
if b("\x00") in fieldDesc[name]:
idx = fieldDesc[name].index(b("\x00"))
else:
idx = len(fieldDesc[name]) - 1
fieldDesc[name] = fieldDesc[name][:idx]
fieldDesc[name] = u(fieldDesc[name])
fieldDesc[name] = fieldDesc[name].lstrip()
fieldDesc[1] = u(fieldDesc[1])
self.fields.append(fieldDesc)
terminator = dbf.read(1)
assert terminator == b("\r")
self.fields.insert(0, ('DeletionFlag', 'C', 1, 0))
def __recordFmt(self):
"""Calculates the size of a .shp geometry record."""
if not self.numRecords:
self.__dbfHeader()
fmt = ''.join(['%ds' % fieldinfo[2] for fieldinfo in self.fields])
fmtSize = calcsize(fmt)
return (fmt, fmtSize)
def __record(self):
"""Reads and returns a dbf record row as a list of values."""
f = self.__getFileObj(self.dbf)
recFmt = self.__recordFmt()
recordContents = unpack(recFmt[0], f.read(recFmt[1]))
if recordContents[0] != b(' '):
# deleted record
return None
record = []
for (name, typ, size, deci), value in zip(self.fields,
recordContents):
if name == 'DeletionFlag':
continue
elif not value.strip():
record.append(value)
continue
elif typ == "N":
value = value.replace(b('\0'), b('')).strip()
if value == b(''):
value = 0
elif deci:
value = float(value)
else:
value = int(value)
elif typ == b('D'):
try:
y, m, d = int(value[:4]), int(value[4:6]), int(value[6:8])
value = [y, m, d]
except:
value = value.strip()
elif typ == b('L'):
value = (value in b('YyTt') and b('T')) or \
(value in b('NnFf') and b('F')) or b('?')
else:
value = u(value)
value = value.strip()
record.append(value)
return record
def record(self, i=0):
"""Returns a specific dbf record based on the supplied index."""
f = self.__getFileObj(self.dbf)
if not self.numRecords:
self.__dbfHeader()
i = self.__restrictIndex(i)
recSize = self.__recordFmt()[1]
f.seek(0)
f.seek(self.__dbfHeaderLength() + (i * recSize))
return self.__record()
def records(self):
"""Returns all records in a dbf file."""
if not self.numRecords:
self.__dbfHeader()
records = []
f = self.__getFileObj(self.dbf)
f.seek(self.__dbfHeaderLength())
for i in range(self.numRecords):
r = self.__record()
if r:
records.append(r)
return records
def shapeRecord(self, i=0):
"""Returns a combination geometry and attribute record for the
supplied record index."""
i = self.__restrictIndex(i)
return _ShapeRecord(shape=self.shape(i),
record=self.record(i))
def shapeRecords(self):
"""Returns a list of combination geometry/attribute records for
all records in a shapefile."""
shapeRecords = []
return [_ShapeRecord(shape=rec[0], record=rec[1]) \
for rec in zip(self.shapes(), self.records())]
class Writer:
"""Provides write support for ESRI Shapefiles."""
def __init__(self, shapeType=None):
self._shapes = []
self.fields = []
self.records = []
self.shapeType = shapeType
self.shp = None
self.shx = None
self.dbf = None
# Geometry record offsets and lengths for writing shx file.
self._offsets = []
self._lengths = []
# Use deletion flags in dbf? Default is false (0).
self.deletionFlag = 0
def __getFileObj(self, f):
"""Safety handler to verify file-like objects"""
if not f:
raise ShapefileException("No file-like object available.")
elif hasattr(f, "write"):
return f
else:
pth = os.path.split(f)[0]
if pth and not os.path.exists(pth):
os.makedirs(pth)
return open(f, "wb")
def __shpFileLength(self):
"""Calculates the file length of the shp file."""
# Start with header length
size = 100
# Calculate size of all shapes
for s in self._shapes:
# Add in record header and shape type fields
size += 12
# nParts and nPoints do not apply to all shapes
#if self.shapeType not in (0,1):
# nParts = len(s.parts)
# nPoints = len(s.points)
if hasattr(s,'parts'):
nParts = len(s.parts)
if hasattr(s,'points'):
nPoints = len(s.points)
# All shape types capable of having a bounding box
if self.shapeType in (3,5,8,13,15,18,23,25,28,31):
size += 32
# Shape types with parts
if self.shapeType in (3,5,13,15,23,25,31):
# Parts count
size += 4
# Parts index array
size += nParts * 4
# Shape types with points
if self.shapeType in (3,5,8,13,15,23,25,31):
# Points count
size += 4
# Points array
size += 16 * nPoints
# Calc size of part types for Multipatch (31)
if self.shapeType == 31:
size += nParts * 4
# Calc z extremes and values
if self.shapeType in (13,15,18,31):
# z extremes
size += 16
# z array
size += 8 * nPoints
# Calc m extremes and values
if self.shapeType in (23,25,31):
# m extremes
size += 16
# m array
size += 8 * nPoints
# Calc a single point
if self.shapeType in (1,11,21):
size += 16
# Calc a single Z value
if self.shapeType == 11:
size += 8
# Calc a single M value
if self.shapeType in (11,21):
size += 8
# Calculate size as 16-bit words
size //= 2
return size
def __bbox(self, shapes, shapeTypes=[]):
x = []
y = []
for s in shapes:
shapeType = self.shapeType
if shapeTypes:
shapeType = shapeTypes[shapes.index(s)]
px, py = list(zip(*s.points))[:2]
x.extend(px)
y.extend(py)
return [min(x), min(y), max(x), max(y)]
def __zbox(self, shapes, shapeTypes=[]):
z = []
for s in shapes:
try:
for p in s.points:
z.append(p[2])
except IndexError:
pass
if not z: z.append(0)
return [min(z), max(z)]
def __mbox(self, shapes, shapeTypes=[]):
m = [0]
for s in shapes:
try:
for p in s.points:
m.append(p[3])
except IndexError:
pass
return [min(m), max(m)]
def bbox(self):
"""Returns the current bounding box for the shapefile which is
the lower-left and upper-right corners. It does not contain the
elevation or measure extremes."""
return self.__bbox(self._shapes)
def zbox(self):
"""Returns the current z extremes for the shapefile."""
return self.__zbox(self._shapes)
def mbox(self):
"""Returns the current m extremes for the shapefile."""
return self.__mbox(self._shapes)
def __shapefileHeader(self, fileObj, headerType='shp'):
"""Writes the specified header type to the specified file-like object.
Several of the shapefile formats are so similar that a single generic
method to read or write them is warranted."""
f = self.__getFileObj(fileObj)
f.seek(0)
# File code, Unused bytes
f.write(pack(">6i", 9994,0,0,0,0,0))
# File length (Bytes / 2 = 16-bit words)
if headerType == 'shp':
f.write(pack(">i", self.__shpFileLength()))
elif headerType == 'shx':
f.write(pack('>i', ((100 + (len(self._shapes) * 8)) // 2)))
# Version, Shape type
f.write(pack("<2i", 1000, self.shapeType))
# The shapefile's bounding box (lower left, upper right)
if self.shapeType != 0:
try:
f.write(pack("<4d", *self.bbox()))
except error:
raise ShapefileException("Failed to write shapefile bounding box. Floats required.")
else:
f.write(pack("<4d", 0,0,0,0))
# Elevation
z = self.zbox()
# Measure
m = self.mbox()
try:
f.write(pack("<4d", z[0], z[1], m[0], m[1]))
except error:
raise ShapefileException("Failed to write shapefile elevation and measure values. Floats required.")
def __dbfHeader(self):
"""Writes the dbf header and field descriptors."""
f = self.__getFileObj(self.dbf)
f.seek(0)
version = 3
year, month, day = time.localtime()[:3]
year -= 1900
# Remove deletion flag placeholder from fields
for field in self.fields:
if field[0].startswith("Deletion"):
self.fields.remove(field)
numRecs = len(self.records)
numFields = len(self.fields)
headerLength = numFields * 32 + 33
recordLength = sum([int(field[2]) for field in self.fields]) + 1
header = pack('<BBBBLHH20x', version, year, month, day, numRecs,
headerLength, recordLength)
f.write(header)
# Field descriptors
for field in self.fields:
name, fieldType, size, decimal = field
name = b(name)
name = name.replace(b(' '), b('_'))
name = name.ljust(11).replace(b(' '), b('\x00'))
fieldType = b(fieldType)
size = int(size)
fld = pack('<11sc4xBB14x', name, fieldType, size, decimal)
f.write(fld)
# Terminator
f.write(b('\r'))
def __shpRecords(self):
"""Write the shp records"""
f = self.__getFileObj(self.shp)
f.seek(100)
recNum = 1
for s in self._shapes:
self._offsets.append(f.tell())
# Record number, Content length place holder
f.write(pack(">2i", recNum, 0))
recNum += 1
start = f.tell()
# Shape Type
f.write(pack("<i", s.shapeType))
# All shape types capable of having a bounding box
if s.shapeType in (3,5,8,13,15,18,23,25,28,31):
try:
f.write(pack("<4d", *self.__bbox([s])))
except error:
raise ShapefileException("Falied to write bounding box for record %s. Expected floats." % recNum)
# Shape types with parts
if s.shapeType in (3,5,13,15,23,25,31):
# Number of parts
f.write(pack("<i", len(s.parts)))
# Shape types with multiple points per record
if s.shapeType in (3,5,8,13,15,23,25,31):
# Number of points
f.write(pack("<i", len(s.points)))
# Write part indexes
if s.shapeType in (3,5,13,15,23,25,31):
for p in s.parts:
f.write(pack("<i", p))
# Part types for Multipatch (31)
if s.shapeType == 31:
for pt in s.partTypes:
f.write(pack("<i", pt))
# Write points for multiple-point records
if s.shapeType in (3,5,8,13,15,23,25,31):
try:
[f.write(pack("<2d", *p[:2])) for p in s.points]
except error:
raise ShapefileException("Failed to write points for record %s. Expected floats." % recNum)
# Write z extremes and values
if s.shapeType in (13,15,18,31):
try:
f.write(pack("<2d", *self.__zbox([s])))
except error:
raise ShapefileException("Failed to write elevation extremes for record %s. Expected floats." % recNum)
try:
[f.write(pack("<d", p[2])) for p in s.points]
except error:
raise ShapefileException("Failed to write elevation values for record %s. Expected floats." % recNum)
# Write m extremes and values
if s.shapeType in (23,25,31):
try:
f.write(pack("<2d", *self.__mbox([s])))
except error:
raise ShapefileException("Failed to write measure extremes for record %s. Expected floats" % recNum)
try:
[f.write(pack("<d", p[3])) for p in s.points]
except error:
raise ShapefileException("Failed to write measure values for record %s. Expected floats" % recNum)
# Write a single point
if s.shapeType in (1,11,21):
try:
f.write(pack("<2d", s.points[0][0], s.points[0][1]))
except error:
raise ShapefileException("Failed to write point for record %s. Expected floats." % recNum)
# Write a single Z value
if s.shapeType == 11:
try:
f.write(pack("<1d", s.points[0][2]))
except error:
raise ShapefileException("Failed to write elevation value for record %s. Expected floats." % recNum)
# Write a single M value
if s.shapeType in (11,21):
try:
f.write(pack("<1d", s.points[0][3]))
except error:
raise ShapefileException("Failed to write measure value for record %s. Expected floats." % recNum)
# Finalize record length as 16-bit words
finish = f.tell()
length = (finish - start) // 2
self._lengths.append(length)
# start - 4 bytes is the content length field
f.seek(start-4)
f.write(pack(">i", length))
f.seek(finish)
def __shxRecords(self):
"""Writes the shx records."""
f = self.__getFileObj(self.shx)
f.seek(100)
for i in range(len(self._shapes)):
f.write(pack(">i", self._offsets[i] // 2))
f.write(pack(">i", self._lengths[i]))
def __dbfRecords(self):
"""Writes the dbf records."""
f = self.__getFileObj(self.dbf)
for record in self.records:
if not self.fields[0][0].startswith("Deletion"):
f.write(b(' ')) # deletion flag
for (fieldName, fieldType, size, dec), value in zip(self.fields, record):
fieldType = fieldType.upper()
size = int(size)
if fieldType.upper() == "N":
value = str(value).rjust(size)
elif fieldType == 'L':
value = str(value)[0].upper()
else:
value = str(value)[:size].ljust(size)
assert len(value) == size
value = b(value)
f.write(value)
def null(self):
"""Creates a null shape."""
self._shapes.append(_Shape(NULL))
def point(self, x, y, z=0, m=0):
"""Creates a point shape."""
pointShape = _Shape(self.shapeType)
pointShape.points.append([x, y, z, m])
self._shapes.append(pointShape)
def line(self, parts=[], shapeType=POLYLINE):
"""Creates a line shape. This method is just a convienience method
which wraps 'poly()'.
"""
self.poly(parts, shapeType, [])
def poly(self, parts=[], shapeType=POLYGON, partTypes=[]):
"""Creates a shape that has multiple collections of points (parts)
including lines, polygons, and even multipoint shapes. If no shape type
is specified it defaults to 'polygon'. If no part types are specified
(which they normally won't be) then all parts default to the shape type.
"""
polyShape = _Shape(shapeType)
polyShape.parts = []
polyShape.points = []
for part in parts:
polyShape.parts.append(len(polyShape.points))
for point in part:
# Ensure point is list
if not isinstance(point, list):
point = list(point)
# Make sure point has z and m values
while len(point) < 4:
point.append(0)
polyShape.points.append(point)
if polyShape.shapeType == 31:
if not partTypes:
for part in parts:
partTypes.append(polyShape.shapeType)
polyShape.partTypes = partTypes
self._shapes.append(polyShape)
def field(self, name, fieldType="C", size="50", decimal=0):
"""Adds a dbf field descriptor to the shapefile."""
self.fields.append((name, fieldType, size, decimal))
def record(self, *recordList, **recordDict):
"""Creates a dbf attribute record. You can submit either a sequence of
field values or keyword arguments of field names and values. Before
adding records you must add fields for the record values using the
fields() method. If the record values exceed the number of fields the
extra ones won't be added. In the case of using keyword arguments to specify
field/value pairs only fields matching the already registered fields
will be added."""
record = []
fieldCount = len(self.fields)
# Compensate for deletion flag
if self.fields[0][0].startswith("Deletion"): fieldCount -= 1
if recordList:
[record.append(recordList[i]) for i in range(fieldCount)]
elif recordDict:
for field in self.fields:
if field[0] in recordDict:
val = recordDict[field[0]]
if val:
record.append(val)
else:
record.append("")
if record:
self.records.append(record)
def shape(self, i):
return self._shapes[i]
def shapes(self):
"""Return the current list of shapes."""
return self._shapes
def saveShp(self, target):
"""Save an shp file."""
if not hasattr(target, "write"):
target = os.path.splitext(target)[0] + '.shp'
if not self.shapeType:
self.shapeType = self._shapes[0].shapeType
self.shp = self.__getFileObj(target)
self.__shapefileHeader(self.shp, headerType='shp')
self.__shpRecords()
def saveShx(self, target):
"""Save an shx file."""
if not hasattr(target, "write"):
target = os.path.splitext(target)[0] + '.shx'
if not self.shapeType:
self.shapeType = self._shapes[0].shapeType
self.shx = self.__getFileObj(target)
self.__shapefileHeader(self.shx, headerType='shx')
self.__shxRecords()
def saveDbf(self, target):
"""Save a dbf file."""
if not hasattr(target, "write"):
target = os.path.splitext(target)[0] + '.dbf'
self.dbf = self.__getFileObj(target)
self.__dbfHeader()
self.__dbfRecords()
def save(self, target=None, shp=None, shx=None, dbf=None):
"""Save the shapefile data to three files or
three file-like objects. SHP and DBF files can also
be written exclusively using saveShp, saveShx, and saveDbf respectively."""
# TODO: Create a unique filename for target if None.
if shp:
self.saveShp(shp)
if shx:
self.saveShx(shx)
if dbf:
self.saveDbf(dbf)
elif target:
self.saveShp(target)
self.shp.close()
self.saveShx(target)
self.shx.close()
self.saveDbf(target)
self.dbf.close()
class Editor(Writer):
def __init__(self, shapefile=None, shapeType=POINT, autoBalance=1):
self.autoBalance = autoBalance
if not shapefile:
Writer.__init__(self, shapeType)
elif is_string(shapefile):
base = os.path.splitext(shapefile)[0]
if os.path.isfile("%s.shp" % base):
r = Reader(base)
Writer.__init__(self, r.shapeType)
self._shapes = r.shapes()
self.fields = r.fields
self.records = r.records()
def select(self, expr):
"""Select one or more shapes (to be implemented)"""
# TODO: Implement expressions to select shapes.
pass
def delete(self, shape=None, part=None, point=None):
"""Deletes the specified part of any shape by specifying a shape
number, part number, or point number."""
# shape, part, point
if shape and part and point:
del self._shapes[shape][part][point]
# shape, part
elif shape and part and not point:
del self._shapes[shape][part]
# shape
elif shape and not part and not point:
del self._shapes[shape]
# point
elif not shape and not part and point:
for s in self._shapes:
if s.shapeType == 1:
del self._shapes[point]
else:
for part in s.parts:
del s[part][point]
# part, point
elif not shape and part and point:
for s in self._shapes:
del s[part][point]
# part
elif not shape and part and not point:
for s in self._shapes:
del s[part]
def point(self, x=None, y=None, z=None, m=None, shape=None, part=None, point=None, addr=None):
"""Creates/updates a point shape. The arguments allows
you to update a specific point by shape, part, point of any
shape type."""
# shape, part, point
if shape and part and point:
try: self._shapes[shape]
except IndexError: self._shapes.append([])
try: self._shapes[shape][part]
except IndexError: self._shapes[shape].append([])
try: self._shapes[shape][part][point]
except IndexError: self._shapes[shape][part].append([])
p = self._shapes[shape][part][point]
if x: p[0] = x
if y: p[1] = y
if z: p[2] = z
if m: p[3] = m
self._shapes[shape][part][point] = p
# shape, part
elif shape and part and not point:
try: self._shapes[shape]
except IndexError: self._shapes.append([])
try: self._shapes[shape][part]
except IndexError: self._shapes[shape].append([])
points = self._shapes[shape][part]
for i in range(len(points)):
p = points[i]
if x: p[0] = x
if y: p[1] = y
if z: p[2] = z
if m: p[3] = m
self._shapes[shape][part][i] = p
# shape
elif shape and not part and not point:
try: self._shapes[shape]
except IndexError: self._shapes.append([])
# point
# part
if addr:
shape, part, point = addr
self._shapes[shape][part][point] = [x, y, z, m]
else:
Writer.point(self, x, y, z, m)
if self.autoBalance:
self.balance()
def validate(self):
"""An optional method to try and validate the shapefile
as much as possible before writing it (not implemented)."""
#TODO: Implement validation method
pass
def balance(self):
"""Adds a corresponding empty attribute or null geometry record depending
on which type of record was created to make sure all three files
are in synch."""
if len(self.records) > len(self._shapes):
self.null()
elif len(self.records) < len(self._shapes):
self.record()
def __fieldNorm(self, fieldName):
"""Normalizes a dbf field name to fit within the spec and the
expectations of certain ESRI software."""
if len(fieldName) > 11: fieldName = fieldName[:11]
fieldName = fieldName.upper()
fieldName.replace(' ', '_')
# Begin Testing
def test():
import doctest
doctest.NORMALIZE_WHITESPACE = 1
doctest.testfile("README.txt", verbose=1)
if __name__ == "__main__":
"""
Doctests are contained in the module 'pyshp_usage.py'. This library was developed
using Python 2.3. Python 2.4 and above have some excellent improvements in the built-in
testing libraries but for now unit testing is done using what's available in
2.3.
"""
test()<|fim▁end|> | if isinstance(v, str):
# For python 3 encode str to bytes. |
<|file_name|>Preferences.cpp<|end_file_name|><|fim▁begin|>//
// Preferences.cpp
//
// $Id: //poco/1.7/OSP/samples/Preferences/src/Preferences.cpp#1 $
//
// Copyright (c) 2007-2016, Applied Informatics Software Engineering GmbH.
// All rights reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
#include "Poco/OSP/BundleActivator.h"
#include "Poco/OSP/BundleContext.h"
#include "Poco/OSP/Bundle.h"
#include "Poco/OSP/ServiceRegistry.h"
#include "Poco/OSP/PreferencesService.h"
#include "Poco/OSP/Preferences.h"
#include "Poco/OSP/ServiceRef.h"
#include "Poco/DateTime.h"
#include "Poco/DateTimeFormatter.h"
#include "Poco/DateTimeFormat.h"
#include "Poco/AutoPtr.h"
#include "Poco/ClassLibrary.h"
using Poco::OSP::BundleActivator;
using Poco::OSP::BundleContext;
using Poco::OSP::Bundle;
using Poco::OSP::PreferencesService;
using Poco::OSP::Preferences;
using Poco::OSP::ServiceRef;
using Poco::DateTime;
using Poco::DateTimeFormatter;
using Poco::DateTimeFormat;
using Poco::AutoPtr;
class PreferencesBundleActivator: public BundleActivator
/// A very simple bundle that shows the usage<|fim▁hole|>public:
PreferencesBundleActivator()
{
}
~PreferencesBundleActivator()
{
}
void start(BundleContext::Ptr pContext)
{
// find PreferencesService using the Service Registry
ServiceRef::Ptr pPrefsSvcRef = pContext->registry().findByName("osp.core.preferences");
if (pPrefsSvcRef)
{
// PreferencesService is available
AutoPtr<PreferencesService> pPrefsSvc = pPrefsSvcRef->castedInstance<PreferencesService>();
// Get the preferences for our bundle
_pPrefs = pPrefsSvc->preferences(pContext->thisBundle()->symbolicName());
// Do something with the preferences
std::string lastStartup = _pPrefs->getString("lastStartup", "never");
std::string lastShutdown = _pPrefs->getString("lastShutdown", "never");
pContext->logger().information(std::string("Last startup at: ") + lastStartup);
pContext->logger().information(std::string("Last shutdown at: ") + lastShutdown);
DateTime now;
std::string dateStr = DateTimeFormatter::format(now, DateTimeFormat::SORTABLE_FORMAT);
_pPrefs->setString("lastStartup", dateStr);
}
else
{
// The service is not available
pContext->logger().error("The PreferencesService is not available.");
}
}
void stop(BundleContext::Ptr pContext)
{
if (_pPrefs)
{
DateTime now;
std::string dateStr = DateTimeFormatter::format(now, DateTimeFormat::SORTABLE_FORMAT);
_pPrefs->setString("lastShutdown", dateStr);
}
}
private:
AutoPtr<Preferences> _pPrefs;
};
POCO_BEGIN_MANIFEST(BundleActivator)
POCO_EXPORT_CLASS(PreferencesBundleActivator)
POCO_END_MANIFEST<|fim▁end|> | /// of the PreferencesService.
{ |
<|file_name|>options.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
import logging
import six
from datetime import timedelta
from django.utils import timezone
from sentry.models import Option
from sentry.options import default_manager
from sentry.options.manager import UnknownOption
from sentry.tasks.base import instrumented_task<|fim▁hole|>
@instrumented_task(name="sentry.tasks.options.sync_options", queue="options")
def sync_options(cutoff=ONE_HOUR):
"""
Ensures all options that have been updated (within the database) since
``cutoff`` have their correct values stored in the cache.
This **does not** guarantee that the correct value is written into the cache
though it will correct itself in the next update window.
"""
cutoff_dt = timezone.now() - timedelta(seconds=cutoff)
# TODO(dcramer): this doesnt handle deleted options (which shouldn't be allowed)
for option in Option.objects.filter(last_updated__gte=cutoff_dt).iterator():
try:
opt = default_manager.lookup_key(option.key)
default_manager.store.set_cache(opt, option.value)
except UnknownOption as e:
logger.exception(six.text_type(e))<|fim▁end|> |
ONE_HOUR = 60 * 60
logger = logging.getLogger("sentry")
|
<|file_name|>_kubernetes.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Copyright: (c) 2015, Google Inc. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: kubernetes
version_added: "2.1"
deprecated:
removed_in: "2.9"
why: This module used the oc command line tool, where as M(k8s_raw) goes over the REST API.
alternative: Use M(k8s_raw) instead.
short_description: Manage Kubernetes resources
description:
- This module can manage Kubernetes resources on an existing cluster using
the Kubernetes server API. Users can specify in-line API data, or
specify an existing Kubernetes YAML file.
- Currently, this module
(1) Only supports HTTP Basic Auth
(2) Only supports 'strategic merge' for update, http://goo.gl/fCPYxT
SSL certs are not working, use C(validate_certs=off) to disable.
options:
api_endpoint:
description:
- The IPv4 API endpoint of the Kubernetes cluster.
required: true
aliases: [ endpoint ]
inline_data:
description:
- The Kubernetes YAML data to send to the API I(endpoint). This option is
mutually exclusive with C('file_reference').
required: true
file_reference:
description:
- Specify full path to a Kubernets YAML file to send to API I(endpoint).
This option is mutually exclusive with C('inline_data').
patch_operation:
description:
- Specify patch operation for Kubernetes resource update.
- For details, see the description of PATCH operations at
U(https://github.com/kubernetes/kubernetes/blob/release-1.5/docs/devel/api-conventions.md#patch-operations).
default: Strategic Merge Patch
choices: [ JSON Patch, Merge Patch, Strategic Merge Patch ]
aliases: [ patch_strategy ]
version_added: 2.4<|fim▁hole|> - Certificate Authority data for Kubernetes server. Should be in either
standard PEM format or base64 encoded PEM data. Note that certificate
verification is broken until ansible supports a version of
'match_hostname' that can match the IP address against the CA data.
state:
description:
- The desired action to take on the Kubernetes data.
required: true
choices: [ absent, present, replace, update ]
default: present
url_password:
description:
- The HTTP Basic Auth password for the API I(endpoint). This should be set
unless using the C('insecure') option.
aliases: [ password ]
url_username:
description:
- The HTTP Basic Auth username for the API I(endpoint). This should be set
unless using the C('insecure') option.
default: admin
aliases: [ username ]
insecure:
description:
- Reverts the connection to using HTTP instead of HTTPS. This option should
only be used when execuing the M('kubernetes') module local to the Kubernetes
cluster using the insecure local port (locahost:8080 by default).
validate_certs:
description:
- Enable/disable certificate validation. Note that this is set to
C(false) until Ansible can support IP address based certificate
hostname matching (exists in >= python3.5.0).
type: bool
default: 'no'
author:
- Eric Johnson (@erjohnso) <[email protected]>
'''
EXAMPLES = '''
# Create a new namespace with in-line YAML.
- name: Create a kubernetes namespace
kubernetes:
api_endpoint: 123.45.67.89
url_username: admin
url_password: redacted
inline_data:
kind: Namespace
apiVersion: v1
metadata:
name: ansible-test
labels:
label_env: production
label_ver: latest
annotations:
a1: value1
a2: value2
state: present
# Create a new namespace from a YAML file.
- name: Create a kubernetes namespace
kubernetes:
api_endpoint: 123.45.67.89
url_username: admin
url_password: redacted
file_reference: /path/to/create_namespace.yaml
state: present
# Do the same thing, but using the insecure localhost port
- name: Create a kubernetes namespace
kubernetes:
api_endpoint: 123.45.67.89
insecure: true
file_reference: /path/to/create_namespace.yaml
state: present
'''
RETURN = '''
# Example response from creating a Kubernetes Namespace.
api_response:
description: Raw response from Kubernetes API, content varies with API.
returned: success
type: complex
contains:
apiVersion: "v1"
kind: "Namespace"
metadata:
creationTimestamp: "2016-01-04T21:16:32Z"
name: "test-namespace"
resourceVersion: "509635"
selfLink: "/api/v1/namespaces/test-namespace"
uid: "6dbd394e-b328-11e5-9a02-42010af0013a"
spec:
finalizers:
- kubernetes
status:
phase: "Active"
'''
import base64
import json
try:
import yaml
HAS_LIB_YAML = True
except ImportError:
HAS_LIB_YAML = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.urls import fetch_url
############################################################################
############################################################################
# For API coverage, this Anislbe module provides capability to operate on
# all Kubernetes objects that support a "create" call (except for 'Events').
# In order to obtain a valid list of Kubernetes objects, the v1 spec file
# was referenced and the below python script was used to parse the JSON
# spec file, extract only the objects with a description starting with
# 'create a'. The script then iterates over all of these base objects
# to get the endpoint URL and was used to generate the KIND_URL map.
#
# import json
# from urllib2 import urlopen
#
# r = urlopen("https://raw.githubusercontent.com/kubernetes"
# "/kubernetes/master/api/swagger-spec/v1.json")
# v1 = json.load(r)
#
# apis = {}
# for a in v1['apis']:
# p = a['path']
# for o in a['operations']:
# if o["summary"].startswith("create a") and o["type"] != "v1.Event":
# apis[o["type"]] = p
#
# def print_kind_url_map():
# results = []
# for a in apis.keys():
# results.append('"%s": "%s"' % (a[3:].lower(), apis[a]))
# results.sort()
# print("KIND_URL = {")
# print(",\n".join(results))
# print("}")
#
# if __name__ == '__main__':
# print_kind_url_map()
############################################################################
############################################################################
KIND_URL = {
"binding": "/api/v1/namespaces/{namespace}/bindings",
"configmap": "/api/v1/namespaces/{namespace}/configmaps",
"endpoints": "/api/v1/namespaces/{namespace}/endpoints",
"limitrange": "/api/v1/namespaces/{namespace}/limitranges",
"namespace": "/api/v1/namespaces",
"node": "/api/v1/nodes",
"persistentvolume": "/api/v1/persistentvolumes",
"persistentvolumeclaim": "/api/v1/namespaces/{namespace}/persistentvolumeclaims", # NOQA
"pod": "/api/v1/namespaces/{namespace}/pods",
"podtemplate": "/api/v1/namespaces/{namespace}/podtemplates",
"replicationcontroller": "/api/v1/namespaces/{namespace}/replicationcontrollers", # NOQA
"resourcequota": "/api/v1/namespaces/{namespace}/resourcequotas",
"secret": "/api/v1/namespaces/{namespace}/secrets",
"service": "/api/v1/namespaces/{namespace}/services",
"serviceaccount": "/api/v1/namespaces/{namespace}/serviceaccounts",
"daemonset": "/apis/extensions/v1beta1/namespaces/{namespace}/daemonsets",
"deployment": "/apis/extensions/v1beta1/namespaces/{namespace}/deployments",
"horizontalpodautoscaler": "/apis/extensions/v1beta1/namespaces/{namespace}/horizontalpodautoscalers", # NOQA
"ingress": "/apis/extensions/v1beta1/namespaces/{namespace}/ingresses",
"job": "/apis/extensions/v1beta1/namespaces/{namespace}/jobs",
}
USER_AGENT = "ansible-k8s-module/0.0.1"
# TODO(erjohnso): SSL Certificate validation is currently unsupported.
# It can be made to work when the following are true:
# - Ansible consistently uses a "match_hostname" that supports IP Address
# matching. This is now true in >= python3.5.0. Currently, this feature
# is not yet available in backports.ssl_match_hostname (still 3.4).
# - Ansible allows passing in the self-signed CA cert that is created with
# a kubernetes master. The lib/ansible/module_utils/urls.py method,
# SSLValidationHandler.get_ca_certs() needs a way for the Kubernetes
# CA cert to be passed in and included in the generated bundle file.
# When this is fixed, the following changes can be made to this module,
# - Remove the 'return' statement in line 254 below
# - Set 'required=true' for certificate_authority_data and ensure that
# ansible's SSLValidationHandler.get_ca_certs() can pick up this CA cert
# - Set 'required=true' for the validate_certs param.
def decode_cert_data(module):
return
# pylint: disable=unreachable
d = module.params.get("certificate_authority_data")
if d and not d.startswith("-----BEGIN"):
module.params["certificate_authority_data"] = base64.b64decode(d)
def api_request(module, url, method="GET", headers=None, data=None):
body = None
if data:
data = json.dumps(data)
response, info = fetch_url(module, url, method=method, headers=headers, data=data)
if int(info['status']) == -1:
module.fail_json(msg="Failed to execute the API request: %s" % info['msg'], url=url, method=method, headers=headers)
if response is not None:
body = json.loads(response.read())
return info, body
def k8s_create_resource(module, url, data):
info, body = api_request(module, url, method="POST", data=data, headers={"Content-Type": "application/json"})
if info['status'] == 409:
name = data["metadata"].get("name", None)
info, body = api_request(module, url + "/" + name)
return False, body
elif info['status'] >= 400:
module.fail_json(msg="failed to create the resource: %s" % info['msg'], url=url)
return True, body
def k8s_delete_resource(module, url, data):
name = data.get('metadata', {}).get('name')
if name is None:
module.fail_json(msg="Missing a named resource in object metadata when trying to remove a resource")
url = url + '/' + name
info, body = api_request(module, url, method="DELETE")
if info['status'] == 404:
return False, "Resource name '%s' already absent" % name
elif info['status'] >= 400:
module.fail_json(msg="failed to delete the resource '%s': %s" % (name, info['msg']), url=url)
return True, "Successfully deleted resource name '%s'" % name
def k8s_replace_resource(module, url, data):
name = data.get('metadata', {}).get('name')
if name is None:
module.fail_json(msg="Missing a named resource in object metadata when trying to replace a resource")
headers = {"Content-Type": "application/json"}
url = url + '/' + name
info, body = api_request(module, url, method="PUT", data=data, headers=headers)
if info['status'] == 409:
name = data["metadata"].get("name", None)
info, body = api_request(module, url + "/" + name)
return False, body
elif info['status'] >= 400:
module.fail_json(msg="failed to replace the resource '%s': %s" % (name, info['msg']), url=url)
return True, body
def k8s_update_resource(module, url, data, patch_operation):
# PATCH operations are explained in details at:
# https://github.com/kubernetes/kubernetes/blob/release-1.5/docs/devel/api-conventions.md#patch-operations
PATCH_OPERATIONS_MAP = {
'JSON Patch': 'application/json-patch+json',
'Merge Patch': 'application/merge-patch+json',
'Strategic Merge Patch': 'application/strategic-merge-patch+json',
}
name = data.get('metadata', {}).get('name')
if name is None:
module.fail_json(msg="Missing a named resource in object metadata when trying to update a resource")
headers = {"Content-Type": PATCH_OPERATIONS_MAP[patch_operation]}
url = url + '/' + name
info, body = api_request(module, url, method="PATCH", data=data, headers=headers)
if info['status'] == 409:
name = data["metadata"].get("name", None)
info, body = api_request(module, url + "/" + name)
return False, body
elif info['status'] >= 400:
module.fail_json(msg="failed to update the resource '%s': %s" % (name, info['msg']), url=url)
return True, body
def main():
module = AnsibleModule(
argument_spec=dict(
http_agent=dict(type='str', default=USER_AGENT),
url_username=dict(type='str', default='admin', aliases=['username']),
url_password=dict(type='str', default='', no_log=True, aliases=['password']),
force_basic_auth=dict(type='bool', default=True),
validate_certs=dict(type='bool', default=False),
certificate_authority_data=dict(type='str'),
insecure=dict(type='bool', default=False),
api_endpoint=dict(type='str', required=True),
patch_operation=dict(type='str', default='Strategic Merge Patch', aliases=['patch_strategy'],
choices=['JSON Patch', 'Merge Patch', 'Strategic Merge Patch']),
file_reference=dict(type='str'),
inline_data=dict(type='str'),
state=dict(type='str', default='present', choices=['absent', 'present', 'replace', 'update'])
),
mutually_exclusive=(('file_reference', 'inline_data'),
('url_username', 'insecure'),
('url_password', 'insecure')),
required_one_of=(('file_reference', 'inline_data')),
)
if not HAS_LIB_YAML:
module.fail_json(msg="missing python library: yaml")
decode_cert_data(module)
api_endpoint = module.params.get('api_endpoint')
state = module.params.get('state')
insecure = module.params.get('insecure')
inline_data = module.params.get('inline_data')
file_reference = module.params.get('file_reference')
patch_operation = module.params.get('patch_operation')
if inline_data:
if not isinstance(inline_data, dict) and not isinstance(inline_data, list):
data = yaml.safe_load(inline_data)
else:
data = inline_data
else:
try:
f = open(file_reference, "r")
data = [x for x in yaml.safe_load_all(f)]
f.close()
if not data:
module.fail_json(msg="No valid data could be found.")
except:
module.fail_json(msg="The file '%s' was not found or contained invalid YAML/JSON data" % file_reference)
# set the transport type and build the target endpoint url
transport = 'https'
if insecure:
transport = 'http'
target_endpoint = "%s://%s" % (transport, api_endpoint)
body = []
changed = False
# make sure the data is a list
if not isinstance(data, list):
data = [data]
for item in data:
namespace = "default"
if item and 'metadata' in item:
namespace = item.get('metadata', {}).get('namespace', "default")
kind = item.get('kind', '').lower()
try:
url = target_endpoint + KIND_URL[kind]
except KeyError:
module.fail_json(msg="invalid resource kind specified in the data: '%s'" % kind)
url = url.replace("{namespace}", namespace)
else:
url = target_endpoint
if state == 'present':
item_changed, item_body = k8s_create_resource(module, url, item)
elif state == 'absent':
item_changed, item_body = k8s_delete_resource(module, url, item)
elif state == 'replace':
item_changed, item_body = k8s_replace_resource(module, url, item)
elif state == 'update':
item_changed, item_body = k8s_update_resource(module, url, item, patch_operation)
changed |= item_changed
body.append(item_body)
module.exit_json(changed=changed, api_response=body)
if __name__ == '__main__':
main()<|fim▁end|> | certificate_authority_data:
description: |
<|file_name|>jobpoller.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import boto3
import shutil
import zipfile
from zipfile import ZipFile, ZipInfo
import os
import tempfile
import time
import threading
from os.path import join
from botocore.client import Config
class JobPoller:
def __init__(self, action_type_id, builder):
self._action_type_id = action_type_id
self._codepipeline = boto3.client('codepipeline')
self._builder = builder
def poll(self):
jobs = []
print("Polling for jobs %s" % self._action_type_id)
while not jobs:
time.sleep(2)
response = self._codepipeline.poll_for_jobs(actionTypeId=self._action_type_id, maxBatchSize=1)
jobs = response['jobs']
job = jobs[0]
job_id = job['id']
print("Job with id %s found" % job_id)
nonce = job['nonce']
self._codepipeline.acknowledge_job(jobId=job_id, nonce=nonce)
threading.Thread(target=self._build, args=(job,)).start()
self.poll()
def _build(self, job):
job_id = job['id']
try:
artifactCredentials = job['data']['artifactCredentials']
s3session = boto3.Session(
aws_access_key_id=artifactCredentials['accessKeyId'],
aws_secret_access_key=artifactCredentials['secretAccessKey'],
aws_session_token=artifactCredentials['sessionToken'])<|fim▁hole|> s3 = s3session.client('s3', config=Config(signature_version='s3v4'))
bucketName = job['data']['inputArtifacts'][0]['location']['s3Location']['bucketName']
objectKey = job['data']['inputArtifacts'][0]['location']['s3Location']['objectKey']
tempdir = tempfile.mkdtemp()
print('tempdir for job %s is %s' % (job_id, tempdir))
input_src = join(tempdir, 'input')
os.mkdir(input_src)
target = join(tempdir, 'output')
os.mkdir(target)
print('Downloading artifact %s from bucket %s' % (objectKey, bucketName))
s3.download_file(bucketName, objectKey, join(tempdir, 'input.zip'))
with ZipFileWithPermissions(join(tempdir, 'input.zip'), 'r') as zip:
zip.extractall(input_src)
configuration = job['data']['actionConfiguration']['configuration']
print('Using configuration %s' % configuration)
print("Building job %s" % job_id)
#Run build
rc = self._builder.run(configuration=configuration, input_src=input_src, target_dir=target)
shutil.make_archive(join(tempdir, 'output'), 'zip', target)
uploadBucket = job['data']['outputArtifacts'][0]['location']['s3Location']['bucketName']
uploadKey = job['data']['outputArtifacts'][0]['location']['s3Location']['objectKey']
print('Uploading artifact %s to bucket %s' % (uploadKey, uploadBucket))
s3.upload_file(join(tempdir, 'output.zip'), uploadBucket, uploadKey)
if not rc == 0:
print('job %s failed with return code %d' % (job_id, rc))
self._codepipeline.put_job_failure_result(jobId=job_id, failureDetails={'type': 'JobFailed', 'message': 'Failed'})
else:
self._codepipeline.put_job_success_result(jobId=job_id, executionDetails={'summary': 'It worked'})
print('job %s succeeded' % job_id)
shutil.rmtree(tempdir)
print("Done with " + job_id)
except:
self._codepipeline.put_job_failure_result(jobId=job_id, failureDetails={'type': 'JobFailed', 'message': 'Failed'})
raise
# ZipFile should keep permissions
class ZipFileWithPermissions(ZipFile):
def extract(self, member, path=None, pwd=None):
if not isinstance(member, ZipInfo):
member = self.getinfo(member)
if path is None:
path = os.getcwd()
ret_val = self._extract_member(member, path, pwd)
attr = member.external_attr >> 16
os.chmod(ret_val, attr)
return ret_val<|fim▁end|> | |
<|file_name|>Solution.java<|end_file_name|><|fim▁begin|>package asdf.test;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Stack;
public class Solution {
/**
* (反转单词串 ) Given an input string, reverse the string word by word.
*
* For example, Given s = "the sky is blue", return "blue is sky the".
*
* Clarification:
*
* What constitutes a word?
*
* A sequence of non-space characters constitutes a word.
*
* Could the input string contain leading or trailing spaces?
*
* Yes. However, your reversed string should not contain leading or trailing
* spaces.
*
* How about multiple spaces between two words?
*
* Reduce them to a single space in the reversed string.
*/
// 首尾空格
// 中间空格
public String reverseWords(String s) {
String[] strs = s.trim().split(" ");
StringBuffer sb = new StringBuffer();<|fim▁hole|> for (int i = strs.length - 1; i > 0; i--) {
if (strs[i].length()>0&&strs[i].charAt(0)!=' ') {//空格串
sb.append(strs[i]);
sb.append(' ');
}
}
if (strs.length > 0) {
sb.append(strs[0]);
}
return sb.toString();
}
public static void main(String[] args) {
Solution solution = new Solution();
System.out.println(solution.reverseWords(""));
System.out.println(solution.reverseWords(" "));
System.out.println(solution.reverseWords("the sky is blue"));
System.out.println(solution.reverseWords(" the sky is blue "));
System.out.println(solution.reverseWords(" 1"));
}
}<|fim▁end|> | |
<|file_name|>OperatorNodes.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.template.soy.exprtree;
/**
* Container of nodes representing operators.
*
* <p> Important: Do not use outside of Soy code (treat as superpackage-private).
*
* @author Kai Huang
*/
public class OperatorNodes {
private OperatorNodes() {}
/**
* Node representing the unary '-' (negative) operator.
*/
public static class NegativeOpNode extends AbstractOperatorNode {
public NegativeOpNode() { super(Operator.NEGATIVE); }
protected NegativeOpNode(NegativeOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.NEGATIVE_OP_NODE; }
@Override public NegativeOpNode clone() { return new NegativeOpNode(this); }
}
/**
* Node representing the 'not' operator.
*/
public static class NotOpNode extends AbstractOperatorNode {
public NotOpNode() { super(Operator.NOT); }
protected NotOpNode(NotOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.NOT_OP_NODE; }
@Override public NotOpNode clone() { return new NotOpNode(this); }
}
/**
* Node representing the '*' (times) operator.
*/
public static class TimesOpNode extends AbstractOperatorNode {
public TimesOpNode() { super(Operator.TIMES); }
protected TimesOpNode(TimesOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.TIMES_OP_NODE; }
@Override public TimesOpNode clone() { return new TimesOpNode(this); }
}
/**
* Node representing the '/' (divde by) operator.
*/
public static class DivideByOpNode extends AbstractOperatorNode {
public DivideByOpNode() { super(Operator.DIVIDE_BY); }
protected DivideByOpNode(DivideByOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.DIVIDE_BY_OP_NODE; }
@Override public DivideByOpNode clone() { return new DivideByOpNode(this); }
}
/**
* Node representing the '%' (mod) operator.
*/
public static class ModOpNode extends AbstractOperatorNode {
public ModOpNode() { super(Operator.MOD); }
protected ModOpNode(ModOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.MOD_OP_NODE; }
@Override public ModOpNode clone() { return new ModOpNode(this); }
}
/**
* Node representing the '+' (plus) operator.
*/
public static class PlusOpNode extends AbstractOperatorNode {
public PlusOpNode() { super(Operator.PLUS); }
protected PlusOpNode(PlusOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.PLUS_OP_NODE; }
@Override public PlusOpNode clone() { return new PlusOpNode(this); }
}
/**
* Node representing the binary '-' (minus) operator.
*/
public static class MinusOpNode extends AbstractOperatorNode {
public MinusOpNode() { super(Operator.MINUS); }
protected MinusOpNode(MinusOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.MINUS_OP_NODE; }
@Override public MinusOpNode clone() { return new MinusOpNode(this); }
}
/**
* Node representing the '<' (less than) operator.
*/
public static class LessThanOpNode extends AbstractOperatorNode {
public LessThanOpNode() { super(Operator.LESS_THAN); }
protected LessThanOpNode(LessThanOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.LESS_THAN_OP_NODE; }
@Override public LessThanOpNode clone() { return new LessThanOpNode(this); }
}
/**
* Node representing the '>' (greater than) operator.
*/
public static class GreaterThanOpNode extends AbstractOperatorNode {
public GreaterThanOpNode() { super(Operator.GREATER_THAN); }
protected GreaterThanOpNode(GreaterThanOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.GREATER_THAN_OP_NODE; }
@Override public GreaterThanOpNode clone() { return new GreaterThanOpNode(this); }
}
/**
* Node representing the '<=' (less than or equal) operator.
*/
public static class LessThanOrEqualOpNode extends AbstractOperatorNode {
public LessThanOrEqualOpNode() { super(Operator.LESS_THAN_OR_EQUAL); }
protected LessThanOrEqualOpNode(LessThanOrEqualOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.LESS_THAN_OR_EQUAL_OP_NODE; }
@Override public LessThanOrEqualOpNode clone() { return new LessThanOrEqualOpNode(this); }
}
/**
* Node representing the '>=' (greater than or equal) operator.
*/
public static class GreaterThanOrEqualOpNode extends AbstractOperatorNode {
public GreaterThanOrEqualOpNode() { super(Operator.GREATER_THAN_OR_EQUAL); }
protected GreaterThanOrEqualOpNode(GreaterThanOrEqualOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.GREATER_THAN_OR_EQUAL_OP_NODE; }
@Override public GreaterThanOrEqualOpNode clone() { return new GreaterThanOrEqualOpNode(this); }
}
/**
* Node representing the '==' (equal) operator.
*/
public static class EqualOpNode extends AbstractOperatorNode {
public EqualOpNode() { super(Operator.EQUAL); }
protected EqualOpNode(EqualOpNode orig) { super(orig); }
<|fim▁hole|>
/**
* Node representing the '!=' (not equal) operator.
*/
public static class NotEqualOpNode extends AbstractOperatorNode {
public NotEqualOpNode() { super(Operator.NOT_EQUAL); }
protected NotEqualOpNode(NotEqualOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.NOT_EQUAL_OP_NODE; }
@Override public NotEqualOpNode clone() { return new NotEqualOpNode(this); }
}
/**
* Node representing the 'and' operator.
*/
public static class AndOpNode extends AbstractOperatorNode {
public AndOpNode() { super(Operator.AND); }
protected AndOpNode(AndOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.AND_OP_NODE; }
@Override public AndOpNode clone() { return new AndOpNode(this); }
}
/**
* Node representing the 'or' operator.
*/
public static class OrOpNode extends AbstractOperatorNode {
public OrOpNode() { super(Operator.OR); }
protected OrOpNode(OrOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.OR_OP_NODE; }
@Override public OrOpNode clone() { return new OrOpNode(this); }
}
/**
* Node representing the ternary '? :' (conditional) operator.
*/
public static class ConditionalOpNode extends AbstractOperatorNode {
public ConditionalOpNode() { super(Operator.CONDITIONAL); }
protected ConditionalOpNode(ConditionalOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.CONDITIONAL_OP_NODE; }
@Override public ConditionalOpNode clone() { return new ConditionalOpNode(this); }
}
}<|fim▁end|> | @Override public Kind getKind() { return Kind.EQUAL_OP_NODE; }
@Override public EqualOpNode clone() { return new EqualOpNode(this); }
} |
<|file_name|>UI_openPathTool.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'openPathTool.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(457, 95)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.formLayout = QtGui.QFormLayout(self.centralwidget)
self.formLayout.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout.setObjectName(_fromUtf8("formLayout"))
self.pathInLineEdit = QtGui.QLineEdit(self.centralwidget)
self.pathInLineEdit.setObjectName(_fromUtf8("pathInLineEdit"))
self.formLayout.setWidget(0, QtGui.QFormLayout.SpanningRole, self.pathInLineEdit)
self.pathOutLineEdit = QtGui.QLineEdit(self.centralwidget)
self.pathOutLineEdit.setReadOnly(True)
self.pathOutLineEdit.setObjectName(_fromUtf8("pathOutLineEdit"))
self.formLayout.setWidget(1, QtGui.QFormLayout.SpanningRole, self.pathOutLineEdit)
self.buttonLayout = QtGui.QHBoxLayout()
self.buttonLayout.setObjectName(_fromUtf8("buttonLayout"))
self.explorerButton = QtGui.QPushButton(self.centralwidget)
self.explorerButton.setObjectName(_fromUtf8("explorerButton"))
self.buttonLayout.addWidget(self.explorerButton)
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.buttonLayout.addItem(spacerItem)
self.convertButton = QtGui.QPushButton(self.centralwidget)
self.convertButton.setObjectName(_fromUtf8("convertButton"))
self.buttonLayout.addWidget(self.convertButton)
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.buttonLayout.addItem(spacerItem1)
self.closeButton = QtGui.QPushButton(self.centralwidget)
self.closeButton.setObjectName(_fromUtf8("closeButton"))
self.buttonLayout.addWidget(self.closeButton)
self.formLayout.setLayout(2, QtGui.QFormLayout.SpanningRole, self.buttonLayout)
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow", None))
self.pathInLineEdit.setPlaceholderText(_translate("MainWindow", "Input Path", None))
self.pathOutLineEdit.setPlaceholderText(_translate("MainWindow", "Output Path", None))
self.explorerButton.setText(_translate("MainWindow", "Open In Explorer", None))
self.convertButton.setText(_translate("MainWindow", "Convert", None))
self.closeButton.setText(_translate("MainWindow", "Close", None))
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
MainWindow = QtGui.QMainWindow()<|fim▁hole|> ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())<|fim▁end|> | ui = Ui_MainWindow() |
<|file_name|>bndcg.py<|end_file_name|><|fim▁begin|># Copyright 2021 The TensorFlow Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import tensorflow as tf
from .retrieval_metric import RetrievalMetric
from tensorflow_similarity.types import FloatTensor, IntTensor, BoolTensor
class BNDCG(RetrievalMetric):
"""Binary normalized discounted cumulative gain.
This is normalized discounted cumulative gain where the relevancy weights
are binary, i.e., either a correct match or an incorrect match.
The NDCG is a score between [0,1] representing the rank weighted results.
The DCG represents the sum of the correct matches weighted by the log2 of
the rank and is normalized by the 'ideal DCG'. The IDCG is computed as the
match_mask, sorted descending, weighted by the log2 of the post sorting rank
order. This metric takes into account both the correctness of the match and
the position.
The normalized DCG is computed as:
$$
nDCG_{p} = \frac{DCG_{p}}{IDCG_{p}}
$$
The DCG is computed for each query using the match_mask as:
$$
DCG_{p} = \sum_{i=1}^{p} \frac{match_mask_{i}}{\log_{2}(i+1)}
$$
The IDCG uses the same equation but sorts the match_mask descending
along axis=-1.
Additionally, all positive matches with a distance above the threshold are
set to 0, and the closest K matches are taken.
Args:
name: Name associated with the metric object, e.g., precision@5
canonical_name: The canonical name associated with metric,
e.g., precision@K
k: The number of nearest neighbors over which the metric is computed.
distance_threshold: The max distance below which a nearest neighbor is
considered a valid match.
average: {'micro', 'macro'} Determines the type of averaging performed
on the data.
* 'micro': Calculates metrics globally over all data.
* 'macro': Calculates metrics for each label and takes the unweighted
mean.
"""
def __init__(
self,
name: str = "ndcg",
k: int = 5,
distance_threshold: float = math.inf,
**kwargs,
) -> None:
if "canonical_name" not in kwargs:
kwargs["canonical_name"] = "ndcg@k"
super().__init__(
name=name, k=k, distance_threshold=distance_threshold, **kwargs
)
def compute(
self,
*, # keyword only arguments see PEP-570
query_labels: IntTensor,
lookup_distances: FloatTensor,
match_mask: BoolTensor,
**kwargs,
) -> FloatTensor:
"""Compute the metric
Computes the binary NDCG. The query labels are only used when the
averaging is set to "macro".
Args:
query_labels: A 1D array of the labels associated with the
embedding queries.
lookup_distances: A 2D array where the jth row is the distances
between the jth query and the set of k neighbors.
match_mask: A 2D mask where a 1 indicates a match between the
jth query and the kth neighbor and a 0 indicates a mismatch.
Returns:
A rank 0 tensor containing the metric.
"""<|fim▁hole|>
if tf.shape(lookup_distances)[0] != tf.shape(query_labels)[0]:
raise ValueError(
"The number of lookup distance rows must equal the number "
"of query labels. Number of lookup distance rows is "
f"{tf.shape(lookup_distances)[0]} but the number of query "
f"labels is {tf.shape(query_labels)[0]}."
)
dist_mask = tf.math.less_equal(
lookup_distances, self.distance_threshold
)
k_slice = tf.math.multiply(
tf.cast(match_mask, dtype="float"),
tf.cast(dist_mask, dtype="float"),
)[:, : self.k]
rank = tf.range(1, self.k + 1, dtype="float")
rank_weights = tf.math.divide(tf.math.log1p(rank), tf.math.log(2.0))
# the numerator is simplier here because we are using binary weights
dcg = tf.math.reduce_sum(k_slice / rank_weights, axis=1)
# generate the "ideal ordering".
ideal_ordering = tf.sort(k_slice, direction="DESCENDING", axis=1)
idcg = tf.math.reduce_sum(ideal_ordering / rank_weights, axis=1)
per_example_ndcg = tf.math.divide_no_nan(dcg, idcg)
if self.average == "micro":
ndcg = tf.math.reduce_mean(per_example_ndcg)
elif self.average == "macro":
per_class_metrics = 0
class_labels = tf.unique(query_labels)[0]
for label in class_labels:
idxs = tf.where(query_labels == label)
c_slice = tf.gather(per_example_ndcg, indices=idxs)
per_class_metrics += tf.math.reduce_mean(c_slice)
ndcg = tf.math.divide(per_class_metrics, len(class_labels))
else:
raise ValueError(
f"{self.average} is not a supported average " "option"
)
result: FloatTensor = ndcg
return result<|fim▁end|> | self._check_shape(query_labels, match_mask) |
<|file_name|>feedbackRoutes.js<|end_file_name|><|fim▁begin|>var express = require('express');
var routes = function (Page) {
var feedbackController = require('../controllers/feedbackController')(Page);
var feedbackRouter = express.Router();
feedbackRouter.route('/')
.get(feedbackController.findByPage);<|fim▁hole|>
module.exports = routes;<|fim▁end|> |
return feedbackRouter;
}; |
<|file_name|>branches.go<|end_file_name|><|fim▁begin|>//
// Copyright 2015, Sander van Harmelen
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,<|fim▁hole|>
package gitlab
import (
"fmt"
"net/url"
)
// BranchesService handles communication with the branch related methods
// of the GitLab API.
//
// GitLab API docs: http://doc.gitlab.com/ce/api/branches.html
type BranchesService struct {
client *Client
}
// Branch represents a GitLab branch.
//
// GitLab API docs: http://doc.gitlab.com/ce/api/branches.html
type Branch struct {
Commit *Commit `json:"commit"`
Name string `json:"name"`
Protected bool `json:"protected"`
}
func (b Branch) String() string {
return Stringify(b)
}
// ListBranches gets a list of repository branches from a project, sorted by
// name alphabetically.
//
// GitLab API docs:
// http://doc.gitlab.com/ce/api/branches.html#list-repository-branches
func (s *BranchesService) ListBranches(pid interface{}) ([]*Branch, *Response, error) {
project, err := parseID(pid)
if err != nil {
return nil, nil, err
}
u := fmt.Sprintf("projects/%s/repository/branches", url.QueryEscape(project))
req, err := s.client.NewRequest("GET", u, nil)
if err != nil {
return nil, nil, err
}
var b []*Branch
resp, err := s.client.Do(req, &b)
if err != nil {
return nil, resp, err
}
return b, resp, err
}
// GetBranch gets a single project repository branch.
//
// GitLab API docs:
// http://doc.gitlab.com/ce/api/branches.html#get-single-repository-branch
func (s *BranchesService) GetBranch(pid interface{}, branch string) (*Branch, *Response, error) {
project, err := parseID(pid)
if err != nil {
return nil, nil, err
}
u := fmt.Sprintf("projects/%s/repository/branches/%s", url.QueryEscape(project), branch)
req, err := s.client.NewRequest("GET", u, nil)
if err != nil {
return nil, nil, err
}
b := new(Branch)
resp, err := s.client.Do(req, b)
if err != nil {
return nil, resp, err
}
return b, resp, err
}
// ProtectBranch protects a single project repository branch. This is an
// idempotent function, protecting an already protected repository branch
// still returns a 200 OK status code.
//
// GitLab API docs:
// http://doc.gitlab.com/ce/api/branches.html#protect-repository-branch
func (s *BranchesService) ProtectBranch(pid interface{}, branch string) (*Branch, *Response, error) {
project, err := parseID(pid)
if err != nil {
return nil, nil, err
}
u := fmt.Sprintf("projects/%s/repository/branches/%s/protect", url.QueryEscape(project), branch)
req, err := s.client.NewRequest("PUT", u, nil)
if err != nil {
return nil, nil, err
}
b := new(Branch)
resp, err := s.client.Do(req, b)
if err != nil {
return nil, resp, err
}
return b, resp, err
}
// UnprotectBranch unprotects a single project repository branch. This is an
// idempotent function, unprotecting an already unprotected repository branch
// still returns a 200 OK status code.
//
// GitLab API docs:
// http://doc.gitlab.com/ce/api/branches.html#unprotect-repository-branch
func (s *BranchesService) UnprotectBranch(
pid interface{},
branch string) (*Branch, *Response, error) {
project, err := parseID(pid)
if err != nil {
return nil, nil, err
}
u := fmt.Sprintf("projects/%s/repository/branches/%s/unprotect", url.QueryEscape(project), branch)
req, err := s.client.NewRequest("PUT", u, nil)
if err != nil {
return nil, nil, err
}
b := new(Branch)
resp, err := s.client.Do(req, b)
if err != nil {
return nil, resp, err
}
return b, resp, err
}
// CreateBranchOptions represents the available CreateBranch() options.
//
// GitLab API docs:
// http://doc.gitlab.com/ce/api/branches.html#create-repository-branch
type CreateBranchOptions struct {
BranchName string `url:"branch_name,omitempty"`
Ref string `url:"ref,omitempty"`
}
// CreateBranch creates branch from commit SHA or existing branch.
//
// GitLab API docs:
// http://doc.gitlab.com/ce/api/branches.html#create-repository-branch
func (s *BranchesService) CreateBranch(
pid interface{},
opt *CreateBranchOptions) (*Branch, *Response, error) {
project, err := parseID(pid)
if err != nil {
return nil, nil, err
}
u := fmt.Sprintf("projects/%s/repository/branches", url.QueryEscape(project))
req, err := s.client.NewRequest("POST", u, opt)
if err != nil {
return nil, nil, err
}
b := new(Branch)
resp, err := s.client.Do(req, b)
if err != nil {
return nil, resp, err
}
return b, resp, err
}
// DeleteBranch deletes an existing branch.
//
// GitLab API docs:
// http://doc.gitlab.com/ce/api/branches.html#delete-repository-branch
func (s *BranchesService) DeleteBranch(pid interface{}, branch string) (*Response, error) {
project, err := parseID(pid)
if err != nil {
return nil, err
}
u := fmt.Sprintf("projects/%s/repository/branches/%s", url.QueryEscape(project), branch)
req, err := s.client.NewRequest("DELETE", u, nil)
if err != nil {
return nil, err
}
resp, err := s.client.Do(req, nil)
if err != nil {
return resp, err
}
return resp, err
}<|fim▁end|> | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// |
<|file_name|>txpool.spec.ts<|end_file_name|><|fim▁begin|>import tape from 'tape'
import Common, { Chain, Hardfork } from '@ethereumjs/common'
import { FeeMarketEIP1559Transaction } from '@ethereumjs/tx'
import { Block } from '@ethereumjs/block'
import { PeerPool } from '../../lib/net/peerpool'
import { TxPool } from '../../lib/sync/txpool'
import { Config } from '../../lib/config'
tape('[TxPool]', async (t) => {
const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London })
const config = new Config({ transports: [] })
const A = {
address: Buffer.from('0b90087d864e82a284dca15923f3776de6bb016f', 'hex'),
privateKey: Buffer.from(
'64bf9cc30328b0e42387b3c82c614e6386259136235e20c1357bd11cdee86993',
'hex'
),
}
const B = {
address: Buffer.from('6f62d8382bf2587361db73ceca28be91b2acb6df', 'hex'),
privateKey: Buffer.from(
'2a6e9ad5a6a8e4f17149b8bc7128bf090566a11dbd63c30e5a0ee9f161309cd6',
'hex'
),
}
const createTx = (from = A, to = B, nonce = 0, value = 1) => {
const txData = {
nonce,
maxFeePerGas: 1000000000,
maxInclusionFeePerGas: 100000000,
gasLimit: 100000,
to: to.address,
value,
}
const tx = FeeMarketEIP1559Transaction.fromTxData(txData, { common })
const signedTx = tx.sign(from.privateKey)
return signedTx
}
const txA01 = createTx() // A -> B, nonce: 0, value: 1
const txA02 = createTx(A, B, 0, 2) // A -> B, nonce: 0, value: 2 (different hash)
const txB01 = createTx(B, A) // B -> A, nonce: 0, value: 1
const txB02 = createTx(B, A, 1, 5) // B -> A, nonce: 1, value: 5
t.test('should initialize correctly', (t) => {
const config = new Config({ transports: [] })
const pool = new TxPool({ config })
t.equal(pool.pool.size, 0, 'pool empty')
t.notOk((pool as any).opened, 'pool not opened yet')
pool.open()
t.ok((pool as any).opened, 'pool opened')
pool.start()
t.ok((pool as any).running, 'pool running')
pool.stop()
t.notOk((pool as any).running, 'pool not running anymore')
pool.close()
t.notOk((pool as any).opened, 'pool not opened anymore')
t.end()
})
t.test('should open/close', async (t) => {
t.plan(3)
const config = new Config({ transports: [] })
const pool = new TxPool({ config })
pool.open()
pool.start()
t.ok((pool as any).opened, 'pool opened')
t.equals(pool.open(), false, 'already opened')
pool.stop()
pool.close()
t.notOk((pool as any).opened, 'closed')
})
t.test('announcedTxHashes() -> add single tx / knownByPeer / getByHash()', async (t) => {
// Safeguard that send() method from peer2 gets called
t.plan(12)
const pool = new TxPool({ config })
pool.open()
pool.start()
const peer: any = {
id: '1',
eth: {
getPooledTransactions: () => {
return [null, [txA01]]
},
send: () => {
t.fail('should not send to announcing peer')
},
},
}
let sentToPeer2 = 0
const peer2: any = {
id: '2',
eth: {
send: () => {
sentToPeer2++
t.equal(sentToPeer2, 1, 'should send once to non-announcing peer')
},
},
}
const peerPool = new PeerPool({ config })
peerPool.add(peer)
peerPool.add(peer2)
await pool.handleAnnouncedTxHashes([txA01.hash()], peer, peerPool)
t.equal(pool.pool.size, 1, 'pool size 1')
t.equal((pool as any).pending.length, 0, 'cleared pending txs')
t.equal((pool as any).handled.size, 1, 'added to handled txs')
t.equal((pool as any).knownByPeer.size, 2, 'known tx hashes size 2 (entries for both peers)')
t.equal((pool as any).knownByPeer.get(peer.id).length, 1, 'one tx added for peer 1')
t.equal(
(pool as any).knownByPeer.get(peer.id)[0].hash,
txA01.hash().toString('hex'),
'new known tx hashes entry for announcing peer'
)
const txs = pool.getByHash([txA01.hash()])
t.equal(txs.length, 1, 'should get correct number of txs by hash')
t.equal(
txs[0].serialize().toString('hex'),
txA01.serialize().toString('hex'),
'should get correct tx by hash'
)
pool.pool.clear()
await pool.handleAnnouncedTxHashes([txA01.hash()], peer, peerPool)
t.equal(pool.pool.size, 0, 'should not add a once handled tx')
t.equal(
(pool as any).knownByPeer.get(peer.id).length,
1,
'should add tx only once to known tx hashes'
)
t.equal((pool as any).knownByPeer.size, 2, 'known tx hashes size 2 (entries for both peers)')
pool.stop()
pool.close()
})
t.test('announcedTxHashes() -> TX_RETRIEVAL_LIMIT', async (t) => {
const pool = new TxPool({ config })
const TX_RETRIEVAL_LIMIT: number = (pool as any).TX_RETRIEVAL_LIMIT
pool.open()
pool.start()
const peer = {
eth: {
getPooledTransactions: (res: any) => {
t.equal(res['hashes'].length, TX_RETRIEVAL_LIMIT, 'should limit to TX_RETRIEVAL_LIMIT')
return [null, []]
},
},
}
const peerPool = new PeerPool({ config })
const hashes = []
for (let i = 1; i <= TX_RETRIEVAL_LIMIT + 1; i++) {
// One more than TX_RETRIEVAL_LIMIT
hashes.push(Buffer.from(i.toString().padStart(64, '0'), 'hex')) // '0000000000000000000000000000000000000000000000000000000000000001',...
}
await pool.handleAnnouncedTxHashes(hashes, peer as any, peerPool)
pool.stop()
pool.close()
})
t.test('announcedTxHashes() -> add two txs (different sender)', async (t) => {
const pool = new TxPool({ config })
pool.open()
pool.start()
const peer: any = {
eth: {
getPooledTransactions: () => {
return [null, [txA01, txB01]]
},
},
}
const peerPool = new PeerPool({ config })
await pool.handleAnnouncedTxHashes([txA01.hash(), txB01.hash()], peer, peerPool)
t.equal(pool.pool.size, 2, 'pool size 2')
pool.stop()
pool.close()
})
t.test('announcedTxHashes() -> add two txs (same sender and nonce)', async (t) => {
const config = new Config({ transports: [] })
const pool = new TxPool({ config })
pool.open()
pool.start()
const peer: any = {
eth: {
getPooledTransactions: () => {
return [null, [txA01, txA02]]
},
},
}
const peerPool = new PeerPool({ config })
await pool.handleAnnouncedTxHashes([txA01.hash(), txA02.hash()], peer, peerPool)
t.equal(pool.pool.size, 1, 'pool size 1')
const address = A.address.toString('hex')
const poolContent = pool.pool.get(address)!
t.equal(poolContent.length, 1, 'only one tx')
t.deepEqual(poolContent[0].tx.hash(), txA02.hash(), 'only later-added tx')
pool.stop()
pool.close()
})
t.test('announcedTxs()', async (t) => {
const config = new Config({ transports: [] })
const pool = new TxPool({ config })
pool.open()
pool.start()
const peer: any = {
eth: {
send: () => {},
},
}
const peerPool = new PeerPool({ config })
await pool.handleAnnouncedTxs([txA01], peer, peerPool)
t.equal(pool.pool.size, 1, 'pool size 1')
const address = A.address.toString('hex')
const poolContent = pool.pool.get(address)!
t.equal(poolContent.length, 1, 'one tx')
t.deepEqual(poolContent[0].tx.hash(), txA01.hash(), 'correct tx')
pool.stop()
pool.close()
})
t.test('newBlocks() -> should remove included txs', async (t) => {
const config = new Config({ transports: [] })
const pool = new TxPool({ config })
pool.open()
pool.start()
let peer: any = {
eth: {
getPooledTransactions: () => {
return [null, [txA01]]
},
},
}
const peerPool = new PeerPool({ config })
await pool.handleAnnouncedTxHashes([txA01.hash()], peer, peerPool)
t.equal(pool.pool.size, 1, 'pool size 1')
// Craft block with tx not in pool
let block = Block.fromBlockData({ transactions: [txA02] }, { common })
pool.removeNewBlockTxs([block])
t.equal(pool.pool.size, 1, 'pool size 1')
// Craft block with tx in pool
block = Block.fromBlockData({ transactions: [txA01] }, { common })
pool.removeNewBlockTxs([block])
t.equal(pool.pool.size, 0, 'pool should be empty')
peer = {
eth: {
getPooledTransactions: () => {
return [null, [txB01, txB02]]
},
},
}
await pool.handleAnnouncedTxHashes([txB01.hash(), txB02.hash()], peer, peerPool)
t.equal(pool.pool.size, 1, 'pool size 1')
const address = B.address.toString('hex')
let poolContent = pool.pool.get(address)!
t.equal(poolContent.length, 2, 'two txs')
// Craft block with tx not in pool
block = Block.fromBlockData({ transactions: [txA02] }, { common })
pool.removeNewBlockTxs([block])
t.equal(pool.pool.size, 1, 'pool size 1')
poolContent = pool.pool.get(address)!
t.equal(poolContent.length, 2, 'two txs')
// Craft block with tx in pool
block = Block.fromBlockData({ transactions: [txB01] }, { common })
pool.removeNewBlockTxs([block])
poolContent = pool.pool.get(address)!
t.equal(poolContent.length, 1, 'only one tx')
// Craft block with tx in pool
block = Block.fromBlockData({ transactions: [txB02] }, { common })
pool.removeNewBlockTxs([block])
t.equal(pool.pool.size, 0, 'pool size 0')
pool.stop()
pool.close()
})
t.test('cleanup()', async (t) => {
const pool = new TxPool({ config })
pool.open()
pool.start()
const peer: any = {
eth: {
getPooledTransactions: () => {
return [null, [txA01, txB01]]
},
},
send: () => {},
}
const peerPool = new PeerPool({ config })
peerPool.add(peer)
await pool.handleAnnouncedTxHashes([txA01.hash(), txB01.hash()], peer, peerPool)
t.equal(pool.pool.size, 2, 'pool size 2')
t.equal((pool as any).handled.size, 2, 'handled size 2')
t.equal((pool as any).knownByPeer.size, 1, 'known by peer size 1')
t.equal((pool as any).knownByPeer.get(peer.id).length, 2, '2 known txs')
pool.cleanup()
t.equal(
pool.pool.size,
2,
'should not remove txs from pool (POOLED_STORAGE_TIME_LIMIT within range)'
)
t.equal(
(pool as any).knownByPeer.size,
1,
'should not remove txs from known by peer map (POOLED_STORAGE_TIME_LIMIT within range)'
)
t.equal(
(pool as any).handled.size,
2,
'should not remove txs from handled (HANDLED_CLEANUP_TIME_LIMIT within range)'
)
const address = txB01.getSenderAddress().toString().slice(2)
const poolObj = pool.pool.get(address)![0]
poolObj.added = Date.now() - pool.POOLED_STORAGE_TIME_LIMIT * 60 - 1
pool.pool.set(address, [poolObj])
const knownByPeerObj1 = (pool as any).knownByPeer.get(peer.id)[0]
const knownByPeerObj2 = (pool as any).knownByPeer.get(peer.id)[1]
knownByPeerObj1.added = Date.now() - pool.POOLED_STORAGE_TIME_LIMIT * 60 - 1
;(pool as any).knownByPeer.set(peer.id, [knownByPeerObj1, knownByPeerObj2])
const hash = txB01.hash().toString('hex')
const handledObj = (pool as any).handled.get(hash)
handledObj.added = Date.now() - pool.HANDLED_CLEANUP_TIME_LIMIT * 60 - 1
;(pool as any).handled.set(hash, handledObj)
pool.cleanup()
t.equal(
pool.pool.size,
1,
'should remove txs from pool (POOLED_STORAGE_TIME_LIMIT before range)'
)
t.equal(
(pool as any).knownByPeer.get(peer.id).length,
1,
'should remove one tx from known by peer map (POOLED_STORAGE_TIME_LIMIT before range)'
)
t.equal(
(pool as any).handled.size,
1,
'should remove txs from handled (HANDLED_CLEANUP_TIME_LIMIT before range)'
)
<|fim▁hole|> })
})<|fim▁end|> | pool.stop()
pool.close() |
<|file_name|>query_task.js<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright 2014 The Lovefield Project Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
goog.provide('lf.proc.QueryTask');
goog.require('goog.Promise');
goog.require('lf.TransactionType');
goog.require('lf.cache.Journal');
goog.require('lf.proc.PhysicalQueryPlan');
goog.require('lf.proc.Task');
goog.require('lf.query.SelectContext');
goog.require('lf.service');
/**
* A QueryTask represents a collection of queries that should be executed as
* part of a single transaction.
* @implements {lf.proc.Task}
* @constructor
* @struct
*
* @param {!lf.Global} global
* @param {!Array<!lf.proc.TaskItem>} items
*/
lf.proc.QueryTask = function(global, items) {
/** @protected {!lf.Global} */
this.global = global;
/** @private {!lf.BackStore} */
this.backStore_ = global.getService(lf.service.BACK_STORE);
/** @protected {!Array<!lf.query.Context>} */
this.queries = items.map(function(item) {
return item.context;
});
/** @private {!Array<!lf.proc.PhysicalQueryPlan>} */
this.plans_ = items.map(function(item) {
return item.plan;
});
/** @private {!lf.structs.Set<!lf.schema.Table>} */
this.combinedScope_ = lf.proc.PhysicalQueryPlan.getCombinedScope(this.plans_);
/** @private {!lf.TransactionType} */
this.txType_ = this.detectType_();
/** @private {!goog.promise.Resolver.<!Array<!lf.proc.Relation>>} */
this.resolver_ = goog.Promise.withResolver();
};
/**
* @return {!lf.TransactionType}
* @private
*/
lf.proc.QueryTask.prototype.detectType_ = function() {
var txType = this.queries.some(
function(query) {
return !(query instanceof lf.query.SelectContext);
}) ? lf.TransactionType.READ_WRITE : lf.TransactionType.READ_ONLY;
return txType;
};
/** @override */
lf.proc.QueryTask.prototype.exec = function() {
var journal = new lf.cache.Journal(this.global, this.combinedScope_);
var results = [];
var remainingPlans = this.plans_.slice();
var queries = this.queries;
/** @return {!IThenable} */
var sequentiallyExec = function() {
var plan = remainingPlans.shift();
if (plan) {
var queryContext = queries[results.length];
return plan.getRoot().exec(journal, queryContext).then(
function(relations) {
results.push(relations[0]);
return sequentiallyExec();
});
}
return goog.Promise.resolve();
};
return sequentiallyExec().then(function() {
var tx = this.backStore_.createTx(this.txType_, journal);
return tx.commit();
}.bind(this)).then(function() {
this.onSuccess(results);
return results;
}.bind(this), function(e) {
journal.rollback();
throw e;
});
};
/** @override */
lf.proc.QueryTask.prototype.getType = function() {
return this.txType_;
};
/** @override */
lf.proc.QueryTask.prototype.getScope = function() {
return this.combinedScope_;
};
/** @override */
lf.proc.QueryTask.prototype.getResolver = function() {
return this.resolver_;
};
/** @override */
lf.proc.QueryTask.prototype.getId = function() {
return goog.getUid(this);
};
/** @override */
lf.proc.QueryTask.prototype.getPriority = goog.abstractMethod;
/**
* Executes after all queries have finished successfully. Default implementation
* is a no-op. Subclasses should override this method as necessary.
* @param {!Array<!lf.proc.Relation>} results The results of all queries run by
* this task.
* @protected
*/
lf.proc.QueryTask.prototype.onSuccess = function(results) {<|fim▁hole|><|fim▁end|> | // Default implementation is a no-op.
}; |
<|file_name|>bitcoin_pt_BR.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="pt_BR" version="2.0">
<defaultcodec>UTF-8</defaultcodec>
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About Molecule</source>
<translation>Sobre o Molecule</translation>
</message>
<message>
<location line="+39"/>
<source><b>Molecule</b> version</source>
<translation>Versão do <b>Molecule</b></translation>
</message>
<message>
<location line="+57"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation>⏎
Este é um software experimental.⏎
⏎
Distribuido sob a licença de software MIT/X11, veja o arquivo anexo COPYING ou http://www.opensource.org/licenses/mit-license.php.⏎
⏎
Este produto inclui software desenvolvido pelo Projeto OpenSSL para uso no OpenSSL Toolkit (http://www.openssl.org/), software de criptografia escrito por Eric Young ([email protected]) e sofware UPnP escrito por Thomas Bernard.</translation>
</message>
<message>
<location filename="../aboutdialog.cpp" line="+14"/>
<source>Copyright</source>
<translation>Copyright</translation>
</message>
<message>
<location line="+0"/>
<source>The Molecule developers</source>
<translation>Desenvolvedores do Molecule</translation>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation>Catálogo de endereços</translation>
</message>
<message>
<location line="+19"/>
<source>Double-click to edit address or label</source>
<translation>Clique duas vezes para editar o endereço ou o etiqueta</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Criar um novo endereço</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Copie o endereço selecionado para a área de transferência do sistema</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation>&Novo endereço</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+63"/>
<source>These are your Molecule addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation>Estes são os seus endereços Molecule para receber pagamentos. Você pode querer enviar um endereço diferente para cada remetente, para acompanhar quem está pagando.</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>&Copy Address</source>
<translation>&Copiar Endereço</translation>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation>Mostrar &QR Code</translation>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a Molecule address</source>
<translation>Assine uma mensagem para provar que você é dono de um endereço Molecule</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>&Assinar Mensagem</translation>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation>Excluir os endereços selecionados da lista</translation>
</message>
<message>
<location line="+27"/>
<source>Export the data in the current tab to a file</source>
<translation>Exportar os dados na aba atual para um arquivo</translation>
</message>
<message>
<location line="+3"/>
<source>&Export</source>
<translation>&Exportar</translation>
</message>
<message>
<location line="-44"/>
<source>Verify a message to ensure it was signed with a specified Molecule address</source>
<translation>Verificar mensagem para se assegurar que ela foi assinada pelo dono de um endereço Molecule específico.</translation>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation>&Verificar Mensagem</translation>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Excluir</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="-5"/>
<source>These are your Molecule addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>Estes são os seus endereços Molecule para receber pagamentos. Você pode querer enviar um endereço diferente para cada remetente, para acompanhar quem está pagando.</translation>
</message>
<message>
<location line="+13"/>
<source>Copy &Label</source>
<translation>Copiar &Etiqueta</translation>
</message>
<message>
<location line="+1"/>
<source>&Edit</source>
<translation>&Editar</translation>
</message>
<message>
<location line="+1"/>
<source>Send &Coins</source>
<translation>Enviar bit&coins</translation>
</message>
<message>
<location line="+260"/>
<source>Export Address Book Data</source>
<translation>Exportar Catálogo de Endereços</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Arquivo separado por vírgulas (*. csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation>Erro ao exportar</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Não foi possível gravar no arquivo %1.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>Rótulo</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Endereço</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(Sem rótulo)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation>Janela da Frase de Segurança</translation>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Digite a frase de segurança</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Nova frase de segurança</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Repita a nova frase de segurança</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+33"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Digite a nova frase de seguraça da sua carteira. <br/> Por favor, use uma frase de <b>10 ou mais caracteres aleatórios,</b> ou <b>oito ou mais palavras.</b></translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>Criptografar carteira</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Esta operação precisa de sua frase de segurança para desbloquear a carteira.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>Desbloquear carteira</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Esta operação precisa de sua frase de segurança para descriptografar a carteira.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Descriptografar carteira</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Alterar frase de segurança</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Digite a frase de segurança antiga e nova para a carteira.</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>Confirmar criptografia da carteira</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR MOLECULECOINS</b>!</source>
<translation>Aviso: Se você criptografar sua carteira e perder sua senha, você vai <b>perder todos os seus MOLECULECOINS!</b></translation>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Tem certeza de que deseja criptografar sua carteira?</translation>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>IMPORTANTE: Qualquer backup prévio que você tenha feito do seu arquivo wallet deve ser substituído pelo novo e encriptado arquivo wallet gerado. Por razões de segurança, qualquer backup do arquivo wallet não criptografado se tornará inútil assim que você começar a usar uma nova carteira criptografada.</translation>
</message>
<message>
<location line="+100"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation>Cuidado: A tecla Caps Lock está ligada!</translation>
</message>
<message>
<location line="-130"/>
<location line="+58"/>
<source>Wallet encrypted</source>
<translation>Carteira criptografada</translation>
</message>
<message>
<location line="-56"/>
<source>Molecule will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your molecules from being stolen by malware infecting your computer.</source>
<translation>O Molecule irá fechar agora para finalizar o processo de encriptação. Lembre-se de que encriptar sua carteira não protege totalmente suas molecules de serem roubadas por malwares que tenham infectado o seu computador.</translation>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+42"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>A criptografia da carteira falhou</translation>
</message>
<message>
<location line="-54"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>A criptografia da carteira falhou devido a um erro interno. Sua carteira não estava criptografada.</translation>
</message>
<message>
<location line="+7"/>
<location line="+48"/>
<source>The supplied passphrases do not match.</source>
<translation>A frase de segurança fornecida não confere.</translation>
</message>
<message>
<location line="-37"/>
<source>Wallet unlock failed</source>
<translation>A abertura da carteira falhou</translation>
</message>
<message>
<location line="+1"/>
<location line="+11"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>A frase de segurança digitada para a descriptografia da carteira estava incorreta.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>A descriptografia da carteira falhou</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation>A frase de segurança da carteira foi alterada com êxito.</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+233"/>
<source>Sign &message...</source>
<translation>&Assinar Mensagem...</translation>
</message>
<message>
<location line="+280"/>
<source>Synchronizing with network...</source>
<translation>Sincronizando com a rede...</translation>
</message>
<message>
<location line="-349"/>
<source>&Overview</source>
<translation>&Visão geral</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>Mostrar visão geral da carteira</translation>
</message>
<message>
<location line="+20"/>
<source>&Transactions</source>
<translation>&Transações</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>Navegar pelo histórico de transações</translation>
</message>
<message>
<location line="+7"/>
<source>Edit the list of stored addresses and labels</source>
<translation>Editar a lista de endereços e rótulos</translation>
</message>
<message>
<location line="-14"/>
<source>Show the list of addresses for receiving payments</source>
<translation>Mostrar a lista de endereços para receber pagamentos</translation>
</message>
<message>
<location line="+31"/>
<source>E&xit</source>
<translation>S&air</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Sair da aplicação</translation>
</message>
<message>
<location line="+4"/>
<source>Show information about Molecule</source>
<translation>Mostrar informação sobre Molecule</translation>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>Sobre &Qt</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>Mostrar informações sobre o Qt</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&Opções...</translation>
</message>
<message>
<location line="+6"/>
<source>&Encrypt Wallet...</source>
<translation>&Criptografar Carteira...</translation>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation>&Backup Carteira...</translation>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation>&Mudar frase de segurança...</translation>
</message>
<message>
<location line="+285"/>
<source>Importing blocks from disk...</source>
<translation>Importando blocos do disco...</translation>
</message>
<message>
<location line="+3"/>
<source>Reindexing blocks on disk...</source>
<translation>Reindexando blocos no disco...</translation>
</message>
<message>
<location line="-347"/>
<source>Send coins to a Molecule address</source>
<translation>Enviar moedas para um endereço molecule</translation>
</message>
<message>
<location line="+49"/>
<source>Modify configuration options for Molecule</source>
<translation>Modificar opções de configuração para molecule</translation>
</message>
<message>
<location line="+9"/>
<source>Backup wallet to another location</source>
<translation>Fazer cópia de segurança da carteira para uma outra localização</translation>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Mudar a frase de segurança utilizada na criptografia da carteira</translation>
</message>
<message>
<location line="+6"/>
<source>&Debug window</source>
<translation>Janela de &Depuração</translation>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation>Abrir console de depuração e diagnóstico</translation>
</message>
<message>
<location line="-4"/>
<source>&Verify message...</source>
<translation>&Verificar mensagem...</translation>
</message>
<message>
<location line="-165"/>
<location line="+530"/>
<source>Molecule</source>
<translation>Molecule</translation>
</message>
<message>
<location line="-530"/>
<source>Wallet</source>
<translation>Carteira</translation>
</message>
<message>
<location line="+101"/>
<source>&Send</source>
<translation>&Enviar</translation>
</message>
<message>
<location line="+7"/>
<source>&Receive</source>
<translation>&Receber</translation>
</message>
<message>
<location line="+14"/>
<source>&Addresses</source>
<translation>&Endereços</translation>
</message>
<message>
<location line="+22"/>
<source>&About Molecule</source>
<translation>&Sobre o Molecule</translation>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation>&Exibir/Ocultar</translation>
</message>
<message>
<location line="+1"/>
<source>Show or hide the main Window</source>
<translation>Mostrar ou esconder a Janela Principal.</translation>
</message>
<message>
<location line="+3"/>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>Criptografar as chaves privadas que pertencem à sua carteira</translation>
</message>
<message>
<location line="+7"/>
<source>Sign messages with your Molecule addresses to prove you own them</source>
<translation>Assine mensagems com seus endereços Molecule para provar que você é dono deles</translation>
</message>
<message>
<location line="+2"/>
<source>Verify messages to ensure they were signed with specified Molecule addresses</source>
<translation>Verificar mensagens para se assegurar que elas foram assinadas pelo dono de Endereços Molecule específicos</translation>
</message>
<message>
<location line="+28"/>
<source>&File</source>
<translation>&Arquivo</translation>
</message>
<message>
<location line="+7"/>
<source>&Settings</source>
<translation>&Configurações</translation>
</message>
<message>
<location line="+6"/>
<source>&Help</source>
<translation>&Ajuda</translation>
</message>
<message>
<location line="+9"/>
<source>Tabs toolbar</source>
<translation>Barra de ferramentas</translation>
</message>
<message>
<location line="+17"/>
<location line="+10"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
<message>
<location line="+47"/>
<source>Molecule client</source>
<translation>Cliente Molecule</translation>
</message>
<message numerus="yes">
<location line="+141"/>
<source>%n active connection(s) to Molecule network</source>
<translation><numerusform>%n conexão ativa na rede Molecule</numerusform><numerusform>%n conexões ativas na rede Molecule</numerusform></translation>
</message>
<message>
<location line="+22"/>
<source>No block source available...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Processed %1 of %2 (estimated) blocks of transaction history.</source>
<translation>Processado %1 de %2 blocos (estimado) de histórico de transações.</translation>
</message>
<message>
<location line="+4"/>
<source>Processed %1 blocks of transaction history.</source>
<translation>Processado %1 blocos do histórico de transações.</translation>
</message>
<message numerus="yes">
<location line="+20"/>
<source>%n hour(s)</source>
<translation><numerusform>%n hora</numerusform><numerusform>%n horas</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation><numerusform>%n dia</numerusform><numerusform>%n dias</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n week(s)</source>
<translation><numerusform>%n semana</numerusform><numerusform>%n semanas</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>%1 behind</source>
<translation>%1 atrás</translation>
</message>
<message>
<location line="+14"/>
<source>Last received block was generated %1 ago.</source>
<translation>Último bloco recebido foi gerado %1 atrás.</translation>
</message>
<message>
<location line="+2"/>
<source>Transactions after this will not yet be visible.</source>
<translation>Transações após isso ainda não estão visíveis.</translation>
</message>
<message>
<location line="+22"/>
<source>Error</source>
<translation>Erro</translation>
</message>
<message>
<location line="+3"/>
<source>Warning</source>
<translation>Cuidado</translation>
</message>
<message>
<location line="+3"/>
<source>Information</source>
<translation>Informação</translation>
</message>
<message>
<location line="+70"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation>A transação está acima do tamanho limite. Você ainda enviar ela com uma taxa de %1, que vai para os nós processam sua transação e ajuda a manter a rede. Você quer pagar a taxa?</translation>
</message>
<message>
<location line="-140"/>
<source>Up to date</source>
<translation>Atualizado</translation>
</message>
<message>
<location line="+31"/>
<source>Catching up...</source>
<translation>Recuperando o atraso ...</translation>
</message>
<message>
<location line="+113"/>
<source>Confirm transaction fee</source>
<translation>Confirmar taxa de transação</translation>
</message>
<message>
<location line="+8"/>
<source>Sent transaction</source>
<translation>Transação enviada</translation>
</message>
<message>
<location line="+0"/>
<source>Incoming transaction</source>
<translation>Transação recebida</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Data: %1
Quantidade: %2
Tipo: %3
Endereço: %4</translation>
</message>
<message>
<location line="+33"/>
<location line="+23"/>
<source>URI handling</source>
<translation>Manipulação de URI</translation>
</message>
<message>
<location line="-23"/>
<location line="+23"/>
<source>URI can not be parsed! This can be caused by an invalid Molecule address or malformed URI parameters.</source>
<translation>URI não pode ser decodificado! Isso pode ter sido causado por um endereço Molecule inválido ou por parâmetros URI malformados.</translation>
</message>
<message>
<location line="+17"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Carteira está <b>criptografada</b> e atualmente <b>desbloqueada</b></translation>
</message>
<message>
<location line="+8"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Carteira está <b>criptografada</b> e atualmente <b>bloqueada</b></translation>
</message>
<message>
<location filename="../bitcoin.cpp" line="+111"/>
<source>A fatal error occurred. Molecule can no longer continue safely and will quit.</source>
<translation>Um erro fatal ocorreu. Molecule não pode continuar em segurança e irá fechar.</translation>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+104"/>
<source>Network Alert</source>
<translation>Alerta da Rede</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Editar Endereço</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&Etiqueta</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation>A etiqueta associada a esse endereço do catálogo</translation>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&Endereço</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation>O endereço associado à essa entrada do seu catálogo de endereços. Isso só pode ser modificado para endereço de envio.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+21"/>
<source>New receiving address</source>
<translation>Novo endereço de recebimento</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>Novo endereço de envio</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>Editar endereço de recebimento</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>Editar endereço de envio</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>O endereço digitado "%1" já se encontra no catálogo de endereços.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid Molecule address.</source>
<translation>O endereço digitado "%1" não é um endereço Molecule válido.</translation>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>Não foi possível destravar a carteira.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>A geração de nova chave falhou.</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+424"/>
<location line="+12"/>
<source>Molecule-Qt</source>
<translation>Molecule-Qt</translation>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation>versão</translation>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation>Uso:</translation>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation>opções da linha de comando</translation>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation>opções da UI</translation>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation>Escolher língua, por exemplo "de_DE" (padrão: localização do sistema)</translation>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation>Inicializar minimizado</translation>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation>Mostrar tela inicial ao ligar (padrão: 1)</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Opções</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation>Principal</translation>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation>Pagar taxa de &transação</translation>
</message>
<message>
<location line="+31"/>
<source>Automatically start Molecule after logging in to the system.</source>
<translation>Iniciar Molecule automaticamente após se logar no sistema.</translation>
</message>
<message>
<location line="+3"/>
<source>&Start Molecule on system login</source>
<translation>Iniciar Molecule no login do sistema</translation>
</message>
<message>
<location line="+35"/>
<source>Reset all client options to default.</source>
<translation>Redefinir todas as opções do cliente para opções padrão.</translation>
</message>
<message>
<location line="+3"/>
<source>&Reset Options</source>
<translation>&Redefinir opções</translation>
</message>
<message>
<location line="+13"/>
<source>&Network</source>
<translation>Rede</translation>
</message>
<message>
<location line="+6"/>
<source>Automatically open the Molecule client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>Abrir as portas do cliente Molecule automaticamente no roteador. Isto só funcionará se seu roteador suportar UPnP e esta função estiver habilitada.</translation>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation>Mapear porta usando &UPnP</translation>
</message>
<message>
<location line="+7"/>
<source>Connect to the Molecule network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation>Conectar à rede Molecule através de um proxy SOCKS (ex. quando estiver usando através do Tor)</translation>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation>&Conectar através de um proxy SOCKS:</translation>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation>&IP do proxy:</translation>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation>Endereço &IP do proxy (ex. 127.0.0.1)</translation>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation>&Porta:</translation>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Porta do serviço de proxy (ex. 9050)</translation>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation>&Versão do SOCKS:</translation>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation>Versão do proxy SOCKS (ex. 5)</translation>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation>&Janela</translation>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Mostrar apenas um ícone na bandeja ao minimizar a janela.</translation>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&Minimizar para a bandeja em vez da barra de tarefas.</translation>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>Minimizar em vez de sair do aplicativo quando a janela for fechada. Quando esta opção é escolhida, o aplicativo só será fechado selecionando Sair no menu Arquivo.</translation>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation>M&inimizar ao sair</translation>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation>&Mostrar</translation>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation>&Língua da interface com usuário:</translation>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting Molecule.</source>
<translation>A língua da interface com usuário pode ser escolhida aqui. Esta configuração só surtirá efeito após reiniciar o Molecule.</translation>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation>&Unidade usada para mostrar quantidades:</translation>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Escolha a unidade padrão de subdivisão para interface mostrar quando enviar molecules.</translation>
</message>
<message>
<location line="+9"/>
<source>Whether to show Molecule addresses in the transaction list or not.</source>
<translation>Mostrar ou não endereços Molecule na lista de transações.</translation>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation>Mostrar en&dereços na lista de transações</translation>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation>&OK</translation>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation>&Cancelar</translation>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation>&Aplicar</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+53"/>
<source>default</source>
<translation>padrão</translation>
</message>
<message>
<location line="+130"/>
<source>Confirm options reset</source>
<translation>Confirmar redefinição de opções</translation>
</message>
<message>
<location line="+1"/>
<source>Some settings may require a client restart to take effect.</source>
<translation>Algumas configurações requerem reinicialização para surtirem efeito.</translation>
</message>
<message>
<location line="+0"/>
<source>Do you want to proceed?</source>
<translation>Você quer continuar?</translation>
</message>
<message>
<location line="+42"/>
<location line="+9"/>
<source>Warning</source>
<translation>Cuidado</translation>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting Molecule.</source>
<translation>Esta configuração surtirá efeito após reinicializar o aplicativo Molecule</translation>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation>O endereço proxy fornecido é inválido.</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>Formulário</translation>
</message>
<message>
<location line="+50"/>
<location line="+166"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Molecule network after a connection is established, but this process has not completed yet.</source>
<translation>A informação mostrada pode estar desatualizada. Sua carteira sincroniza automaticamente com a rede Molecule depois que a conexão é estabelecida, mas este processo pode não estar completo ainda.</translation>
</message>
<message>
<location line="-124"/>
<source>Balance:</source>
<translation>Saldo:</translation>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation>Não confirmadas:</translation>
<|fim▁hole|> <translation>Carteira</translation>
</message>
<message>
<location line="+107"/>
<source>Immature:</source>
<translation>Imaturo:</translation>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation>Saldo minerado que ainda não maturou</translation>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>Transações recentes</b></translation>
</message>
<message>
<location line="-101"/>
<source>Your current balance</source>
<translation>Seu saldo atual</translation>
</message>
<message>
<location line="+29"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation>Total de transações ainda não confirmadas, e que ainda não contam no saldo atual</translation>
</message>
<message>
<location filename="../overviewpage.cpp" line="+116"/>
<location line="+1"/>
<source>out of sync</source>
<translation>fora de sincronia</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<location filename="../paymentserver.cpp" line="+107"/>
<source>Cannot start molecule: click-to-pay handler</source>
<translation>Não foi possível iniciar molecule: manipulador clique-para-pagar</translation>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation>Janela do código QR</translation>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation>Requisitar Pagamento</translation>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation>Quantia:</translation>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation>Etiqueta:</translation>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation>Mensagem:</translation>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation>&Salvar como...</translation>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation>Erro ao codigicar o URI em código QR</translation>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation>A quantidade digitada é inválida, favor verificar.</translation>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation>URI resultante muito longa. Tente reduzir o texto do rótulo ou da mensagem.</translation>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation>Salvar código QR</translation>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation>Imagens PNG (*.png)</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation>Nome do cliente</translation>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+339"/>
<source>N/A</source>
<translation>N/A</translation>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation>Versão do cliente</translation>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation>&Informação</translation>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation>Usando OpenSSL versão</translation>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation>Horário de inicialização</translation>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation>Rede</translation>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation>Número de conexões</translation>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation>Na rede de teste</translation>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation>Corrente de blocos</translation>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation>Quantidade atual de blocos</translation>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation>Total estimado de blocos</translation>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation>Horário do último bloco</translation>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation>&Abrir</translation>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation>Opções da linha de comando</translation>
</message>
<message>
<location line="+7"/>
<source>Show the Molecule-Qt help message to get a list with possible Molecule command-line options.</source>
<translation>Mostrar mensagem de ajuda do Molecule-Qt para obter uma lista com possíveis opções da linha de comando do Molecule.</translation>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation>&Mostrar</translation>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation>&Console</translation>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation>Data do 'build'</translation>
</message>
<message>
<location line="-104"/>
<source>Molecule - Debug window</source>
<translation>Molecule - Janela de Depuração</translation>
</message>
<message>
<location line="+25"/>
<source>Molecule Core</source>
<translation>Núcleo Molecule</translation>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation>Arquivo de log de Depuração</translation>
</message>
<message>
<location line="+7"/>
<source>Open the Molecule debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation>Abrir o arquivo de log de depuração do Molecule do diretório atual de dados. Isso pode levar alguns segundos para arquivos de log grandes.</translation>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation>Limpar console</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-30"/>
<source>Welcome to the Molecule RPC console.</source>
<translation>Bem-vindo ao console Molecule RPC.</translation>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>Use as setas para cima e para baixo para navegar pelo histórico, e <b>Ctrl-L</b> para limpar a tela.</translation>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>Digite <b>help</b> para uma visão geral dos comandos disponíveis.</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+124"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Enviar dinheiro</translation>
</message>
<message>
<location line="+50"/>
<source>Send to multiple recipients at once</source>
<translation>Enviar para vários destinatários de uma só vez</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation>Adicionar destinatário</translation>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation>Remover todos os campos da transação</translation>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation>Limpar Tudo</translation>
</message>
<message>
<location line="+22"/>
<source>Balance:</source>
<translation>Saldo:</translation>
</message>
<message>
<location line="+10"/>
<source>123.456 BTC</source>
<translation>123.456 BTC</translation>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>Confirmar o envio</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation>Enviar</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-59"/>
<source><b>%1</b> to %2 (%3)</source>
<translation><b>%1</b> para %2 (%3)</translation>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation>Confirmar envio de dinheiro</translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation>Você tem certeza que deseja enviar %1?</translation>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation>e</translation>
</message>
<message>
<location line="+23"/>
<source>The recipient address is not valid, please recheck.</source>
<translation>O endereço do destinatário não é válido, favor verificar.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation>A quantidade a ser paga precisa ser maior que 0.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation>A quantidade excede seu saldo.</translation>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>O total excede seu saldo quando uma taxa de transação de %1 é incluída.</translation>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>Endereço duplicado: pode-se enviar para cada endereço apenas uma vez por transação.</translation>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed!</source>
<translation>Erro: Criação da transação falhou!</translation>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Erro: A transação foi rejeitada. Isso pode acontecer se alguns dos molecules de sua carteira já haviam sido gastos, por exemplo se você usou uma cópia do arquivo wallet.dat e alguns molecules foram gastos na cópia mas não foram marcados como gastos aqui.</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation>Formulário</translation>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>Q&uantidade:</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation>Pagar &Para:</translation>
</message>
<message>
<location line="+34"/>
<source>The address to send the payment to (e.g. Qi1NooNjQySQLDJ643HWfZZ7UN2EmLEvix)</source>
<translation>O endereço para onde enviar o pagamento (ex. Qi1NooNjQySQLDJ643HWfZZ7UN2EmLEvix)</translation>
</message>
<message>
<location line="+60"/>
<location filename="../sendcoinsentry.cpp" line="+26"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>Digite uma etiqueta para este endereço para adicioná-lo ao catálogo de endereços</translation>
</message>
<message>
<location line="-78"/>
<source>&Label:</source>
<translation>&Etiqueta:</translation>
</message>
<message>
<location line="+28"/>
<source>Choose address from address book</source>
<translation>Escolha um endereço do seu catálogo</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>Colar o endereço da área de transferência</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation>Remover este destinatário</translation>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a Molecule address (e.g. Qi1NooNjQySQLDJ643HWfZZ7UN2EmLEvix)</source>
<translation>Digite um endereço Molecule (exemplo: Qi1NooNjQySQLDJ643HWfZZ7UN2EmLEvix)</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation>Assinaturas - Assinar / Verificar uma mensagem</translation>
</message>
<message>
<location line="+13"/>
<source>&Sign Message</source>
<translation>&Assinar Mensagem</translation>
</message>
<message>
<location line="+6"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>Você pode assinar mensagens com seus endereços para provar que você é o dono deles. Seja cuidadoso para não assinar algo vago, pois ataques de pishing podem tentar te enganar para dar sua assinatura de identidade para eles. Apenas assine afirmações completamente detalhadas com as quais você concorda.</translation>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. Qi1NooNjQySQLDJ643HWfZZ7UN2EmLEvix)</source>
<translation>Endereço a ser usado para assinar a mensagem (e.g. Qi1NooNjQySQLDJ643HWfZZ7UN2EmLEvix)</translation>
</message>
<message>
<location line="+10"/>
<location line="+213"/>
<source>Choose an address from the address book</source>
<translation>Escolha um endereço do catálogo</translation>
</message>
<message>
<location line="-203"/>
<location line="+213"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-203"/>
<source>Paste address from clipboard</source>
<translation>Colar o endereço da área de transferência</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation>Entre a mensagem que você quer assinar aqui</translation>
</message>
<message>
<location line="+7"/>
<source>Signature</source>
<translation>Assinatura</translation>
</message>
<message>
<location line="+27"/>
<source>Copy the current signature to the system clipboard</source>
<translation>Copiar a assinatura para a área de transferência do sistema</translation>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this Molecule address</source>
<translation>Assinar mensagem para provar que você é dono deste endereço Molecule</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>Assinar &Mensagem</translation>
</message>
<message>
<location line="+14"/>
<source>Reset all sign message fields</source>
<translation>Limpar todos os campos de assinatura da mensagem</translation>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation>Limpar Tudo</translation>
</message>
<message>
<location line="-87"/>
<source>&Verify Message</source>
<translation>&Verificar Mensagem</translation>
</message>
<message>
<location line="+6"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation>Forneça o endereço da assinatura, a mensagem (se assegure que você copiou quebras de linha, espaços, tabs, etc. exatamente) e a assinatura abaixo para verificar a mensagem. Cuidado para não ler mais na assinatura do que está escrito na mensagem propriamente, para evitar ser vítima de uma ataque do tipo "man-in-the-middle".</translation>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. Qi1NooNjQySQLDJ643HWfZZ7UN2EmLEvix)</source>
<translation>O endereço usado para assinar a mensagem (ex. Qi1NooNjQySQLDJ643HWfZZ7UN2EmLEvix)</translation>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified Molecule address</source>
<translation>Verificar mensagem para se assegurar que ela foi assinada pelo dono de um endereço Molecule específico.</translation>
</message>
<message>
<location line="+3"/>
<source>Verify &Message</source>
<translation>Verificar %Mensagem</translation>
</message>
<message>
<location line="+14"/>
<source>Reset all verify message fields</source>
<translation>Limpar todos os campos de assinatura da mensagem</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a Molecule address (e.g. Qi1NooNjQySQLDJ643HWfZZ7UN2EmLEvix)</source>
<translation>Digite um endereço Molecule (exemplo: Qi1NooNjQySQLDJ643HWfZZ7UN2EmLEvix)</translation>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation>Clique em "Assinar Mensagem" para gerar a assinatura</translation>
</message>
<message>
<location line="+3"/>
<source>Enter Molecule signature</source>
<translation>Entre com a assinatura Molecule</translation>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation>O endereço fornecido é inválido.</translation>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation>Por favor, verifique o endereço e tente novamente.</translation>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation>O endereço fornecido não se refere a uma chave.</translation>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation>Destravamento da Carteira foi cancelado.</translation>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation>A chave privada para o endereço fornecido não está disponível.</translation>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation>Assinatura da mensagem falhou.</translation>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation>Mensagem assinada.</translation>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation>A assinatura não pode ser decodificada.</translation>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation>Por favor, verifique a assinatura e tente novamente.</translation>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation>A assinatura não corresponde ao "resumo da mensagem".</translation>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation>Verificação da mensagem falhou.</translation>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation>Mensagem verificada.</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<location filename="../splashscreen.cpp" line="+22"/>
<source>The Molecule developers</source>
<translation>Desenvolvedores do Molecule</translation>
</message>
<message>
<location line="+1"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+20"/>
<source>Open until %1</source>
<translation>Aberto até %1</translation>
</message>
<message>
<location line="+6"/>
<source>%1/offline</source>
<translation>%1/offline</translation>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/não confirmadas</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>%1 confirmações</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation>Status</translation>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation><numerusform>, difundir atráves de %n nó</numerusform><numerusform>, difundir atráves de %n nós</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation>Fonte</translation>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation>Gerados</translation>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation>De</translation>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation>Para</translation>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation>seu próprio endereço</translation>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation>etiqueta</translation>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation>Crédito</translation>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation><numerusform>matura em mais %n bloco</numerusform><numerusform>matura em mais %n blocos</numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation>não aceito</translation>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation>Débito</translation>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation>Taxa de transação</translation>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation>Valor líquido</translation>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation>Mensagem</translation>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation>Comentário</translation>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation>ID da transação</translation>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 240 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>Molecules gerados precisam maturar por 240 blocos antes de serem gastos. Quando você gera este bloco, ele é difundido na rede para ser adicionado ao blockchain. Se ele falhar ao ser acrescentado no blockchain, seu estado mudará para "não aceito" e não poderá ser gasto. Isso pode ocasionamente acontecer se outro nó gerou um bloco poucos segundos antes do seu.</translation>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation>Informação de depuração</translation>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation>Transação</translation>
</message>
<message>
<location line="+3"/>
<source>Inputs</source>
<translation>Entradas</translation>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>Quantidade</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation>verdadeiro</translation>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation>falso</translation>
</message>
<message>
<location line="-209"/>
<source>, has not been successfully broadcast yet</source>
<translation>, ainda não foi propagada na rede com sucesso.</translation>
</message>
<message numerus="yes">
<location line="-35"/>
<source>Open for %n more block(s)</source>
<translation><numerusform>Abrir para mais %n bloco</numerusform><numerusform>Abrir para mais %n blocos</numerusform></translation>
</message>
<message>
<location line="+70"/>
<source>unknown</source>
<translation>desconhecido</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>Detalhes da transação</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>Este painel mostra uma descrição detalhada da transação</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+225"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>Tipo</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Endereço</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>Quantidade</translation>
</message>
<message numerus="yes">
<location line="+57"/>
<source>Open for %n more block(s)</source>
<translation><numerusform>Abrir para mais %n bloco</numerusform><numerusform>Abrir para mais %n blocos</numerusform></translation>
</message>
<message>
<location line="+3"/>
<source>Open until %1</source>
<translation>Aberto até %1</translation>
</message>
<message>
<location line="+3"/>
<source>Offline (%1 confirmations)</source>
<translation>Offline (%1 confirmações)</translation>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed (%1 of %2 confirmations)</source>
<translation>Não confirmado (%1 of %2 confirmações)</translation>
</message>
<message>
<location line="+3"/>
<source>Confirmed (%1 confirmations)</source>
<translation>Confirmado (%1 confirmações)</translation>
</message>
<message numerus="yes">
<location line="+8"/>
<source>Mined balance will be available when it matures in %n more block(s)</source>
<translation><numerusform>Saldo minerado vai estar disponível quando ele maturar em mais %n bloco</numerusform><numerusform>Saldo minerado vai estar disponível quando ele maturar em mais %n blocos</numerusform></translation>
</message>
<message>
<location line="+5"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Este bloco não foi recebido por nenhum outro participante da rede e provavelmente não será aceito!</translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation>Gerado mas não aceito</translation>
</message>
<message>
<location line="+43"/>
<source>Received with</source>
<translation>Recebido por</translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation>Recebido de</translation>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>Enviado para</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>Pagamento para você mesmo</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>Minerado</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(n/a)</translation>
</message>
<message>
<location line="+199"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Status da transação. Passe o mouse sobre este campo para mostrar o número de confirmações.</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>Data e hora em que a transação foi recebida.</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>Tipo de transação.</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>Endereço de destino da transação.</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>Quantidade debitada ou creditada ao saldo.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+52"/>
<location line="+16"/>
<source>All</source>
<translation>Todos</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>Hoje</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>Esta semana</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>Este mês</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>Mês passado</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>Este ano</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>Intervalo...</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>Recebido por</translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>Enviado para</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>Para você mesmo</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>Minerado</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>Outro</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation>Procure um endereço ou etiqueta</translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>Quantidade mínima</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>Copiar endereço</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Copiar etiqueta</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Copiar quantia</translation>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation>Copiar ID da transação</translation>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation>Editar etiqueta</translation>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation>Mostrar detalhes da transação</translation>
</message>
<message>
<location line="+139"/>
<source>Export Transaction Data</source>
<translation>Exportar Dados das Transações</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Arquivo separado por vírgulas (*. csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation>Confirmado</translation>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>Tipo</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Etiqueta</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Endereço</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>Quantidade</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation>Erro ao exportar</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Não foi possível gravar no arquivo %1.</translation>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation>Intervalo: </translation>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation>para</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+193"/>
<source>Send Coins</source>
<translation>Send Coins</translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<location filename="../walletview.cpp" line="+42"/>
<source>&Export</source>
<translation>&Exportar</translation>
</message>
<message>
<location line="+1"/>
<source>Export the data in the current tab to a file</source>
<translation>Exportar os dados na aba atual para um arquivo</translation>
</message>
<message>
<location line="+193"/>
<source>Backup Wallet</source>
<translation>Fazer cópia de segurança da Carteira</translation>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation>Dados da Carteira (*.dat)</translation>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation>Cópia de segurança Falhou</translation>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation>Houve um erro ao tentar salvar os dados da carteira para uma nova localização.</translation>
</message>
<message>
<location line="+4"/>
<source>Backup Successful</source>
<translation>Backup feito com sucesso</translation>
</message>
<message>
<location line="+0"/>
<source>The wallet data was successfully saved to the new location.</source>
<translation>Os dados da carteira foram salvos com sucesso na nova localização</translation>
</message>
</context>
<context>
<name>molecule-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+94"/>
<source>Molecule version</source>
<translation>Versão do Molecule</translation>
</message>
<message>
<location line="+102"/>
<source>Usage:</source>
<translation>Uso:</translation>
</message>
<message>
<location line="-29"/>
<source>Send command to -server or moleculed</source>
<translation>Enviar comando para -server ou moleculed</translation>
</message>
<message>
<location line="-23"/>
<source>List commands</source>
<translation>Lista de comandos</translation>
</message>
<message>
<location line="-12"/>
<source>Get help for a command</source>
<translation>Obtenha ajuda sobre um comando</translation>
</message>
<message>
<location line="+24"/>
<source>Options:</source>
<translation>Opções:</translation>
</message>
<message>
<location line="+24"/>
<source>Specify configuration file (default: molecule.conf)</source>
<translation>Especifique um arquivo de configurações (padrão: molecule.conf)</translation>
</message>
<message>
<location line="+3"/>
<source>Specify pid file (default: moleculed.pid)</source>
<translation>Especifique um arquivo de pid (padrão: moleculed.pid)</translation>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation>Especificar diretório de dados</translation>
</message>
<message>
<location line="-9"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation>Definir o tamanho do cache do banco de dados em megabytes (padrão: 25)</translation>
</message>
<message>
<location line="-28"/>
<source>Listen for connections on <port> (default: 8333 or testnet: 18333)</source>
<translation>Procurar por conexões em <port> (padrão: 8333 ou testnet:18333)</translation>
</message>
<message>
<location line="+5"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation>Manter no máximo <n> conexões aos peers (padrão: 125)</translation>
</message>
<message>
<location line="-48"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>Conectar a um nó para receber endereços de participantes, e desconectar.</translation>
</message>
<message>
<location line="+82"/>
<source>Specify your own public address</source>
<translation>Especificar seu próprio endereço público</translation>
</message>
<message>
<location line="+3"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation>Limite para desconectar peers mal comportados (padrão: 100)</translation>
</message>
<message>
<location line="-134"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation>Número de segundos para impedir que peers mal comportados reconectem (padrão: 86400)</translation>
</message>
<message>
<location line="-29"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation>Um erro ocorreu ao configurar a porta RPC %u para escuta em IPv4: %s</translation>
</message>
<message>
<location line="+27"/>
<source>Listen for JSON-RPC connections on <port> (default: 8332 or testnet: 18332)</source>
<translation>Escutar conexões JSON-RPC na porta <porta> (padrão: 8332 ou testnet: 18332)</translation>
</message>
<message>
<location line="+37"/>
<source>Accept command line and JSON-RPC commands</source>
<translation>Aceitar linha de comando e comandos JSON-RPC</translation>
</message>
<message>
<location line="+76"/>
<source>Run in the background as a daemon and accept commands</source>
<translation>Rodar em segundo plano como serviço e aceitar comandos</translation>
</message>
<message>
<location line="+37"/>
<source>Use the test network</source>
<translation>Usar rede de teste</translation>
</message>
<message>
<location line="-112"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation>Aceitar conexões externas (padrão: 1 se opções -proxy ou -connect não estiverem presentes)</translation>
</message>
<message>
<location line="-80"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=moleculerpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "Molecule Alert" [email protected]
</source>
<translation>%s, você deve especificar uma senha rpcpassword no arquivo de configuração:⏎
%s⏎
É recomendado que você use a seguinte senha aleatória:⏎
rpcuser=moleculerpc⏎
rpcpassword=%s⏎
(você não precisa lembrar esta senha)⏎
O nome de usuário e a senha NÃO PODEM ser os mesmos.⏎
Se o arquivo não existir, crie um com permissão de leitura apenas para o dono.⏎
É recomendado também definir um alertnotify para que você seja notificado de problemas;⏎
por exemplo: alertnotify=echo %%s | mail -s "Molecule Alert" [email protected]⏎
</translation>
</message>
<message>
<location line="+17"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation>Um erro ocorreu ao configurar a porta RPC %u para escuta em IPv6, voltando ao IPv4: %s</translation>
</message>
<message>
<location line="+3"/>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation>Vincular ao endereço fornecido e sempre escutar nele. Use a notação [host]:port para IPv6</translation>
</message>
<message>
<location line="+3"/>
<source>Cannot obtain a lock on data directory %s. Molecule is probably already running.</source>
<translation>Não foi possível obter exclusividade de escrita no endereço %s. O Molecule provavelmente já está rodando.</translation>
</message>
<message>
<location line="+3"/>
<source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Erro: A transação foi rejeitada. Isso pode acontecer se alguns dos molecules de sua carteira já haviam sido gastos, por exemplo se você usou uma cópia do arquivo wallet.dat e alguns molecules foram gastos na cópia mas não foram marcados como gastos aqui.</translation>
</message>
<message>
<location line="+4"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source>
<translation>Erro: Esta transação requer uma taxa de transação de pelo menos %s, por causa sua quantidade, complexidade ou uso de dinheiro recebido recentemente.</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation>Executar comando quando um alerta relevante for recebido (%s no comando será substituído pela mensagem)</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>Executar comando quando uma transação da carteira mudar (%s no comando será substituído por TxID)</translation>
</message>
<message>
<location line="+11"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation>Determinar tamanho máximo de transações de alta-prioridade/baixa-taxa em bytes (padrão: 27000)</translation>
</message>
<message>
<location line="+6"/>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation>Este pode ser um build de teste pré-lançamento - use por sua conta e risco - não use para mineração ou aplicações de comércio.</translation>
</message>
<message>
<location line="+5"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>Cuidado: valor de -paytxfee escolhido é muito alto! Este é o valor da taxa de transação que você irá pagar se enviar a transação.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation>Cuidado: Transações mostradas podem não estar corretas! Você pode precisar atualizar, ou outros nós podem precisar atualizar o cliente.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong Molecule will not work properly.</source>
<translation>Cuidado: Por favor, verifique que a data e hora do seu computador estão corretas! If o seu relógio estiver errado, o Molecule não irá funcionar corretamente.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation>Cuidado: erro ao ler arquivo wallet.dat! Todas as chaves foram lidas corretamente, mas dados transações e do catálogo de endereços podem estar faltando ou estar incorretas.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation>Aviso: wallet.dat corrompido, dados recuperados! Arquivo wallet.dat original salvo como wallet.{timestamp}.bak em %s; se seu saldo ou transações estiverem incorretos, você deve restauras o backup.</translation>
</message>
<message>
<location line="+14"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation>Tentar recuperar chaves privadas de um arquivo wallet.dat corrompido</translation>
</message>
<message>
<location line="+2"/>
<source>Block creation options:</source>
<translation>Opções de criação de blocos:</translation>
</message>
<message>
<location line="+5"/>
<source>Connect only to the specified node(s)</source>
<translation>Conectar apenas a nó(s) específico(s)</translation>
</message>
<message>
<location line="+3"/>
<source>Corrupted block database detected</source>
<translation>Detectado Banco de dados de blocos corrompido</translation>
</message>
<message>
<location line="+1"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation>Descobrir os próprios endereços IP (padrão: 1 quando no modo listening e opção -externalip não estiver presente)</translation>
</message>
<message>
<location line="+1"/>
<source>Do you want to rebuild the block database now?</source>
<translation>Você quer reconstruir o banco de dados de blocos agora?</translation>
</message>
<message>
<location line="+2"/>
<source>Error initializing block database</source>
<translation>Erro ao inicializar banco de dados de blocos</translation>
</message>
<message>
<location line="+1"/>
<source>Error initializing wallet database environment %s!</source>
<translation>Erro ao inicializar ambiente de banco de dados de carteira %s!</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading block database</source>
<translation>Erro ao carregar banco de dados de blocos</translation>
</message>
<message>
<location line="+4"/>
<source>Error opening block database</source>
<translation>Erro ao abrir banco de dados de blocos</translation>
</message>
<message>
<location line="+2"/>
<source>Error: Disk space is low!</source>
<translation>Erro: Espaço em disco insuficiente!</translation>
</message>
<message>
<location line="+1"/>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation>Erro: Carteira travada, impossível criar transação!</translation>
</message>
<message>
<location line="+1"/>
<source>Error: system error: </source>
<translation>Erro: erro de sistema</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>Falha ao escutar em qualquer porta. Use -listen=0 se você quiser isso.</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to read block info</source>
<translation>Falha ao ler informação de bloco</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to read block</source>
<translation>Falha ao ler bloco</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to sync block index</source>
<translation>Falha ao sincronizar índice de blocos</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block index</source>
<translation>Falha ao escrever índice de blocos</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block info</source>
<translation>Falha ao escrever informações de bloco</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block</source>
<translation>Falha ao escrever bloco</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write file info</source>
<translation>Falha ao escrever informções de arquivo</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write to coin database</source>
<translation>Falha ao escrever banco de dados de moedas</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write transaction index</source>
<translation>Falha ao escrever índice de transações</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write undo data</source>
<translation>Falha ao escrever dados para desfazer ações</translation>
</message>
<message>
<location line="+2"/>
<source>Find peers using DNS lookup (default: 1 unless -connect)</source>
<translation>Procurar pares usando consulta de DNS (padrão: 1 a menos que a opção -connect esteja presente)</translation>
</message>
<message>
<location line="+1"/>
<source>Generate coins (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 288, 0 = all)</source>
<translation>Quantos blocos checar ao inicializar (padrão: 288, 0 = todos)</translation>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-4, default: 3)</source>
<translation>Quão minuciosa é a verificação dos blocos (0-4, padrão: 3)</translation>
</message>
<message>
<location line="+19"/>
<source>Not enough file descriptors available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation>Reconstruir índice de blockchain a partir dos arquivos atuais blk000??.dat</translation>
</message>
<message>
<location line="+16"/>
<source>Set the number of threads to service RPC calls (default: 4)</source>
<translation>Defina o número de threads de script de verificação. (Padrão: 4)</translation>
</message>
<message>
<location line="+26"/>
<source>Verifying blocks...</source>
<translation>Verificando blocos...</translation>
</message>
<message>
<location line="+1"/>
<source>Verifying wallet...</source>
<translation>Verificando carteira...</translation>
</message>
<message>
<location line="-69"/>
<source>Imports blocks from external blk000??.dat file</source>
<translation>Importar blocos de um arquivo externo blk000??.dat</translation>
</message>
<message>
<location line="-76"/>
<source>Set the number of script verification threads (up to 16, 0 = auto, <0 = leave that many cores free, default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Information</source>
<translation>Informação</translation>
</message>
<message>
<location line="+3"/>
<source>Invalid -tor address: '%s'</source>
<translation>Endereço -tor inválido: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Maintain a full transaction index (default: 0)</source>
<translation>Manter índice completo de transações (padrão: 0)</translation>
</message>
<message>
<location line="+2"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation>Buffer máximo de recebimento por conexão, <n>*1000 bytes (padrão: 5000)</translation>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation>Buffer máximo de envio por conexão, <n>*1000 bytes (padrão: 1000)</translation>
</message>
<message>
<location line="+2"/>
<source>Only accept block chain matching built-in checkpoints (default: 1)</source>
<translation>Apenas aceitar cadeia de blocos correspondente a marcas de verificação internas (padrão: 1)</translation>
</message>
<message>
<location line="+1"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation>Apenas conectar em nós na rede <net> (IPv4, IPv6, ou Tor)</translation>
</message>
<message>
<location line="+2"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation>Mostrar informações extras de depuração. Implica em outras opções -debug*</translation>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation>Mostrar informações extras de depuração da rede</translation>
</message>
<message>
<location line="+2"/>
<source>Prepend debug output with timestamp</source>
<translation>Pré anexar a saída de debug com estampa de tempo</translation>
</message>
<message>
<location line="+5"/>
<source>SSL options: (see the Molecule Wiki for SSL setup instructions)</source>
<translation>Opções SSL: (veja a Wiki do Molecule para instruções de configuração SSL)</translation>
</message>
<message>
<location line="+1"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation>Escolher versão do proxy socks a ser usada (4-5, padrão: 5)</translation>
</message>
<message>
<location line="+3"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Mandar informação de trace/debug para o console em vez de para o arquivo debug.log</translation>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation>Mandar informação de trace/debug para o debugger</translation>
</message>
<message>
<location line="+5"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation>Determinar tamanho máximo de bloco em bytes (padrão: 250000)</translation>
</message>
<message>
<location line="+1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation>Determinar tamanho mínimo de bloco em bytes (padrão: 0)</translation>
</message>
<message>
<location line="+2"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation>Encolher arquivo debug.log ao iniciar o cliente (padrão 1 se opção -debug não estiver presente)</translation>
</message>
<message>
<location line="+1"/>
<source>Signing transaction failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation>Especifique o tempo limite (timeout) da conexão em milissegundos (padrão: 5000) </translation>
</message>
<message>
<location line="+4"/>
<source>System error: </source>
<translation>Erro de sistema:</translation>
</message>
<message>
<location line="+4"/>
<source>Transaction amount too small</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction amounts must be positive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction too large</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation>Usar UPnP para mapear porta de escuta (padrão: 0)</translation>
</message>
<message>
<location line="+1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>Usar UPnP para mapear porta de escuta (padrão: 1 quando estiver escutando)</translation>
</message>
<message>
<location line="+1"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation>Usar proxy para alcançar serviços escondidos (padrão: mesmo que -proxy)</translation>
</message>
<message>
<location line="+2"/>
<source>Username for JSON-RPC connections</source>
<translation>Nome de usuário para conexões JSON-RPC</translation>
</message>
<message>
<location line="+4"/>
<source>Warning</source>
<translation>Cuidado</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation>Cuidado: Esta versão está obsoleta, atualização exigida!</translation>
</message>
<message>
<location line="+1"/>
<source>You need to rebuild the databases using -reindex to change -txindex</source>
<translation>Você precisa reconstruir os bancos de dados usando -reindex para mudar -txindex</translation>
</message>
<message>
<location line="+1"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation>wallet.dat corrompido, recuperação falhou</translation>
</message>
<message>
<location line="-50"/>
<source>Password for JSON-RPC connections</source>
<translation>Senha para conexões JSON-RPC</translation>
</message>
<message>
<location line="-67"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation>Permitir conexões JSON-RPC de endereços IP específicos</translation>
</message>
<message>
<location line="+76"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation>Enviar comando para nó rodando em <ip> (pardão: 127.0.0.1)</translation>
</message>
<message>
<location line="-120"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>Executar comando quando o melhor bloco mudar (%s no comando será substituído pelo hash do bloco)</translation>
</message>
<message>
<location line="+147"/>
<source>Upgrade wallet to latest format</source>
<translation>Atualizar carteira para o formato mais recente</translation>
</message>
<message>
<location line="-21"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation>Determinar tamanho do pool de endereços para <n> (padrão: 100)</translation>
</message>
<message>
<location line="-12"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Re-escanear blocos procurando por transações perdidas da carteira</translation>
</message>
<message>
<location line="+35"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>Usar OpenSSL (https) para conexões JSON-RPC</translation>
</message>
<message>
<location line="-26"/>
<source>Server certificate file (default: server.cert)</source>
<translation>Arquivo de certificado do servidor (padrão: server.cert)</translation>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation>Chave privada do servidor (padrão: server.pem)</translation>
</message>
<message>
<location line="-151"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation>Algoritmos de criptografia aceitos (padrão: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation>
</message>
<message>
<location line="+165"/>
<source>This help message</source>
<translation>Esta mensagem de ajuda</translation>
</message>
<message>
<location line="+6"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation>Impossível vincular a %s neste computador (bind retornou erro %d, %s)</translation>
</message>
<message>
<location line="-91"/>
<source>Connect through socks proxy</source>
<translation>Conectar através de um proxy socks</translation>
</message>
<message>
<location line="-10"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>Permitir consultas DNS para -addnode, -seednode e -connect</translation>
</message>
<message>
<location line="+55"/>
<source>Loading addresses...</source>
<translation>Carregando endereços...</translation>
</message>
<message>
<location line="-35"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Erro ao carregar wallet.dat: Carteira corrompida</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat: Wallet requires newer version of Molecule</source>
<translation>Erro ao carregar wallet.dat: Carteira requer uma versão mais nova do Molecule</translation>
</message>
<message>
<location line="+93"/>
<source>Wallet needed to be rewritten: restart Molecule to complete</source>
<translation>A Carteira precisou ser reescrita: reinicie o Molecule para completar</translation>
</message>
<message>
<location line="-95"/>
<source>Error loading wallet.dat</source>
<translation>Erro ao carregar wallet.dat</translation>
</message>
<message>
<location line="+28"/>
<source>Invalid -proxy address: '%s'</source>
<translation>Endereço -proxy inválido: '%s'</translation>
</message>
<message>
<location line="+56"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>Rede desconhecida especificada em -onlynet: '%s'</translation>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation>Versão desconhecida do proxy -socks requisitada: %i</translation>
</message>
<message>
<location line="-96"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation>Impossível encontrar o endereço -bind: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation>Impossível encontrar endereço -externalip: '%s'</translation>
</message>
<message>
<location line="+44"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>Quantidade inválida para -paytxfee=<quantidade>: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount</source>
<translation>Quantidade inválida</translation>
</message>
<message>
<location line="-6"/>
<source>Insufficient funds</source>
<translation>Saldo insuficiente</translation>
</message>
<message>
<location line="+10"/>
<source>Loading block index...</source>
<translation>Carregando índice de blocos...</translation>
</message>
<message>
<location line="-57"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Adicionar um nó com o qual se conectar e tentar manter a conexão ativa</translation>
</message>
<message>
<location line="-25"/>
<source>Unable to bind to %s on this computer. Molecule is probably already running.</source>
<translation>Impossível vincular a %s neste computador. O Molecule provavelmente já está rodando.</translation>
</message>
<message>
<location line="+64"/>
<source>Fee per KB to add to transactions you send</source>
<translation>Taxa por KB a ser acrescida nas transações que você enviar</translation>
</message>
<message>
<location line="+19"/>
<source>Loading wallet...</source>
<translation>Carregando carteira...</translation>
</message>
<message>
<location line="-52"/>
<source>Cannot downgrade wallet</source>
<translation>Não é possível fazer downgrade da carteira</translation>
</message>
<message>
<location line="+3"/>
<source>Cannot write default address</source>
<translation>Não foi possível escrever no endereço padrão</translation>
</message>
<message>
<location line="+64"/>
<source>Rescanning...</source>
<translation>Re-escaneando...</translation>
</message>
<message>
<location line="-57"/>
<source>Done loading</source>
<translation>Carregamento terminado</translation>
</message>
<message>
<location line="+82"/>
<source>To use the %s option</source>
<translation>Para usar a opção %s</translation>
</message>
<message>
<location line="-74"/>
<source>Error</source>
<translation>Erro</translation>
</message>
<message>
<location line="-31"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation>Você precisa especificar rpcpassword=<senha> no arquivo de configurações:⏎
%s⏎
Se o arquivo não existir, crie um com permissão de leitura apenas pelo dono</translation>
</message>
</context>
</TS><|fim▁end|> | </message>
<message>
<location line="-78"/>
<source>Wallet</source>
|
<|file_name|>binding_files.rs<|end_file_name|><|fim▁begin|>use super::write_file;
use anyhow::{Context, Result};
use std::path::{Path, PathBuf};
use std::{fs, str};
const BINDING_CC_TEMPLATE: &'static str = include_str!("./templates/binding.cc");
const BINDING_GYP_TEMPLATE: &'static str = include_str!("./templates/binding.gyp");
const INDEX_JS_TEMPLATE: &'static str = include_str!("./templates/index.js");
const LIB_RS_TEMPLATE: &'static str = include_str!("./templates/lib.rs");
const BUILD_RS_TEMPLATE: &'static str = include_str!("./templates/build.rs");
const CARGO_TOML_TEMPLATE: &'static str = include_str!("./templates/cargo.toml");
const PACKAGE_JSON_TEMPLATE: &'static str = include_str!("./templates/package.json");
const PARSER_NAME_PLACEHOLDER: &'static str = "PARSER_NAME";
const CLI_VERSION_PLACEHOLDER: &'static str = "CLI_VERSION";
const CLI_VERSION: &'static str = env!("CARGO_PKG_VERSION");
const RUST_BINDING_VERSION: &'static str = env!("RUST_BINDING_VERSION");
const RUST_BINDING_VERSION_PLACEHOLDER: &'static str = "RUST_BINDING_VERSION";
pub fn generate_binding_files(repo_path: &Path, language_name: &str) -> Result<()> {
let bindings_dir = repo_path.join("bindings");
let dashed_language_name = language_name.replace("_", "-");
let dashed_language_name = dashed_language_name.as_str();
// Generate rust bindings if needed.
let rust_binding_dir = bindings_dir.join("rust");
create_path(&rust_binding_dir, |path| create_dir(path))?;
create_path(&rust_binding_dir.join("lib.rs").to_owned(), |path| {
generate_file(path, LIB_RS_TEMPLATE, language_name)
})?;
create_path(&rust_binding_dir.join("build.rs").to_owned(), |path| {
generate_file(path, BUILD_RS_TEMPLATE, language_name)
})?;
create_path(&repo_path.join("Cargo.toml").to_owned(), |path| {
generate_file(path, CARGO_TOML_TEMPLATE, dashed_language_name)
})?;
// Generate node bindings
let node_binding_dir = bindings_dir.join("node");
create_path(&node_binding_dir, |path| create_dir(path))?;
create_path(&node_binding_dir.join("index.js").to_owned(), |path| {
generate_file(path, INDEX_JS_TEMPLATE, language_name)
})?;
create_path(&node_binding_dir.join("binding.cc").to_owned(), |path| {
generate_file(path, BINDING_CC_TEMPLATE, language_name)
})?;
// Create binding.gyp, or update it with new binding path.
let binding_gyp_path = repo_path.join("binding.gyp");
create_path_else(
&binding_gyp_path,
|path| generate_file(path, BINDING_GYP_TEMPLATE, language_name),
|path| {
let binding_gyp =
fs::read_to_string(path).with_context(|| "Failed to read binding.gyp")?;
let old_path = "\"src/binding.cc\"";
if binding_gyp.contains(old_path) {
eprintln!("Updating binding.gyp with new binding path");
let binding_gyp = binding_gyp.replace(old_path, "\"bindings/node/binding.cc\"");
write_file(path, binding_gyp)?;
}
Ok(())
},
)?;
// Create package.json, or update it with new binding path.
let package_json_path = repo_path.join("package.json");
create_path_else(
&package_json_path,
|path| generate_file(path, PACKAGE_JSON_TEMPLATE, dashed_language_name),
|path| {
let package_json_str =
fs::read_to_string(path).with_context(|| "Failed to read package.json")?;
let mut package_json =
serde_json::from_str::<serde_json::Map<String, serde_json::Value>>(
&package_json_str,
)
.with_context(|| "Failed to parse package.json")?;
let package_json_main = package_json.get("main");<|fim▁hole|> let package_json_needs_update = package_json_main.map_or(true, |v| {
let main_string = v.as_str();
main_string == Some("index.js") || main_string == Some("./index.js")
});
if package_json_needs_update {
eprintln!("Updating package.json with new binding path");
package_json.insert(
"main".to_string(),
serde_json::Value::String("bindings/node".to_string()),
);
let mut package_json_str = serde_json::to_string_pretty(&package_json)?;
package_json_str.push('\n');
write_file(path, package_json_str)?;
}
Ok(())
},
)?;
// Remove files from old node binding paths.
let old_index_js_path = repo_path.join("index.js");
let old_binding_cc_path = repo_path.join("src").join("binding.cc");
if old_index_js_path.exists() {
fs::remove_file(old_index_js_path).ok();
}
if old_binding_cc_path.exists() {
fs::remove_file(old_binding_cc_path).ok();
}
Ok(())
}
fn generate_file(path: &Path, template: &str, language_name: &str) -> Result<()> {
write_file(
path,
template
.replace(PARSER_NAME_PLACEHOLDER, language_name)
.replace(CLI_VERSION_PLACEHOLDER, CLI_VERSION)
.replace(RUST_BINDING_VERSION_PLACEHOLDER, RUST_BINDING_VERSION),
)
}
fn create_dir(path: &Path) -> Result<()> {
fs::create_dir_all(&path)
.with_context(|| format!("Failed to create {:?}", path.to_string_lossy()))
}
fn create_path<F>(path: &PathBuf, action: F) -> Result<bool>
where
F: Fn(&PathBuf) -> Result<()>,
{
if !path.exists() {
action(path)?;
return Ok(true);
}
Ok(false)
}
fn create_path_else<T, F>(path: &PathBuf, action: T, else_action: F) -> Result<bool>
where
T: Fn(&PathBuf) -> Result<()>,
F: Fn(&PathBuf) -> Result<()>,
{
if !path.exists() {
action(path)?;
return Ok(true);
} else {
else_action(path)?;
}
Ok(false)
}<|fim▁end|> | |
<|file_name|>_deprecated_nilsimsa.py<|end_file_name|><|fim▁begin|># Copyright (C) MetaCarta, Incorporated.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
# Port of nilsimsa-20050414.rb from Ruby to Python
#
# Ported by Michael Itz at MetaCarta
#
# Original comments from Ruby version:
# ---------------------------------------------------------
# Nilsimsa hash (build 20050414)
# Ruby port (C) 2005 Martin Pirker
# released under GNU GPL V2 license
#
# inspired by Digest::Nilsimsa-0.06 from Perl CPAN and
# the original C nilsimsa-0.2.4 implementation by cmeclax
# http://ixazon.dynip.com/~cmeclax/nilsimsa.html
# ---------------------------------------------------------
"""
Computes and compares nilsimsa codes.
A nilsimsa code is something like a hash, but unlike hashes, a small
change in the message results in a small change in the nilsimsa
code. Such a function is called a locality-sensitive hash.
Python port of ruby version that was inspired by a perl version:
http://ixazon.dynip.com/~cmeclax/nilsimsa.html
"""
import sys
if sys.version_info[0] >= 3:
PY3 = True
text_type = str
else:
PY3 = False
text_type = unicode
def is_iterable_non_string(obj):
return hasattr(obj, '__iter__') and not isinstance(obj, (bytes, text_type))
# $ Id: $
# table used in computing trigram statistics
# TRAN[x] is the accumulator that should be incremented when x
# is the value observed from hashing a triplet of recently
# seen characters (done in Nilsimsa.tran3(a, b, c, n))
TRAN = [ord(x) for x in
"\x02\xD6\x9E\x6F\xF9\x1D\x04\xAB\xD0\x22\x16\x1F\xD8\x73\xA1\xAC"\
"\x3B\x70\x62\x96\x1E\x6E\x8F\x39\x9D\x05\x14\x4A\xA6\xBE\xAE\x0E"\
"\xCF\xB9\x9C\x9A\xC7\x68\x13\xE1\x2D\xA4\xEB\x51\x8D\x64\x6B\x50"\
"\x23\x80\x03\x41\xEC\xBB\x71\xCC\x7A\x86\x7F\x98\xF2\x36\x5E\xEE"\
"\x8E\xCE\x4F\xB8\x32\xB6\x5F\x59\xDC\x1B\x31\x4C\x7B\xF0\x63\x01"\
"\x6C\xBA\x07\xE8\x12\x77\x49\x3C\xDA\x46\xFE\x2F\x79\x1C\x9B\x30"\
"\xE3\x00\x06\x7E\x2E\x0F\x38\x33\x21\xAD\xA5\x54\xCA\xA7\x29\xFC"\
"\x5A\x47\x69\x7D\xC5\x95\xB5\xF4\x0B\x90\xA3\x81\x6D\x25\x55\x35"\
"\xF5\x75\x74\x0A\x26\xBF\x19\x5C\x1A\xC6\xFF\x99\x5D\x84\xAA\x66"\
"\x3E\xAF\x78\xB3\x20\x43\xC1\xED\x24\xEA\xE6\x3F\x18\xF3\xA0\x42"\
"\x57\x08\x53\x60\xC3\xC0\x83\x40\x82\xD7\x09\xBD\x44\x2A\x67\xA8"\
"\x93\xE0\xC2\x56\x9F\xD9\xDD\x85\x15\xB4\x8A\x27\x28\x92\x76\xDE"\
"\xEF\xF8\xB2\xB7\xC9\x3D\x45\x94\x4B\x11\x0D\x65\xD5\x34\x8B\x91"\
"\x0C\xFA\x87\xE9\x7C\x5B\xB1\x4D\xE5\xD4\xCB\x10\xA2\x17\x89\xBC"\
"\xDB\xB0\xE2\x97\x88\x52\xF7\x48\xD3\x61\x2C\x3A\x2B\xD1\x8C\xFB"\
"\xF1\xCD\xE4\x6A\xE7\xA9\xFD\xC4\x37\xC8\xD2\xF6\xDF\x58\x72\x4E"]
# table used in comparing bit differences between digests
# POPC[x] = <number of 1 bits in x>
# so...
# POPC[a^b] = <number of bits different between a and b>
POPC = [ord(x) for x in
"\x00\x01\x01\x02\x01\x02\x02\x03\x01\x02\x02\x03\x02\x03\x03\x04"\
"\x01\x02\x02\x03\x02\x03\x03\x04\x02\x03\x03\x04\x03\x04\x04\x05"\
"\x01\x02\x02\x03\x02\x03\x03\x04\x02\x03\x03\x04\x03\x04\x04\x05"\
"\x02\x03\x03\x04\x03\x04\x04\x05\x03\x04\x04\x05\x04\x05\x05\x06"\
"\x01\x02\x02\x03\x02\x03\x03\x04\x02\x03\x03\x04\x03\x04\x04\x05"\
"\x02\x03\x03\x04\x03\x04\x04\x05\x03\x04\x04\x05\x04\x05\x05\x06"\
"\x02\x03\x03\x04\x03\x04\x04\x05\x03\x04\x04\x05\x04\x05\x05\x06"\
"\x03\x04\x04\x05\x04\x05\x05\x06\x04\x05\x05\x06\x05\x06\x06\x07"\
"\x01\x02\x02\x03\x02\x03\x03\x04\x02\x03\x03\x04\x03\x04\x04\x05"\
"\x02\x03\x03\x04\x03\x04\x04\x05\x03\x04\x04\x05\x04\x05\x05\x06"\
"\x02\x03\x03\x04\x03\x04\x04\x05\x03\x04\x04\x05\x04\x05\x05\x06"\
"\x03\x04\x04\x05\x04\x05\x05\x06\x04\x05\x05\x06\x05\x06\x06\x07"\
"\x02\x03\x03\x04\x03\x04\x04\x05\x03\x04\x04\x05\x04\x05\x05\x06"\
"\x03\x04\x04\x05\x04\x05\x05\x06\x04\x05\x05\x06\x05\x06\x06\x07"\
"\x03\x04\x04\x05\x04\x05\x05\x06\x04\x05\x05\x06\x05\x06\x06\x07"\
"\x04\x05\x05\x06\x05\x06\x06\x07\x05\x06\x06\x07\x06\x07\x07\x08"]
class Nilsimsa(object):
"""Nilsimsa code calculator."""
def __init__(self, data=None):
"""Nilsimsa calculator, w/optional list of initial data chunks."""
self.count = 0 # num characters seen
self.acc = [0]*256 # accumulators for computing digest
self.lastch = [-1]*4 # last four seen characters (-1 until set)
if data:
if is_iterable_non_string(data):
for chunk in data:
self.update(chunk)
elif isinstance(data, (bytes, text_type)):
self.update(data)
else:
raise TypeError("Excpected string, iterable or None, got {}"
.format(type(data)))
def tran3(self, a, b, c, n):
"""Get accumulator for a transition n between chars a, b, c."""
return (((TRAN[(a+n)&255]^TRAN[b]*(n+n+1))+TRAN[(c)^TRAN[n]])&255)
def update(self, data):
"""Add data to running digest, increasing the accumulators for 0-8
triplets formed by this char and the previous 0-3 chars."""
for character in data:
if PY3:
ch = character
else:
ch = ord(character)
self.count += 1
# incr accumulators for triplets
if self.lastch[1] > -1:
self.acc[self.tran3(ch, self.lastch[0], self.lastch[1], 0)] +=1
if self.lastch[2] > -1:
self.acc[self.tran3(ch, self.lastch[0], self.lastch[2], 1)] +=1
self.acc[self.tran3(ch, self.lastch[1], self.lastch[2], 2)] +=1
if self.lastch[3] > -1:
self.acc[self.tran3(ch, self.lastch[0], self.lastch[3], 3)] +=1
self.acc[self.tran3(ch, self.lastch[1], self.lastch[3], 4)] +=1
self.acc[self.tran3(ch, self.lastch[2], self.lastch[3], 5)] +=1
self.acc[self.tran3(self.lastch[3], self.lastch[0], ch, 6)] +=1
self.acc[self.tran3(self.lastch[3], self.lastch[2], ch, 7)] +=1
# adjust last seen chars
self.lastch = [ch] + self.lastch[:3]
def digest(self):
"""Get digest of data seen thus far as a list of bytes."""
total = 0 # number of triplets seen
if self.count == 3: # 3 chars = 1 triplet
total = 1
elif self.count == 4: # 4 chars = 4 triplets
total = 4
elif self.count > 4: # otherwise 8 triplets/char less
total = 8 * self.count - 28 # 28 'missed' during 'ramp-up'
threshold = total / 256 # threshold for accumulators, using the mean
code = [0]*32 # start with all zero bits
for i in range(256): # for all 256 accumulators
if self.acc[i] > threshold: # if it meets the threshold
code[i >> 3] += 1 << (i&7) # set corresponding digest bit, equivalent to i/8, 2 ** (i % 8)
return code[::-1] # reverse the byte order in result
def hexdigest(self):
"""Get digest of data seen this far as a 64-char hex string."""
return ("%02x" * 32) % tuple(self.digest())
def __str__(self):
"""Show digest for convenience."""
return self.hexdigest()
def from_file(self, filename):
"""Update running digest with content of named file."""
f = open(filename, 'rb')
while True:
data = f.read(10480)
if not data:
break
self.update(data)
f.close()
def compare(self, otherdigest, ishex=False):
"""Compute difference in bits between own digest and another.
returns -127 to 128; 128 is the same, -127 is different"""
bits = 0
myd = self.digest()
if ishex:
# convert to 32-tuple of unsighed two-byte INTs
otherdigest = tuple([int(otherdigest[i:i+2],16) for i in range(0,63,2)])
for i in range(32):
bits += POPC[255 & myd[i] ^ otherdigest[i]]
return 128 - bits
<|fim▁hole|> """Compute difference in bits between digest1 and digest2
returns -127 to 128; 128 is the same, -127 is different"""
# convert to 32-tuple of unsighed two-byte INTs
digest1 = tuple([int(digest1[i:i+2],16) for i in range(0,63,2)])
digest2 = tuple([int(digest2[i:i+2],16) for i in range(0,63,2)])
bits = 0
for i in range(32):
bits += POPC[255 & digest1[i] ^ digest2[i]]
return 128 - bits
def selftest( name=None, opt=None, value=None, parser=None ):
print("running selftest...")
n1 = Nilsimsa()
n1.update("abcdefgh")
n2 = Nilsimsa(["abcd", "efgh"])
print("abcdefgh:\t%s" % str(n1.hexdigest()==\
'14c8118000000000030800000004042004189020001308014088003280000078'))
print("abcd efgh:\t%s" % str(n2.hexdigest()==\
'14c8118000000000030800000004042004189020001308014088003280000078'))
print("digest:\t\t%s" % str(n1.digest() == n2.digest()))
n1.update("ijk")
print("update(ijk):\t%s" % str(n1.hexdigest()==\
'14c811840010000c0328200108040630041890200217582d4098103280000078'))
print("compare:\t%s" % str(n1.compare(n2.digest())==109))
print("compare:\t%s" % str(n1.compare(n2.hexdigest(), ishex=True)==109))<|fim▁end|> | def compare_hexdigests( digest1, digest2 ): |
<|file_name|>app-root.d.ts<|end_file_name|><|fim▁begin|>import { ComponentFactoryResolver, ElementRef, OnInit, OpaqueToken, Renderer, ViewContainerRef } from '@angular/core';
import { App } from './app';
import { Config } from '../../config/config';
import { Ion } from '../ion';
import { OverlayPortal } from '../nav/overlay-portal';
import { Platform } from '../../platform/platform';
export declare const AppRootToken: OpaqueToken;
/**
* @private
*/
export declare class IonicApp extends Ion implements OnInit {
private _userCmp;
private _cfr;
private _platform;
private _stopScrollPlugin;
private _rafId;
_viewport: ViewContainerRef;
_modalPortal: OverlayPortal;
_overlayPortal: OverlayPortal;
_loadingPortal: OverlayPortal;
_toastPortal: OverlayPortal;
constructor(_userCmp: any, _cfr: ComponentFactoryResolver, elementRef: ElementRef, renderer: Renderer, config: Config, _platform: Platform, app: App);
ngOnInit(): void;
/**
* @private
*/
_getPortal(portal?: AppPortal): OverlayPortal;
/**
* @private
*/
_getActivePortal(): OverlayPortal;
/**<|fim▁hole|>}
export declare const enum AppPortal {
DEFAULT = 0,
MODAL = 1,
LOADING = 2,
TOAST = 3,
}<|fim▁end|> | * @private
*/
_disableScroll(shouldDisableScroll: boolean): void;
stopScroll(): Promise<boolean>; |
<|file_name|>mock.rs<|end_file_name|><|fim▁begin|>use std::{
io::{self, Cursor, Read, Write},
pin::Pin,
task::{Context, Poll},
};
use tokio::io::{AsyncRead, AsyncWrite};
/// A fake stream for testing network applications backed by buffers.
#[derive(Clone, Debug)]
pub struct MockStream {
written: Cursor<Vec<u8>>,
received: Cursor<Vec<u8>>,
}
impl MockStream {
/// Creates a new mock stream with nothing to read.
pub fn empty() -> MockStream {
MockStream::new(&[])
}
/// Creates a new mock stream with the specified bytes to read.
pub fn new(initial: &[u8]) -> MockStream {
MockStream {
written: Cursor::new(vec![]),
received: Cursor::new(initial.to_owned()),
}
}
/// Gets a slice of bytes representing the data that has been written.
pub fn written(&self) -> &[u8] {
self.written.get_ref()
}
<|fim▁hole|> }
}
impl AsyncRead for MockStream {
fn poll_read(
mut self: Pin<&mut Self>,
_: &mut Context<'_>,
buf: &mut [u8],
) -> Poll<io::Result<usize>> {
Poll::Ready(self.as_mut().received.read(buf))
}
}
impl AsyncWrite for MockStream {
fn poll_write(
mut self: Pin<&mut Self>,
_: &mut Context<'_>,
buf: &[u8],
) -> Poll<Result<usize, io::Error>> {
Poll::Ready(self.as_mut().written.write(buf))
}
fn poll_flush(mut self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Result<(), io::Error>> {
Poll::Ready(self.as_mut().written.flush())
}
fn poll_shutdown(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Result<(), io::Error>> {
Poll::Ready(Ok(()))
}
}<|fim▁end|> | /// Gets a slice of bytes representing the data that has been received.
pub fn received(&self) -> &[u8] {
self.received.get_ref() |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
from django.db import models
from modelcluster.fields import ParentalKey
from wagtail.wagtailcore.models import Page
from wagtail.wagtailcore import fields
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailadmin.edit_handlers import InlinePanel
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
<|fim▁hole|> 'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+',
)
content_panels = Page.content_panels + [
FieldPanel('content'),
ImageChooserPanel('picture'),
]
api_fields = ('content', 'picture',)<|fim▁end|> | class AboutPage(Page):
content = fields.RichTextField()
picture = models.ForeignKey( |
<|file_name|>c2.java<|end_file_name|><|fim▁begin|><|fim▁hole|> *
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package p2;
public class c2 {
int i;
public void method2() { i = 5; System.out.println("c2 method2 called"); }
}<|fim▁end|> | /*
* Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
<|file_name|>ModalEmployer.js<|end_file_name|><|fim▁begin|>import React from "react";
import $ from "jquery";
import "bootstrap/dist/js/bootstrap.min";
import "jquery-ui/ui/widgets/datepicker";
import {
postFulltimeEmployer,
postParttimeEmployer,
putFullTimeEmployer,
putParttimeEmployer
} from "../actions/PostData";
import DropDownBtn from "../components/DropDownBtn";
import {parseBirthdayForServer} from "../utils/Utils";
import {connect} from "react-redux";
class ModalEmployer extends React.Component {
componentWillReceiveProps = (nextProps) => {
if (nextProps.data) {
this.setState(nextProps.data);
if (nextProps.data.day_salary) {
this.setState({
type: 'congNhat'
})
} else {
this.setState({
type: 'bienChe'
})
}
}
if (nextProps.data == null) {
this.setState(this.emptyObject)
}
}
emptyObject = {
"salary_level": "",
"name": "",
"month_salary": "",
"phone": "",
"birthday": "",
"allowance": "",
"department_id": this.props.departments.length > 0 ? this.props.departments[0].id : "",
day_salary: "",
}
onClickSave = () => {
let data = {
"name": this.state.name,
"phone": this.state.phone,
"birthday": parseBirthdayForServer(this.state.birthday),
"department_id": this.state.department_id,
}<|fim▁hole|> data.allowance = this.state.allowance;
if (this.props.data) {
putFullTimeEmployer(this.props.data.id, data, this.props.fetchEmployers);
}
else {
postFulltimeEmployer(data, this.props.fetchEmployers);
}
}
if (this.state.type === "congNhat") {
data.day_salary = this.state.day_salary;
if (this.props.data) {
putParttimeEmployer(this.props.data.id, data, this.props.fetchEmployers);
}
else {
postParttimeEmployer(data, this.props.fetchEmployers)
}
}
$('#create').modal('toggle');
}
constructor(props) {
super(props);
this.state = Object.assign(this.emptyObject, {type: null})
}
onChangeText = (event) => {
this.setState({
[event.target.name]: event.target.value
})
}
clickType = (type) => {
this.setState({
type
})
}
componentDidMount = () => {
$('#datepicker').datepicker({
uiLibrary: 'bootstrap4',
iconsLibrary: 'fontawesome',
onSelect: (dateText) => {
this.setState({
birthday: dateText
})
},
dateFormat: 'dd/mm/yy'
});
}
renderExtraForm = () => {
if (this.state.type) {
switch (this.state.type) {
case "bienChe":
return <div>
<label htmlFor="id">Lương tháng</label>
<div className="input-group">
<input onChange={this.onChangeText} name="month_salary" type="text" className="form-control"
id="id" aria-describedby="basic-addon3" value={this.state.month_salary}/>
</div>
<label htmlFor="id">Bậc lương</label>
<div className="input-group">
<input onChange={this.onChangeText} name="salary_level" type="text" className="form-control"
id="id" aria-describedby="basic-addon3" value={this.state.salary_level}/>
</div>
<label htmlFor="id">Phụ cấp</label>
<div className="input-group">
<input onChange={this.onChangeText} name="allowance" type="text" className="form-control"
id="id" aria-describedby="basic-addon3" value={this.state.allowance}/>
</div>
</div>
break;
case "congNhat":
return <div>
<label htmlFor="id">Lương ngày</label>
<div className="input-group">
<input name="day_salary" value={this.state.day_salary} onChange={this.onChangeText}
type="text"
className="form-control" id="id" aria-describedby="basic-addon3"/>
</div>
</div>
break;
}
}
}
onSelectDepartment = (item) => {
this.setState({
department_id: item.id
})
}
renderDropDownBtn = () => {
if (this.props.departments.length > 0) {
return (
<DropDownBtn onChange={this.onSelectDepartment} default data={this.props.departments}></DropDownBtn>
)
}
}
render() {
return (
<div className="modal fade" id="create" tabIndex="-1" role="dialog" aria-labelledby="exampleModalLabel"
aria-hidden="true">
<div className="modal-dialog" role="document">
<div className="modal-content">
<div className="modal-header">
<h5 className="modal-title" id="exampleModalLabel">Thêm nhân viên</h5>
<button type="button" className="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">×</span>
</button>
</div>
<div className="modal-body">
<label htmlFor="name">Tên nhân viên</label>
<div className="input-group">
<input onChange={this.onChangeText} value={this.state.name} name="name" type="text"
className="form-control" id="name" aria-describedby="basic-addon3"/>
</div>
<label htmlFor="datepicker">Ngày tháng năm sinh</label>
<div className="input-group date" data-provide="datepicker">
<input onClick={this.onChangeText} onChange={this.onChangeText}
value={this.state.birthday}
name="birthday" type="text" className="form-control" id="datepicker"/>
</div>
<label htmlFor="phone">Số điện thoại</label>
<div className="input-group">
<input onChange={this.onChangeText} value={this.state.phone} name="phone" type="text"
className="form-control" id="phone" aria-describedby="basic-addon3"/>
</div>
<div className="btn-group">
<label htmlFor="name" className="margin-R10">Chọn phòng ban</label>
{this.renderDropDownBtn()}
</div>
<div className="btn-group btn-middle align-middle">
{
(() => {
if (this.props.data) {
if (this.props.data.day_salary === null) {
return (<button className="btn btn-primary"
id="bien-che">Nhân viên Biên chế
</button>)
}
else {
return (
<button className="btn btn-info"
id="cong-nhat">Nhân viên Công nhật
</button>
)
}
}
else {
let arr = [];
arr.push(<button onClick={this.clickType.bind(this, "bienChe")}
className="btn btn-primary"
id="bien-che">Nhân viên Biên chế
</button>);
arr.push(<button onClick={this.clickType.bind(this, "congNhat")}
className="btn btn-info"
id="cong-nhat">Nhân viên Công nhật
</button>);
return arr;
}
})()
}
</div>
{
this.renderExtraForm()
}
</div>
<div className="modal-footer">
<button type="button" className="btn btn-secondary" data-dismiss="modal">Hủy</button>
<button onClick={this.onClickSave} type="button" className="btn btn-primary">Lưu</button>
</div>
</div>
</div>
</div>
)
}
}
const mapStateToProps = state => {
return {
departments: state.app.departments
}
}
export default connect(mapStateToProps)(ModalEmployer)<|fim▁end|> |
if (this.state.type === "bienChe") {
data.salary_level = this.state.salary_level;
data.month_salary = this.state.month_salary; |
<|file_name|>KKSACBulkF.C<|end_file_name|><|fim▁begin|>/****************************************************************/
/* MOOSE - Multiphysics Object Oriented Simulation Environment */
/* */
/* All contents are licensed under LGPL V2.1 */
/* See LICENSE for full restrictions */
/****************************************************************/
#include "KKSACBulkF.h"
template<>
InputParameters validParams<KKSACBulkF>()
{
InputParameters params = validParams<KKSACBulkBase>();
// params.addClassDescription("KKS model kernel for the Bulk Allen-Cahn. This operates on the order parameter 'eta' as the non-linear variable");
params.addRequiredParam<Real>("w", "Double well height parameter");
params.addParam<MaterialPropertyName>("g_name", "g", "Base name for the double well function g(eta)");
return params;
}
KKSACBulkF::KKSACBulkF(const InputParameters & parameters) :
KKSACBulkBase(parameters),
_w(getParam<Real>("w")),
_prop_dg(getMaterialPropertyDerivative<Real>("g_name", _eta_name)),
_prop_d2g(getMaterialPropertyDerivative<Real>("g_name", _eta_name, _eta_name))
{
}
Real
KKSACBulkF::computeDFDOP(PFFunctionType type)
{
Real res = 0.0;
Real A1 = _prop_Fa[_qp] - _prop_Fb[_qp];
switch (type)
{
case Residual:
return -_prop_dh[_qp] * A1 + _w * _prop_dg[_qp];
case Jacobian:
{
res = -_prop_d2h[_qp] * A1
+ _w * _prop_d2g[_qp];<|fim▁hole|> // the -\frac{dh}{d\eta}\left(\frac{dF_a}{d\eta}-\frac{dF_b}{d\eta}\right)
// term is handled in KKSACBulkC!
return _phi[_j][_qp] * res;
}
}
mooseError("Invalid type passed in");
}
Real
KKSACBulkF::computeQpOffDiagJacobian(unsigned int jvar)
{
// get the coupled variable jvar is referring to
unsigned int cvar;
if (!mapJvarToCvar(jvar, cvar))
return 0.0;
Real res = _prop_dh[_qp] * ( (*_derivatives_Fa[cvar])[_qp]
- (*_derivatives_Fb[cvar])[_qp])
* _phi[_j][_qp];
return res * _test[_j][_qp];
}
// DEPRECATED CONSTRUCTOR
KKSACBulkF::KKSACBulkF(const std::string & deprecated_name, InputParameters parameters) :
KKSACBulkBase(deprecated_name, parameters),
_w(getParam<Real>("w")),
_prop_dg(getMaterialPropertyDerivative<Real>("g_name", _eta_name)),
_prop_d2g(getMaterialPropertyDerivative<Real>("g_name", _eta_name, _eta_name))
{
}<|fim▁end|> | |
<|file_name|>ScaleIntegerTests.cpp<|end_file_name|><|fim▁begin|>#include <catch.hpp>
#include <rapidcheck/catch.h>
using namespace rc;
TEST_CASE("scaleInteger") {
prop("for uint32_t, equal to naive way",
[] {
const auto x = *gen::arbitrary<uint32_t>();
const auto size = *gen::nonNegative<int>();
RC_ASSERT(gen::detail::scaleInteger(x, size) ==
((x * std::min<uint64_t>(kNominalSize, size) +
(kNominalSize / 2)) /
kNominalSize));
});
prop("result strictly increases with size",<|fim▁hole|> [](uint64_t x) {
const auto sizeA = *gen::nonNegative<int>();
const auto sizeB = *gen::nonNegative<int>();
const auto small = std::min(sizeA, sizeB);
const auto large = std::max(sizeA, sizeB);
RC_ASSERT(gen::detail::scaleInteger(x, small) <=
gen::detail::scaleInteger(x, large));
});
prop("result strictly increases with value",
[](uint64_t a, uint64_t b){
const auto size = *gen::nonNegative<int>();
const auto small = std::min(a, b);
const auto large = std::max(a, b);
RC_ASSERT(gen::detail::scaleInteger(small, size) <=
gen::detail::scaleInteger(large, size));
});
prop("yields input for kNominalSize",
[](uint64_t x) {
RC_ASSERT(gen::detail::scaleInteger(x, kNominalSize) == x);
});
prop("yields 0 for 0",
[](uint64_t x) { RC_ASSERT(gen::detail::scaleInteger(x, 0) == 0U); });
}<|fim▁end|> | |
<|file_name|>container.go<|end_file_name|><|fim▁begin|>// Copyright 2014 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package v1
import (
"reflect"
"time"
)
type CpuSpec struct {
Limit uint64 `json:"limit"`
MaxLimit uint64 `json:"max_limit"`
Mask string `json:"mask,omitempty"`
}
type MemorySpec struct {
// The amount of memory requested. Default is unlimited (-1).
// Units: bytes.
Limit uint64 `json:"limit,omitempty"`
// The amount of guaranteed memory. Default is 0.
// Units: bytes.
Reservation uint64 `json:"reservation,omitempty"`
// The amount of swap space requested. Default is unlimited (-1).
// Units: bytes.
SwapLimit uint64 `json:"swap_limit,omitempty"`
}
type ContainerSpec struct {
// Time at which the container was created.
CreationTime time.Time `json:"creation_time,omitempty"`
HasCpu bool `json:"has_cpu"`
Cpu CpuSpec `json:"cpu,omitempty"`
HasMemory bool `json:"has_memory"`
Memory MemorySpec `json:"memory,omitempty"`
HasNetwork bool `json:"has_network"`
HasFilesystem bool `json:"has_filesystem"`
// HasDiskIo when true, indicates that DiskIo stats will be available.
HasDiskIo bool `json:"has_diskio"`
}
// Container reference contains enough information to uniquely identify a container
type ContainerReference struct {
// The absolute name of the container. This is unique on the machine.
Name string `json:"name"`
// Other names by which the container is known within a certain namespace.
// This is unique within that namespace.
Aliases []string `json:"aliases,omitempty"`
// Namespace under which the aliases of a container are unique.
// An example of a namespace is "docker" for Docker containers.
Namespace string `json:"namespace,omitempty"`
}
// Sorts by container name.
type ContainerReferenceSlice []ContainerReference
func (self ContainerReferenceSlice) Len() int { return len(self) }
func (self ContainerReferenceSlice) Swap(i, j int) { self[i], self[j] = self[j], self[i] }
func (self ContainerReferenceSlice) Less(i, j int) bool { return self[i].Name < self[j].Name }
// ContainerInfoQuery is used when users check a container info from the REST api.
// It specifies how much data users want to get about a container
type ContainerInfoRequest struct {
// Max number of stats to return. Specify -1 for all stats currently available.
// If start and end time are specified this limit is ignored.
// Default: 60
NumStats int `json:"num_stats,omitempty"`
// Start time for which to query information.
// If ommitted, the beginning of time is assumed.
Start time.Time `json:"start,omitempty"`
// End time for which to query information.
// If ommitted, current time is assumed.
End time.Time `json:"end,omitempty"`
}
// Returns a ContainerInfoRequest with all default values specified.
func DefaultContainerInfoRequest() ContainerInfoRequest {
return ContainerInfoRequest{
NumStats: 60,
}
}
func (self *ContainerInfoRequest) Equals(other ContainerInfoRequest) bool {
return self.NumStats == other.NumStats &&
self.Start.Equal(other.Start) &&
self.End.Equal(other.End)
}
type ContainerInfo struct {
ContainerReference
// The direct subcontainers of the current container.
Subcontainers []ContainerReference `json:"subcontainers,omitempty"`
// The isolation used in the container.
Spec ContainerSpec `json:"spec,omitempty"`
// Historical statistics gathered from the container.
Stats []*ContainerStats `json:"stats,omitempty"`
}
// TODO(vmarmol): Refactor to not need this equality comparison.
// ContainerInfo may be (un)marshaled by json or other en/decoder. In that
// case, the Timestamp field in each stats/sample may not be precisely
// en/decoded. This will lead to small but acceptable differences between a
// ContainerInfo and its encode-then-decode version. Eq() is used to compare
// two ContainerInfo accepting small difference (<10ms) of Time fields.
func (self *ContainerInfo) Eq(b *ContainerInfo) bool {
// If both self and b are nil, then Eq() returns true
if self == nil {
return b == nil
}
if b == nil {
return self == nil
}
// For fields other than time.Time, we will compare them precisely.
// This would require that any slice should have same order.
if !reflect.DeepEqual(self.ContainerReference, b.ContainerReference) {
return false
}
if !reflect.DeepEqual(self.Subcontainers, b.Subcontainers) {
return false
}
if !self.Spec.Eq(&b.Spec) {
return false
}
for i, expectedStats := range b.Stats {
selfStats := self.Stats[i]
if !expectedStats.Eq(selfStats) {
return false
}
}
return true
}
func (self *ContainerSpec) Eq(b *ContainerSpec) bool {
// Creation within 1s of each other.
diff := self.CreationTime.Sub(b.CreationTime)
if (diff > time.Second) || (diff < -time.Second) {
return false
}
if self.HasCpu != b.HasCpu {
return false
}
if !reflect.DeepEqual(self.Cpu, b.Cpu) {
return false
}
if self.HasMemory != b.HasMemory {
return false
}
if !reflect.DeepEqual(self.Memory, b.Memory) {
return false
}
if self.HasNetwork != b.HasNetwork {
return false
}
if self.HasFilesystem != b.HasFilesystem {
return false
}
if self.HasDiskIo != b.HasDiskIo {
return false
}
return true
}
func (self *ContainerInfo) StatsAfter(ref time.Time) []*ContainerStats {
n := len(self.Stats) + 1
for i, s := range self.Stats {
if s.Timestamp.After(ref) {
n = i
break
}
}
if n > len(self.Stats) {
return nil
}
return self.Stats[n:]
}
func (self *ContainerInfo) StatsStartTime() time.Time {
var ret time.Time
for _, s := range self.Stats {
if s.Timestamp.Before(ret) || ret.IsZero() {
ret = s.Timestamp
}
}
return ret
}
func (self *ContainerInfo) StatsEndTime() time.Time {
var ret time.Time
for i := len(self.Stats) - 1; i >= 0; i-- {
s := self.Stats[i]
if s.Timestamp.After(ret) {
ret = s.Timestamp
}
}
return ret
}
// This mirrors kernel internal structure.
type LoadStats struct {
// Number of sleeping tasks.
NrSleeping uint64 `json:"nr_sleeping"`
// Number of running tasks.
NrRunning uint64 `json:"nr_running"`
// Number of tasks in stopped state
NrStopped uint64 `json:"nr_stopped"`
// Number of tasks in uninterruptible state
NrUninterruptible uint64 `json:"nr_uninterruptible"`
// Number of tasks waiting on IO
NrIoWait uint64 `json:"nr_io_wait"`
}
// CPU usage time statistics.
type CpuUsage struct {
// Total CPU usage.
// Units: nanoseconds
Total uint64 `json:"total"`
// Per CPU/core usage of the container.
// Unit: nanoseconds.
PerCpu []uint64 `json:"per_cpu_usage,omitempty"`
// Time spent in user space.
// Unit: nanoseconds
User uint64 `json:"user"`
// Time spent in kernel space.
// Unit: nanoseconds
System uint64 `json:"system"`
}
// All CPU usage metrics are cumulative from the creation of the container
type CpuStats struct {
Usage CpuUsage `json:"usage"`
// Smoothed average of number of runnable threads x 1000.
// We multiply by thousand to avoid using floats, but preserving precision.
// Load is smoothed over the last 10 seconds. Instantaneous value can be read
// from LoadStats.NrRunning.
LoadAverage int32 `json:"load_average"`
}
type PerDiskStats struct {
Major uint64 `json:"major"`
Minor uint64 `json:"minor"`
Stats map[string]uint64 `json:"stats"`
}
type DiskIoStats struct {
IoServiceBytes []PerDiskStats `json:"io_service_bytes,omitempty"`
IoServiced []PerDiskStats `json:"io_serviced,omitempty"`
IoQueued []PerDiskStats `json:"io_queued,omitempty"`
Sectors []PerDiskStats `json:"sectors,omitempty"`
IoServiceTime []PerDiskStats `json:"io_service_time,omitempty"`
IoWaitTime []PerDiskStats `json:"io_wait_time,omitempty"`
IoMerged []PerDiskStats `json:"io_merged,omitempty"`
IoTime []PerDiskStats `json:"io_time,omitempty"`
}
type MemoryStats struct {
// Current memory usage, this includes all memory regardless of when it was
// accessed.
// Units: Bytes.
Usage uint64 `json:"usage"`
// The amount of working set memory, this includes recently accessed memory,
// dirty memory, and kernel memory. Working set is <= "usage".
// Units: Bytes.
WorkingSet uint64 `json:"working_set"`
ContainerData MemoryStatsMemoryData `json:"container_data,omitempty"`
HierarchicalData MemoryStatsMemoryData `json:"hierarchical_data,omitempty"`
}
type MemoryStatsMemoryData struct {
Pgfault uint64 `json:"pgfault"`
Pgmajfault uint64 `json:"pgmajfault"`
}
type NetworkStats struct {
// Cumulative count of bytes received.
RxBytes uint64 `json:"rx_bytes"`
// Cumulative count of packets received.
RxPackets uint64 `json:"rx_packets"`
// Cumulative count of receive errors encountered.
RxErrors uint64 `json:"rx_errors"`
// Cumulative count of packets dropped while receiving.
RxDropped uint64 `json:"rx_dropped"`<|fim▁hole|> // Cumulative count of bytes transmitted.
TxBytes uint64 `json:"tx_bytes"`
// Cumulative count of packets transmitted.
TxPackets uint64 `json:"tx_packets"`
// Cumulative count of transmit errors encountered.
TxErrors uint64 `json:"tx_errors"`
// Cumulative count of packets dropped while transmitting.
TxDropped uint64 `json:"tx_dropped"`
}
type FsStats struct {
// The block device name associated with the filesystem.
Device string `json:"device,omitempty"`
// Number of bytes that can be consumed by the container on this filesystem.
Limit uint64 `json:"capacity"`
// Number of bytes that is consumed by the container on this filesystem.
Usage uint64 `json:"usage"`
// Number of reads completed
// This is the total number of reads completed successfully.
ReadsCompleted uint64 `json:"reads_completed"`
// Number of reads merged
// Reads and writes which are adjacent to each other may be merged for
// efficiency. Thus two 4K reads may become one 8K read before it is
// ultimately handed to the disk, and so it will be counted (and queued)
// as only one I/O. This field lets you know how often this was done.
ReadsMerged uint64 `json:"reads_merged"`
// Number of sectors read
// This is the total number of sectors read successfully.
SectorsRead uint64 `json:"sectors_read"`
// Number of milliseconds spent reading
// This is the total number of milliseconds spent by all reads (as
// measured from __make_request() to end_that_request_last()).
ReadTime uint64 `json:"read_time"`
// Number of writes completed
// This is the total number of writes completed successfully.
WritesCompleted uint64 `json:"writes_completed"`
// Number of writes merged
// See the description of reads merged.
WritesMerged uint64 `json:"writes_merged"`
// Number of sectors written
// This is the total number of sectors written successfully.
SectorsWritten uint64 `json:"sectors_written"`
// Number of milliseconds spent writing
// This is the total number of milliseconds spent by all writes (as
// measured from __make_request() to end_that_request_last()).
WriteTime uint64 `json:"write_time"`
// Number of I/Os currently in progress
// The only field that should go to zero. Incremented as requests are
// given to appropriate struct request_queue and decremented as they finish.
IoInProgress uint64 `json:"io_in_progress"`
// Number of milliseconds spent doing I/Os
// This field increases so long as field 9 is nonzero.
IoTime uint64 `json:"io_time"`
// weighted number of milliseconds spent doing I/Os
// This field is incremented at each I/O start, I/O completion, I/O
// merge, or read of these stats by the number of I/Os in progress
// (field 9) times the number of milliseconds spent doing I/O since the
// last update of this field. This can provide an easy measure of both
// I/O completion time and the backlog that may be accumulating.
WeightedIoTime uint64 `json:"weighted_io_time"`
}
type ContainerStats struct {
// The time of this stat point.
Timestamp time.Time `json:"timestamp"`
Cpu CpuStats `json:"cpu,omitempty"`
DiskIo DiskIoStats `json:"diskio,omitempty"`
Memory MemoryStats `json:"memory,omitempty"`
Network NetworkStats `json:"network,omitempty"`
// Filesystem statistics
Filesystem []FsStats `json:"filesystem,omitempty"`
// Task load stats
TaskStats LoadStats `json:"task_stats,omitempty"`
}
func timeEq(t1, t2 time.Time, tolerance time.Duration) bool {
// t1 should not be later than t2
if t1.After(t2) {
t1, t2 = t2, t1
}
diff := t2.Sub(t1)
if diff <= tolerance {
return true
}
return false
}
func durationEq(a, b time.Duration, tolerance time.Duration) bool {
if a > b {
a, b = b, a
}
diff := a - b
if diff <= tolerance {
return true
}
return false
}
const (
// 10ms, i.e. 0.01s
timePrecision time.Duration = 10 * time.Millisecond
)
// This function is useful because we do not require precise time
// representation.
func (a *ContainerStats) Eq(b *ContainerStats) bool {
if !timeEq(a.Timestamp, b.Timestamp, timePrecision) {
return false
}
return a.StatsEq(b)
}
// Checks equality of the stats values.
func (a *ContainerStats) StatsEq(b *ContainerStats) bool {
// TODO(vmarmol): Consider using this through reflection.
if !reflect.DeepEqual(a.Cpu, b.Cpu) {
return false
}
if !reflect.DeepEqual(a.Memory, b.Memory) {
return false
}
if !reflect.DeepEqual(a.DiskIo, b.DiskIo) {
return false
}
if !reflect.DeepEqual(a.Network, b.Network) {
return false
}
if !reflect.DeepEqual(a.Filesystem, b.Filesystem) {
return false
}
return true
}
// Saturate CPU usage to 0.
func calculateCpuUsage(prev, cur uint64) uint64 {
if prev > cur {
return 0
}
return cur - prev
}
// Event contains information general to events such as the time at which they
// occurred, their specific type, and the actual event. Event types are
// differentiated by the EventType field of Event.
type Event struct {
// the absolute container name for which the event occurred
ContainerName string `json:"container_name"`
// the time at which the event occurred
Timestamp time.Time `json:"timestamp"`
// the type of event. EventType is an enumerated type
EventType EventType `json:"event_type"`
// the original event object and all of its extraneous data, ex. an
// OomInstance
EventData EventData `json:"event_data,omitempty"`
}
// EventType is an enumerated type which lists the categories under which
// events may fall. The Event field EventType is populated by this enum.
type EventType int
const (
EventOom EventType = iota
EventContainerCreation
EventContainerDeletion
)
// Extra information about an event. Only one type will be set.
type EventData struct {
// Information about a container creation event.
Created *CreatedEventData `json:"created,omitempty"`
// Information about an OOM event.
Oom *OomEventData `json:"oom,omitempty"`
}
// Information related to a container creation event.
type CreatedEventData struct {
// Spec of the container at creation.
Spec ContainerSpec `json:"spec"`
}
// Information related to an OOM kill instance
type OomEventData struct {
// process id of the killed process
Pid int `json:"pid"`
// The name of the killed process
ProcessName string `json:"process_name"`
}<|fim▁end|> | |
<|file_name|>instance_controller.go<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package v4
import (
"encoding/json"
"github.com/apache/servicecomb-service-center/pkg/log"
"github.com/apache/servicecomb-service-center/pkg/rest"
"github.com/apache/servicecomb-service-center/pkg/util"
"github.com/apache/servicecomb-service-center/server/core"
pb "github.com/apache/servicecomb-service-center/server/core/proto"
scerr "github.com/apache/servicecomb-service-center/server/error"
"github.com/apache/servicecomb-service-center/server/rest/controller"
serviceUtil "github.com/apache/servicecomb-service-center/server/service/util"
"io/ioutil"
"net/http"
"strings"
)
type MicroServiceInstanceService struct {
//
}
func (this *MicroServiceInstanceService) URLPatterns() []rest.Route {
return []rest.Route{
{rest.HTTP_METHOD_GET, "/v4/:project/registry/instances", this.FindInstances},
{rest.HTTP_METHOD_GET, "/v4/:project/registry/microservices/:serviceId/instances", this.GetInstances},
{rest.HTTP_METHOD_GET, "/v4/:project/registry/microservices/:serviceId/instances/:instanceId", this.GetOneInstance},
{rest.HTTP_METHOD_POST, "/v4/:project/registry/microservices/:serviceId/instances", this.RegisterInstance},
{rest.HTTP_METHOD_DELETE, "/v4/:project/registry/microservices/:serviceId/instances/:instanceId", this.UnregisterInstance},
{rest.HTTP_METHOD_PUT, "/v4/:project/registry/microservices/:serviceId/instances/:instanceId/properties", this.UpdateMetadata},
{rest.HTTP_METHOD_PUT, "/v4/:project/registry/microservices/:serviceId/instances/:instanceId/status", this.UpdateStatus},
{rest.HTTP_METHOD_PUT, "/v4/:project/registry/microservices/:serviceId/instances/:instanceId/heartbeat", this.Heartbeat},
{rest.HTTP_METHOD_PUT, "/v4/:project/registry/heartbeats", this.HeartbeatSet},
}
}
func (this *MicroServiceInstanceService) RegisterInstance(w http.ResponseWriter, r *http.Request) {
message, err := ioutil.ReadAll(r.Body)
if err != nil {
log.Error("read body failed", err)
controller.WriteError(w, scerr.ErrInvalidParams, err.Error())
return
}
request := &pb.RegisterInstanceRequest{}
err = json.Unmarshal(message, request)
if err != nil {
log.Errorf(err, "Invalid json: %s", util.BytesToStringWithNoCopy(message))
controller.WriteError(w, scerr.ErrInvalidParams, "Unmarshal error")
return
}
if request.GetInstance() != nil {
request.Instance.ServiceId = r.URL.Query().Get(":serviceId")
}
resp, err := core.InstanceAPI.Register(r.Context(), request)
respInternal := resp.Response
resp.Response = nil
controller.WriteResponse(w, respInternal, resp)
}
//TODO 什么样的服务允许更新服务心跳,只能是本服务才可以更新自己,如何屏蔽其他服务伪造的心跳更新?
func (this *MicroServiceInstanceService) Heartbeat(w http.ResponseWriter, r *http.Request) {
query := r.URL.Query()
request := &pb.HeartbeatRequest{
ServiceId: query.Get(":serviceId"),
InstanceId: query.Get(":instanceId"),
}
resp, _ := core.InstanceAPI.Heartbeat(r.Context(), request)
controller.WriteResponse(w, resp.Response, nil)
}
func (this *MicroServiceInstanceService) HeartbeatSet(w http.ResponseWriter, r *http.Request) {
message, err := ioutil.ReadAll(r.Body)
if err != nil {
log.Error("read body failed", err)
controller.WriteError(w, scerr.ErrInvalidParams, err.Error())
return
}
request := &pb.HeartbeatSetRequest{}
err = json.Unmarshal(message, request)
if err != nil {
log.Errorf(err, "Invalid json: %s", util.BytesToStringWithNoCopy(message))
controller.WriteError(w, scerr.ErrInvalidParams, "Unmarshal error")
return
}
resp, _ := core.InstanceAPI.HeartbeatSet(r.Context(), request)
if resp.Response.Code == pb.Response_SUCCESS {
controller.WriteResponse(w, nil, nil)
return
}
respInternal := resp.Response
resp.Response = nil
controller.WriteResponse(w, respInternal, resp)
return
}
func (this *MicroServiceInstanceService) UnregisterInstance(w http.ResponseWriter, r *http.Request) {
query := r.URL.Query()
request := &pb.UnregisterInstanceRequest{
ServiceId: query.Get(":serviceId"),
InstanceId: query.Get(":instanceId"),
}
resp, _ := core.InstanceAPI.Unregister(r.Context(), request)
controller.WriteResponse(w, resp.Response, nil)
}
func (this *MicroServiceInstanceService) FindInstances(w http.ResponseWriter, r *http.Request) {
var ids []string
query := r.URL.Query()
keys := query.Get("tags")
if len(keys) > 0 {
ids = strings.Split(keys, ",")
}
request := &pb.FindInstancesRequest{
ConsumerServiceId: r.Header.Get("X-ConsumerId"),
AppId: query.Get("appId"),
ServiceName: query.Get("serviceName"),
VersionRule: query.Get("version"),
Environment: query.Get("env"),
Tags: ids,<|fim▁hole|> }
ctx := util.SetTargetDomainProject(r.Context(), r.Header.Get("X-Domain-Name"), query.Get(":project"))
resp, _ := core.InstanceAPI.Find(ctx, request)
respInternal := resp.Response
resp.Response = nil
iv, _ := ctx.Value(serviceUtil.CTX_REQUEST_REVISION).(string)
ov, _ := ctx.Value(serviceUtil.CTX_RESPONSE_REVISION).(string)
w.Header().Set(serviceUtil.HEADER_REV, ov)
if len(iv) > 0 && iv == ov {
w.WriteHeader(http.StatusNotModified)
return
}
controller.WriteResponse(w, respInternal, resp)
}
func (this *MicroServiceInstanceService) GetOneInstance(w http.ResponseWriter, r *http.Request) {
var ids []string
query := r.URL.Query()
keys := query.Get("tags")
if len(keys) > 0 {
ids = strings.Split(keys, ",")
}
request := &pb.GetOneInstanceRequest{
ConsumerServiceId: r.Header.Get("X-ConsumerId"),
ProviderServiceId: query.Get(":serviceId"),
ProviderInstanceId: query.Get(":instanceId"),
Tags: ids,
}
resp, _ := core.InstanceAPI.GetOneInstance(r.Context(), request)
respInternal := resp.Response
resp.Response = nil
controller.WriteResponse(w, respInternal, resp)
}
func (this *MicroServiceInstanceService) GetInstances(w http.ResponseWriter, r *http.Request) {
var ids []string
query := r.URL.Query()
keys := query.Get("tags")
if len(keys) > 0 {
ids = strings.Split(keys, ",")
}
request := &pb.GetInstancesRequest{
ConsumerServiceId: r.Header.Get("X-ConsumerId"),
ProviderServiceId: query.Get(":serviceId"),
Tags: ids,
}
resp, _ := core.InstanceAPI.GetInstances(r.Context(), request)
respInternal := resp.Response
resp.Response = nil
controller.WriteResponse(w, respInternal, resp)
}
func (this *MicroServiceInstanceService) UpdateStatus(w http.ResponseWriter, r *http.Request) {
query := r.URL.Query()
status := query.Get("value")
request := &pb.UpdateInstanceStatusRequest{
ServiceId: query.Get(":serviceId"),
InstanceId: query.Get(":instanceId"),
Status: status,
}
resp, _ := core.InstanceAPI.UpdateStatus(r.Context(), request)
controller.WriteResponse(w, resp.Response, nil)
}
func (this *MicroServiceInstanceService) UpdateMetadata(w http.ResponseWriter, r *http.Request) {
query := r.URL.Query()
message, err := ioutil.ReadAll(r.Body)
if err != nil {
log.Error("read body failed", err)
controller.WriteError(w, scerr.ErrInvalidParams, err.Error())
return
}
request := &pb.UpdateInstancePropsRequest{
ServiceId: query.Get(":serviceId"),
InstanceId: query.Get(":instanceId"),
}
err = json.Unmarshal(message, request)
if err != nil {
log.Errorf(err, "Invalid json: %s", util.BytesToStringWithNoCopy(message))
controller.WriteError(w, scerr.ErrInvalidParams, "Unmarshal error")
return
}
resp, err := core.InstanceAPI.UpdateInstanceProperties(r.Context(), request)
controller.WriteResponse(w, resp.Response, nil)
}<|fim▁end|> | |
<|file_name|>index.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | 'use strict';
module.exports = require('../lib/server'); |
<|file_name|>dynamic_component_loader_spec.ts<|end_file_name|><|fim▁begin|>import {
AsyncTestCompleter,
beforeEach,
ddescribe,
xdescribe,
describe,
el,
dispatchEvent,
expect,
iit,
inject,
beforeEachBindings,
it,
xit,
viewRootNodes
} from 'angular2/test_lib';
import {TestBed, ViewProxy} from 'angular2/src/test_lib/test_bed';
import {Injector} from 'angular2/di';
import {Component, View} from 'angular2/annotations';
import * as viewAnn from 'angular2/src/core/annotations_impl/view';
import {DynamicComponentLoader} from 'angular2/src/core/compiler/dynamic_component_loader';
import {ElementRef} from 'angular2/src/core/compiler/element_ref';
import {NgIf} from 'angular2/src/directives/ng_if';
import {DomRenderer, DOCUMENT_TOKEN} from 'angular2/src/render/dom/dom_renderer';
import {DOM} from 'angular2/src/dom/dom_adapter';
import {AppViewManager} from 'angular2/src/core/compiler/view_manager';
export function main() {
describe('DynamicComponentLoader', function() {
describe("loading into existing location", () => {
it('should work', inject([TestBed, AsyncTestCompleter], (tb, async) => {
tb.overrideView(MyComp, new viewAnn.View({
template: '<dynamic-comp #dynamic></dynamic-comp>',
directives: [DynamicComp]
}));
tb.createView(MyComp).then((view) => {
var dynamicComponent = view.rawView.locals.get("dynamic");
expect(dynamicComponent).toBeAnInstanceOf(DynamicComp);
dynamicComponent.done.then((_) => {
view.detectChanges();
expect(view.rootNodes).toHaveText('hello');
async.done();
});
});
}));
it('should inject dependencies of the dynamically-loaded component',
inject([TestBed, AsyncTestCompleter], (tb, async) => {
tb.overrideView(MyComp, new viewAnn.View({
template: '<dynamic-comp #dynamic></dynamic-comp>',
directives: [DynamicComp]
}));
tb.createView(MyComp).then((view) => {
var dynamicComponent = view.rawView.locals.get("dynamic");
dynamicComponent.done.then((ref) => {
expect(ref.instance.dynamicallyCreatedComponentService)
.toBeAnInstanceOf(DynamicallyCreatedComponentService);
async.done();
});
});
}));
it('should allow to destroy and create them via viewcontainer directives',
inject([TestBed, AsyncTestCompleter], (tb, async) => {
tb.overrideView(MyComp, new viewAnn.View({
template:
'<div><dynamic-comp #dynamic template="ng-if: ctxBoolProp"></dynamic-comp></div>',
directives: [DynamicComp, NgIf]
}));
tb.createView(MyComp).then((view) => {
view.context.ctxBoolProp = true;
view.detectChanges();
var dynamicComponent = view.rawView.viewContainers[0].views[0].locals.get("dynamic");
var promise = dynamicComponent.done.then((_) => {
view.detectChanges();
expect(view.rootNodes).toHaveText('hello');
view.context.ctxBoolProp = false;
view.detectChanges();
expect(view.rawView.viewContainers[0].views.length).toBe(0);
expect(view.rootNodes).toHaveText('');
view.context.ctxBoolProp = true;
view.detectChanges();
var dynamicComponent = view.rawView.viewContainers[0].views[0].locals.get("dynamic");
return dynamicComponent.done;
});
promise.then((_) => {
view.detectChanges();
expect(view.rootNodes).toHaveText('hello');
async.done();
});
});
}));
});
describe("loading next to an existing location", () => {
it('should work',
inject([DynamicComponentLoader, TestBed, AsyncTestCompleter], (loader, tb, async) => {
tb.overrideView(
MyComp,
new viewAnn.View(
{template: '<div><location #loc></location></div>', directives: [Location]}));
tb.createView(MyComp).then((view) => {
var location = view.rawView.locals.get("loc");
loader.loadNextToExistingLocation(DynamicallyLoaded, location.elementRef)
.then(ref => {
expect(view.rootNodes).toHaveText("Location;DynamicallyLoaded;");
async.done();
});
});
}));
it('should return a disposable component ref',
inject([DynamicComponentLoader, TestBed, AsyncTestCompleter], (loader, tb, async) => {
tb.overrideView(
MyComp,
new viewAnn.View(
{template: '<div><location #loc></location></div>', directives: [Location]}));
tb.createView(MyComp).then((view) => {
var location = view.rawView.locals.get("loc");
loader.loadNextToExistingLocation(DynamicallyLoaded, location.elementRef)
.then(ref => {
loader.loadNextToExistingLocation(DynamicallyLoaded2, location.elementRef)
.then(ref2 => {
expect(view.rootNodes)
.toHaveText("Location;DynamicallyLoaded;DynamicallyLoaded2;")
ref2.dispose();
expect(view.rootNodes)
.toHaveText("Location;DynamicallyLoaded;")
async.done();
});
});
});
}));
it('should update host properties',
inject([DynamicComponentLoader, TestBed, AsyncTestCompleter], (loader, tb, async) => {
tb.overrideView(
MyComp,
new viewAnn.View(
{template: '<div><location #loc></location></div>', directives: [Location]}));
tb.createView(MyComp).then((view) => {
var location = view.rawView.locals.get("loc");
loader.loadNextToExistingLocation(DynamicallyLoadedWithHostProps, location.elementRef)
.then(ref => {
ref.instance.id = "new value";
view.detectChanges();
var newlyInsertedElement = DOM.childNodesAsList(view.rootNodes[0])[1];
expect(newlyInsertedElement.id)
.toEqual("new value")
async.done();
});
});
}));
});
describe('loading into a new location', () => {
it('should allow to create, update and destroy components',
inject([TestBed, AsyncTestCompleter], (tb, async) => {
tb.overrideView(MyComp, new viewAnn.View({
template: '<imp-ng-cmp #impview></imp-ng-cmp>',
directives: [ImperativeViewComponentUsingNgComponent]
}));
tb.createView(MyComp).then((view) => {
var userViewComponent = view.rawView.locals.get("impview");
userViewComponent.done.then((childComponentRef) => {
view.detectChanges();
expect(view.rootNodes).toHaveText('hello');
childComponentRef.instance.ctxProp = 'new';
view.detectChanges();
expect(view.rootNodes).toHaveText('new');
childComponentRef.dispose();
expect(view.rootNodes).toHaveText('');
async.done();
});
});
}));
});
describe('loadAsRoot', () => {
it('should allow to create, update and destroy components',
inject([TestBed, AsyncTestCompleter, DynamicComponentLoader, DOCUMENT_TOKEN, Injector],
(tb, async, dcl, doc, injector) => {
var rootEl = el('<child-cmp></child-cmp>');
DOM.appendChild(doc.body, rootEl);
dcl.loadAsRoot(ChildComp, null, injector)
.then((componentRef) => {
var view = new ViewProxy(componentRef);
expect(rootEl.parentNode).toBe(doc.body);
view.detectChanges();
expect(rootEl).toHaveText('hello');
componentRef.instance.ctxProp = 'new';
view.detectChanges();
expect(rootEl).toHaveText('new');
componentRef.dispose();
expect(rootEl).toHaveText('');
expect(rootEl.parentNode).toBe(doc.body);
async.done();
});
}));
});
});
}
@Component({selector: 'imp-ng-cmp'})
@View({template: ''})
class ImperativeViewComponentUsingNgComponent {
done;
constructor(self: ElementRef, dynamicComponentLoader: DynamicComponentLoader,
viewManager: AppViewManager, renderer: DomRenderer) {
var div = el('<div id="impHost"></div>');
var shadowViewRef = viewManager.getComponentView(self);
renderer.setComponentViewRootNodes(shadowViewRef.render, [div]);
this.done = dynamicComponentLoader.loadIntoNewLocation(ChildComp, self, null)
.then((componentRef) => {
var element = renderer.getRootNodes(componentRef.hostView.render)[0];
DOM.appendChild(div, element);
return componentRef;
});
}
}
@Component({
selector: 'child-cmp',
})
@View({template: '{{ctxProp}}'})
class ChildComp {
ctxProp: string;
constructor() { this.ctxProp = 'hello'; }
}
class DynamicallyCreatedComponentService {}<|fim▁hole|>@Component({selector: 'dynamic-comp'})
class DynamicComp {
done;
constructor(loader: DynamicComponentLoader, location: ElementRef) {
this.done = loader.loadIntoExistingLocation(DynamicallyCreatedCmp, location);
}
}
@Component({selector: 'hello-cmp', appInjector: [DynamicallyCreatedComponentService]})
@View({template: "{{greeting}}"})
class DynamicallyCreatedCmp {
greeting: string;
dynamicallyCreatedComponentService: DynamicallyCreatedComponentService;
constructor(a: DynamicallyCreatedComponentService) {
this.greeting = "hello";
this.dynamicallyCreatedComponentService = a;
}
}
@Component({selector: 'dummy'})
@View({template: "DynamicallyLoaded;"})
class DynamicallyLoaded {
}
@Component({selector: 'dummy'})
@View({template: "DynamicallyLoaded2;"})
class DynamicallyLoaded2 {
}
@Component({selector: 'dummy', hostProperties: {'id': 'id'}})
@View({template: "DynamicallyLoadedWithHostProps;"})
class DynamicallyLoadedWithHostProps {
id: string;
constructor() { this.id = "default"; }
}
@Component({selector: 'location'})
@View({template: "Location;"})
class Location {
elementRef: ElementRef;
constructor(elementRef: ElementRef) { this.elementRef = elementRef; }
}
@Component({selector: 'my-comp'})
@View({directives: []})
class MyComp {
ctxBoolProp: boolean;
constructor() { this.ctxBoolProp = false; }
}<|fim▁end|> | |
<|file_name|>failing.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Show the current failures in the repository."""
import sys
from cliff import command
import testtools
from stestr import output
from stestr.repository import util
from stestr import results
from stestr import user_config
class Failing(command.Command):
"""Show the current failures known by the repository.
Without --subunit, the process exit code will be non-zero if the
previous test run was not successful and test failures are shown. But,
with --subunit, the process exit code is non-zero only if the subunit
stream could not be generated successfully from any failures. The test
results and run status are included in the subunit stream emitted for
the failed tests, so the stream should be used for interpretting the
failing tests. If no subunit stream is emitted with --subunit and a
zero exit code then there were no failures in the most recent run in
the repository.
"""
def get_parser(self, prog_name):
parser = super().get_parser(prog_name)
parser.add_argument(
"--subunit", action="store_true",
default=False, help="Show output as a subunit stream.")
parser.add_argument(
"--list", action="store_true",
default=False, help="Show only a list of failing tests.")
return parser
def take_action(self, parsed_args):
user_conf = user_config.get_user_config(self.app_args.user_config)
args = parsed_args
if getattr(user_conf, 'failing', False):
list_opt = args.list or user_conf.failing.get('list', False)
else:
list_opt = args.list
return failing(repo_type=self.app_args.repo_type,
repo_url=self.app_args.repo_url,
list_tests=list_opt, subunit=args.subunit)
def _show_subunit(run):
stream = run.get_subunit_stream()
if getattr(sys.stdout, 'buffer', False):
sys.stdout.buffer.write(stream.read())
else:
sys.stdout.write(stream.read())
return 0
def _make_result(repo, list_tests=False, stdout=sys.stdout):
if list_tests:
list_result = testtools.StreamSummary()
return list_result, list_result
else:
def _get_id():
return repo.get_latest_run().get_id()
output_result = results.CLITestResult(_get_id,
stdout, None)
summary_result = output_result.get_summary()
return output_result, summary_result
def failing(repo_type='file', repo_url=None, list_tests=False, subunit=False,
stdout=sys.stdout):
"""Print the failing tests from the most recent run in the repository
This function will print to STDOUT whether there are any tests that failed
in the last run. It optionally will print the test_ids for the failing
tests if ``list_tests`` is true. If ``subunit`` is true a subunit stream
with just the failed tests will be printed to STDOUT.
Note this function depends on the cwd for the repository if `repo_type` is
set to file and `repo_url` is not specified it will use the repository
located at CWD/.stestr
:param str repo_type: This is the type of repository to use. Valid choices
are 'file' and 'sql'.
:param str repo_url: The url of the repository to use.
:param bool list_test: Show only a list of failing tests.
:param bool subunit: Show output as a subunit stream.
:param file stdout: The output file to write all output to. By default
this is sys.stdout
:return return_code: The exit code for the command. 0 for success and > 0
for failures.
:rtype: int
"""
if repo_type not in ['file', 'sql']:
stdout.write('Repository type %s is not a type' % repo_type)
return 1
repo = util.get_repo_open(repo_type, repo_url)
run = repo.get_failing()
if subunit:
return _show_subunit(run)
case = run.get_test()
failed = False
result, summary = _make_result(repo, list_tests=list_tests)
result.startTestRun()
try:
case.run(result)
finally:
result.stopTestRun()
failed = not results.wasSuccessful(summary)
if failed:
result = 1
else:
result = 0
if list_tests:
failing_tests = [
test for test, _ in summary.errors + summary.failures]<|fim▁hole|> return result<|fim▁end|> | output.output_tests(failing_tests, output=stdout) |
<|file_name|>structutils.py<|end_file_name|><|fim▁begin|>import struct
import re
from .core import NamedItemList
from copy import deepcopy
_SINGLE_MEMBER_REGEX = re.compile(r"^[@=<>!]?([0-9]*)([xcbB\?hHiIlLqQnNefdspP])$")
def __isSingleMemberFormatString(format):
return bool(_SINGLE_MEMBER_REGEX.match(format))
def formatStringForMembers(members):
formatString = ""
for member in members:
if not isinstance(member, tuple):
raise TypeError("Member list items must be specified as tuples.")
if len(member) != 2:
raise ValueError("Member tuple must have two items.")
if not isinstance(member[0], str):
raise TypeError("Member name was not specified as a string.")
if len(member[0]) < 1:
raise ValueError("Member name must not be an empty string.")
if not isinstance(member[1], str):
raise TypeError("Member format was not specified as a string.")
if not __isSingleMemberFormatString(member[1]):
raise ValueError("Member '" + member[0] + "' format string '" + member[1] + "' is not valid for a single member.")
formatString += member[1]
return formatString
def dataSizeForMembers(members):
return struct.calcsize(formatStringForMembers(members))
def dataBlobFormatString(members):
formatString = formatStringForMembers(members)
length = struct.calcsize(formatString)
return str(length) + "B"
def flattenList(data):
flatData = []
for item in data:
if isinstance(item, list):
flatData += item
elif isinstance(item, tuple):
flatData += list(item)
else:
flatData.append(item)
return flatData
class DataStruct:
"""Holds the definition for a lump of binary data."""
def __init__(self, formatStr, startZeroed=False):
if not isinstance(formatStr, str):
raise TypeError("DataStruct defString must be a string.")
self.__format = formatStr
self.__data = []
self.__requiredSize = struct.calcsize(self.__format)
if startZeroed:
self.setToZero()
<|fim▁hole|> self.parseBinaryData(b'\0' * self.binaryDataRequiredSize())
def parseBinaryData(self, binaryData, offset=0):
if not isinstance(binaryData, bytes):
raise TypeError("Binary data is not in byte format.")
data = list(struct.unpack_from(self.__format, binaryData, offset))
self.__data = self._processParsedData(data)
def exportBinaryData(self):
if len(self.__data) < 1:
raise ValueError("No data to export.")
return struct.pack(self.__format, *flattenList(self.__data))
def binaryDataRequiredSize(self):
return self.__requiredSize
def formatString(self):
return self.__format
def data(self):
return self.__data
def _processParsedData(self, data):
return data
def __repr__(self):
return repr(self.__data)
class DataStructMemberGroupInfo:
"""Analyses members of a data struct and computes which items should be
grouped together (eg. vectors, strings, etc.)."""
# Assumes that the format strings have been validated as per data struct requirements.
def __init__(self, members):
self.__members = members
# Number of items in the group for this member.
self.__groupCount = {}
# Original index in the incoming data at which this member group resides.
self.__originalIndex = {}
# Lambda to use to combine the group items. Non-existent if not applicable.
self.__combineFunc = {}
# Type of the group. Non-existent if not applicable.
self.__expectedType = {}
self.__processMembers()
# The following accessor functions use member indices, as members represented by the same format
# string may be referred to by different names.
def originalIndex(self, memberIndex):
return self.__originalIndex[memberIndex]
def groupCount(self, memberIndex):
return self.__groupCount[memberIndex]
def isGrouped(self, memberIndex):
return self.__groupCount[memberIndex] > 1
def combineFunc(self, memberIndex):
return self.__combineFunc[memberIndex]
def expectedGroupType(self, memberIndex):
return self.__expectedType[memberIndex]
def __processMembers(self):
dataIndex = 0
for memberIndex in range(0, len(self.__members)):
member = self.__members[memberIndex]
memberFormat = member[1]
self.__originalIndex[memberIndex] = dataIndex
# Set up some defaults that will get overridden if required.
self.__combineFunc[memberIndex] = lambda origItems: list(origItems)
self.__expectedType[memberIndex] = list
formatMatch = _SINGLE_MEMBER_REGEX.match(memberFormat)
groupCount = formatMatch.group(1)
groupType = formatMatch.group(2)
try:
if groupCount is None:
raise ValueError()
# This should never raise an exception, but the outer try will catch if it does.
groupCount = int(groupCount)
if groupCount < 2:
raise ValueError()
self.__groupCount[memberIndex] = groupCount
# Special case for handling strings:
if groupType == "c":
self.__combineFunc[memberIndex] = lambda origItems: b"".join(origItems)
self.__expectedType[memberIndex] = bytes
except Exception:
# If something goes wrong, this implies that the member has no group.
self.__groupCount[memberIndex] = 1
del self.__combineFunc[memberIndex]
del self.__expectedType[memberIndex]
dataIndex += self.__groupCount[memberIndex]
class NamedDataStruct(DataStruct):
"""Allows 'member names' for struct items."""
# The time taken to generate these for each instance every time one is created
# adds up. These are indexed by format string, so that we can check whether a
# group list for a set of members has already been created.
__cachedGroupInfoByFormatString = {}
# As an optimisation, the format string for the members can be passed in.
# This is not checked - it is assumed to accurately represent the list of members.
def __init__(self, members, startZeroed=False, formatString=None):
if not isinstance(members, list):
raise TypeError("Members must be specified as a list of 2-tuples.")
if formatString is None:
formatString = formatStringForMembers(members)
# This will do most of the validation, so we don't have to below.
# If we start zeroed, this must be done later after the members have been initialised.
super().__init__(formatString, False)
self.__rawMembers = members
self.__memberList = NamedItemList()
self.__memberGroupInfo = None
for member in members:
self.__memberList.append(member[0], member)
self.__generateMemberGroupInfo()
if startZeroed:
self.setToZero()
def valueByName(self, memberName):
if not isinstance(memberName, str):
raise TypeError("Member must be specified as a string.")
if not self.__memberList.hasItemWithName(memberName):
raise ValueError("Member '" + memberName + "' was not recognised.")
return self.valueByIndex(self.__memberList.nameToIndex(memberName))
def valueByIndex(self, index):
if not isinstance(index, int):
raise TypeError("Member index must be an integer.")
if index < 0 or index >= len(self):
raise ValueError("Member index " + str(index) + " is out of range.")
data = self.data()
if data is None:
raise ValueError("No member data has been set.")
return deepcopy(data[index])
def hasMemberName(self, name):
return self.__memberList.hasItemWithName(name)
def setValueByName(self, memberName, value):
if not self.hasMemberName(memberName):
raise ValueError(f"Member with name '{memberName}' does not exist.")
self.setValueByIndex(self.__memberList.nameToIndex(memberName), value)
def setValueByIndex(self, index, value):
data = self.data()
if len(data) < 1:
raise ValueError("Item is not yet initialised.")
if index < 0 or index >= len(data):
raise ValueError(f"Index {index} was out of range (expected 0-{len(data) - 1}).")
coercedValue = value
if isinstance(coercedValue, bytes):
coercedValue = [bytes([character]) for character in coercedValue]
else:
try:
# Attempt to intelligently convert to a list.
coercedValue = list(coercedValue)
except:
# Assume the value is singular and create a list instead.
coercedValue = [coercedValue]
member = self.__memberList.getItemByIndex(index)
memberFormat = member[1]
isGrouped = self.__memberGroupInfo.isGrouped(index)
if isGrouped:
groupCount = self.__memberGroupInfo.groupCount(index)
# If the type is bytes, pad the incoming data with zeroes.
if self.__memberGroupInfo.expectedGroupType(index) == bytes and \
isinstance(value, bytes) and \
len(coercedValue) < groupCount:
paddingLength = groupCount - len(coercedValue)
coercedValue += [bytes(character) for character in bytes(paddingLength)]
if len(coercedValue) != groupCount:
raise ValueError(f"Expected {groupCount} items for member format '{memberFormat}', but got {len(coercedValue)} items.")
# Try packing the data in order to validate it.
try:
struct.pack(memberFormat, *coercedValue)
except:
raise TypeError(f"Value type '{type(value)}' was incorrect. Expected member format: '{memberFormat}'.")
# If this member requires a group, use the coerced list.
# Otherwise, use the raw value (which should be singular).
data[index] = coercedValue if isGrouped else value
def __generateMemberGroupInfo(self):
# Member format strings have been validated so that they only reference one data type.
# Therefore, if the string contains a number > 1, this means it references an aggregate
# type (eg. a vector, string, etc.). We need to convert these into appropriate data types.
formatString = self.formatString()
# This function gets called every time an instance of this class is created, which is a lot.
# The time it takes to generate all of these can add up. Therefore, cache them once we have
# generated the data once.
if formatString in NamedDataStruct.__cachedGroupInfoByFormatString:
# Use cached version.
self.__memberGroupInfo = NamedDataStruct.__cachedGroupInfoByFormatString[formatString]
else:
# Create and cache.
self.__memberGroupInfo = DataStructMemberGroupInfo(self.__rawMembers)
NamedDataStruct.__cachedGroupInfoByFormatString[formatString] = self.__memberGroupInfo
def _processParsedData(self, data):
origDataList = data
newDataList = []
origIndex = 0
#for member in self.__memberList:
for memberIndex in range(0, len(self.__memberList)):
if self.__memberGroupInfo.isGrouped(memberIndex):
origIndex = self.__memberGroupInfo.originalIndex(memberIndex)
count = self.__memberGroupInfo.groupCount(memberIndex)
combineFunc = self.__memberGroupInfo.combineFunc(memberIndex)
# Generate a tuple as a group.
tupleToCopy = combineFunc(origDataList[origIndex : origIndex + count])
# Add this to the output list.
newDataList.append(tupleToCopy)
origIndex += len(tupleToCopy)
else:
# Just copy the date over verbatim.
newDataList.append(origDataList[origIndex])
origIndex += 1
return newDataList
def __getitem__(self, key):
if isinstance(key, str):
return self.valueByName(key)
elif isinstance(key, int):
return self.valueByIndex(key)
raise TypeError("Key is of an invalid type.")
def __setitem__(self, key, value):
if isinstance(key, str):
return self.setValueByName(key, value)
elif isinstance(key, int):
return self.setValueByIndex(key, value)
raise TypeError("Key is of an invalid type.")
def __len__(self):
return len(self.__memberList)
def __repr__(self):
return repr({self.__memberList.indexToName(index): self.valueByIndex(index) for index in range(0, len(self.__memberList))})<|fim▁end|> | def setToZero(self): |
<|file_name|>momentum_test.py<|end_file_name|><|fim▁begin|># Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Momentum."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
class MomentumOptimizerTest(tf.test.TestCase):
def testBasic(self):
for dtype in [tf.half, tf.float32, tf.float64]:
with self.test_session():
var0 = tf.Variable([1.0, 2.0], dtype=dtype)
var1 = tf.Variable([3.0, 4.0], dtype=dtype)
grads0 = tf.constant([0.1, 0.1], dtype=dtype)
grads1 = tf.constant([0.01, 0.01], dtype=dtype)
mom_opt = tf.train.MomentumOptimizer(learning_rate=2.0, momentum=0.9)
mom_update = mom_opt.apply_gradients(
zip([grads0, grads1], [var0, var1]))
tf.initialize_all_variables().run()
# Check we have slots
self.assertEqual(["momentum"], mom_opt.get_slot_names())
slot0 = mom_opt.get_slot(var0, "momentum")
self.assertEquals(slot0.get_shape(), var0.get_shape())
self.assertFalse(slot0 in tf.trainable_variables())
slot1 = mom_opt.get_slot(var1, "momentum")
self.assertEquals(slot1.get_shape(), var1.get_shape())
self.assertFalse(slot1 in tf.trainable_variables())
# Fetch params to validate initial values
self.assertAllClose([1.0, 2.0], var0.eval())
self.assertAllClose([3.0, 4.0], var1.eval())
# Step 1: the momentum accumulators where 0. So we should see a normal
# update: v -= grad * learning_rate
mom_update.run()
# Check that the momentum accumulators have been updated.
self.assertAllCloseAccordingToType(np.array([0.1, 0.1]), slot0.eval())
self.assertAllCloseAccordingToType(np.array([0.01, 0.01]), slot1.eval())
# Check that the parameters have been updated.
self.assertAllCloseAccordingToType(np.array([1.0 - (0.1 * 2.0),
2.0 - (0.1 * 2.0)]),
var0.eval())
self.assertAllCloseAccordingToType(np.array([3.0 - (0.01 * 2.0),
4.0 - (0.01 * 2.0)]),
var1.eval())
# Step 2: the momentum accumulators contain the previous update.
mom_update.run()
# Check that the momentum accumulators have been updated.
self.assertAllCloseAccordingToType(
np.array([(0.9 * 0.1 + 0.1), (0.9 * 0.1 + 0.1)]),
slot0.eval())
self.assertAllCloseAccordingToType(
np.array([(0.9 * 0.01 + 0.01), (0.9 * 0.01 + 0.01)]),
slot1.eval())
# Check that the parameters have been updated.
self.assertAllCloseAccordingToType(
np.array([1.0 - (0.1 * 2.0) - ((0.9 * 0.1 + 0.1) * 2.0),
2.0 - (0.1 * 2.0) - ((0.9 * 0.1 + 0.1) * 2.0)]),
var0.eval())
self.assertAllCloseAccordingToType(
np.array([2.98 - ((0.9 * 0.01 + 0.01) * 2.0),
3.98 - ((0.9 * 0.01 + 0.01) * 2.0)]),
var1.eval())
def testTensorLearningRateAndMomentum(self):
for dtype in [tf.half, tf.float32, tf.float64]:
with self.test_session():
var0 = tf.Variable([1.0, 2.0], dtype=dtype)
var1 = tf.Variable([3.0, 4.0], dtype=dtype)
grads0 = tf.constant([0.1, 0.1], dtype=dtype)
grads1 = tf.constant([0.01, 0.01], dtype=dtype)
mom_opt = tf.train.MomentumOptimizer(
learning_rate=tf.constant(2.0), momentum=tf.constant(0.9))
mom_update = mom_opt.apply_gradients(
zip([grads0, grads1], [var0, var1]))
tf.initialize_all_variables().run()
# Check we have slots
self.assertEqual(["momentum"], mom_opt.get_slot_names())
slot0 = mom_opt.get_slot(var0, "momentum")
self.assertEquals(slot0.get_shape(), var0.get_shape())
self.assertFalse(slot0 in tf.trainable_variables())
slot1 = mom_opt.get_slot(var1, "momentum")
self.assertEquals(slot1.get_shape(), var1.get_shape())
self.assertFalse(slot1 in tf.trainable_variables())
# Fetch params to validate initial values
self.assertAllClose([1.0, 2.0], var0.eval())
self.assertAllClose([3.0, 4.0], var1.eval())
# Step 1: the momentum accumulators where 0. So we should see a normal
# update: v -= grad * learning_rate
mom_update.run()
# Check that the momentum accumulators have been updated.
self.assertAllCloseAccordingToType(np.array([0.1, 0.1]), slot0.eval())
self.assertAllCloseAccordingToType(np.array([0.01, 0.01]), slot1.eval())
# Check that the parameters have been updated.
self.assertAllCloseAccordingToType(np.array([1.0 - (0.1 * 2.0),
2.0 - (0.1 * 2.0)]),
var0.eval())
self.assertAllCloseAccordingToType(np.array([3.0 - (0.01 * 2.0),
4.0 - (0.01 * 2.0)]),
var1.eval())
# Step 2: the momentum accumulators contain the previous update.
mom_update.run()
# Check that the momentum accumulators have been updated.
self.assertAllCloseAccordingToType(
np.array([(0.9 * 0.1 + 0.1), (0.9 * 0.1 + 0.1)]),
slot0.eval())
self.assertAllCloseAccordingToType(
np.array([(0.9 * 0.01 + 0.01), (0.9 * 0.01 + 0.01)]),
slot1.eval())<|fim▁hole|> # Check that the parameters have been updated.
self.assertAllCloseAccordingToType(
np.array([1.0 - (0.1 * 2.0) - ((0.9 * 0.1 + 0.1) * 2.0),
2.0 - (0.1 * 2.0) - ((0.9 * 0.1 + 0.1) * 2.0)]),
var0.eval())
self.assertAllCloseAccordingToType(
np.array([2.98 - ((0.9 * 0.01 + 0.01) * 2.0),
3.98 - ((0.9 * 0.01 + 0.01) * 2.0)]),
var1.eval())
def _dbParamsMom01(self):
"""Return dist-belief momentum values.
Return values been generated from the dist-belief momentum unittest,
running with a learning rate of 0.1 and a momentum of 0.1.
These values record how a parameter vector of size 10, initialized with 0.0,
gets updated with 10 consecutive momentum steps. It uses random gradients.
Returns:
db_grad: The gradients to apply
db_out: The parameters after the momentum update.
"""
db_grad = [[]] * 10
db_out = [[]] * 10
# pylint: disable=line-too-long
db_grad[0] = [0.00096264342, 0.17914793, 0.93945462, 0.41396621, 0.53037018, 0.93197989, 0.78648776, 0.50036013, 0.55345792, 0.96722615]
db_out[0] = [-9.6264346e-05, -0.017914793, -0.093945466, -0.041396622, -0.053037018, -0.093197994, -0.078648776, -0.050036013, -0.055345792, -0.096722618]
db_grad[1] = [0.17075552, 0.88821375, 0.20873757, 0.25236958, 0.57578111, 0.15312378, 0.5513742, 0.94687688, 0.16012503, 0.22159521]
db_out[1] = [-0.017181443, -0.10852765, -0.12421377, -0.070773244, -0.11591884, -0.11783017, -0.14165108, -0.14972731, -0.076892875, -0.1285544]
db_grad[2] = [0.35077485, 0.47304362, 0.44412705, 0.44368884, 0.078527533, 0.81223965, 0.31168157, 0.43203235, 0.16792089, 0.24644311]
db_out[2] = [-0.053967446, -0.1648933, -0.1716533, -0.1180798, -0.13005978, -0.20151734, -0.17911947, -0.20289968, -0.095839672, -0.15638189]
db_grad[3] = [0.9694621, 0.75035888, 0.28171822, 0.83813518, 0.53807181, 0.3728098, 0.81454384, 0.03848977, 0.89759839, 0.93665648]
db_out[3] = [-0.15459226, -0.24556576, -0.20456907, -0.20662397, -0.18528105, -0.24716705, -0.2643207, -0.21206589, -0.18749419, -0.2528303]
db_grad[4] = [0.38578293, 0.8536852, 0.88722926, 0.66276771, 0.13678469, 0.94036359, 0.69107032, 0.81897682, 0.5433259, 0.67860287]
db_out[4] = [-0.20323303, -0.33900154, -0.29658359, -0.28175515, -0.20448165, -0.34576839, -0.34194785, -0.29488021, -0.25099224, -0.33033544]
db_grad[5] = [0.27885768, 0.76100707, 0.24625534, 0.81354135, 0.18959245, 0.48038563, 0.84163809, 0.41172323, 0.83259648, 0.44941229]
db_out[5] = [-0.23598288, -0.42444581, -0.33041057, -0.3706224, -0.22536094, -0.40366709, -0.43387437, -0.34433398, -0.34060168, -0.38302717]
db_grad[6] = [0.27233034, 0.056316052, 0.5039115, 0.24105175, 0.35697976, 0.75913221, 0.73577434, 0.16014607, 0.57500273, 0.071136251]
db_out[6] = [-0.26649091, -0.43862185, -0.38418442, -0.40361428, -0.26314685, -0.48537019, -0.51664448, -0.36529395, -0.40706289, -0.39540997]
db_grad[7] = [0.58697265, 0.2494842, 0.08106143, 0.39954534, 0.15892942, 0.12683646, 0.74053431, 0.16033, 0.66625422, 0.73515922]
db_out[7] = [-0.32823896, -0.46498787, -0.39766794, -0.446868, -0.28281838, -0.50622416, -0.59897494, -0.38342294, -0.48033443, -0.47016418]
db_grad[8] = [0.8215279, 0.41994119, 0.95172721, 0.68000203, 0.79439718, 0.43384039, 0.55561525, 0.22567581, 0.93331909, 0.29438227]
db_out[8] = [-0.41656655, -0.50961858, -0.49418902, -0.51919359, -0.36422527, -0.55169362, -0.6627695, -0.40780342, -0.58099347, -0.50707781]
db_grad[9] = [0.68297005, 0.67758518, 0.1748755, 0.13266537, 0.70697063, 0.055731893, 0.68593478, 0.50580865, 0.12602448, 0.093537711]
db_out[9] = [-0.49369633, -0.58184016, -0.52132869, -0.5396927, -0.44306302, -0.56181377, -0.73774242, -0.46082234, -0.60366184, -0.52012295]
# pylint: enable=line-too-long
return db_grad, db_out
def testLikeDistBeliefMom01(self):
with self.test_session():
db_grad, db_out = self._dbParamsMom01()
num_samples = len(db_grad)
var0 = tf.Variable([0.0] * num_samples)
grads0 = tf.constant([0.0] * num_samples)
mom_opt = tf.train.MomentumOptimizer(learning_rate=0.1, momentum=0.1)
mom_update = mom_opt.apply_gradients(zip([grads0], [var0]))
tf.initialize_all_variables().run()
for i in xrange(num_samples):
mom_update.run(feed_dict={grads0: db_grad[i]})
self.assertAllClose(np.array(db_out[i]), var0.eval())
def testSparse(self):
for dtype in [tf.half, tf.float32, tf.float64]:
with self.test_session():
var0 = tf.Variable(tf.zeros([4, 2], dtype=dtype))
var1 = tf.Variable(tf.constant(1.0, dtype, [4, 2]))
grads0 = tf.IndexedSlices(tf.constant([[.1, .1]], dtype=dtype),
tf.constant([1]),
tf.constant([4, 2]))
grads1 = tf.IndexedSlices(tf.constant([[.01, .01], [.01, .01]],
dtype=dtype),
tf.constant([2, 3]),
tf.constant([4, 2]))
mom_opt = tf.train.MomentumOptimizer(learning_rate=2.0, momentum=0.9)
mom_update = mom_opt.apply_gradients(
zip([grads0, grads1], [var0, var1]))
tf.initialize_all_variables().run()
# Check we have slots
self.assertEqual(["momentum"], mom_opt.get_slot_names())
slot0 = mom_opt.get_slot(var0, "momentum")
self.assertEquals(slot0.get_shape(), var0.get_shape())
slot1 = mom_opt.get_slot(var1, "momentum")
self.assertEquals(slot1.get_shape(), var1.get_shape())
# Fetch params to validate initial values
self.assertAllClose([0, 0], var0.eval()[0])
self.assertAllClose([0, 0], var0.eval()[1])
self.assertAllClose([1, 1], var1.eval()[2])
# Step 1: the momentum accumulators are 0. So we should see a normal
# update: v -= grad * learning_rate
mom_update.run()
# Check that the momentum accumulators have been updated.
self.assertAllCloseAccordingToType(
np.array([0, 0]), slot0.eval()[0])
self.assertAllCloseAccordingToType(
np.array([.1, .1]), slot0.eval()[1])
self.assertAllCloseAccordingToType(
np.array([.01, .01]), slot1.eval()[2])
# Check that the parameters have been updated.
self.assertAllCloseAccordingToType(np.array([0, 0]), var0.eval()[0])
self.assertAllCloseAccordingToType(np.array([- (0.1 * 2.0),
- (0.1 * 2.0)]),
var0.eval()[1])
self.assertAllCloseAccordingToType(np.array([1.0 - (0.01 * 2.0),
1.0 - (0.01 * 2.0)]),
var1.eval()[2])
# Step 2: the momentum accumulators contain the previous update.
mom_update.run()
# Check that the momentum accumulators have been updated.
self.assertAllClose(np.array([0, 0]), slot0.eval()[0])
self.assertAllCloseAccordingToType(np.array([(0.9 * 0.1 + 0.1),
(0.9 * 0.1 + 0.1)]),
slot0.eval()[1])
self.assertAllCloseAccordingToType(np.array([(0.9 * 0.01 + 0.01),
(0.9 * 0.01 + 0.01)]),
slot1.eval()[2])
# Check that the parameters have been updated.
self.assertAllClose(np.array([0, 0]), var0.eval()[0])
self.assertAllCloseAccordingToType(
np.array([- (0.1 * 2.0) - ((0.9 * 0.1 + 0.1) * 2.0),
- (0.1 * 2.0) - ((0.9 * 0.1 + 0.1) * 2.0)]),
var0.eval()[1])
self.assertAllCloseAccordingToType(
np.array([0.98 - ((0.9 * 0.01 + 0.01) * 2.0),
0.98 - ((0.9 * 0.01 + 0.01) * 2.0)]),
var1.eval()[2])
def testSharing(self):
for dtype in [tf.half, tf.float32, tf.float64]:
with self.test_session():
var0 = tf.Variable([1.0, 2.0], dtype=dtype)
var1 = tf.Variable([3.0, 4.0], dtype=dtype)
grads0 = tf.constant([0.1, 0.1], dtype=dtype)
grads1 = tf.constant([0.01, 0.01], dtype=dtype)
mom_opt = tf.train.MomentumOptimizer(learning_rate=2.0, momentum=0.9)
mom_update1 = mom_opt.apply_gradients(
zip([grads0, grads1], [var0, var1]))
mom_update2 = mom_opt.apply_gradients(
zip([grads0, grads1], [var0, var1]))
tf.initialize_all_variables().run()
self.assertEqual(["momentum"], mom_opt.get_slot_names())
slot0 = mom_opt.get_slot(var0, "momentum")
self.assertEquals(slot0.get_shape(), var0.get_shape())
slot1 = mom_opt.get_slot(var1, "momentum")
self.assertEquals(slot1.get_shape(), var1.get_shape())
# Fetch params to validate initial values
self.assertAllClose([1.0, 2.0], var0.eval())
self.assertAllClose([3.0, 4.0], var1.eval())
# Step 1: the momentum accumulators where 0. So we should see a normal
# update: v -= grad * learning_rate
mom_update1.run()
# Check that the momentum accumulators have been updated.
self.assertAllCloseAccordingToType(np.array([0.1, 0.1]), slot0.eval())
self.assertAllCloseAccordingToType(np.array([0.01, 0.01]), slot1.eval())
# Check that the parameters have been updated.
self.assertAllCloseAccordingToType(np.array([1.0 - (0.1 * 2.0),
2.0 - (0.1 * 2.0)]),
var0.eval())
self.assertAllCloseAccordingToType(np.array([3.0 - (0.01 * 2.0),
4.0 - (0.01 * 2.0)]),
var1.eval())
# Step 2: the second momentum accumulators contain the previous update.
mom_update2.run()
# Check that the momentum accumulators have been updated.
self.assertAllCloseAccordingToType(
np.array([(0.9 * 0.1 + 0.1), (0.9 * 0.1 + 0.1)]),
slot0.eval())
self.assertAllCloseAccordingToType(
np.array([(0.9 * 0.01 + 0.01), (0.9 * 0.01 + 0.01)]),
slot1.eval())
# Check that the parameters have been updated.
self.assertAllCloseAccordingToType(
np.array([1.0 - (0.1 * 2.0) - ((0.9 * 0.1 + 0.1) * 2.0),
2.0 - (0.1 * 2.0) - ((0.9 * 0.1 + 0.1) * 2.0)]),
var0.eval())
self.assertAllCloseAccordingToType(
np.array([2.98 - ((0.9 * 0.01 + 0.01) * 2.0),
3.98 - ((0.9 * 0.01 + 0.01) * 2.0)]),
var1.eval())
if __name__ == "__main__":
tf.test.main()<|fim▁end|> | |
<|file_name|>a.py<|end_file_name|><|fim▁begin|>from devassistant.command_runners import CommandRunner
from devassistant.logger import logger
class CR1(CommandRunner):
@classmethod
def matches(cls, c):
return c.comm_type == 'barbarbar'
@classmethod
def run(cls, c):
logger.info('CR1: Doing something ...')
x = c.input_res + 'bar'
return (True, x)
class CR2(CommandRunner):
@classmethod
def matches(cls, c):
return c.comm_type == 'spamspamspam'
@classmethod
def run(cls, c):
logger.info('CR2: Doing something ...')
x = c.input_res + 'spam'<|fim▁hole|><|fim▁end|> | return (True, x) |
<|file_name|>valid-triangle-number.py<|end_file_name|><|fim▁begin|>from collections import Counter
class Solution(object):
def triangleNumber(self, nums):<|fim▁hole|> """
nums = filter(None, nums)
if not nums:
return 0
c = Counter(nums)
N = max(nums)
buckets = [0] * (N + 1)
for k, cnt in c.iteritems():
buckets[k] += cnt
for i in xrange(1, N + 1):
buckets[i] += buckets[i - 1]
s = sorted(c)
ans = 0
for i, n1 in enumerate(s):
for j in xrange(i):
n2 = s[j]
n1_n2 = n1 + n2
ans += c[n1] * c[n2] * (buckets[min(n1_n2 - 1, N)] - buckets[n1])
ans += c[n2] * (c[n1] - 1) * c[n1] / 2
ans += c[n1] * (c[n1] - 1) * (c[n1] - 2) / 6
ans += c[n1] * (c[n1] - 1) / 2 * (buckets[min(n1 * 2 - 1, N)] - buckets[n1])
return ans<|fim▁end|> | """
:type nums: List[int]
:rtype: int |
<|file_name|>all_tests_coverage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
__author__ = '[email protected] (Jeff Scudder)'
import unittest
import coverage
import all_tests
import atom.core<|fim▁hole|>import atom.http_core
import atom.mock_http_core
import atom.auth
import atom.client
import gdata.gauth
import gdata.client
import gdata.data
import gdata.blogger.data
import gdata.blogger.client
import gdata.maps.data
import gdata.maps.client
import gdata.spreadsheets.data
from gdata.test_config import settings
# Ensure that coverage tests execute the live requests to the servers, but
# allow use of cached server responses to speed up repeated runs.
settings.RUN_LIVE_TESTS = True
settings.CLEAR_CACHE = False
def suite():
return unittest.TestSuite((atom_tests.core_test.suite(),))
if __name__ == '__main__':
coverage.erase()
coverage.start()
unittest.TextTestRunner().run(all_tests.suite())
coverage.stop()
coverage.report([atom.core, atom.http_core, atom.auth, atom.data,
atom.mock_http_core, atom.client, gdata.gauth, gdata.client,
gdata.core, gdata.data, gdata.blogger.data, gdata.blogger.client,
gdata.maps.data, gdata.maps.client, gdata.spreadsheets.data])<|fim▁end|> | |
<|file_name|>qgscalloutwidget.cpp<|end_file_name|><|fim▁begin|>/***************************************************************************
qgscalloutwidget.cpp
---------------------
begin : July 2019
copyright : (C) 2019 by Nyall Dawson
email : nyall dot dawson at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
#include "qgscalloutwidget.h"
#include "qgsvectorlayer.h"
#include "qgsexpressioncontextutils.h"
#include "qgsunitselectionwidget.h"
#include "qgscallout.h"
#include "qgsnewauxiliaryfielddialog.h"
#include "qgsnewauxiliarylayerdialog.h"
#include "qgsauxiliarystorage.h"
QgsExpressionContext QgsCalloutWidget::createExpressionContext() const
{
if ( auto *lExpressionContext = mContext.expressionContext() )
return *lExpressionContext;
QgsExpressionContext expContext( mContext.globalProjectAtlasMapLayerScopes( vectorLayer() ) );
QgsExpressionContextScope *symbolScope = QgsExpressionContextUtils::updateSymbolScope( nullptr, new QgsExpressionContextScope() );
symbolScope->addVariable( QgsExpressionContextScope::StaticVariable( QgsExpressionContext::EXPR_SYMBOL_COLOR, QColor(), true ) );
expContext << symbolScope;
// additional scopes
const auto constAdditionalExpressionContextScopes = mContext.additionalExpressionContextScopes();
for ( const QgsExpressionContextScope &scope : constAdditionalExpressionContextScopes )
{
expContext.appendScope( new QgsExpressionContextScope( scope ) );
}
//TODO - show actual value
expContext.setOriginalValueVariable( QVariant() );
expContext.setHighlightedVariables( QStringList() << QgsExpressionContext::EXPR_ORIGINAL_VALUE << QgsExpressionContext::EXPR_SYMBOL_COLOR );
return expContext;
}
void QgsCalloutWidget::setContext( const QgsSymbolWidgetContext &context )
{
mContext = context;
const auto unitSelectionWidgets = findChildren<QgsUnitSelectionWidget *>();
for ( QgsUnitSelectionWidget *unitWidget : unitSelectionWidgets )
{
unitWidget->setMapCanvas( mContext.mapCanvas() );
}
const auto symbolButtonWidgets = findChildren<QgsSymbolButton *>();
for ( QgsSymbolButton *symbolWidget : symbolButtonWidgets )
{
symbolWidget->setMapCanvas( mContext.mapCanvas() );
symbolWidget->setMessageBar( mContext.messageBar() );
}
}
QgsSymbolWidgetContext QgsCalloutWidget::context() const
{
return mContext;
}
void QgsCalloutWidget::registerDataDefinedButton( QgsPropertyOverrideButton *button, QgsCallout::Property key )
{
button->init( key, callout()->dataDefinedProperties(), QgsCallout::propertyDefinitions(), mVectorLayer, true );
connect( button, &QgsPropertyOverrideButton::changed, this, &QgsCalloutWidget::updateDataDefinedProperty );
connect( button, &QgsPropertyOverrideButton::createAuxiliaryField, this, &QgsCalloutWidget::createAuxiliaryField );
button->registerExpressionContextGenerator( this );
}
void QgsCalloutWidget::createAuxiliaryField()
{
// try to create an auxiliary layer if not yet created
if ( !mVectorLayer->auxiliaryLayer() )
{
QgsNewAuxiliaryLayerDialog dlg( mVectorLayer, this );
dlg.exec();
}
// return if still not exists
if ( !mVectorLayer->auxiliaryLayer() )
return;
QgsPropertyOverrideButton *button = qobject_cast<QgsPropertyOverrideButton *>( sender() );
QgsCallout::Property key = static_cast< QgsCallout::Property >( button->propertyKey() );
QgsPropertyDefinition def = QgsCallout::propertyDefinitions()[key];
// create property in auxiliary storage if necessary
if ( !mVectorLayer->auxiliaryLayer()->exists( def ) )
{
QgsNewAuxiliaryFieldDialog dlg( def, mVectorLayer, true, this );
if ( dlg.exec() == QDialog::Accepted )
def = dlg.propertyDefinition();
}
// return if still not exist
if ( !mVectorLayer->auxiliaryLayer()->exists( def ) )
return;
// update property with join field name from auxiliary storage
QgsProperty property = button->toProperty();
property.setField( QgsAuxiliaryLayer::nameFromProperty( def, true ) );
property.setActive( true );
button->updateFieldLists();
button->setToProperty( property );
callout()->dataDefinedProperties().setProperty( key, button->toProperty() );
emit changed();
}
void QgsCalloutWidget::updateDataDefinedProperty()
{
QgsPropertyOverrideButton *button = qobject_cast<QgsPropertyOverrideButton *>( sender() );
QgsCallout::Property key = static_cast< QgsCallout::Property >( button->propertyKey() );
callout()->dataDefinedProperties().setProperty( key, button->toProperty() );
emit changed();
}
/// @cond PRIVATE
//
// QgsSimpleLineCalloutWidget
//
QgsSimpleLineCalloutWidget::QgsSimpleLineCalloutWidget( QgsVectorLayer *vl, QWidget *parent )
: QgsCalloutWidget( parent, vl )
{
setupUi( this );
// Callout options - to move to custom widgets when multiple callout styles exist
mCalloutLineStyleButton->setSymbolType( QgsSymbol::Line );
mCalloutLineStyleButton->setDialogTitle( tr( "Callout Symbol" ) );
mCalloutLineStyleButton->registerExpressionContextGenerator( this );
mCalloutLineStyleButton->setLayer( vl );
mMinCalloutWidthUnitWidget->setUnits( QgsUnitTypes::RenderUnitList() << QgsUnitTypes::RenderMillimeters << QgsUnitTypes::RenderMetersInMapUnits << QgsUnitTypes::RenderMapUnits << QgsUnitTypes::RenderPixels
<< QgsUnitTypes::RenderPoints << QgsUnitTypes::RenderInches );
mOffsetFromAnchorUnitWidget->setUnits( QgsUnitTypes::RenderUnitList() << QgsUnitTypes::RenderMillimeters << QgsUnitTypes::RenderMetersInMapUnits << QgsUnitTypes::RenderMapUnits << QgsUnitTypes::RenderPixels
<< QgsUnitTypes::RenderPoints << QgsUnitTypes::RenderInches );
mOffsetFromLabelUnitWidget->setUnits( QgsUnitTypes::RenderUnitList() << QgsUnitTypes::RenderMillimeters << QgsUnitTypes::RenderMetersInMapUnits << QgsUnitTypes::RenderMapUnits << QgsUnitTypes::RenderPixels
<< QgsUnitTypes::RenderPoints << QgsUnitTypes::RenderInches );
connect( mMinCalloutWidthUnitWidget, &QgsUnitSelectionWidget::changed, this, &QgsSimpleLineCalloutWidget::minimumLengthUnitWidgetChanged );
connect( mMinCalloutLengthSpin, static_cast < void ( QDoubleSpinBox::* )( double ) > ( &QDoubleSpinBox::valueChanged ), this, &QgsSimpleLineCalloutWidget::minimumLengthChanged );
connect( mOffsetFromAnchorUnitWidget, &QgsUnitSelectionWidget::changed, this, &QgsSimpleLineCalloutWidget::offsetFromAnchorUnitWidgetChanged );
connect( mOffsetFromAnchorSpin, static_cast < void ( QDoubleSpinBox::* )( double ) > ( &QDoubleSpinBox::valueChanged ), this, &QgsSimpleLineCalloutWidget::offsetFromAnchorChanged );
connect( mOffsetFromLabelUnitWidget, &QgsUnitSelectionWidget::changed, this, &QgsSimpleLineCalloutWidget::offsetFromLabelUnitWidgetChanged );
connect( mOffsetFromLabelSpin, static_cast < void ( QDoubleSpinBox::* )( double ) > ( &QDoubleSpinBox::valueChanged ), this, &QgsSimpleLineCalloutWidget::offsetFromLabelChanged );
connect( mDrawToAllPartsCheck, &QCheckBox::toggled, this, &QgsSimpleLineCalloutWidget::drawToAllPartsToggled );
// Anchor point options
mAnchorPointComboBox->addItem( tr( "Pole of Inaccessibility" ), static_cast< int >( QgsCallout::PoleOfInaccessibility ) );
mAnchorPointComboBox->addItem( tr( "Point on Exterior" ), static_cast< int >( QgsCallout::PointOnExterior ) );
mAnchorPointComboBox->addItem( tr( "Point on Surface" ), static_cast< int >( QgsCallout::PointOnSurface ) );
mAnchorPointComboBox->addItem( tr( "Centroid" ), static_cast< int >( QgsCallout::Centroid ) );
connect( mAnchorPointComboBox, static_cast<void ( QComboBox::* )( int )>( &QComboBox::currentIndexChanged ), this, &QgsSimpleLineCalloutWidget::mAnchorPointComboBox_currentIndexChanged );
mLabelAnchorPointComboBox->addItem( tr( "Closest Point" ), static_cast< int >( QgsCallout::LabelPointOnExterior ) );
mLabelAnchorPointComboBox->addItem( tr( "Centroid" ), static_cast< int >( QgsCallout::LabelCentroid ) );
mLabelAnchorPointComboBox->addItem( tr( "Top Left" ), static_cast< int >( QgsCallout::LabelTopLeft ) );
mLabelAnchorPointComboBox->addItem( tr( "Top Center" ), static_cast< int >( QgsCallout::LabelTopMiddle ) );
mLabelAnchorPointComboBox->addItem( tr( "Top Right" ), static_cast< int >( QgsCallout::LabelTopRight ) );
mLabelAnchorPointComboBox->addItem( tr( "Left Middle" ), static_cast< int >( QgsCallout::LabelMiddleLeft ) );
mLabelAnchorPointComboBox->addItem( tr( "Right Middle" ), static_cast< int >( QgsCallout::LabelMiddleRight ) );
mLabelAnchorPointComboBox->addItem( tr( "Bottom Left" ), static_cast< int >( QgsCallout::LabelBottomLeft ) );
mLabelAnchorPointComboBox->addItem( tr( "Bottom Center" ), static_cast< int >( QgsCallout::LabelBottomMiddle ) );
mLabelAnchorPointComboBox->addItem( tr( "Bottom Right" ), static_cast< int >( QgsCallout::LabelBottomRight ) );
connect( mLabelAnchorPointComboBox, static_cast<void ( QComboBox::* )( int )>( &QComboBox::currentIndexChanged ), this, &QgsSimpleLineCalloutWidget::mLabelAnchorPointComboBox_currentIndexChanged );
connect( mCalloutLineStyleButton, &QgsSymbolButton::changed, this, &QgsSimpleLineCalloutWidget::lineSymbolChanged );
}
void QgsSimpleLineCalloutWidget::setCallout( QgsCallout *callout )
{
if ( !callout )
return;
mCallout.reset( dynamic_cast<QgsSimpleLineCallout *>( callout->clone() ) );
if ( !mCallout )
return;
mMinCalloutWidthUnitWidget->blockSignals( true );
mMinCalloutWidthUnitWidget->setUnit( mCallout->minimumLengthUnit() );
mMinCalloutWidthUnitWidget->setMapUnitScale( mCallout->minimumLengthMapUnitScale() );
mMinCalloutWidthUnitWidget->blockSignals( false );
whileBlocking( mMinCalloutLengthSpin )->setValue( mCallout->minimumLength() );
mOffsetFromAnchorUnitWidget->blockSignals( true );
mOffsetFromAnchorUnitWidget->setUnit( mCallout->offsetFromAnchorUnit() );
mOffsetFromAnchorUnitWidget->setMapUnitScale( mCallout->offsetFromAnchorMapUnitScale() );
mOffsetFromAnchorUnitWidget->blockSignals( false );
mOffsetFromLabelUnitWidget->blockSignals( true );
mOffsetFromLabelUnitWidget->setUnit( mCallout->offsetFromLabelUnit() );
mOffsetFromLabelUnitWidget->setMapUnitScale( mCallout->offsetFromLabelMapUnitScale() );
mOffsetFromLabelUnitWidget->blockSignals( false );
whileBlocking( mOffsetFromAnchorSpin )->setValue( mCallout->offsetFromAnchor() );
whileBlocking( mOffsetFromLabelSpin )->setValue( mCallout->offsetFromLabel() );
whileBlocking( mCalloutLineStyleButton )->setSymbol( mCallout->lineSymbol()->clone() );
whileBlocking( mDrawToAllPartsCheck )->setChecked( mCallout->drawCalloutToAllParts() );
whileBlocking( mAnchorPointComboBox )->setCurrentIndex( mAnchorPointComboBox->findData( static_cast< int >( callout->anchorPoint() ) ) );
whileBlocking( mLabelAnchorPointComboBox )->setCurrentIndex( mLabelAnchorPointComboBox->findData( static_cast< int >( callout->labelAnchorPoint() ) ) );
registerDataDefinedButton( mMinCalloutLengthDDBtn, QgsCallout::MinimumCalloutLength );
registerDataDefinedButton( mOffsetFromAnchorDDBtn, QgsCallout::OffsetFromAnchor );
registerDataDefinedButton( mOffsetFromLabelDDBtn, QgsCallout::OffsetFromLabel );
registerDataDefinedButton( mDrawToAllPartsDDBtn, QgsCallout::DrawCalloutToAllParts );
registerDataDefinedButton( mAnchorPointDDBtn, QgsCallout::AnchorPointPosition );
registerDataDefinedButton( mLabelAnchorPointDDBtn, QgsCallout::LabelAnchorPointPosition );
registerDataDefinedButton( mOriginXDDBtn, QgsCallout::OriginX );
registerDataDefinedButton( mOriginYDDBtn, QgsCallout::OriginY );
registerDataDefinedButton( mDestXDDBtn, QgsCallout::DestinationX );
registerDataDefinedButton( mDestYDDBtn, QgsCallout::DestinationY );
}
void QgsSimpleLineCalloutWidget::setGeometryType( QgsWkbTypes::GeometryType type )
{
bool isPolygon = type == QgsWkbTypes::PolygonGeometry;
mAnchorPointLbl->setEnabled( isPolygon );
mAnchorPointLbl->setVisible( isPolygon );
mAnchorPointComboBox->setEnabled( isPolygon );
mAnchorPointComboBox->setVisible( isPolygon );
mAnchorPointDDBtn->setEnabled( isPolygon );
mAnchorPointDDBtn->setVisible( isPolygon );
}
QgsCallout *QgsSimpleLineCalloutWidget::callout()
{
return mCallout.get();
}
void QgsSimpleLineCalloutWidget::minimumLengthChanged()
{
mCallout->setMinimumLength( mMinCalloutLengthSpin->value() );
emit changed();
}
void QgsSimpleLineCalloutWidget::minimumLengthUnitWidgetChanged()
{
mCallout->setMinimumLengthUnit( mMinCalloutWidthUnitWidget->unit() );
mCallout->setMinimumLengthMapUnitScale( mMinCalloutWidthUnitWidget->getMapUnitScale() );
emit changed();
}
void QgsSimpleLineCalloutWidget::offsetFromAnchorUnitWidgetChanged()
{
mCallout->setOffsetFromAnchorUnit( mOffsetFromAnchorUnitWidget->unit() );
mCallout->setOffsetFromAnchorMapUnitScale( mOffsetFromAnchorUnitWidget->getMapUnitScale() );
emit changed();
}
void QgsSimpleLineCalloutWidget::offsetFromAnchorChanged()
{
mCallout->setOffsetFromAnchor( mOffsetFromAnchorSpin->value() );
emit changed();
}
void QgsSimpleLineCalloutWidget::offsetFromLabelUnitWidgetChanged()
{
mCallout->setOffsetFromLabelUnit( mOffsetFromLabelUnitWidget->unit() );
mCallout->setOffsetFromLabelMapUnitScale( mOffsetFromLabelUnitWidget->getMapUnitScale() );
emit changed();
}
void QgsSimpleLineCalloutWidget::offsetFromLabelChanged()
{
mCallout->setOffsetFromLabel( mOffsetFromLabelSpin->value() );
emit changed();
}
void QgsSimpleLineCalloutWidget::lineSymbolChanged()
{
mCallout->setLineSymbol( mCalloutLineStyleButton->clonedSymbol< QgsLineSymbol >() );
emit changed();
}
void QgsSimpleLineCalloutWidget::mAnchorPointComboBox_currentIndexChanged( int index )
{
mCallout->setAnchorPoint( static_cast<QgsCallout::AnchorPoint>( mAnchorPointComboBox->itemData( index ).toInt() ) );
emit changed();
}
void QgsSimpleLineCalloutWidget::mLabelAnchorPointComboBox_currentIndexChanged( int index )
{
mCallout->setLabelAnchorPoint( static_cast<QgsCallout::LabelAnchorPoint>( mLabelAnchorPointComboBox->itemData( index ).toInt() ) );
emit changed();
}
void QgsSimpleLineCalloutWidget::drawToAllPartsToggled( bool active )
{
mCallout->setDrawCalloutToAllParts( active );
emit changed();
}
//
// QgsManhattanLineCalloutWidget
//<|fim▁hole|> : QgsSimpleLineCalloutWidget( vl, parent )
{
}
///@endcond<|fim▁end|> |
QgsManhattanLineCalloutWidget::QgsManhattanLineCalloutWidget( QgsVectorLayer *vl, QWidget *parent ) |
<|file_name|>sale_order.py<|end_file_name|><|fim▁begin|># Copyright (C) 2021 Open Source Integrators
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import models
class SaleOrder(models.Model):<|fim▁hole|> def action_confirm(self):
res = super(SaleOrder, self).action_confirm()
for order in self:
order.procurement_group_id.stock_move_ids.created_production_id.write(
{"analytic_account_id": order.analytic_account_id}
)
return res<|fim▁end|> | _inherit = "sale.order"
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/*
The library provides a simple datastructure to access geolocated labels with an additional
elimination time t and a label size factor. The library provides method to query a set of
such labels with a bounding box and a minimum elimination time.
Copyright (C) {2017} {Filip Krumpe <[email protected]}
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#[macro_use]
extern crate lazy_static;
extern crate rand;
extern crate regex;
///
/// A module providing some primitive geo types.
///
/// A BoundingBox (BBox) is a 2 dimensional bounding box.
///
/// A Label is a point label with a given 2 dimensional position. It is linked to an osm object via
/// its osm_id and has a certain priority.
///
pub mod primitives;
///
/// A module that implements a 3 dimensional priority search tree on label data.
///
/// The 3 dimensional PST is a priority search tree where the elements are splitted alternating by
/// their x and y coordinate - similar to kd trees.
///
/// The 3d PST allows to find all labels within an half open interval:
///
/// ```text
/// (\infty, t] x [x_min, x_max] x [y_min, y_max]
/// ```
///
pub mod pst_3d;
///
/// A simple module to import data of label elimination sequences.
///
/// The module imports label elimination sequences from files of the form:
///
/// ```text
/// 5
/// lat lon osm_id priority collision_time label_length size_factor label
/// 53.143155300000004 8.9351249 3627273522 1 1.4922737369836614 3300.0 11.0 'Timmersloh'
/// 53.200157000000004 8.528893 253042611 2 1.5769136968447124 1650.0 11.0 'Farge'
/// 53.170524900000004 8.6238803 2147118476 3 2.2440622447579543 2880.0 12.0 'Vegesack'
/// 53.5522264 8.5865509 660314734 4 4.751763965397364 7260.0 22.0 'Bremerhaven'
/// 53.0758196 8.8071646 20982927 5 3686.835042292192 4320.0 24.0 'Bremen'
/// ```
///
/// Where the first line contains the number of elements<br>
///
/// The second line is a standard header<br>
///
/// Each of the following lines defines a label:<br>
/// * its position (lat, lon)<br>
/// * its collision time<br>
/// * its length<br>
/// * its size factor<br>
/// * the label string<br>
///
pub mod input;
use std::ffi::CStr;
use std::ffi::CString;
use std::os::raw::c_char;
use std::error::Error;
use std::io::prelude::*;
use std::fs::File;
///
/// C representation of a pst instance.
///
/// After initializing the pst by the C interface, a pointer DataStructure object will be returned
/// caller. The pointer should not be modified from outside!
///
/// To get data, the struct pointer must be given to the corresponding function as an argument.
///
#[repr(C)]
pub struct DataStructure {
pst: Option<pst_3d::GeoPst3d>,
}
///
/// A C representation of a label and its data.
///
/// The result of requests of the data structure will be returned as an c-array of these structs.
///
#[repr(C)]
pub struct C_Label {
x: f64,
y: f64,
t: f64,
osm_id: i64,
prio: i32,
lbl_fac: f64,
label: *mut c_char,
}
///
/// A struct represents a basic C_Label vector, i.e. its size and the data (the contained C_Label
/// objects).
///
#[repr(C)]
pub struct C_Result {
size: u64,
data: *mut C_Label,
}
///
/// Initialize a 3D PST from the file defined by input_path.
///
/// The returned pointer to the DataStructure object can be used to request data from the 3D PST.
///
/// The given file must match the format specified in the [Input Module](input/index.html).
///
#[no_mangle]
pub extern "C" fn init(input_path: *const c_char) -> Box<DataStructure> {
let c_string = unsafe { CStr::from_ptr(input_path) };
let input_path = match c_string.to_str() {
Ok(path) => path.to_string(),
Err(_) => return Box::new(DataStructure { pst: None }),
};
// debug
let log_path = "log_ds.txt";
match File::create(&log_path) {
Err(why) => println!("couldn't create {}: {}", log_path, why.description()),
Ok(mut file) => {
match file.write_all(format!("Reading ds from {}", input_path).as_bytes()) {
Err(why) => panic!("couldn't write to {}: {}", log_path, why.description()),
Ok(_) => println!("successfully wrote to {}", log_path),
};
}
}
let tree: Option<pst_3d::GeoPst3d> = match input::import_labels(&input_path) {
Ok(res) => {
println!("Successfully imported {} labels", res.len());
Some(pst_3d::GeoPst3d::new(res))
}
Err(e) => {
println!("Could not read the given input file:{}\n\t{:?}\n",
input_path,
e);
None
}
};
Box::new(DataStructure { pst: tree })
}
///
/// Check if the initialization was successfull and the returned DataStructure object is valid.
///
#[no_mangle]
pub extern "C" fn is_good(ds: &mut DataStructure) -> bool {
return ds.pst.is_some();<|fim▁hole|>}
///
/// Get the labels contained in the specified bounding box with a t value >= min_t.
///
/// The ownership of the result returned by this function is passed to the caller.
/// To safely deallocate the result pass it to the function `free_result`.
#[no_mangle]
pub extern "C" fn get_data(ds: &DataStructure,
min_t: f64,
min_x: f64,
max_x: f64,
min_y: f64,
max_y: f64)
-> C_Result {
use std::mem::forget;
let mut result;
let pointer;
let pst = match ds.pst {
Some(ref pst) => pst,
None => {
result = Vec::with_capacity(0);
let len = 0;
pointer = result.as_mut_ptr();
forget(result);
return C_Result {
size: len,
data: pointer,
};
}
};
let bb = primitives::bbox::BBox::new(min_x, min_y, max_x, max_y);
let r = pst.get(&bb, min_t);
result = Vec::with_capacity(r.len());
for e in &r {
let c_label = CString::new(e.get_label().as_str()).unwrap();
result.push(C_Label {
x: e.get_x(),
y: e.get_y(),
t: e.get_t(),
osm_id: e.get_osm_id(),
prio: e.get_prio(),
lbl_fac: e.get_label_factor(),
label: c_label.into_raw(),
});
}
result.shrink_to_fit();
let pointer = result.as_mut_ptr();
forget(result);
C_Result {
size: r.len() as u64,
data: pointer,
}
}
///
/// Deallocate a result returned by `get_data`.
///
#[no_mangle]
pub extern "C" fn free_result(res: C_Result) {
unsafe {
let vec = Vec::from_raw_parts(res.data, res.size as usize, res.size as usize);
for label in vec {
let _ = CString::from_raw(label.label);
}
}
drop(res);
}
#[cfg(test)]
mod tests {
extern crate rand;
const TEST_SIZE: usize = 500;
const TEST_COUNT: usize = 1;
use rand::{thread_rng, Rng};
use std::collections::HashSet;
use super::primitives::{bbox, label};
use super::pst_3d;
// create a random floating point number in the range -180 to 180
fn rand_lat() -> f64 {
180. * rand::random::<f64>() - 90.
}
// create a random floating point number in the range -90 to 90
fn rand_lon() -> f64 {
360. * rand::random::<f64>() - 180.
}
// create a random level instance of count many elements
fn random_label_instance(count: usize) -> Vec<label::Label> {
let mut v: Vec<label::Label> = Vec::new();
for counter in 1..count {
let lat = rand_lat();
let lon = rand_lon();
let t = rand::random::<f64>();
v.push(label::Label::new(lon,
lat,
t,
counter as i64,
counter as i32,
1.0, // label factor is not of interesst
format!("T {}", counter)));
}
v
}
// get a hash set of ids of the labels in the label list
fn get_id_set(v: &Vec<&label::Label>) -> HashSet<i64> {
let mut res = HashSet::new();
for id in v.iter().map(|l| l.get_osm_id()) {
res.insert(id);
}
res
}
// get a hash set of ids of the labels in the label list
fn get_id_set_filtered(v: &Vec<label::Label>, bbox: &bbox::BBox, t: f64) -> HashSet<i64> {
let mut res = HashSet::new();
for id in v.iter()
.filter(|l| l.get_t() >= t)
.filter(|l| bbox.is_contained(l))
.map(|l| l.get_osm_id()) {
res.insert(id);
}
res
}
#[test]
fn randomized_test() {
let instance = random_label_instance(TEST_SIZE);
let mut data_box = bbox::BBox::new_empty();
for l in &instance {
data_box.add_to_box(l);
}
let pskdt = pst_3d::Pst3d::new(instance.clone());
let mut rng = rand::thread_rng();
for _ in 0..TEST_COUNT {
let t = rand::random::<f64>();
let min_x = rng.gen_range(data_box.get_min_x(), data_box.get_max_x());
let max_x = rng.gen_range(min_x, data_box.get_max_x());
let min_y = rng.gen_range(data_box.get_min_y(), data_box.get_max_y());
let max_y = rng.gen_range(min_y, data_box.get_max_y());
let bbox = bbox::BBox::new(min_x, min_y, max_x, max_y);
let res = pskdt.get(&bbox, t);
assert!(get_id_set(&res) == get_id_set_filtered(&instance, &bbox, t));
}
}
}<|fim▁end|> | |
<|file_name|>jquery.simpler-sidebar.js<|end_file_name|><|fim▁begin|>/*! simpler-sidebar v1.4.9 (https://github.com/dcdeiv/simpler-sidebar)
** Copyright (c) 2015 - 2016 Davide Di Criscito
** Dual licensed under MIT and GPL-2.0
*/
( function( $ ) {
$.fn.simplerSidebar = function( options ) {
var cfg = $.extend( true, $.fn.simplerSidebar.settings, options );
return this.each( function() {
var align, sbw, ssbInit, ssbStyle, maskInit, maskStyle,
attr = cfg.attr,
$sidebar = $( this ),
$opener = $( cfg.opener ),
$links = cfg.sidebar.closingLinks,
duration = cfg.animation.duration,
sbMaxW = cfg.sidebar.width,
gap = cfg.sidebar.gap,
winMaxW = sbMaxW + gap,
w = $( window ).width(),
animationStart = {},
animationReset = {},
hiddenFlow = function() {
$( "body, html" ).css( "overflow", "hidden" );
},
autoFlow = function() {
$( "body, html" ).css( "overflow", "auto" );
},
activate = {
duration: duration,
easing: cfg.animation.easing,
complete: hiddenFlow
},
deactivate = {
duration: duration,
easing: cfg.animation.easing,
complete: autoFlow
},
animateOpen = function() {
$sidebar
.animate( animationStart, activate )
.attr( "data-" + attr, "active" );
$mask.fadeIn( duration );
},
animateClose = function() {
$sidebar
.animate( animationReset, deactivate )
.attr( "data-" + attr, "disabled" );
$mask.fadeOut( duration );
},
closeSidebar = function() {
var isWhat = $sidebar.attr( "data-" + attr ),
csbw = $sidebar.width();
animationReset[ align ] = -csbw;
if ( isWhat === "active" ) {
animateClose();
}
},
$mask = $( "<div>" ).attr( "data-" + attr, "mask" );
//Checking sidebar align
if ( [ undefined, "right" ].indexOf( cfg.sidebar.align ) !== -1 ) {
align = "right";
} else if ( cfg.sidebar.align === "left" ) {
align = "left";
} else {
console.log( "ERR sidebar.align: you typed \"" + cfg.sidebar.align + "\". You should choose between \"right\" or \"left\"." );
}
//Sidebar style
if ( w < winMaxW ) {
sbw = w - gap;
} else {
sbw = sbMaxW;
}
ssbInit = {
position: "fixed",
top: cfg.top,
bottom: 0,
width: sbw
};
ssbInit[ align ] = -sbw;
animationStart[ align ] = 0;
ssbStyle = $.extend( true, ssbInit, cfg.sidebar.css );
$sidebar.css( ssbStyle )
.attr( "data-" + attr, "disabled" );
//Mask style
maskInit = {
position: "fixed",
top: cfg.top,
right: 0,
bottom: 0,
left: 0,
zIndex: cfg.sidebar.css.zIndex - 1,
display: "none"
};
maskStyle = $.extend( true, maskInit, cfg.mask.css );
//Appending Mask if mask.display is true
if ( [ true, "true", false, "false" ].indexOf( cfg.mask.display) !== -1 ) {
if ( [ true, "true" ].indexOf( cfg.mask.display ) !== -1 ) {
$mask.appendTo( "body" ).css( maskStyle );
}
} else {
console.log( "ERR mask.display: you typed \"" + cfg.mask.display + "\". You should choose between true or false." );
}
//Opening and closing the Sidebar when $opener is clicked
$opener.click( function() {
var isWhat = $sidebar.attr( "data-" + attr ),
csbw = $sidebar.width();
animationReset[ align ] = -csbw;
if ( isWhat === "disabled" ) {
animateOpen();
} else if ( isWhat === "active" ) {
animateClose();<|fim▁hole|>
//Closing Sidebar when the mask is clicked
$mask.click( closeSidebar );
//Closing Sidebar when a link inside of it is clicked
$sidebar.on( "click", $links, closeSidebar );
//Adjusting width;
$( window ).resize( function() {
var rsbw, update,
isWhat = $sidebar.attr( "data-" + attr ),
nw = $( window ).width();
if ( nw < winMaxW ) {
rsbw = nw - gap;
} else {
rsbw = sbMaxW;
}
update = {
width: rsbw
};
if ( isWhat === "disabled" ) {
update[ align ] = -rsbw;
$sidebar.css( update );
} else if ( isWhat === "active" ) {
$sidebar.css( update );
}
});
});
};
$.fn.simplerSidebar.settings = {
attr: "simplersidebar",
top: 0,
animation: {
duration: 500,
easing: "swing"
},
sidebar: {
width: 300,
gap: 64,
closingLinks: "a",
css: {
zIndex: 3000
}
},
mask: {
display: true,
css: {
backgroundColor: "black",
opacity: 0.5,
filter: "Alpha(opacity=50)"
}
}
};
} )( jQuery );<|fim▁end|> | }
}); |
<|file_name|>posts.ts<|end_file_name|><|fim▁begin|>import express from 'express';
import passport from 'passport';
const router = express.Router();
import getLogger from '../log';
const log = getLogger(module);
import util from 'util';
import mongoose from '../db/mongoose';
import Post from '../model/post';
const pageSize = 10;
router.get('/index/:page', function(req, res) {
var page = Number(req.params.page);
Post.find({published : true}).select('-content').limit(pageSize)
.skip(pageSize * page).sort({ created: -1 }).exec(function (err, posts) {
if (!err) {
return res.json(posts);
} else {
res.statusCode = 500;
log.error(util.format('Internal error(%d): %s',res.statusCode,err.message));
return res.json({
error: 'Server error.'
});
}
});
});
router.get('/', function(_req, res) {
Post.find().sort({ created: -1 }).exec(function (err, posts) {
if (!err) {
return res.json(posts);
} else {
res.statusCode = 500;
log.error(util.format('Internal error(%d): %s',res.statusCode,err.message));
return res.json({
error: 'Server error.'
});
}
});
});
router.get('/:id', function(req, res) {
Post.findById(req.params.id, function (err, post) {
if(!post) {
res.statusCode = 404;
return res.json({
error: 'Not found.'
});
}
if (!err) {
return res.json({<|fim▁hole|> });
} else {
res.statusCode = 500;
log.error(util.format('Internal error(%d): %s',res.statusCode,err.message));
return res.json({
error: 'Server error.'
});
}
});
});
router.get('/slug/:slug', function(req, res) {
Post.find({ slug: req.params.slug }, function (err, post) {
if(!post) {
res.statusCode = 404;
return res.json({
error: 'Not found.'
});
}
if (!err) {
return res.json({
status: 'OK',
post: post[0]
});
} else {
res.statusCode = 500;
log.error(util.format('Internal error(%d): %s',res.statusCode,err.message));
return res.json({
error: 'Server error.'
});
}
});
});
router.post('/', passport.authenticate('bearer', { session: false }), function(req, res) {
var post = new Post({
title: req.body.title,
description: req.body.description,
content: req.body.content,
userId: mongoose.Types.ObjectId((req.user as any).id),
tags: req.body.tags,
published: req.body.published,
slug: req.body.slug
});
if (req.body.categoryId != null && req.body.categoryId != '')
post.categoryId = mongoose.Types.ObjectId(req.body.categoryId);
post.save(function (err) {
if (!err) {
log.info(util.format("New post created with id: %s", post.id));
return res.json({
status: 'OK',
post:post
});
} else {
if(err.name === 'ValidationError') {
res.statusCode = 400;
res.json({
error: 'Validation error.'
});
} else if(err.message.startsWith('E11000')) {
res.statusCode = 400;
return res.json({
error: 'Slug must be unique.'
});
} else {
res.statusCode = 500;
log.error(util.format('Internal error(%d): %s', res.statusCode, err.message));
res.json({
error: 'Server error.'
});
}
}
});
});
router.put('/:id', passport.authenticate('bearer', { session: false }), function (req, res){
var postId = req.params.id;
Post.findById(postId, function (_err, post) {
if(!post) {
res.statusCode = 404;
log.error(util.format('Post with id: %s Not Found', postId));
return res.json({
error: 'Not found.'
});
}
post.title = req.body.title;
post.description = req.body.description;
post.content = req.body.content;
post.userId = mongoose.Types.ObjectId((req.user as any).id);
post.tags = req.body.tags;
post.published = req.body.published;
post.slug = req.body.slug;
post.modified = new Date();
if (req.body.categoryId != null && req.body.categoryId != '')
post.categoryId = mongoose.Types.ObjectId(req.body.categoryId);
post.save(function (err) {
if (!err) {
log.info(util.format("Post with id: %s updated", post.id));
return res.json({
status: 'OK',
post:post
});
} else {
if(err.name === 'ValidationError') {
res.statusCode = 400;
return res.json({
error: 'Validation error.'
});
} else if(err.message.startsWith('E11000')) {
res.statusCode = 400;
return res.json({
error: 'Slug must be unique.'
});
} else {
res.statusCode = 500;
return res.json({
error: 'Server error.'
});
}
}
});
});
});
router.delete('/:id', passport.authenticate('bearer', { session: false }), function (req, res){
Post.deleteOne({ _id: req.params.id },function (err) {
if (!err) {
log.info(util.format("Post with id: %s deleted", req.params.id));
return res.json({
status: 'OK'
});
} else {
if(err.name === 'ValidationError') {
res.statusCode = 400;
return res.json({
error: 'Validation error.'
});
} else {
res.statusCode = 500;
return res.json({
error: 'Server error.'
});
}
}
});
});
export default router;<|fim▁end|> | status: 'OK',
post:post |
<|file_name|>psus.rs<|end_file_name|><|fim▁begin|>#[doc = "Register `PSUS` reader"]
pub struct R(crate::R<PSUS_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<PSUS_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<PSUS_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<PSUS_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `PSUS` writer"]
pub struct W(crate::W<PSUS_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<PSUS_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<PSUS_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<PSUS_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Quadrature Mode Suspend Config\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum QSUS_A {
#[doc = "0: Suspend request ignored"]
VALUE1 = 0,
#[doc = "1: Stop immediately"]
VALUE2 = 1,
#[doc = "2: Suspend in the next index occurrence"]
VALUE3 = 2,
#[doc = "3: Suspend in the next phase (PhaseA or PhaseB) occurrence"]
VALUE4 = 3,
}
impl From<QSUS_A> for u8 {
#[inline(always)]
fn from(variant: QSUS_A) -> Self {
variant as _
}
}
#[doc = "Field `QSUS` reader - Quadrature Mode Suspend Config"]
pub struct QSUS_R(crate::FieldReader<u8, QSUS_A>);
impl QSUS_R {
pub(crate) fn new(bits: u8) -> Self {
QSUS_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> QSUS_A {
match self.bits {
0 => QSUS_A::VALUE1,
1 => QSUS_A::VALUE2,
2 => QSUS_A::VALUE3,
3 => QSUS_A::VALUE4,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `VALUE1`"]
#[inline(always)]
pub fn is_value1(&self) -> bool {
**self == QSUS_A::VALUE1
}
#[doc = "Checks if the value of the field is `VALUE2`"]
#[inline(always)]
pub fn is_value2(&self) -> bool {
**self == QSUS_A::VALUE2
}
#[doc = "Checks if the value of the field is `VALUE3`"]
#[inline(always)]
pub fn is_value3(&self) -> bool {
**self == QSUS_A::VALUE3
}
#[doc = "Checks if the value of the field is `VALUE4`"]
#[inline(always)]
pub fn is_value4(&self) -> bool {
**self == QSUS_A::VALUE4
}
}
impl core::ops::Deref for QSUS_R {
type Target = crate::FieldReader<u8, QSUS_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `QSUS` writer - Quadrature Mode Suspend Config"]
pub struct QSUS_W<'a> {
w: &'a mut W,
}
impl<'a> QSUS_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: QSUS_A) -> &'a mut W {
self.bits(variant.into())
}
#[doc = "Suspend request ignored"]
#[inline(always)]
pub fn value1(self) -> &'a mut W {
self.variant(QSUS_A::VALUE1)
}
#[doc = "Stop immediately"]
#[inline(always)]
pub fn value2(self) -> &'a mut W {
self.variant(QSUS_A::VALUE2)
}
#[doc = "Suspend in the next index occurrence"]
#[inline(always)]
pub fn value3(self) -> &'a mut W {
self.variant(QSUS_A::VALUE3)
}
#[doc = "Suspend in the next phase (PhaseA or PhaseB) occurrence"]
#[inline(always)]
pub fn value4(self) -> &'a mut W {
self.variant(QSUS_A::VALUE4)
}
#[doc = r"Writes raw bits to the field"]<|fim▁hole|> self.w.bits = (self.w.bits & !0x03) | (value as u32 & 0x03);
self.w
}
}
#[doc = "Multi-Channel Mode Suspend Config\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum MSUS_A {
#[doc = "0: Suspend request ignored"]
VALUE1 = 0,
#[doc = "1: Stop immediately. Multi-Channel pattern is not set to the reset value."]
VALUE2 = 1,
#[doc = "2: Stop immediately. Multi-Channel pattern is set to the reset value."]
VALUE3 = 2,
#[doc = "3: Suspend with the synchronization of the PWM signal. Multi-Channel pattern is set to the reset value at the same time of the synchronization."]
VALUE4 = 3,
}
impl From<MSUS_A> for u8 {
#[inline(always)]
fn from(variant: MSUS_A) -> Self {
variant as _
}
}
#[doc = "Field `MSUS` reader - Multi-Channel Mode Suspend Config"]
pub struct MSUS_R(crate::FieldReader<u8, MSUS_A>);
impl MSUS_R {
pub(crate) fn new(bits: u8) -> Self {
MSUS_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> MSUS_A {
match self.bits {
0 => MSUS_A::VALUE1,
1 => MSUS_A::VALUE2,
2 => MSUS_A::VALUE3,
3 => MSUS_A::VALUE4,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `VALUE1`"]
#[inline(always)]
pub fn is_value1(&self) -> bool {
**self == MSUS_A::VALUE1
}
#[doc = "Checks if the value of the field is `VALUE2`"]
#[inline(always)]
pub fn is_value2(&self) -> bool {
**self == MSUS_A::VALUE2
}
#[doc = "Checks if the value of the field is `VALUE3`"]
#[inline(always)]
pub fn is_value3(&self) -> bool {
**self == MSUS_A::VALUE3
}
#[doc = "Checks if the value of the field is `VALUE4`"]
#[inline(always)]
pub fn is_value4(&self) -> bool {
**self == MSUS_A::VALUE4
}
}
impl core::ops::Deref for MSUS_R {
type Target = crate::FieldReader<u8, MSUS_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `MSUS` writer - Multi-Channel Mode Suspend Config"]
pub struct MSUS_W<'a> {
w: &'a mut W,
}
impl<'a> MSUS_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: MSUS_A) -> &'a mut W {
self.bits(variant.into())
}
#[doc = "Suspend request ignored"]
#[inline(always)]
pub fn value1(self) -> &'a mut W {
self.variant(MSUS_A::VALUE1)
}
#[doc = "Stop immediately. Multi-Channel pattern is not set to the reset value."]
#[inline(always)]
pub fn value2(self) -> &'a mut W {
self.variant(MSUS_A::VALUE2)
}
#[doc = "Stop immediately. Multi-Channel pattern is set to the reset value."]
#[inline(always)]
pub fn value3(self) -> &'a mut W {
self.variant(MSUS_A::VALUE3)
}
#[doc = "Suspend with the synchronization of the PWM signal. Multi-Channel pattern is set to the reset value at the same time of the synchronization."]
#[inline(always)]
pub fn value4(self) -> &'a mut W {
self.variant(MSUS_A::VALUE4)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 2)) | ((value as u32 & 0x03) << 2);
self.w
}
}
impl R {
#[doc = "Bits 0:1 - Quadrature Mode Suspend Config"]
#[inline(always)]
pub fn qsus(&self) -> QSUS_R {
QSUS_R::new((self.bits & 0x03) as u8)
}
#[doc = "Bits 2:3 - Multi-Channel Mode Suspend Config"]
#[inline(always)]
pub fn msus(&self) -> MSUS_R {
MSUS_R::new(((self.bits >> 2) & 0x03) as u8)
}
}
impl W {
#[doc = "Bits 0:1 - Quadrature Mode Suspend Config"]
#[inline(always)]
pub fn qsus(&mut self) -> QSUS_W {
QSUS_W { w: self }
}
#[doc = "Bits 2:3 - Multi-Channel Mode Suspend Config"]
#[inline(always)]
pub fn msus(&mut self) -> MSUS_W {
MSUS_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Service Request Processing Suspend Config\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [psus](index.html) module"]
pub struct PSUS_SPEC;
impl crate::RegisterSpec for PSUS_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [psus::R](R) reader structure"]
impl crate::Readable for PSUS_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [psus::W](W) writer structure"]
impl crate::Writable for PSUS_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets PSUS to value 0"]
impl crate::Resettable for PSUS_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}<|fim▁end|> | #[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W { |
<|file_name|>checkpoints_file_test.go<|end_file_name|><|fim▁begin|>package checkpoints_test
import (
"context"
"path/filepath"
"sort"
"testing"
. "github.com/pingcap/check"
"github.com/pingcap/errors"
"github.com/pingcap/tidb/br/pkg/lightning/checkpoints"
"github.com/pingcap/tidb/br/pkg/lightning/config"
"github.com/pingcap/tidb/br/pkg/lightning/mydump"
"github.com/pingcap/tidb/br/pkg/lightning/verification"
)
func Test(t *testing.T) {
TestingT(t)
}
var _ = Suite(&cpFileSuite{})
type cpFileSuite struct {
cpdb *checkpoints.FileCheckpointsDB
}
func newTestConfig() *config.Config {
cfg := config.NewConfig()
cfg.Mydumper.SourceDir = "/data"
cfg.TaskID = 123
cfg.TiDB.Port = 4000
cfg.TiDB.PdAddr = "127.0.0.1:2379"
cfg.TikvImporter.Backend = config.BackendLocal
cfg.TikvImporter.Addr = "127.0.0.1:8287"
cfg.TikvImporter.SortedKVDir = "/tmp/sorted-kv"
return cfg
}
func (s *cpFileSuite) SetUpTest(c *C) {
dir := c.MkDir()
s.cpdb = checkpoints.NewFileCheckpointsDB(filepath.Join(dir, "cp.pb"))
ctx := context.Background()
cpdb := s.cpdb
// 2. initialize with checkpoint data.
cfg := newTestConfig()
err := cpdb.Initialize(ctx, cfg, map[string]*checkpoints.TidbDBInfo{
"db1": {
Name: "db1",
Tables: map[string]*checkpoints.TidbTableInfo{
"t1": {Name: "t1"},
"t2": {Name: "t2"},
},
},
"db2": {
Name: "db2",
Tables: map[string]*checkpoints.TidbTableInfo{
"t3": {Name: "t3"},
},
},
})
c.Assert(err, IsNil)
// 3. set some checkpoints
err = cpdb.InsertEngineCheckpoints(ctx, "`db1`.`t2`", map[int32]*checkpoints.EngineCheckpoint{
0: {
Status: checkpoints.CheckpointStatusLoaded,
Chunks: []*checkpoints.ChunkCheckpoint{{
Key: checkpoints.ChunkCheckpointKey{
Path: "/tmp/path/1.sql",
Offset: 0,
},
FileMeta: mydump.SourceFileMeta{
Path: "/tmp/path/1.sql",
Type: mydump.SourceTypeSQL,
FileSize: 12345,
},
Chunk: mydump.Chunk{
Offset: 12,
EndOffset: 102400,
PrevRowIDMax: 1,
RowIDMax: 5000,
},
}},
},
-1: {
Status: checkpoints.CheckpointStatusLoaded,
Chunks: nil,
},
})
c.Assert(err, IsNil)
err = cpdb.InsertEngineCheckpoints(ctx, "`db2`.`t3`", map[int32]*checkpoints.EngineCheckpoint{
-1: {
Status: checkpoints.CheckpointStatusLoaded,
Chunks: nil,
},
})
c.Assert(err, IsNil)
// 4. update some checkpoints
cpd := checkpoints.NewTableCheckpointDiff()
scm := checkpoints.StatusCheckpointMerger{
EngineID: 0,
Status: checkpoints.CheckpointStatusImported,
}
scm.MergeInto(cpd)
scm = checkpoints.StatusCheckpointMerger{
EngineID: checkpoints.WholeTableEngineID,
Status: checkpoints.CheckpointStatusAllWritten,
}
scm.MergeInto(cpd)
rcm := checkpoints.RebaseCheckpointMerger{
AllocBase: 132861,
}
rcm.MergeInto(cpd)
cksum := checkpoints.TableChecksumMerger{
Checksum: verification.MakeKVChecksum(4492, 686, 486070148910),
}
cksum.MergeInto(cpd)
ccm := checkpoints.ChunkCheckpointMerger{
EngineID: 0,
Key: checkpoints.ChunkCheckpointKey{Path: "/tmp/path/1.sql", Offset: 0},
Checksum: verification.MakeKVChecksum(4491, 586, 486070148917),
Pos: 55904,
RowID: 681,
}
ccm.MergeInto(cpd)
cpdb.Update(map[string]*checkpoints.TableCheckpointDiff{"`db1`.`t2`": cpd})
}
func (s *cpFileSuite) TearDownTest(c *C) {
c.Assert(s.cpdb.Close(), IsNil)
}
func (s *cpFileSuite) setInvalidStatus() {
cpd := checkpoints.NewTableCheckpointDiff()
scm := checkpoints.StatusCheckpointMerger{
EngineID: -1,
Status: checkpoints.CheckpointStatusAllWritten,
}
scm.SetInvalid()
scm.MergeInto(cpd)
s.cpdb.Update(map[string]*checkpoints.TableCheckpointDiff{
"`db1`.`t2`": cpd,
"`db2`.`t3`": cpd,
})
}
func (s *cpFileSuite) TestGet(c *C) {
ctx := context.Background()
// 5. get back the checkpoints
cp, err := s.cpdb.Get(ctx, "`db1`.`t2`")
c.Assert(err, IsNil)
c.Assert(cp, DeepEquals, &checkpoints.TableCheckpoint{
Status: checkpoints.CheckpointStatusAllWritten,
AllocBase: 132861,
Checksum: verification.MakeKVChecksum(4492, 686, 486070148910),
Engines: map[int32]*checkpoints.EngineCheckpoint{
-1: {
Status: checkpoints.CheckpointStatusLoaded,
Chunks: []*checkpoints.ChunkCheckpoint{},
},
0: {
Status: checkpoints.CheckpointStatusImported,
Chunks: []*checkpoints.ChunkCheckpoint{{
Key: checkpoints.ChunkCheckpointKey{
Path: "/tmp/path/1.sql",
Offset: 0,
},
FileMeta: mydump.SourceFileMeta{
Path: "/tmp/path/1.sql",
Type: mydump.SourceTypeSQL,
FileSize: 12345,
},
ColumnPermutation: []int{},
Chunk: mydump.Chunk{
Offset: 55904,
EndOffset: 102400,
PrevRowIDMax: 681,
RowIDMax: 5000,
},
Checksum: verification.MakeKVChecksum(4491, 586, 486070148917),
}},
},
},
})
cp, err = s.cpdb.Get(ctx, "`db2`.`t3`")
c.Assert(err, IsNil)
c.Assert(cp, DeepEquals, &checkpoints.TableCheckpoint{
Status: checkpoints.CheckpointStatusLoaded,
Engines: map[int32]*checkpoints.EngineCheckpoint{
-1: {
Status: checkpoints.CheckpointStatusLoaded,
Chunks: []*checkpoints.ChunkCheckpoint{},
},
},
})
cp, err = s.cpdb.Get(ctx, "`db3`.`not-exists`")
c.Assert(cp, IsNil)
c.Assert(errors.IsNotFound(err), IsTrue)
}
func (s *cpFileSuite) TestRemoveAllCheckpoints(c *C) {
ctx := context.Background()
err := s.cpdb.RemoveCheckpoint(ctx, "all")
c.Assert(err, IsNil)
cp, err := s.cpdb.Get(ctx, "`db1`.`t2`")
c.Assert(cp, IsNil)
c.Assert(errors.IsNotFound(err), IsTrue)
cp, err = s.cpdb.Get(ctx, "`db2`.`t3`")
c.Assert(cp, IsNil)
c.Assert(errors.IsNotFound(err), IsTrue)
}
func (s *cpFileSuite) TestRemoveOneCheckpoint(c *C) {
ctx := context.Background()
err := s.cpdb.RemoveCheckpoint(ctx, "`db1`.`t2`")
c.Assert(err, IsNil)
cp, err := s.cpdb.Get(ctx, "`db1`.`t2`")
c.Assert(cp, IsNil)
c.Assert(errors.IsNotFound(err), IsTrue)
cp, err = s.cpdb.Get(ctx, "`db2`.`t3`")
c.Assert(err, IsNil)
c.Assert(cp.Status, Equals, checkpoints.CheckpointStatusLoaded)
}
func (s *cpFileSuite) TestIgnoreAllErrorCheckpoints(c *C) {
ctx := context.Background()
s.setInvalidStatus()
err := s.cpdb.IgnoreErrorCheckpoint(ctx, "all")
c.Assert(err, IsNil)
cp, err := s.cpdb.Get(ctx, "`db1`.`t2`")<|fim▁hole|> c.Assert(cp.Status, Equals, checkpoints.CheckpointStatusLoaded)
cp, err = s.cpdb.Get(ctx, "`db2`.`t3`")
c.Assert(err, IsNil)
c.Assert(cp.Status, Equals, checkpoints.CheckpointStatusLoaded)
}
func (s *cpFileSuite) TestIgnoreOneErrorCheckpoints(c *C) {
ctx := context.Background()
s.setInvalidStatus()
err := s.cpdb.IgnoreErrorCheckpoint(ctx, "`db1`.`t2`")
c.Assert(err, IsNil)
cp, err := s.cpdb.Get(ctx, "`db1`.`t2`")
c.Assert(err, IsNil)
c.Assert(cp.Status, Equals, checkpoints.CheckpointStatusLoaded)
cp, err = s.cpdb.Get(ctx, "`db2`.`t3`")
c.Assert(err, IsNil)
c.Assert(cp.Status, Equals, checkpoints.CheckpointStatusAllWritten/10)
}
func (s *cpFileSuite) TestDestroyAllErrorCheckpoints(c *C) {
ctx := context.Background()
s.setInvalidStatus()
dtc, err := s.cpdb.DestroyErrorCheckpoint(ctx, "all")
c.Assert(err, IsNil)
sort.Slice(dtc, func(i, j int) bool { return dtc[i].TableName < dtc[j].TableName })
c.Assert(dtc, DeepEquals, []checkpoints.DestroyedTableCheckpoint{
{
TableName: "`db1`.`t2`",
MinEngineID: -1,
MaxEngineID: 0,
},
{
TableName: "`db2`.`t3`",
MinEngineID: -1,
MaxEngineID: -1,
},
})
cp, err := s.cpdb.Get(ctx, "`db1`.`t2`")
c.Assert(cp, IsNil)
c.Assert(errors.IsNotFound(err), IsTrue)
cp, err = s.cpdb.Get(ctx, "`db2`.`t3`")
c.Assert(cp, IsNil)
c.Assert(errors.IsNotFound(err), IsTrue)
}
func (s *cpFileSuite) TestDestroyOneErrorCheckpoint(c *C) {
ctx := context.Background()
s.setInvalidStatus()
dtc, err := s.cpdb.DestroyErrorCheckpoint(ctx, "`db1`.`t2`")
c.Assert(err, IsNil)
c.Assert(dtc, DeepEquals, []checkpoints.DestroyedTableCheckpoint{
{
TableName: "`db1`.`t2`",
MinEngineID: -1,
MaxEngineID: 0,
},
})
cp, err := s.cpdb.Get(ctx, "`db1`.`t2`")
c.Assert(cp, IsNil)
c.Assert(errors.IsNotFound(err), IsTrue)
cp, err = s.cpdb.Get(ctx, "`db2`.`t3`")
c.Assert(err, IsNil)
c.Assert(cp.Status, Equals, checkpoints.CheckpointStatusAllWritten/10)
}<|fim▁end|> | c.Assert(err, IsNil) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import unittest
<|fim▁hole|><|fim▁end|> | import pymq.tests.test_item |
<|file_name|>version.py<|end_file_name|><|fim▁begin|># Copyright (C) 2015 zulily, llc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software<|fim▁hole|># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""node_status version"""
__version__ = '0.1.0'<|fim▁end|> | # distributed under the License is distributed on an "AS IS" BASIS, |
<|file_name|>classes-cross-crate.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:cci_class_4.rs
extern crate cci_class_4;
use cci_class_4::kitties::cat;
<|fim▁hole|> assert!((!nyan.eat()));
for _ in range(1u, 10u) { nyan.speak(); };
assert!((nyan.eat()));
}<|fim▁end|> | pub fn main() {
let mut nyan = cat(0u, 2, ~"nyan");
nyan.eat(); |
<|file_name|>bsefilter.cc<|end_file_name|><|fim▁begin|>// CC0 Public Domain: http://creativecommons.org/publicdomain/zero/1.0/
#include "bsefilter.hh"
#include <sfi/sfi.hh>
using namespace Bse;
const gchar*
bse_iir_filter_kind_string (BseIIRFilterKind fkind)
{<|fim▁hole|> {
case BSE_IIR_FILTER_BUTTERWORTH: return "Butterworth";
case BSE_IIR_FILTER_BESSEL: return "Bessel";
case BSE_IIR_FILTER_CHEBYSHEV1: return "Chebyshev1";
case BSE_IIR_FILTER_CHEBYSHEV2: return "Chebyshev2";
case BSE_IIR_FILTER_ELLIPTIC: return "Elliptic";
default: return "?unknown?";
}
}
const gchar*
bse_iir_filter_type_string (BseIIRFilterType ftype)
{
switch (ftype)
{
case BSE_IIR_FILTER_LOW_PASS: return "Low-pass";
case BSE_IIR_FILTER_BAND_PASS: return "Band-pass";
case BSE_IIR_FILTER_HIGH_PASS: return "High-pass";
case BSE_IIR_FILTER_BAND_STOP: return "Band-stop";
default: return "?unknown?";
}
}
gchar*
bse_iir_filter_request_string (const BseIIRFilterRequest *ifr)
{
String s;
s += bse_iir_filter_kind_string (ifr->kind);
s += " ";
s += bse_iir_filter_type_string (ifr->type);
s += " order=" + string_from_int (ifr->order);
s += " sample-rate=" + string_from_float (ifr->sampling_frequency);
if (ifr->kind == BSE_IIR_FILTER_CHEBYSHEV1 || ifr->kind == BSE_IIR_FILTER_ELLIPTIC)
s += " passband-ripple-db=" + string_from_float (ifr->passband_ripple_db);
s += " passband-edge=" + string_from_float (ifr->passband_edge);
if (ifr->type == BSE_IIR_FILTER_BAND_PASS || ifr->type == BSE_IIR_FILTER_BAND_STOP)
s += " passband-edge2=" + string_from_float (ifr->passband_edge2);
if (ifr->kind == BSE_IIR_FILTER_ELLIPTIC && ifr->stopband_db < 0)
s += " stopband-db=" + string_from_float (ifr->stopband_db);
if (ifr->kind == BSE_IIR_FILTER_ELLIPTIC && ifr->stopband_edge > 0)
s += " stopband-edge=" + string_from_float (ifr->stopband_edge);
return g_strdup (s.c_str());
}
gchar*
bse_iir_filter_design_string (const BseIIRFilterDesign *fid)
{
String s;
s += "order=" + string_from_int (fid->order);
s += " sampling-frequency=" + string_from_float (fid->sampling_frequency);
s += " center-frequency=" + string_from_float (fid->center_frequency);
s += " gain=" + string_from_double (fid->gain);
s += " n_zeros=" + string_from_int (fid->n_zeros);
s += " n_poles=" + string_from_int (fid->n_poles);
for (uint i = 0; i < fid->n_zeros; i++)
{
String u ("Zero:");
u += " " + string_from_double (fid->zz[i].re);
u += " + " + string_from_double (fid->zz[i].im) + "*i";
s += "\n" + u;
}
for (uint i = 0; i < fid->n_poles; i++)
{
String u ("Pole:");
u += " " + string_from_double (fid->zp[i].re);
u += " + " + string_from_double (fid->zp[i].im) + "*i";
s += "\n" + u;
}
String u;
#if 0
uint o = fid->order;
u = string_from_double (fid->zn[o]);
while (o--)
u = "(" + u + ") * z + " + string_from_double (fid->zn[o]);
s += "\nNominator: " + u;
o = fid->order;
u = string_from_double (fid->zd[o]);
while (o--)
u = "(" + u + ") * z + " + string_from_double (fid->zd[o]);
s += "\nDenominator: " + u;
#endif
return g_strdup (s.c_str());
}
bool
bse_iir_filter_design (const BseIIRFilterRequest *filter_request,
BseIIRFilterDesign *filter_design)
{
if (filter_request->kind == BSE_IIR_FILTER_BUTTERWORTH ||
filter_request->kind == BSE_IIR_FILTER_CHEBYSHEV1 ||
filter_request->kind == BSE_IIR_FILTER_ELLIPTIC)
return _bse_filter_design_ellf (filter_request, filter_design);
return false;
}<|fim▁end|> | switch (fkind) |
<|file_name|>Agent.tsx<|end_file_name|><|fim▁begin|>import React, { FC } from 'react';
const Agent: FC = () => {
return (
<>
<h2>Prometheus Agent</h2>
<p>
This Prometheus instance is running in <strong>agent mode</strong>. In this mode, Prometheus is only used to scrape
discovered targets and forward the scraped metrics to remote write endpoints.<|fim▁hole|>};
export default Agent;<|fim▁end|> | </p>
<p>Some features are not available in this mode, such as querying and alerting.</p>
</>
); |
<|file_name|>operadores.py<|end_file_name|><|fim▁begin|># para os tipos numericos temos os seguintes operadores:
# + - * / % **
print "Numeros inteiros:"
x = 10
y = 3
print x, "+", y, "=", x + y
print x, "+", y, "=", x - y
print x, "+", y, "=", x*y
print x, "+", y, "=", x/y # repare como o resultado eh um inteiro
print x, "+", y, "=", x % y # esse eh o resto da divisao
print x, "+", y, "=", x**y # esse eh o operador potencia, x elevado a potencia de y
print x, "(",bin(x),") & ",y,"(",bin(y),") =", x&y # operador binario E
print x, "(",bin(x),") | ",y,"(",bin(y),") =", x|y # operador binario OU
print x, "(",bin(x),") ^ ",y,"(",bin(y),") =", x^y # operador binario XOU
print x," igual a ",y,"? ", x==y
print x," diferente de ",y,"? ", x!=y
print x," maior que ",y,"? ", x>y<|fim▁hole|>print x," menor que ",y,"? ", x<y
print x," maior ou igual a ",y,"? ", x>=y
print x," menor ou igual a ",y,"? ", x<=y
print "\nNumeros em ponto flutuante: "
x = 10.0
y = 3.0
print x, "+", y, "=", x + y
print x, "+", y, "=", x - y
print x, "+", y, "=", x*y
print x, "+", y, "=", x/y # agora eh um numero real
print x, "+", y, "=", x % y # esse eh o resto da divisao
print x, "+", y, "=", x**y # esse eh o operador potencia, x elevado a potencia de y
print "\nNumeros complexos:"
x = 1 + 1j
y = 2 + 1j
print x, "+", y, "=", x + y
print x, "+", y, "=", x - y
print x, "+", y, "=", x*y
print x, "+", y, "=", x/y # agora eh um numero real
print x, "+", y, "=", x % y # esse eh o resto da divisao
print x, "+", y, "=", x**y # esse eh o operador potencia, x elevado a potencia de y
print "\nVariaveis Booleanas:"
# agora x eh uma variavel booleana (logica)
x = True
y = False
print "Nao ", x, "=", not x
print x," ou ",y,"=",x or y
print x," e ",y,"=",x and y
x = 10
y = 3
print x, " maior que ", y, " OU ", x, " menor que ", y, "? ", x>y or x<y
print x, " maior que ", y, " E ", x, " menor que ", y, "? ", x>y and x<y
print "\nOperacao com Strings:"
x = "Ola "
y = "Mundo"
print x," + ",y," = ",x+y
print x," *2 = ",x*2
print x,"*2 + ",y," = ",x*2 + y
print "Letra na posicao 0 de x = ",x[0]
print "Concatenar as 3 primeiras letras de x com y = ",x[0:3] + y
# Operadores Relacionais
print "Tem 'a' em Ola? ", "a" in x
print "Nao tem 'b' em Ola? ", "b" not in x<|fim▁end|> | |
<|file_name|>brython_fourbar1.py<|end_file_name|><|fim▁begin|>#要注意 javascript 轉 python 語法差異
#document.getElementById -> doc[]
#module Math -> math
#Math.PI -> math.pi
#abs -> fabs
#array 可用 list代替
import math
import time
from browser import doc
import browser.timer
# 點類別
class Point(object):
# 起始方法
def __init__(self, x, y):
self.x = x
self.y = y
# 繪製方法
def drawMe(self, g, r):
self.g = g
self.r = r
self.g.save()
self.g.moveTo(self.x,self.y)
self.g.beginPath()
# 根據 r 半徑繪製一個圓代表點的所在位置
self.g.arc(self.x, self.y, self.r, 0, 2*math.pi, true)
self.g.moveTo(self.x,self.y)
self.g.lineTo(self.x+self.r, self.y)
self.g.moveTo(self.x, self.y)
self.g.lineTo(self.x-self.r, self.y)
self.g.moveTo(self.x, self.y)
self.g.lineTo(self.x, self.y+self.r)
self.g.moveTo(self.x, self.y)
self.g.lineTo(self.x, self.y-self.r)
self.g.restore()
self.g.stroke()
# 加入 Eq 方法
def Eq(self, pt):
self.x = pt.x
self.y = pt.y
# 加入 setPoint 方法
def setPoint(self, px, py):
self.x = px
self.y = py
# 加上 distance(pt) 方法, 計算點到 pt 的距離
def distance(self, pt):
self.pt = pt
x = self.x - self.pt.x
y = self.y - self.pt.y
return math.sqrt(x * x + y * y)
# 利用文字標示點的座標位置
def tag(self, g):
self.g = g
self.g.beginPath()
self.g.fillText("%d, %d"%(self.x, self.y),self.x, self.y)
self.g.stroke()
# Line 類別物件
class Line(object):
# 起始方法
def __init__(self, p1, p2):
self.p1 = p1
self.p2 = p2
# 直線的第一點, 設為線尾
self.Tail = self.p1
# 直線組成的第二點, 設為線頭
self.Head = self.p2
# 直線的長度屬性
self.length = math.sqrt(math.pow(self.p2.x-self.p1.x, 2)+math.pow(self.p2.y-self.p1.y,2))
# setPP 以指定頭尾座標點來定義直線
def setPP(self, p1, p2):
self.p1 = p1
self.p2 = p2
self.Tail = self.p1
self.Head = self.p2
self.length = math.sqrt(math.pow(self.p2.x-self.p1.x, 2)+math.pow(self.p2.y-self.p1.y,2))
# setRT 方法 for Line, 應該已經確定 Tail 點, 然後以 r, t 作為設定 Head 的參考
def setRT(self, r, t):
self.r = r
self.t = t
x = self.r * math.cos(self.t)
y = self.r * math.sin(self.t)
self.Tail.Eq(self.p1)
self.Head.setPoint(self.Tail.x + x,self.Tail.y + y)
# getR 方法 for Line
def getR(self):
# x 分量與 y 分量
x = self.p1.x - self.p2.x
y = self.p1.y - self.p2.y
return math.sqrt(x * x + y * y)
# 根據定義 atan2(y,x), 表示 (x,y) 與 正 x 軸之間的夾角, 介於 pi 與 -pi 間
def getT(self):
x = self.p2.x - self.p1.x
y = self.p2.y - self.p1.y
if (math.fabs(x) < math.pow(10,-100)):
if(y < 0.0):
return (-math.pi/2)
else:
return (math.pi/2)
else:
return math.atan2(y, x)
# setTail 方法 for Line
def setTail(self, pt):
self.pt = pt
self.Tail.Eq(pt)
self.Head.setPoint(self.pt.x + self.x, self.pt.y + self.y)
# getHead 方法 for Line
def getHead(self):
return self.Head
def getTail(self):
return self.Tail
def drawMe(self, g):
self.g = g
self.g.beginPath()
self.g.moveTo(self.p1.x,self.p1.y)
self.g.lineTo(self.p2.x,self.p2.y)
self.g.stroke()
def test(self):
return ("this is pure test to Inherit")
class Link(Line):
def __init__(self, p1, p2):
self.p1 = p1
self.p2 = p2
self.length = math.sqrt(math.pow((self.p2.x - self.p1.x), 2) + math.pow((self.p2.y - self.p1.y), 2))
#g context
def drawMe(self, g):
self.g = g
hole = 5
radius = 10
length = self.getR()
# alert(length)
# 儲存先前的繪圖狀態
self.g.save()
self.g.translate(self.p1.x,self.p1.y)<|fim▁hole|> #self.g.rotate(-((math.pi/2)-self.getT()))
self.g.rotate(-math.pi*0.5 + self.getT())
#alert(str(self.getT()))
#self.g.rotate(10*math.pi/180)
#this.g.rotate(-(Math.PI/2-this.getT()));
# 必須配合畫在 y 軸上的 Link, 進行座標轉換, 也可以改為畫在 x 軸上...
self.g.beginPath()
self.g.moveTo(0,0)
self.g.arc(0, 0, hole, 0, 2*math.pi, true)
self.g.stroke()
self.g.moveTo(0,length)
self.g.beginPath()
self.g.arc(0,length, hole, 0, 2*math.pi, true)
self.g.stroke()
self.g.moveTo(0,0)
self.g.beginPath()
self.g.arc(0,0, radius, 0, math.pi, true)
self.g.moveTo(0+radius,0)
self.g.lineTo(0+radius,0+length)
self.g.stroke()
self.g.moveTo(0,0+length)
self.g.beginPath()
self.g.arc(0, 0+length, radius, math.pi, 0, true)
self.g.moveTo(0-radius,0+length)
self.g.lineTo(0-radius,0)
self.g.stroke()
self.g.restore()
self.g.beginPath()
self.g.fillStyle = "red"
self.g.font = "bold 18px sans-serif"
self.g.fillText("%d, %d"%(self.p2.x, self.p2.y),self.p2.x, self.p2.y)
self.g.stroke()
self.g.restore()
class Triangle(object):
def __init__(self, p1, p2, p3):
self.p1 = p1
self.p2 = p2
self.p3 = p3
def getLenp3(self):
p1 = self.p1
ret = p1.distance(self.p2)
return ret
def getLenp1(self):
p2 = self.p2
ret = p2.distance(self.p3)
return ret
def getLenp2(self):
p1 = self.p1
ret = p1.distance(self.p3)
return ret
# 角度
def getAp1(self):
ret = math.acos(((self.getLenp2() * self.getLenp2() + self.getLenp3() * self.getLenp3()) - self.getLenp1() * self.getLenp1()) / (2* self.getLenp2() * self.getLenp3()))
return ret
#
def getAp2(self):
ret =math.acos(((self.getLenp1() * self.getLenp1() + self.getLenp3() * self.getLenp3()) - self.getLenp2() * self.getLenp2()) / (2* self.getLenp1() * self.getLenp3()))
return ret
def getAp3(self):
ret = math.acos(((self.getLenp1() * self.getLenp1() + self.getLenp2() * self.getLenp2()) - self.getLenp3() * self.getLenp3()) / (2* self.getLenp1() * self.getLenp2()))
return ret
def drawMe(self, g):
self.g = g
r = 5
# 繪出三個頂點
self.p1.drawMe(self.g,r)
self.p2.drawMe(self.g,r)
self.p3.drawMe(self.g,r)
line1 = Line(self.p1,self.p2)
line2 = Line(self.p1,self.p3)
line3 = Line(self.p2,self.p3)
# 繪出三邊線
line1.drawMe(self.g)
line2.drawMe(self.g)
line3.drawMe(self.g)
# ends Triangle def
# 透過三個邊長定義三角形
def setSSS(self, lenp3, lenp1, lenp2):
self.lenp3 = lenp3
self.lenp1 = lenp1
self.lenp2 = lenp2
self.ap1 = math.acos(((self.lenp2 * self.lenp2 + self.lenp3 * self.lenp3) - self.lenp1 * self.lenp1) / (2* self.lenp2 * self.lenp3))
self.ap2 = math.acos(((self.lenp1 * self.lenp1 + self.lenp3 * self.lenp3) - self.lenp2 * self.lenp2) / (2* self.lenp1 * self.lenp3))
self.ap3 = math.acos(((self.lenp1 * self.lenp1 + self.lenp2 * self.lenp2) - self.lenp3 * self.lenp3) / (2* self.lenp1 * self.lenp2))
# 透過兩個邊長與夾角定義三角形
def setSAS(self, lenp3, ap2, lenp1):
self.lenp3 = lenp3
self.ap2 = ap2
self.lenp1 = lenp1
self.lenp2 = math.sqrt((self.lenp3 * self.lenp3 + self.lenp1 * self.lenp1) - 2* self.lenp3 * self.lenp1 * math.cos(self.ap2))
#等於 SSS(AB, BC, CA)
def setSaSS(self, lenp2, lenp3, lenp1):
self.lenp2 = lenp2
self.lenp3 = lenp3
self.lenp1 = lenp1
if(self.lenp1 > (self.lenp2 + self.lenp3)):
#<CAB 夾角為 180 度, 三點共線且 A 介於 BC 之間
ret = math.pi
else :
# <CAB 夾角為 0, 三點共線且 A 不在 BC 之間
if((self.lenp1 < (self.lenp2 - self.lenp3)) or (self.lenp1 < (self.lenp3 - self.lenp2))):
ret = 0.0
else :
# 透過餘絃定理求出夾角 <CAB
ret = math.acos(((self.lenp2 * self.lenp2 + self.lenp3 * self.lenp3) - self.lenp1 * self.lenp1) / (2 * self.lenp2 * self.lenp3))
return ret
# 取得三角形的三個邊長值
def getSSS(self):
temp = []
temp.append( self.getLenp1() )
temp.append( self.getLenp2() )
temp.append( self.getLenp3() )
return temp
# 取得三角形的三個角度值
def getAAA(self):
temp = []
temp.append( self.getAp1() )
temp.append( self.getAp2() )
temp.append( self.getAp3() )
return temp
# 取得三角形的三個角度與三個邊長
def getASASAS(self):
temp = []
temp.append(self.getAp1())
temp.append(self.getLenp1())
temp.append(self.getAp2())
temp.append(self.getLenp2())
temp.append(self.getAp3())
temp.append(self.getLenp3())
return temp
#2P 2L return mid P
def setPPSS(self, p1, p3, lenp1, lenp3):
temp = []
self.p1 = p1
self.p3 = p3
self.lenp1 = lenp1
self.lenp3 = lenp3
#bp3 is the angle beside p3 point, cp3 is the angle for line23, p2 is the output
line31 = Line(p3, p1)
self.lenp2 = line31.getR()
#self.lenp2 = self.p3.distance(self.p1)
#這裡是求角3
ap3 = math.acos(((self.lenp1 * self.lenp1 + self.lenp2 * self.lenp2) - self.lenp3 * self.lenp3) / (2 * self.lenp1 * self.lenp2))
#ap3 = math.acos(((self.lenp1 * self.lenp1 + self.lenp3 * self.lenp3) - self.lenp2 * self.lenp2) / (2 * self.lenp1 * self.lenp3))
bp3 = line31.getT()
cp3 = bp3 - ap3
temp.append(p3.x + self.lenp1*math.cos(cp3))#p2.x
temp.append(p3.y + self.lenp1*math.sin(cp3))#p2.y
return temp
def tag(g, p):
None
# 執行繪圖流程, 注意 x, y 為 global variables
def draw():
global theta
context.clearRect(0, 0, canvas.width, canvas.height)
line1.drawMe(context)
line2.drawMe(context)
line3.drawMe(context)
#triangle1.drawMe(context)
#triangle2.drawMe(context)
theta += dx
p2.x = p1.x + line1.length*math.cos(theta*degree)
p2.y = p1.y - line1.length*math.sin(theta*degree)
p3.x, p3.y = triangle2.setPPSS(p2,p4,link2_len,link3_len)
p1.tag(context)
# 以上為相關函式物件的定義區
# 全域變數
# 幾何位置輸入變數
x=10
y=10
r=10
# 畫布與繪圖內容
# 其他輸入變數
theta = 0
degree = math.pi/180.0
dx = 2
dy = 4
#set p1.p2.p3.p4 position
p1 = Point(150,100)
p2 = Point(150,200)
p3 = Point(300,300)
p4 = Point(350,100)
#accord position create link
line1 = Link(p1,p2)
line2 = Link(p2,p3)
line3 = Link(p3,p4)
line4 = Link(p1,p4)
line5 = Link(p2,p4)
link2_len = p2.distance(p3)
link3_len = p3.distance(p4)
#link2_len = line1.getR()
#link3_len = line3.getR()
#alert(str(link2_len)+','+str(link3_len))
triangle1 = Triangle(p1,p2,p4)
triangle2 = Triangle(p2,p3,p4)
# 視窗載入時執行內容
# 繪圖畫布設定
canvas = doc["plotarea"]
context = canvas.getContext("2d")
# 座標轉換, 移動 canvas.height 並且 y 座標變號, 也就是將原點座標移到畫面左下角
context.translate(0,canvas.height)
context.scale(1,-1)
#以間隔 10 micro seconds 重複呼叫 draw()
#time.set_interval(draw,20)
browser.timer.set_interval(draw,10)<|fim▁end|> | #alert(str(self.p1.x)+","+str(self.p1.y)) |
<|file_name|>front.js<|end_file_name|><|fim▁begin|>var mdf_range_update = false;
jQuery(function() {
if (!jQuery('#pn_html_buffer').length) {
jQuery('body').append('<div id="pn_html_buffer" class="mdf_info_popup" style="display: none;"></div>');
jQuery('body').append('<div id="pn_html_buffer2" style="display: none;"></div>');
}
//+++
mdf_hide_empty_blocks_titles();
//***
if (post_features_panel_auto == 1) {
var mdf_title_data = jQuery('.mdf_title_data');
jQuery.each(mdf_title_data, function(index, value) {
var clone = jQuery(value).clone();
jQuery(value).parents(':eq(' + under_title_out + ')').after(clone);
jQuery(value).remove();
});
}
jQuery('.mdf_title_data').show();
//+++
mdf_tooltip_init();
mdf_init_checkboxes_scroll();
mdf_init_selects();
//+++
jQuery('.mdf_range_min').life('change', function() {
var slider_id = jQuery(this).data('slider-id');
mdf_range_update = true;
jQuery("#" + slider_id).ionRangeSlider("update", {
from: parseInt(jQuery(this).val(), 10)
});
mdf_range_update = false;
//jQuery("#" + slider_id).slider("values", 0, parseInt(jQuery(this).val(), 10));
});
jQuery('.mdf_range_max').life('change', function() {
var slider_id = jQuery(this).data('slider-id');
mdf_range_update = true;
jQuery("#" + slider_id).ionRangeSlider("update", {
to: parseInt(jQuery(this).val(), 10)
});
mdf_range_update = false;
//jQuery("#" + slider_id).slider("values", 1, parseInt(jQuery(this).val(), 10));
});
//css selects
mdf_init_selects();
//work with taxonomy
//select
jQuery('.mdf_taxonomy').life('change', function() {
mdf_deinit_chosen_selects();
var tax_name = jQuery(this).data('tax-name');
if (tax_name == 'post_tag') {
//return true;
}
//+++
jQuery(this).next('.mdf_taxonomy_child_container').show(200);
var _this = this;
var is_auto_submit = jQuery(this).parents('.mdf_input_container').hasClass('mdf_tax_auto_submit');
var slug = jQuery(this).parents('form').find('input[name="mdf[mdf_widget_options][slug]"]').val();
var form = jQuery(this).parents('form');
var data = {
action: "mdf_draw_term_childs",
type: 'select',
tax_name: tax_name,
mdf_parent_id: jQuery(this).val(),
hide: jQuery(this).data('hide'),
page_mdf: jQuery(this).parents('form').find('.hidden_page_mdf_for_ajax').val(),
meta_data_filter_cat: jQuery(this).parents('form').find('input[name="mdf[mdf_widget_options][meta_data_filter_cat]"]').val(),
slug: slug,
is_auto_submit: is_auto_submit
};
jQuery.post(ajaxurl, data, function(content) {
if (is_auto_submit) {
jQuery(_this).next('.mdf_taxonomy_child_container').hide();
}
jQuery(_this).next('.mdf_taxonomy_child_container').html(content);
if (!content) {
jQuery(_this).next('.mdf_taxonomy_child_container').hide().html(mdf_tax_loader);
}
if (jQuery(_this).parents('.mdf_input_container').hasClass('mdf_tax_auto_submit')) {
jQuery(_this).parents('form').submit();
}
//ajax recount
if (jQuery(form).hasClass('mdf_ajax_auto_recount')) {
mdf_ajax_data_recount(jQuery(form).attr('id'), slug);
}
});
return true;
});
//checkbox
jQuery('.mdf_taxonomy_checkbox').life('click', function() {
var tax_name = jQuery(this).data('tax-name');
var is_auto_submit = jQuery(this).parents('.mdf_input_container').hasClass('mdf_tax_auto_submit');
var form = jQuery(this).parents('form');
if (!jQuery(this).hasClass('mdf_has_childs') && !jQuery(form).hasClass('mdf_ajax_auto_recount')) {
if (is_auto_submit) {
jQuery(this).parents('form').submit();
}
return true;
}
//+++
if (tax_name == 'post_tag') {
//return true;
}
//+++
var _this = this;
var term_id = jQuery(this).val();
var slug = jQuery(this).parents('form').find('input[name="mdf[mdf_widget_options][slug]"]').val();
//+++
if (jQuery(this).is(":checked")) {
jQuery(this).prev("input[type=hidden]").val(term_id);
jQuery(_this).parent().find('.mdf_taxonomy_child_container').show(200);
var data = {
action: "mdf_draw_term_childs",
type: 'checkbox',
tax_name: tax_name,
mdf_parent_id: term_id,
hide: jQuery(this).data('hide'),
page_mdf: jQuery(this).parents('form').find('.hidden_page_mdf_for_ajax').val(),
meta_data_filter_cat: jQuery(this).parents('form').find('input[name="mdf[mdf_widget_options][meta_data_filter_cat]"]').val(),
slug: slug,
is_auto_submit: is_auto_submit
};
jQuery.post(ajaxurl, data, function(content) {
if (is_auto_submit) {
jQuery(_this).parent().find('.mdf_taxonomy_child_container').hide();
}
jQuery(_this).parent().find('.mdf_taxonomy_child_container').html(content);
if (!content) {
jQuery(_this).parent().find('.mdf_taxonomy_child_container').hide().html(mdf_tax_loader);
}
if (jQuery(_this).parents('.mdf_input_container').hasClass('mdf_tax_auto_submit')) {
jQuery(_this).parents('form').submit();
}
//ajax recount
if (jQuery(form).hasClass('mdf_ajax_auto_recount')) {
mdf_ajax_data_recount(jQuery(form).attr('id'), slug);
}
});
} else {
jQuery(_this).parent().find('.mdf_taxonomy_child_container').hide().html(mdf_tax_loader);
if (jQuery(this).parents('.mdf_input_container').hasClass('mdf_tax_auto_submit')) {
jQuery(this).parents('form').submit();
}
//ajax recount
if (jQuery(form).hasClass('mdf_ajax_auto_recount')) {
mdf_ajax_data_recount(jQuery(form).attr('id'), slug);
}
}
return true;
});
//+++
//for shortcode
try {
jQuery('.mdf_shortcode_container .mdf_widget_found_count span').html(mdf_found_totally);
} catch (e) {
}
});
function mdf_draw_ui_slider_items(act_without_button, uniqid) {
var items = jQuery(".ui_slider_item_" + uniqid);
jQuery.each(items, function(key, item) {
var input = jQuery(item).next('input');
mdf_init_range_sliders(item, input, act_without_button, uniqid);
});
}
function mdf_get_ui_slider_step(input) {
var step = jQuery(input).data('step');
if (!step) {
step = Math.ceil(parseInt((jQuery(input).data('max') - jQuery(input).data('min')) / 100, 10));
}
return step;
}
function mdf_init_range_sliders(item, input, act_without_button, uniqid) {
try {
jQuery(item).ionRangeSlider({
min: jQuery(input).data('min'),
max: jQuery(input).data('max'),
from: jQuery(input).data('min-now'),
to: jQuery(input).data('max-now'),
type: 'double',
prefix: jQuery(input).data('slider-prefix'),
postfix: jQuery(input).data('slider-postfix'),
//maxPostfix: "+",
prettify: jQuery(input).data('slider-prettify'),
hideMinMax: false,
hideFromTo: false,
hasGrid: true,
step: mdf_get_ui_slider_step(input),
onFinish: function(ui) {
jQuery(input).val(ui.fromNumber + '^' + ui.toNumber);
jQuery(input).parent().find('.mdf_range .mdf_range_min').val(ui.fromNumber);
jQuery(input).parent().find('.mdf_range .mdf_range_max').val(ui.toNumber);
if (act_without_button) {
jQuery("#meta_data_filter_" + uniqid).submit();
}
//ajax recount
if (jQuery("#meta_data_filter_" + uniqid).hasClass('mdf_ajax_auto_recount')) {
mdf_ajax_data_recount(jQuery("#meta_data_filter_" + uniqid).attr('id'), jQuery("#meta_data_filter_" + uniqid).data('slug'));
}
return false;
},
onChange: function(ui) {
jQuery(input).val(ui.fromNumber + '^' + ui.toNumber);
jQuery(input).parent().find('.mdf_range .mdf_range_min').val(ui.fromNumber);
jQuery(input).parent().find('.mdf_range .mdf_range_max').val(ui.toNumber);
},
onLoad: function(ui) {
if (mdf_range_update) {
jQuery(input).val(ui.fromNumber + '^' + ui.toNumber);
jQuery(input).parent().find('.mdf_range .mdf_range_min').val(ui.fromNumber);
jQuery(input).parent().find('.mdf_range .mdf_range_max').val(ui.toNumber);
if (act_without_button) {
jQuery("#meta_data_filter_" + uniqid).submit();
}
//ajax recount
if (jQuery("#meta_data_filter_" + uniqid).hasClass('mdf_ajax_auto_recount')) {
mdf_ajax_data_recount(jQuery("#meta_data_filter_" + uniqid).attr('id'), jQuery("#meta_data_filter_" + uniqid).data('slug'));
}
return false;
}
}
});
} catch (e) {
/*
jQuery(item).slider({
min: jQuery(input).data('min'),
max: jQuery(input).data('max'),
values: [jQuery(input).data('min-now'), jQuery(input).data('max-now')],
range: true,
step: mdf_get_ui_slider_step(input),
slide: function(event, ui) {
jQuery(input).val(ui.values[0] + '^' + ui.values[1]);
jQuery(input).parent().find('.mdf_range .mdf_range_min').val(ui.values[0]);
jQuery(input).parent().find('.mdf_range .mdf_range_max').val(ui.values[1]);
},
change: function(event, ui) {
jQuery(input).val(ui.values[0] + '^' + ui.values[1]);
jQuery(input).parent().find('.mdf_range .mdf_range_min').val(ui.values[0]);
jQuery(input).parent().find('.mdf_range .mdf_range_max').val(ui.values[1]);
if (act_without_button) {
jQuery("#meta_data_filter_" + uniqid).submit();
}
//ajax recount
if (jQuery("#meta_data_filter_" + uniqid).hasClass('mdf_ajax_auto_recount')) {
mdf_ajax_data_recount(jQuery("#meta_data_filter_" + uniqid).attr('id'), jQuery("#meta_data_filter_" + uniqid).data('slug'));
}
}
});
*/
}
}
function mdf_click_checkbox(_this) {
if (jQuery(_this).is(":checked")) {
jQuery(_this).prev("input[type=hidden]").val(1);
jQuery(_this).next("input[type=hidden]").val(1);
jQuery(_this).val(1);
} else {
jQuery(_this).prev("input[type=hidden]").val('~');
jQuery(_this).next("input[type=hidden]").val('~');
jQuery(_this).val('~');
}
return true;
}
function mdf_init_search_form(uniqid, slug, search_url, act_without_button, ajax_searching) {
if (act_without_button === 1) {
//checkbox actions
jQuery("#meta_data_filter_" + uniqid + " .mdf_option_checkbox").life('click', function() {
mdf_click_checkbox(this);
jQuery("#meta_data_filter_" + uniqid).submit();
return true;
});
//select actions
jQuery("#meta_data_filter_" + uniqid + " .mdf_filter_select").life('change', function() {
jQuery("#meta_data_filter_" + uniqid).submit();
return true;
});
} else {
jQuery("#meta_data_filter_" + uniqid + " .mdf_option_checkbox").unbind('click');
jQuery("#meta_data_filter_" + uniqid + " .mdf_option_checkbox").life('click', function() {
mdf_click_checkbox(this);
//recount items count by ajax
if (ajax_searching) {
mdf_ajax_data_recount("meta_data_filter_" + uniqid, slug);
}
});
jQuery("#meta_data_filter_" + uniqid + " .mdf_filter_select").unbind('change');
jQuery("#meta_data_filter_" + uniqid + " .mdf_filter_select").life('change', function() {
//recount items count by ajax
if (ajax_searching) {
mdf_ajax_data_recount("meta_data_filter_" + uniqid, slug);
}
});
}
//+++
mdf_draw_ui_slider_items(act_without_button, uniqid);
//+++
mdf_init_submit_button(uniqid, slug, search_url);
}
function mdf_init_submit_button(uniqid, slug, search_url) {
var submit_mode = 'submit';
jQuery('#meta_data_filter_' + uniqid + ' .mdf_reset_button').click(function() {
submit_mode = 'reset';
jQuery("#meta_data_filter_" + uniqid).submit();
return false;
});
var form_id = "meta_data_filter_" + uniqid;
var is_ajaxed_reset = false;
//check is form inserted in popup
var is_in_popup = false;
if (jQuery(this).parents('.advanced_wp_popup_content')) {
is_in_popup = true;
is_ajaxed_reset = true;
}
//***
var type = 'widget';
var shortcode_id = 0;
var widget_id = 0;
var sidebar_name = "";
var sidebar_id = 0;
if (jQuery("#" + form_id).hasClass('mdf_shortcode_form')) {
type = 'shortcode';
shortcode_id = jQuery("#" + form_id).data('shortcode-id');
}
if (type == 'widget') {
sidebar_name = jQuery("#" + form_id).data('sidebar-name');
sidebar_id = jQuery("#" + form_id).data('sidebar-id');
widget_id = jQuery("#" + form_id).data('widget-id');
}
jQuery("#meta_data_filter_" + uniqid).submit(function() {
jQuery(this).find("input[type='submit'], .mdf_reset_button").replaceWith(mdf_tax_loader);
jQuery("#meta_data_filter_" + uniqid + " .mdf_one_moment_txt span").show();
var mdf_widget_search_url = search_url + "slg=" + slug + "&";
var data = {
action: "mdf_encode_search_get_params",
vars: jQuery(this).serialize(),
mode: submit_mode,
mdf_front_qtrans_lang: mdf_front_qtrans_lang,
type: type,
shortcode_id: shortcode_id,
sidebar_id: sidebar_id,
sidebar_name: sidebar_name,
widget_id: widget_id,
is_ajaxed_reset: is_ajaxed_reset
};
jQuery.post(ajaxurl, data, function(response) {
if (is_ajaxed_reset && submit_mode == 'reset' && type == 'shortcode') {
jQuery("#meta_data_filter_" + uniqid).parents('.mdf_shortcode_container').replaceWith(response);
mdf_init_selects();
//mdf_ajax_data_recount(form_id, slug);
} else {
if (mdf_widget_search_url.substring(0, 4) == 'self') {
mdf_widget_search_url = mdf_widget_search_url.replace('self', (mdf_current_page_url.length>0 ? mdf_current_page_url : window.location.href));
}
if (mdf_widget_search_url.match(/\?/g).length > 1) {
var index = mdf_widget_search_url.lastIndexOf('?');
mdf_widget_search_url = mdf_widget_search_url.substr(0, index) + '&' + mdf_widget_search_url.substr(index + 1);
}
//only for project TODO
//mdf_widget_search_url = mdf_widget_search_url.replace("#butique_woo_products", "");
window.location = mdf_widget_search_url + response;
}
});
return false;
});
}
var mdf_ajax_lock = false;//remove twice ajax request on the same time
function mdf_ajax_data_recount(form_id, slug) {
if (mdf_ajax_lock) {
return;
}
mdf_ajax_lock = true;
//+++
mdf_show_stat_info_popup(lang_one_moment);
var type = 'widget';
var shortcode_id = 0;
var widget_id = 0;
var sidebar_name = "";
var sidebar_id = 0;
if (jQuery("#" + form_id).hasClass('mdf_shortcode_form')) {
type = 'shortcode';
shortcode_id = jQuery("#" + form_id).data('shortcode-id');
}
if (type == 'widget') {
sidebar_id = jQuery("#" + form_id).data('sidebar-id');
sidebar_name = jQuery("#" + form_id).data('sidebar-name');
widget_id = jQuery("#" + form_id).data('widget-id');
}
var data = {
action: "mdf_get_ajax_auto_recount_data",
vars: jQuery("#" + form_id).serialize(),
slug: slug,
type: type,
shortcode_id: shortcode_id,
sidebar_id: sidebar_id,
sidebar_name: sidebar_name,
widget_id: widget_id,
mode: 'submit',
mdf_front_qtrans_lang: mdf_front_qtrans_lang,
mdf_front_wpml_lang: mdf_front_wpml_lang
};
jQuery.post(ajaxurl, data, function(response) {
mdf_hide_stat_info_popup();
if (type == 'shortcode') {
jQuery("#" + form_id).parents('.mdf_shortcode_container').replaceWith(response);
} else {
jQuery('#pn_html_buffer2').html(response);
var widget = jQuery('#pn_html_buffer2').find('.widget-meta-data-filter').clone();
//jQuery("#" + form_id).parents('.widget-meta-data-filter').replaceWith(widget);
jQuery("#" + form_id).parents('.widget-meta-data-filter').replaceWith(response);
jQuery('#pn_html_buffer2').html("");
mdf_draw_ui_slider_items(false, jQuery(widget).find('form').data('unique-id'));
mdf_hide_empty_blocks_titles();
mdf_init_submit_button(jQuery(widget).find('form').data('unique-id'), slug, jQuery(widget).find('form').data('search-url'));
}
mdf_tooltip_init();
mdf_init_checkboxes_scroll();
mdf_init_selects();
mdf_ajax_lock = false;
});
}
<|fim▁hole|> var count = jQuery(value).find('table').find('tr').size();
if (!count) {
jQuery(value).hide();
jQuery(value).find('table').hide();
jQuery(value).prev('h4.data-filter-section-title').hide();
}
});
}
function mdf_tooltip_init() {
try {
jQuery('.mdf_tooltip').tooltipster({
maxWidth: tooltip_max_width,
//iconDesktop:true,
animation: 'fade',
delay: 200,
theme: 'tooltipster-' + mdf_tooltip_theme,
touchDevices: false,
trigger: 'hover',
contentAsHTML: true
//content: jQuery('<span><strong>' + jQuery(this).find('i').html() + '</strong></span>')
});
} catch (e) {
console.log(e);
}
}
function mdf_init_checkboxes_scroll() {
try {
jQuery(".mdf_tax_filter_section, .mdf_filter_section").mCustomScrollbar('destroy');
jQuery(".mdf_tax_filter_section, .mdf_filter_section").mCustomScrollbar({
scrollButtons: {
enable: true
},
advanced: {
updateOnContentResize: true,
updateOnBrowserResize: true
},
theme: "dark-2",
horizontalScroll: false,
mouseWheel: true,
scrollType: 'pixels',
contentTouchScroll: true
});
} catch (e) {
console.log(e);
}
}
//by chosen js
function mdf_init_selects() {
mdf_deinit_chosen_selects();
/*
if (mdf_use_chosen_js_w) {
jQuery(".mdf_widget_form select").chosen();
}
if (mdf_use_chosen_js_s) {
jQuery(".mdf_shortcode_container select").chosen();
}
*/
}
function mdf_deinit_chosen_selects() {
try {
if (mdf_use_chosen_js_w) {
jQuery(".mdf_widget_form select").chosen('destroy').trigger("liszt:updated");
}
if (mdf_use_chosen_js_s) {
jQuery(".mdf_shortcode_container select").chosen('destroy').trigger("liszt:updated");
}
} catch (e) {
}
//jQuery(".mdf_shortcode_form select, .mdf_widget_form select").removeClass("chzn-done").css('display', 'inline').data('chosen', null);
//jQuery("*[class*=chzn]").remove();
}
function mdf_show_stat_info_popup(text) {
jQuery("#pn_html_buffer").text(text);
jQuery("#pn_html_buffer").fadeTo(200, 0.9);
}
function mdf_hide_stat_info_popup() {
window.setTimeout(function() {
jQuery("#pn_html_buffer").fadeOut(400);
}, 500);
}<|fim▁end|> |
function mdf_hide_empty_blocks_titles() {
var section = jQuery('.widget-meta-data-filter .mdf_filter_section');
jQuery.each(section, function(index, value) { |
<|file_name|>demo.py<|end_file_name|><|fim▁begin|>import asyncio
import websockets<|fim▁hole|>import duplex
rpc = duplex.RPC("json")
@asyncio.coroutine
def echo(ch):
obj, _ = yield from ch.recv()
yield from ch.send(obj)
rpc.register("echo", echo)
@asyncio.coroutine
def do_msgbox(ch):
text, _ = yield from ch.recv()
yield from ch.call("msgbox", text, async=True)
rpc.register("doMsgbox", do_msgbox)
@asyncio.coroutine
def server(conn, path):
peer = yield from rpc.accept(conn)
yield from peer.route()
start_server = websockets.serve(server, 'localhost', 8001)
asyncio.get_event_loop().run_until_complete(start_server)
asyncio.get_event_loop().run_forever()<|fim▁end|> | |
<|file_name|>InstrumentType.java<|end_file_name|><|fim▁begin|>/*
* This file is part of SpongeAPI, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,<|fim▁hole|> * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.api.data.types;
import org.spongepowered.api.CatalogType;
import org.spongepowered.api.util.annotation.CatalogedBy;
/**
* Represents a type of instrument.
*/
@CatalogedBy(InstrumentTypes.class)
public interface InstrumentType extends CatalogType {
}<|fim▁end|> | |
<|file_name|>mempty.rs<|end_file_name|><|fim▁begin|>/*
* Copyright 2017 Sreejith Krishnan R
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use std::any::Any;
use super::{Layout};
use ::platform::Context;
use ::draw::{Drawable, Empty};
pub struct MemptyLayout {}
impl MemptyLayout {
pub fn new() -> MemptyLayout {
MemptyLayout {}
}<|fim▁hole|>impl Layout for MemptyLayout {
fn layout<'a>(&'a self, _: &Context) -> Box<Drawable + 'a> {
Box::new(Empty::new())
}
fn as_any(&self) -> &Any {
self
}
fn as_any_mut(&mut self) -> &mut Any {
self
}
}<|fim▁end|> | }
|
<|file_name|>backend_xmlrpc.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- Mode: Python; tab-width: 4 -*-
#
# Netfarm Mail Archiver - release 2
#
# Copyright (C) 2005-2007 Gianluigi Tiesi <[email protected]><|fim▁hole|># Free Software Foundation; either version 2, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTIBILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
# ======================================================================
## @file backend_xmlrpc.py
## XMLrpc Storage and Archive Backend
__doc__ = '''Netfarm Archiver - release 2.1.0 - XmlRpc backend'''
__version__ = '2.1.0'
__all__ = [ 'Backend' ]
from archiver import *
from sys import exc_info
from xmlrpclib import ServerProxy, Error
from urlparse import urlparse
from time import mktime
_prefix = 'XmlRpc Backend: '
##
class BadUrlSyntax(Exception):
"""BadUrlSyntax Bad url syntax in config file"""
pass
class Backend(BackendBase):
"""XMLrpc Backend using python-xmlrpc
This backend can be used with a xmlrpc capable server like zope"""
def __init__(self, config, stage_type, ar_globals):
"""The constructor"""
self.config = config
self.type = stage_type
self.LOG = ar_globals['LOG']
try:
self.url = config.get(self.type, 'url')
self.method = config.get(self.type, 'method')
self.server = ServerProxy(self.url)
except:
raise BadConfig, 'Bad config in xmlrpc backend'
self.LOG(E_ALWAYS, 'XmlRpc Backend (%s) at %s' % (self.type, self.url))
def process(self, data):
"""Archive backend proces
@param data: The data argument is a dict containing mail info and the mail itself
@return: year as status and pid as code"""
## FIXME wrap with xmlrpc DateTime - time.struct_time objects cannot be marshalled
data['m_date'] = mktime(data['m_date'])
self.LOG(E_TRACE, 'XmlRpc Backend (%s): ready to process %s' % (self.type, data))
try:
getattr(self.server, self.method)({'data': data})
except Error, v:
del v ## FIXME Fill error
return 0, 443, 'Error'
return 0, 200, 'Ok'
def shutdown(self):
"""Backend Shutdown callback"""
self.LOG(E_ALWAYS, 'XmlRpc Backend (%s): closing connection' % self.type)
self.server = None<|fim▁end|> | # Copyright (C) 2005-2007 NetFarm S.r.l. [http://www.netfarm.it]
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by the |
<|file_name|>InspectorDebuggerAgent.js<|end_file_name|><|fim▁begin|>'use strict';
var format = require('util').format
, scripts = require('./scripts')
, loadScriptSource = require('./load-script-source')
// Ports: https://code.google.com/p/chromium/codesearch#chromium/src/third_party/WebKit/Source/core/inspector/InspectorDebuggerAgent.cpp
function ignore(cb) { cb() }
function InspectorDebuggerAgent() {
if (!(this instanceof InspectorDebuggerAgent)) return new InspectorDebuggerAgent();
this._enabled = false;
this._breakpointsCookie = {}
}
module.exports = InspectorDebuggerAgent;
var proto = InspectorDebuggerAgent.prototype;
proto.enable = function enable(cb) {
this._enabled = true;
cb()
}
proto.disable = function disable(cb) {
this._enabled = false;
cb()
}
// https://code.google.com/p/chromium/codesearch#chromium/src/third_party/WebKit/Source/core/inspector/InspectorDebuggerAgent.cpp&l=606
proto._resolveBreakpoint = function _resolveBreakpoint(breakpointId, script, breakpoint, cb) {
var result = { breakpointId: breakpointId, locations: [ ] };
// if a breakpoint registers on startup, the script's source may not have been loaded yet
// in that case we load it, the script's source is set automatically during that step
// should not be needed once other debugger methods are implemented
if (script.source) onensuredSource();
else loadScriptSource(script.url, onensuredSource)
function onensuredSource(err) {
if (err) return cb(err);
if (breakpoint.lineNumber < script.startLine || script.endLine < breakpoint.lineNumber) return cb(null, result);
// TODO: scriptDebugServer().setBreakpoint(scriptId, breakpoint, &actualLineNumber, &actualColumnNumber, false);
// https://code.google.com/p/chromium/codesearch#chromium/src/third_party/WebKit/Source/bindings/core/v8/ScriptDebugServer.cpp&l=89
var debugServerBreakpointId = 'TBD'
if (!debugServerBreakpointId) return cb(null, result);
// will be returned from call to script debug server
var actualLineNumber = breakpoint.lineNumber
, actualColumnNumber = breakpoint.columnNumber
result.locations.push({
scriptId : script.id
, lineNumber : actualLineNumber
, columnNumber : actualColumnNumber
})
cb(null, result);
}
}
// https://code.google.com/p/chromium/codesearch#chromium/src/third_party/WebKit/Source/core/inspector/InspectorDebuggerAgent.cpp&l=333
proto.setBreakpointByUrl = function setBreakpointByUrl(opts, cb) {
if (opts.urlRegex) return cb(new Error('Not supporting setBreakpointByUrl with urlRegex'));
var isAntibreakpoint = !!opts.isAntibreakpoint
, url = opts.url
, condition = opts.condition || ''
, lineNumber = opts.lineNumber
, columnNumber
if (typeof opts.columnNumber === Number) {
columnNumber = opts.columnNumber;
if (columnNumber < 0) return cb(new Error('Incorrect column number.'));
} else {
columnNumber = isAntibreakpoint ? -1 : 0;
}
var breakpointId = format('%s:%d:%d', url, lineNumber, columnNumber);
if (this._breakpointsCookie[breakpointId]) return cb(new Error('Breakpoint at specified location already exists.'));
this._breakpointsCookie[breakpointId] = {
url : url
, lineNumber : lineNumber
, columnNumber : columnNumber
, condition : condition
, isAntibreakpoint : isAntibreakpoint
}
if (isAntibreakpoint) return cb(null, { breakpointId: breakpointId });
var match = scripts.byUrl[url];
if (!match) return cb(null, { breakpointId: breakpointId, locations: [] })
var breakpoint = { lineNumber: lineNumber, columnNumber: columnNumber, condition: condition }
this._resolveBreakpoint(breakpointId, match, breakpoint, cb)
}
proto._removeBreakpoint = function _removeBreakpoint(breakpointId, cb) {
// todo
cb()
}
// https://code.google.com/p/chromium/codesearch#chromium/src/third_party/WebKit/Source/core/inspector/InspectorDebuggerAgent.cpp&l=416
proto.removeBreakpoint = function removeBreakpoint(breakpointId, cb) {
var breakpoint = this._breakpointsCookie[breakpointId];
if (!breakpoint) return;
this._breakpointsCookie[breakpointId] = undefined;
if (!breakpoint.isAntibreakpoint) this._removeBreakpoint(breakpointId, cb);
else cb()
}
proto.getScriptSource = function getScriptSource(id, cb) {
var script = scripts.byId[id];
if (!script) return cb(new Error('Script with id ' + id + 'was not found'))
cb(null, { scriptSource: script.source })
}
<|fim▁hole|>proto.setBreakpoint = ignore
proto.continueToLocation = ignore
proto.stepOver = ignore
proto.stepInto = ignore
proto.stepOut = ignore
proto.pause = ignore
proto.resume = ignore
proto.searchInContent = ignore
proto.canSetScriptSource = ignore
proto.setScriptSource = ignore
proto.restartFrame = ignore
proto.getFunctionDetails = ignore
proto.getCollectionEntries = ignore
proto.setPauseOnExceptions = ignore
proto.evaluateOnCallFrame = ignore
proto.compileScript = ignore
proto.runScript = ignore
proto.setOverlayMessage = ignore
proto.setVariableValue = ignore
proto.getStepInPositions = ignore
proto.getBacktrace = ignore
proto.skipStackFrames = ignore
proto.setAsyncCallStackDepth = ignore
proto.enablePromiseTracker = ignore
proto.disablePromiseTracker = ignore
proto.getPromises = ignore
proto.getPromiseById = ignore<|fim▁end|> | proto.setBreakpointsActive = ignore
proto.setSkipAllPauses = ignore |
<|file_name|>simple_joint_control_test_sm.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
###########################################################
# WARNING: Generated code! #
# ************************** #
# Manual changes may get lost if file is generated again. #
# Only code inside the [MANUAL] tags will be kept. #
###########################################################
import roslib; roslib.load_manifest('vigir_behavior_simple_joint_control_test')
from flexbe_core import Behavior, Autonomy, OperatableStateMachine, Logger
from vigir_flexbe_states.check_current_control_mode_state import CheckCurrentControlModeState
from vigir_flexbe_states.change_control_mode_action_state import ChangeControlModeActionState
from vigir_flexbe_states.moveit_move_group_state import MoveitMoveGroupState
from flexbe_states.decision_state import DecisionState
from flexbe_states.calculation_state import CalculationState
from flexbe_states.wait_state import WaitState
from vigir_flexbe_states.execute_trajectory_state import ExecuteTrajectoryState
from flexbe_states.flexible_calculation_state import FlexibleCalculationState
from vigir_flexbe_states.update_dynamic_parameter_state import UpdateDynamicParameterState
from vigir_flexbe_states.read_dynamic_parameter_state import ReadDynamicParameterState
from flexbe_states.start_record_logs_state import StartRecordLogsState
from flexbe_states.stop_record_logs_state import StopRecordLogsState
# Additional imports can be added inside the following tags
# [MANUAL_IMPORT]
import time
import os
import rospy
# [/MANUAL_IMPORT]
'''
Created on Mon Nov 03 2014
@author: Philipp and Spyros
'''
class SimpleJointControlTestSM(Behavior):
'''
Get step response of joint controllers by varying PID gains.
'''
def __init__(self):
super(SimpleJointControlTestSM, self).__init__()
self.name = 'Simple Joint Control Test'
# parameters of this behavior
self.add_parameter('topics_to_record', '')
self.add_parameter('joint_upper_bounds', 0.6)
self.add_parameter('joint_lower_bounds', 0.4)
self.add_parameter('real_robot', True)
# references to used behaviors
# Additional initialization code can be added inside the following tags
# [MANUAL_INIT]
# 0-5 left arm
# 6-11 right arm
# for each: wrx, wry, elx, ely, shx, shz
# simulation
self._joint_limits_sim = [ \
[-0.44, 1.57], \
[-1.57, 1.57], \
[0.00, 2.35], \
[0.00, 3.14], \
[-1.40, 1.75], \
[-1.96, 1.96], \
\
[-1.57, 0.44], \
[-1.57, 1.57], \
[-2.35, 0.00], \
[0.00, 3.14], \
[-1.75, 1.40], \
[-1.96, 1.96] \
]
# real robot
self._joint_limits_rob = [ \
[-1.18, 1.18], \
[0.00, 3.14], \
[0.00, 2.36], \
[0.00, 3.14], \
[-1.57, 1.57], \
[-1.57, 0.79], \
\
[-1.18, 1.18], \
[0.00, 3.14], \
[-2.36, 0.00], \
[0.00, 3.14], \
[-1.57, 1.57], \
[-1.57, 0.79], \
]
self._joint_limits = []
# joint order: shz, shx, ely, elx, wry, wrx
self._joint_configs_down = []
self._joint_configs_up = []
self._traj_controllers = [ \
UpdateDynamicParameterState.LEFT_ARM_WRX, \
UpdateDynamicParameterState.LEFT_ARM_WRY, \
UpdateDynamicParameterState.LEFT_ARM_ELX, \
UpdateDynamicParameterState.LEFT_ARM_ELY, \
UpdateDynamicParameterState.LEFT_ARM_SHX, \
UpdateDynamicParameterState.LEFT_ARM_SHZ, \
\
UpdateDynamicParameterState.RIGHT_ARM_WRX, \
UpdateDynamicParameterState.RIGHT_ARM_WRY, \
UpdateDynamicParameterState.RIGHT_ARM_ELX, \
UpdateDynamicParameterState.RIGHT_ARM_ELY, \
UpdateDynamicParameterState.RIGHT_ARM_SHX, \
UpdateDynamicParameterState.RIGHT_ARM_SHZ \
]
# [/MANUAL_INIT]
# Behavior comments:
def create(self):
joint_names_left = ["l_arm_shz", "l_arm_shx", "l_arm_ely", "l_arm_elx", "l_arm_wry", "l_arm_wrx"]
joint_names_right = ["r_arm_shz", "r_arm_shx", "r_arm_ely", "r_arm_elx", "r_arm_wry", "r_arm_wrx"]
wait_time = 3.0
bagfolder = "" # calculated
gains_list = {'pid_gains': ['p', 'i', 'd'], 'bdi_gains': ['k_qd_p', 'ff_qd_d'], 'vigir_gains': ['ff_bang', 'ff_effort', 'ff_friction']}
# x:30 y:365, x:130 y:365
_state_machine = OperatableStateMachine(outcomes=['finished', 'failed'])
_state_machine.userdata.joints_left_up = [] # calculated
_state_machine.userdata.joints_right_up = [] # calculated
_state_machine.userdata.joint_index = 0
_state_machine.userdata.zero_time = [0.02]
_state_machine.userdata.joint_positions_up = [] # calculated
_state_machine.userdata.joint_positions_down = [] # calculated
_state_machine.userdata.joint_index = 0
_state_machine.userdata.none = None
_state_machine.userdata.init_time = [3.0]
# Additional creation code can be added inside the following tags
# [MANUAL_CREATE]
# 'Basic' configuration for SIMULATION
#_state_machine.userdata.joints_left_up = [0.00, 0.18, 1.57, 1.18, 0.00, 0.57]
#_state_machine.userdata.joints_right_up = [0.00, -0.18, 1.57, -1.18, 0.00, -0.57]
logs_folder = os.path.expanduser('~/joint_control_tests/')
if not os.path.exists(logs_folder):
os.makedirs(logs_folder)
bagfolder = os.path.join(logs_folder, "run_" + time.strftime("%Y-%m-%d-%H_%M"))
os.makedirs(bagfolder)
self._joint_limits = self._joint_limits_rob if self.real_robot else self._joint_limits_sim
# standard config
joints_left_up = [0] * 6
for i in range(6):
joint_range = self._joint_limits[i][1] - self._joint_limits[i][0]
joints_left_up[5-i] = self._joint_limits[i][0] + joint_range * 0.5
joints_right_up = [0] * 6
for i in range(6):
joint_range = self._joint_limits[i+6][1] - self._joint_limits[i+6][0]
joints_right_up[5-i] = self._joint_limits[i+6][0] + joint_range * 0.5
_state_machine.userdata.joints_left_up = joints_left_up
_state_machine.userdata.joints_right_up = joints_right_up
rospy.loginfo('Average left joint positions: ' + ' '.join(map(str, joints_left_up)))
rospy.loginfo('Average right joint positions: ' + ' '.join(map(str, joints_right_up)))
# left
for i in range(6):
joint_config_up = list(_state_machine.userdata.joints_left_up)
joint_config_down = list(_state_machine.userdata.joints_left_up)
joint_range = self._joint_limits[i][1] - self._joint_limits[i][0]
joint_config_up[5-i] = self._joint_limits[i][0] + joint_range * self.joint_upper_bounds
joint_config_down[5-i] = self._joint_limits[i][0] + joint_range * self.joint_lower_bounds
self._joint_configs_up.append([joint_config_up])
self._joint_configs_down.append([joint_config_down])
rospy.loginfo('Left Joint Config Up: ' + ' '.join(map(str, joint_config_up)))
rospy.loginfo('Left Joint Config Dn: ' + ' '.join(map(str, joint_config_down)))
# right
for i in range(6):
joint_config_up = list(_state_machine.userdata.joints_right_up)
joint_config_down = list(_state_machine.userdata.joints_right_up)
joint_range = self._joint_limits[i+6][1] - self._joint_limits[i+6][0]
joint_config_up[5-i] = self._joint_limits[i+6][0] + joint_range * self.joint_upper_bounds
joint_config_down[5-i] = self._joint_limits[i+6][0] + joint_range * self.joint_lower_bounds
self._joint_configs_up.append([joint_config_up])
self._joint_configs_down.append([joint_config_down])
rospy.loginfo('Right Joint Config Up: ' + ' '.join(map(str, joint_config_up)))
rospy.loginfo('Right Joint Config Dn: ' + ' '.join(map(str, joint_config_down)))
# [/MANUAL_CREATE]
# x:30 y:365, x:130 y:365
_sm_move_joint_down_0 = OperatableStateMachine(outcomes=['finished', 'failed'], input_keys=['joint_index', 'joint_positions_down', 'zero_time', 'joints_right_up', 'joints_left_up', 'init_time'])
with _sm_move_joint_down_0:
# x:71 y:145
OperatableStateMachine.add('Move_Left_Arm_Back',
MoveitMoveGroupState(planning_group="l_arm_group", joint_names=joint_names_left),
transitions={'reached': 'Move_Right_Arm_Back', 'failed': 'failed'},
autonomy={'reached': Autonomy.Low, 'failed': Autonomy.High},
remapping={'target_joint_config': 'joints_left_up'})
# x:639 y:69
OperatableStateMachine.add('Move_Left_Joint_Down',
ExecuteTrajectoryState(controller=ExecuteTrajectoryState.CONTROLLER_LEFT_ARM, joint_names=joint_names_left),
transitions={'done': 'finished', 'failed': 'failed'},
autonomy={'done': Autonomy.Low, 'failed': Autonomy.Off},
remapping={'joint_positions': 'joint_positions_down', 'time': 'init_time'})
# x:631 y:200
OperatableStateMachine.add('Move_Right_Joint_Down',
ExecuteTrajectoryState(controller=ExecuteTrajectoryState.CONTROLLER_RIGHT_ARM, joint_names=joint_names_right),
transitions={'done': 'finished', 'failed': 'failed'},
autonomy={'done': Autonomy.High, 'failed': Autonomy.Off},
remapping={'joint_positions': 'joint_positions_down', 'time': 'init_time'})
# x:201 y:54
OperatableStateMachine.add('Move_Right_Arm_Back',
MoveitMoveGroupState(planning_group="r_arm_group", joint_names=joint_names_right),
transitions={'reached': 'Decide_Left_Or_Right', 'failed': 'failed'},
autonomy={'reached': Autonomy.Low, 'failed': Autonomy.High},
remapping={'target_joint_config': 'joints_right_up'})
# x:429 y:62
OperatableStateMachine.add('Decide_Left_Or_Right',
DecisionState(outcomes=["left", "right"], conditions=lambda it: "left" if it < 6 else "right"),
transitions={'left': 'Move_Left_Joint_Down', 'right': 'Move_Right_Joint_Down'},
autonomy={'left': Autonomy.High, 'right': Autonomy.High},
remapping={'input_value': 'joint_index'})
# x:30 y:365, x:130 y:365
_sm_perform_gain_test_right_1 = OperatableStateMachine(outcomes=['finished', 'failed'], input_keys=['joint_positions_up', 'joint_positions_down', 'zero_time'])
with _sm_perform_gain_test_right_1:
# x:84 y:39
OperatableStateMachine.add('Initial_Wait',
WaitState(wait_time=wait_time),
transitions={'done': 'Perform_Step_Up'},
autonomy={'done': Autonomy.Off})
# x:80 y:218
OperatableStateMachine.add('Wait_Up',
WaitState(wait_time=wait_time),
transitions={'done': 'Perform_Step_Down'},
autonomy={'done': Autonomy.Off})
# x:44 y:331
OperatableStateMachine.add('Perform_Step_Down',
ExecuteTrajectoryState(controller=ExecuteTrajectoryState.CONTROLLER_RIGHT_ARM, joint_names=joint_names_right),
transitions={'done': 'Wait_Down', 'failed': 'failed'},
autonomy={'done': Autonomy.Off, 'failed': Autonomy.Off},
remapping={'joint_positions': 'joint_positions_down', 'time': 'zero_time'})
# x:73 y:440
OperatableStateMachine.add('Wait_Down',
WaitState(wait_time=wait_time),
transitions={'done': 'Perforn_Step_Up_2'},
autonomy={'done': Autonomy.Off})
# x:414 y:401
OperatableStateMachine.add('Perforn_Step_Up_2',
ExecuteTrajectoryState(controller=ExecuteTrajectoryState.CONTROLLER_RIGHT_ARM, joint_names=joint_names_right),
transitions={'done': 'Wait_Up_2', 'failed': 'failed'},
autonomy={'done': Autonomy.Off, 'failed': Autonomy.Off},
remapping={'joint_positions': 'joint_positions_up', 'time': 'zero_time'})
# x:442 y:291
OperatableStateMachine.add('Wait_Up_2',
WaitState(wait_time=wait_time),
transitions={'done': 'Perform_Step_Down_2'},
autonomy={'done': Autonomy.Off})
# x:416 y:167
OperatableStateMachine.add('Perform_Step_Down_2',
ExecuteTrajectoryState(controller=ExecuteTrajectoryState.CONTROLLER_RIGHT_ARM, joint_names=joint_names_right),
transitions={'done': 'Wait_Down_2', 'failed': 'failed'},
autonomy={'done': Autonomy.Off, 'failed': Autonomy.Off},
remapping={'joint_positions': 'joint_positions_down', 'time': 'zero_time'})
# x:449 y:62
OperatableStateMachine.add('Wait_Down_2',
WaitState(wait_time=wait_time),
transitions={'done': 'finished'},
autonomy={'done': Autonomy.Off})
# x:48 y:113
OperatableStateMachine.add('Perform_Step_Up',
ExecuteTrajectoryState(controller=ExecuteTrajectoryState.CONTROLLER_RIGHT_ARM, joint_names=joint_names_right),
transitions={'done': 'Wait_Up', 'failed': 'failed'},
autonomy={'done': Autonomy.Off, 'failed': Autonomy.Off},
remapping={'joint_positions': 'joint_positions_up', 'time': 'zero_time'})
# x:30 y:365, x:130 y:365
_sm_perform_gain_test_left_2 = OperatableStateMachine(outcomes=['finished', 'failed'], input_keys=['joint_positions_up', 'joint_positions_down', 'zero_time'])
with _sm_perform_gain_test_left_2:
# x:84 y:39
OperatableStateMachine.add('Initial_Wait',
WaitState(wait_time=wait_time),
transitions={'done': 'Perform_Step_Up_1'},
autonomy={'done': Autonomy.Off})
# x:87 y:232
OperatableStateMachine.add('Wait_Up_1',
WaitState(wait_time=wait_time),
transitions={'done': 'Perform_Step_Down_1'},
autonomy={'done': Autonomy.Off})
# x:50 y:321
OperatableStateMachine.add('Perform_Step_Down_1',
ExecuteTrajectoryState(controller=ExecuteTrajectoryState.CONTROLLER_LEFT_ARM, joint_names=joint_names_left),
transitions={'done': 'Wait_Down_1', 'failed': 'failed'},
autonomy={'done': Autonomy.Off, 'failed': Autonomy.Off},
remapping={'joint_positions': 'joint_positions_down', 'time': 'zero_time'})
# x:77 y:415
OperatableStateMachine.add('Wait_Down_1',
WaitState(wait_time=wait_time),
transitions={'done': 'Perforn_Step_Up_2'},
autonomy={'done': Autonomy.Off})
# x:51 y:131
OperatableStateMachine.add('Perform_Step_Up_1',
ExecuteTrajectoryState(controller=ExecuteTrajectoryState.CONTROLLER_LEFT_ARM, joint_names=joint_names_left),
transitions={'done': 'Wait_Up_1', 'failed': 'failed'},
autonomy={'done': Autonomy.Off, 'failed': Autonomy.Off},
remapping={'joint_positions': 'joint_positions_up', 'time': 'zero_time'})
# x:414 y:401
OperatableStateMachine.add('Perforn_Step_Up_2',
ExecuteTrajectoryState(controller=ExecuteTrajectoryState.CONTROLLER_LEFT_ARM, joint_names=joint_names_left),
transitions={'done': 'Wait_Up_2', 'failed': 'failed'},
autonomy={'done': Autonomy.Off, 'failed': Autonomy.Off},
remapping={'joint_positions': 'joint_positions_up', 'time': 'zero_time'})
# x:442 y:291
OperatableStateMachine.add('Wait_Up_2',
WaitState(wait_time=wait_time),
transitions={'done': 'Perform_Step_Down_2'},
autonomy={'done': Autonomy.Off})
# x:416 y:167
OperatableStateMachine.add('Perform_Step_Down_2',
ExecuteTrajectoryState(controller=ExecuteTrajectoryState.CONTROLLER_LEFT_ARM, joint_names=joint_names_left),
transitions={'done': 'Wait_Down_2', 'failed': 'failed'},
autonomy={'done': Autonomy.Off, 'failed': Autonomy.Off},
remapping={'joint_positions': 'joint_positions_down', 'time': 'zero_time'})
# x:449 y:62
OperatableStateMachine.add('Wait_Down_2',
WaitState(wait_time=wait_time),
transitions={'done': 'finished'},
autonomy={'done': Autonomy.Off})
# x:30 y:365, x:130 y:365
_sm_test_individual_joint_3 = OperatableStateMachine(outcomes=['finished', 'failed'], input_keys=['joint_positions_up', 'joint_positions_down', 'joint_index', 'traj_controller', 'none', 'zero_time', 'joints_right_up', 'joints_left_up', 'init_time'])
with _sm_test_individual_joint_3:
# x:45 y:60
OperatableStateMachine.add('Initialize_Iteration',
CalculationState(calculation=lambda x: 0),
transitions={'done': 'Move_Joint_Down'},
autonomy={'done': Autonomy.Off},
remapping={'input_value': 'none', 'output_value': 'iteration'})
# x:520 y:555
OperatableStateMachine.add('Perform_Gain_Test_Left',
_sm_perform_gain_test_left_2,
transitions={'finished': 'Stop_Gain_Logs', 'failed': 'Stop_Gain_Logs'},
autonomy={'finished': Autonomy.Inherit, 'failed': Autonomy.Inherit},
remapping={'joint_positions_up': 'joint_positions_up', 'joint_positions_down': 'joint_positions_down', 'zero_time': 'zero_time'})
# x:176 y:388
OperatableStateMachine.add('Decide_If_Tests_To_Go',
DecisionState(outcomes=["done", "continue"], conditions=lambda it: "done" if it == 5 else "continue"),
transitions={'done': 'Reset_Joint_Gains', 'continue': 'Calculate_Next_Gain_Value'},
autonomy={'done': Autonomy.Off, 'continue': Autonomy.Off},
remapping={'input_value': 'iteration'})
# x:144 y:298
OperatableStateMachine.add('Calculate_Next_Gain_Value',
FlexibleCalculationState(calculation=self.calculate_gains, input_keys=["iteration", "nominal_gain"]),
transitions={'done': 'Set_Joint_Gain'},
autonomy={'done': Autonomy.Off},
remapping={'iteration': 'iteration', 'nominal_gain': 'nominal_gains', 'output_value': 'altered_gains'})
# x:395 y:268
OperatableStateMachine.add('Set_Joint_Gain',
UpdateDynamicParameterState(param=gains_list),
transitions={'updated': 'Set_Logfile_Name', 'failed': 'failed'},
autonomy={'updated': Autonomy.Off, 'failed': Autonomy.Off},
remapping={'traj_controller': 'traj_controller', 'parameter_value': 'altered_gains'})
# x:190 y:193
OperatableStateMachine.add('Get_Joint_Gains',
ReadDynamicParameterState(param=gains_list),
transitions={'read': 'Calculate_Next_Gain_Value', 'failed': 'failed'},
autonomy={'read': Autonomy.Off, 'failed': Autonomy.Off},
remapping={'traj_controller': 'traj_controller', 'parameter_value': 'nominal_gains'})
# x:158 y:505
OperatableStateMachine.add('Increment_Iteration_Counter',
CalculationState(calculation=lambda it: it + 1),
transitions={'done': 'Decide_If_Tests_To_Go'},
autonomy={'done': Autonomy.Off},
remapping={'input_value': 'iteration', 'output_value': 'iteration'})
# x:798 y:435
OperatableStateMachine.add('Decide_Left_Or_Right',
DecisionState(outcomes=["left", "right"], conditions=lambda it: "left" if it < 6 else "right"),
transitions={'left': 'Perform_Gain_Test_Left', 'right': 'Perform_Gain_Test_Right'},
autonomy={'left': Autonomy.High, 'right': Autonomy.High},
remapping={'input_value': 'joint_index'})
# x:811 y:624
OperatableStateMachine.add('Perform_Gain_Test_Right',
_sm_perform_gain_test_right_1,
transitions={'finished': 'Stop_Gain_Logs', 'failed': 'Stop_Gain_Logs'},
autonomy={'finished': Autonomy.Inherit, 'failed': Autonomy.Inherit},
remapping={'joint_positions_up': 'joint_positions_up', 'joint_positions_down': 'joint_positions_down', 'zero_time': 'zero_time'})
# x:545 y:458
OperatableStateMachine.add('Start_Gain_Logs',
StartRecordLogsState(topics_to_record=self.topics_to_record),
transitions={'logging': 'Decide_Left_Or_Right'},
autonomy={'logging': Autonomy.Off},
remapping={'bagfile_name': 'bagfile_name', 'rosbag_process': 'rosbag_process'})
# x:184 y:616
OperatableStateMachine.add('Stop_Gain_Logs',
StopRecordLogsState(),
transitions={'stopped': 'Increment_Iteration_Counter'},
autonomy={'stopped': Autonomy.Off},
remapping={'rosbag_process': 'rosbag_process'})
# x:576 y:346
OperatableStateMachine.add('Set_Logfile_Name',
FlexibleCalculationState(calculation=lambda i: bagfolder + self._traj_controllers[i[1]][1] + "_k_p_" + str(i[0][0]) + ".bag", input_keys=["gain_percentage", "joint_index"]),
transitions={'done': 'Start_Gain_Logs'},
autonomy={'done': Autonomy.Off},
remapping={'gain_percentage': 'altered_gains', 'joint_index': 'joint_index', 'output_value': 'bagfile_name'})
# x:210 y:53
OperatableStateMachine.add('Move_Joint_Down',
_sm_move_joint_down_0,
transitions={'finished': 'Get_Joint_Gains', 'failed': 'failed'},
autonomy={'finished': Autonomy.Inherit, 'failed': Autonomy.Inherit},
remapping={'joint_index': 'joint_index', 'joint_positions_down': 'joint_positions_down', 'zero_time': 'zero_time', 'joints_right_up': 'joints_right_up', 'joints_left_up': 'joints_left_up', 'init_time': 'init_time'})
# x:365 y:430
OperatableStateMachine.add('Reset_Joint_Gains',
UpdateDynamicParameterState(param=gains_list),
transitions={'updated': 'finished', 'failed': 'failed'},
autonomy={'updated': Autonomy.Off, 'failed': Autonomy.Off},
remapping={'traj_controller': 'traj_controller', 'parameter_value': 'nominal_gains'})
# x:30 y:365, x:130 y:365
_sm_test_joint_controls_4 = OperatableStateMachine(outcomes=['finished', 'failed'], input_keys=['joint_index', 'none', 'zero_time', 'joints_right_up', 'joints_left_up', 'init_time'])
with _sm_test_joint_controls_4:
# x:47 y:121
OperatableStateMachine.add('Decide_Joints_To_Go',
DecisionState(outcomes=["done", "continue"], conditions=lambda idx: "done" if idx == len(self._joint_configs_down) else "continue"),
transitions={'done': 'finished', 'continue': 'Select_Next_Joint_Up'},
autonomy={'done': Autonomy.High, 'continue': Autonomy.Low},
remapping={'input_value': 'joint_index'})
# x:257 y:290
OperatableStateMachine.add('Select_Next_Joint_Up',
CalculationState(calculation=lambda idx: self._joint_configs_up[idx]),
transitions={'done': 'Select_Next_Joint_Down'},
autonomy={'done': Autonomy.Off},
remapping={'input_value': 'joint_index', 'output_value': 'joint_positions_up'})
# x:571 y:68
OperatableStateMachine.add('Test_Individual_Joint',
_sm_test_individual_joint_3,
transitions={'finished': 'Increment_Joint_Index', 'failed': 'failed'},
autonomy={'finished': Autonomy.Inherit, 'failed': Autonomy.Inherit},
remapping={'joint_positions_up': 'joint_positions_up', 'joint_positions_down': 'joint_positions_down', 'joint_index': 'joint_index', 'traj_controller': 'traj_controller', 'none': 'none', 'zero_time': 'zero_time', 'joints_right_up': 'joints_right_up', 'joints_left_up': 'joints_left_up', 'init_time': 'init_time'})
# x:529 y:324
OperatableStateMachine.add('Select_Next_Joint_Down',
CalculationState(calculation=lambda idx: self._joint_configs_down[idx]),
transitions={'done': 'Set_Trajectory_Controller'},
autonomy={'done': Autonomy.Off},
remapping={'input_value': 'joint_index', 'output_value': 'joint_positions_down'})
# x:222 y:51<|fim▁hole|> remapping={'input_value': 'joint_index', 'output_value': 'joint_index'})
# x:559 y:189
OperatableStateMachine.add('Set_Trajectory_Controller',
CalculationState(calculation=lambda idx: self._traj_controllers[idx]),
transitions={'done': 'Test_Individual_Joint'},
autonomy={'done': Autonomy.Off},
remapping={'input_value': 'joint_index', 'output_value': 'traj_controller'})
with _state_machine:
# x:112 y:38
OperatableStateMachine.add('Check_Initial_Stand',
CheckCurrentControlModeState(target_mode=CheckCurrentControlModeState.STAND, wait=False),
transitions={'correct': 'Switch_To_Manipulate', 'incorrect': 'Set_Initial_Stand'},
autonomy={'correct': Autonomy.Low, 'incorrect': Autonomy.Low},
remapping={'control_mode': 'control_mode'})
# x:336 y:123
OperatableStateMachine.add('Set_Initial_Stand',
ChangeControlModeActionState(target_mode=ChangeControlModeActionState.STAND),
transitions={'changed': 'Switch_To_Manipulate', 'failed': 'failed'},
autonomy={'changed': Autonomy.Low, 'failed': Autonomy.High})
# x:60 y:235
OperatableStateMachine.add('Switch_To_Manipulate',
ChangeControlModeActionState(target_mode=ChangeControlModeActionState.MANIPULATE),
transitions={'changed': 'Bring_Left_Arm_Up', 'failed': 'failed'},
autonomy={'changed': Autonomy.Low, 'failed': Autonomy.High})
# x:105 y:428
OperatableStateMachine.add('Bring_Left_Arm_Up',
MoveitMoveGroupState(planning_group="l_arm_group", joint_names=joint_names_left),
transitions={'reached': 'Bring_Right_Arm_Up', 'failed': 'failed'},
autonomy={'reached': Autonomy.Low, 'failed': Autonomy.High},
remapping={'target_joint_config': 'joints_left_up'})
# x:323 y:482
OperatableStateMachine.add('Bring_Right_Arm_Up',
MoveitMoveGroupState(planning_group="r_arm_group", joint_names=joint_names_right),
transitions={'reached': 'Test_Joint_Controls', 'failed': 'failed'},
autonomy={'reached': Autonomy.High, 'failed': Autonomy.High},
remapping={'target_joint_config': 'joints_right_up'})
# x:620 y:465
OperatableStateMachine.add('Test_Joint_Controls',
_sm_test_joint_controls_4,
transitions={'finished': 'Change_Back_To_Stand', 'failed': 'failed'},
autonomy={'finished': Autonomy.Inherit, 'failed': Autonomy.Inherit},
remapping={'joint_index': 'joint_index', 'none': 'none', 'zero_time': 'zero_time', 'joints_right_up': 'joints_right_up', 'joints_left_up': 'joints_left_up', 'init_time': 'init_time'})
# x:831 y:349
OperatableStateMachine.add('Change_Back_To_Stand',
ChangeControlModeActionState(target_mode=ChangeControlModeActionState.STAND),
transitions={'changed': 'finished', 'failed': 'failed'},
autonomy={'changed': Autonomy.Off, 'failed': Autonomy.Off})
return _state_machine
# Private functions can be added inside the following tags
# [MANUAL_FUNC]
def calculate_gains(self, input_values):
iteration = input_values[0]
nominal_gains = input_values[1]
gain_percentage = nominal_gains[0] * (0.4 + 0.2 * iteration)
altered_gains = [gain_percentage]
for gain in nominal_gains[1:]:
altered_gains.append(0)
return altered_gains
# [/MANUAL_FUNC]<|fim▁end|> | OperatableStateMachine.add('Increment_Joint_Index',
CalculationState(calculation=lambda it: it + 1),
transitions={'done': 'Decide_Joints_To_Go'},
autonomy={'done': Autonomy.Off}, |
<|file_name|>media.component.ts<|end_file_name|><|fim▁begin|>import { SingleFileUploadModal } from './modal-single';
import { Component, OnInit } from '@angular/core';
import { MdDialog } from '@angular/material';
@Component({
selector: 'media-library',
templateUrl: './media.component.html',
styleUrls: ['./media.component.css']
})
export class MediaComponent implements OnInit {
selectedOption: string;
constructor(public dialog: MdDialog) { }
<|fim▁hole|> this.selectedOption = result;
});
}
ngOnInit() {
}
}<|fim▁end|> | openDialog() {
let dialogRef = this.dialog.open(SingleFileUploadModal);
dialogRef.afterClosed().subscribe(result => {
|
<|file_name|>source.d.ts<|end_file_name|><|fim▁begin|>interface Location {
line: number;
column: number;
}
/**
* A representation of source input to GraphQL. The `name` and `locationOffset` parameters are
* optional, but they are useful for clients who store GraphQL documents in source files.
* For example, if the GraphQL input starts at line 40 in a file named `Foo.graphql`, it might
* be useful for `name` to be `"Foo.graphql"` and location to be `{ line: 40, column: 1 }`.
* The `line` and `column` properties in `locationOffset` are 1-indexed.
*/
export class Source {
body: string;
name: string;
locationOffset: Location;
constructor(body: string, name?: string, locationOffset?: Location);
}
/**
* Test if the given value is a Source object.
*
* @internal<|fim▁hole|><|fim▁end|> | */
export function isSource(source: any): source is Source; |
<|file_name|>0015_auto_20160503_0248.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import absolute_import
# Generated by Django 1.9 on 2016-05-03 02:48
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('podcasts', '0014_auto_20160503_0247'),
]
operations = [
migrations.RemoveField(
model_name='podcast',<|fim▁hole|> name='tip_last_payout_amount',
),
migrations.RemoveField(
model_name='podcast',
name='tip_value',
),
]<|fim▁end|> | name='tip_last_payout',
),
migrations.RemoveField(
model_name='podcast', |
<|file_name|>scheme.rs<|end_file_name|><|fim▁begin|>use core::ops::{Deref, DerefMut};
use core::{mem, slice};
use super::error::*;
use super::syscall::*;
use super::c_string_to_str;
#[derive(Copy, Clone, Debug, Default)]
#[repr(packed)]
pub struct Packet {
pub id: usize,
pub a: usize,
pub b: usize,
pub c: usize,
pub d: usize
}
impl Deref for Packet {
type Target = [u8];
fn deref(&self) -> &[u8] {
unsafe {
slice::from_raw_parts(self as *const Packet as *const u8, mem::size_of::<Packet>()) as &[u8]
}
}
}
impl DerefMut for Packet {
fn deref_mut(&mut self) -> &mut [u8] {
unsafe {
slice::from_raw_parts_mut(self as *mut Packet as *mut u8, mem::size_of::<Packet>()) as &mut [u8]<|fim▁hole|>pub trait Scheme {
fn handle(&mut self, packet: &mut Packet) {
packet.a = Error::mux(match packet.a {
SYS_OPEN => self.open(c_string_to_str(packet.b as *const u8), packet.c, packet.d),
SYS_MKDIR => self.mkdir(c_string_to_str(packet.b as *const u8), packet.c),
SYS_RMDIR => self.rmdir(c_string_to_str(packet.b as *const u8)),
SYS_STAT => self.stat(c_string_to_str(packet.b as *const u8), unsafe { &mut *(packet.c as *mut Stat) }),
SYS_UNLINK => self.unlink(c_string_to_str(packet.b as *const u8)),
SYS_READ => self.read(packet.b, unsafe { slice::from_raw_parts_mut(packet.c as *mut u8, packet.d) }),
SYS_WRITE => self.write(packet.b, unsafe { slice::from_raw_parts(packet.c as *const u8, packet.d) }),
SYS_LSEEK => self.seek(packet.b, packet.c, packet.d),
SYS_FPATH => self.fpath(packet.b, unsafe { slice::from_raw_parts_mut(packet.c as *mut u8, packet.d) }),
SYS_FSTAT => self.fstat(packet.b, unsafe { &mut *(packet.c as *mut Stat) }),
SYS_FSYNC => self.fsync(packet.b),
SYS_FTRUNCATE => self.ftruncate(packet.b, packet.c),
SYS_CLOSE => self.close(packet.b),
_ => Err(Error::new(ENOSYS))
});
}
/* Scheme operations */
#[allow(unused_variables)]
fn open(&mut self, path: &str, flags: usize, mode: usize) -> Result<usize> {
Err(Error::new(ENOENT))
}
#[allow(unused_variables)]
fn mkdir(&mut self, path: &str, mode: usize) -> Result<usize> {
Err(Error::new(ENOENT))
}
#[allow(unused_variables)]
fn rmdir(&mut self, path: &str) -> Result<usize> {
Err(Error::new(ENOENT))
}
#[allow(unused_variables)]
fn stat(&mut self, path: &str, stat: &mut Stat) -> Result<usize> {
Err(Error::new(ENOENT))
}
#[allow(unused_variables)]
fn unlink(&mut self, path: &str) -> Result<usize> {
Err(Error::new(ENOENT))
}
/* Resource operations */
#[allow(unused_variables)]
fn read(&mut self, id: usize, buf: &mut [u8]) -> Result<usize> {
Err(Error::new(EBADF))
}
#[allow(unused_variables)]
fn write(&mut self, id: usize, buf: &[u8]) -> Result<usize> {
Err(Error::new(EBADF))
}
#[allow(unused_variables)]
fn seek(&mut self, id: usize, pos: usize, whence: usize) -> Result<usize> {
Err(Error::new(EBADF))
}
#[allow(unused_variables)]
fn fpath(&self, id: usize, buf: &mut [u8]) -> Result<usize> {
Err(Error::new(EBADF))
}
#[allow(unused_variables)]
fn fstat(&self, id: usize, stat: &mut Stat) -> Result<usize> {
Err(Error::new(EBADF))
}
#[allow(unused_variables)]
fn fsync(&mut self, id: usize) -> Result<usize> {
Err(Error::new(EBADF))
}
#[allow(unused_variables)]
fn ftruncate(&mut self, id: usize, len: usize) -> Result<usize> {
Err(Error::new(EBADF))
}
#[allow(unused_variables)]
fn close(&mut self, id: usize) -> Result<usize> {
Err(Error::new(EBADF))
}
}<|fim▁end|> | }
}
}
|
<|file_name|>test_readline.py<|end_file_name|><|fim▁begin|># vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2017 Florian Bruhin (The Compiler) <[email protected]>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Tests for qutebrowser.misc.readline."""
import re
import inspect
from PyQt5.QtWidgets import QLineEdit, QApplication
import pytest<|fim▁hole|>
# Some functions aren't 100% readline compatible:
# https://github.com/qutebrowser/qutebrowser/issues/678
# Those are marked with fixme and have another value marked with '# wrong'
# which marks the current behavior.
fixme = pytest.mark.xfail(reason='readline compatibility - see #678')
class LineEdit(QLineEdit):
"""QLineEdit with some methods to make testing easier."""
def _get_index(self, haystack, needle):
"""Get the index of a char (needle) in a string (haystack).
Return:
The position where needle was found, or None if it wasn't found.
"""
try:
return haystack.index(needle)
except ValueError:
return None
def set_aug_text(self, text):
"""Set a text with </> markers for selected text and | as cursor."""
real_text = re.sub('[<>|]', '', text)
self.setText(real_text)
cursor_pos = self._get_index(text, '|')
sel_start_pos = self._get_index(text, '<')
sel_end_pos = self._get_index(text, '>')
if sel_start_pos is not None and sel_end_pos is None:
raise ValueError("< given without >!")
if sel_start_pos is None and sel_end_pos is not None:
raise ValueError("> given without <!")
if cursor_pos is not None:
if sel_start_pos is not None or sel_end_pos is not None:
raise ValueError("Can't mix | and </>!")
self.setCursorPosition(cursor_pos)
elif sel_start_pos is not None:
if sel_start_pos > sel_end_pos:
raise ValueError("< given after >!")
sel_len = sel_end_pos - sel_start_pos - 1
self.setSelection(sel_start_pos, sel_len)
def aug_text(self):
"""Get a text with </> markers for selected text and | as cursor."""
text = self.text()
chars = list(text)
cur_pos = self.cursorPosition()
assert cur_pos >= 0
chars.insert(cur_pos, '|')
if self.hasSelectedText():
selected_text = self.selectedText()
sel_start = self.selectionStart()
sel_end = sel_start + len(selected_text)
assert sel_start > 0
assert sel_end > 0
assert sel_end > sel_start
assert cur_pos == sel_end
assert text[sel_start:sel_end] == selected_text
chars.insert(sel_start, '<')
chars.insert(sel_end + 1, '>')
return ''.join(chars)
def _validate_deletion(lineedit, bridge, method, text, deleted, rest):
"""Run and validate a text deletion method on the ReadLine bridge.
Args:
lineedit: The LineEdit instance.
bridge: The ReadlineBridge instance.
method: Reference to the method on the bridge to test.
text: The starting 'augmented' text (see LineEdit.set_aug_text)
deleted: The text that should be deleted when the method is invoked.
rest: The augmented text that should remain after method is invoked.
"""
lineedit.set_aug_text(text)
method()
assert bridge._deleted[lineedit] == deleted
assert lineedit.aug_text() == rest
lineedit.clear()
bridge.rl_yank()
assert lineedit.aug_text() == deleted + '|'
@pytest.fixture
def lineedit(qtbot, monkeypatch):
"""Fixture providing a LineEdit."""
le = LineEdit()
qtbot.add_widget(le)
monkeypatch.setattr(QApplication.instance(), 'focusWidget', lambda: le)
return le
@pytest.fixture
def bridge():
"""Fixture providing a ReadlineBridge."""
return readline.ReadlineBridge()
def test_none(bridge, qtbot):
"""Call each rl_* method with a None focusWidget."""
assert QApplication.instance().focusWidget() is None
for name, method in inspect.getmembers(bridge, inspect.ismethod):
if name.startswith('rl_'):
method()
@pytest.mark.parametrize('text, expected', [('f<oo>bar', 'fo|obar'),
('|foobar', '|foobar')])
def test_rl_backward_char(text, expected, lineedit, bridge):
"""Test rl_backward_char."""
lineedit.set_aug_text(text)
bridge.rl_backward_char()
assert lineedit.aug_text() == expected
@pytest.mark.parametrize('text, expected', [('f<oo>bar', 'foob|ar'),
('foobar|', 'foobar|')])
def test_rl_forward_char(text, expected, lineedit, bridge):
"""Test rl_forward_char."""
lineedit.set_aug_text(text)
bridge.rl_forward_char()
assert lineedit.aug_text() == expected
@pytest.mark.parametrize('text, expected', [('one <tw>o', 'one |two'),
('<one >two', '|one two'),
('|one two', '|one two')])
def test_rl_backward_word(text, expected, lineedit, bridge):
"""Test rl_backward_word."""
lineedit.set_aug_text(text)
bridge.rl_backward_word()
assert lineedit.aug_text() == expected
@pytest.mark.parametrize('text, expected', [
pytest.param('<o>ne two', 'one| two', marks=fixme),
('<o>ne two', 'one |two'), # wrong
pytest.param('<one> two', 'one two|', marks=fixme),
('<one> two', 'one |two'), # wrong
('one t<wo>', 'one two|')
])
def test_rl_forward_word(text, expected, lineedit, bridge):
"""Test rl_forward_word."""
lineedit.set_aug_text(text)
bridge.rl_forward_word()
assert lineedit.aug_text() == expected
def test_rl_beginning_of_line(lineedit, bridge):
"""Test rl_beginning_of_line."""
lineedit.set_aug_text('f<oo>bar')
bridge.rl_beginning_of_line()
assert lineedit.aug_text() == '|foobar'
def test_rl_end_of_line(lineedit, bridge):
"""Test rl_end_of_line."""
lineedit.set_aug_text('f<oo>bar')
bridge.rl_end_of_line()
assert lineedit.aug_text() == 'foobar|'
@pytest.mark.parametrize('text, expected', [('foo|bar', 'foo|ar'),
('foobar|', 'foobar|'),
('|foobar', '|oobar'),
('f<oo>bar', 'f|bar')])
def test_rl_delete_char(text, expected, lineedit, bridge):
"""Test rl_delete_char."""
lineedit.set_aug_text(text)
bridge.rl_delete_char()
assert lineedit.aug_text() == expected
@pytest.mark.parametrize('text, expected', [('foo|bar', 'fo|bar'),
('foobar|', 'fooba|'),
('|foobar', '|foobar'),
('f<oo>bar', 'f|bar')])
def test_rl_backward_delete_char(text, expected, lineedit, bridge):
"""Test rl_backward_delete_char."""
lineedit.set_aug_text(text)
bridge.rl_backward_delete_char()
assert lineedit.aug_text() == expected
@pytest.mark.parametrize('text, deleted, rest', [
('delete this| test', 'delete this', '| test'),
pytest.param('delete <this> test', 'delete this', '| test', marks=fixme),
('delete <this> test', 'delete ', '|this test'), # wrong
pytest.param('f<oo>bar', 'foo', '|bar', marks=fixme),
('f<oo>bar', 'f', '|oobar'), # wrong
])
def test_rl_unix_line_discard(lineedit, bridge, text, deleted, rest):
"""Delete from the cursor to the beginning of the line and yank back."""
_validate_deletion(lineedit, bridge, bridge.rl_unix_line_discard, text,
deleted, rest)
@pytest.mark.parametrize('text, deleted, rest', [
('test |delete this', 'delete this', 'test |'),
pytest.param('<test >delete this', 'test delete this', 'test |',
marks=fixme),
('<test >delete this', 'test delete this', '|'), # wrong
])
def test_rl_kill_line(lineedit, bridge, text, deleted, rest):
"""Delete from the cursor to the end of line and yank back."""
_validate_deletion(lineedit, bridge, bridge.rl_kill_line, text, deleted,
rest)
@pytest.mark.parametrize('text, deleted, rest', [
('test delete|foobar', 'delete', 'test |foobar'),
('test delete |foobar', 'delete ', 'test |foobar'),
('open -t github.com/foo/bar |', 'github.com/foo/bar ', 'open -t |'),
('open -t |github.com/foo/bar', '-t ', 'open |github.com/foo/bar'),
pytest.param('test del<ete>foobar', 'delete', 'test |foobar',
marks=fixme),
('test del<ete >foobar', 'del', 'test |ete foobar'), # wrong
])
def test_rl_unix_word_rubout(lineedit, bridge, text, deleted, rest):
"""Delete to word beginning and see if it comes back with yank."""
_validate_deletion(lineedit, bridge, bridge.rl_unix_word_rubout, text,
deleted, rest)
@pytest.mark.parametrize('text, deleted, rest', [
('test delete|foobar', 'delete', 'test |foobar'),
('test delete |foobar', 'delete ', 'test |foobar'),
('open -t github.com/foo/bar |', 'bar ', 'open -t github.com/foo/|'),
('open -t |github.com/foo/bar', '-t ', 'open |github.com/foo/bar'),
('open foo/bar.baz|', 'bar.baz', 'open foo/|'),
])
def test_rl_unix_filename_rubout(lineedit, bridge, text, deleted, rest):
"""Delete filename segment and see if it comes back with yank."""
_validate_deletion(lineedit, bridge, bridge.rl_unix_filename_rubout, text,
deleted, rest)
@pytest.mark.parametrize('text, deleted, rest', [
pytest.param('test foobar| delete', ' delete', 'test foobar|',
marks=fixme),
('test foobar| delete', ' ', 'test foobar|delete'), # wrong
pytest.param('test foo|delete bar', 'delete', 'test foo| bar',
marks=fixme),
('test foo|delete bar', 'delete ', 'test foo|bar'), # wrong
pytest.param('test foo<bar> delete', ' delete', 'test foobar|',
marks=fixme),
('test foo<bar>delete', 'bardelete', 'test foo|'), # wrong
])
def test_rl_kill_word(lineedit, bridge, text, deleted, rest):
"""Delete to word end and see if it comes back with yank."""
_validate_deletion(lineedit, bridge, bridge.rl_kill_word, text, deleted,
rest)
@pytest.mark.parametrize('text, deleted, rest', [
('test delete|foobar', 'delete', 'test |foobar'),
('test delete |foobar', 'delete ', 'test |foobar'),
('open -t github.com/foo/bar |', 'bar ', 'open -t github.com/foo/|'),
('open -t |github.com/foo/bar', 't ', 'open -|github.com/foo/bar'),
pytest.param('test del<ete>foobar', 'delete', 'test |foobar', marks=fixme),
('test del<ete >foobar', 'del', 'test |ete foobar'), # wrong
('open foo/bar.baz|', 'baz', 'open foo/bar.|'),
])
def test_rl_backward_kill_word(lineedit, bridge, text, deleted, rest):
"""Delete to word beginning and see if it comes back with yank."""
_validate_deletion(lineedit, bridge, bridge.rl_backward_kill_word, text,
deleted, rest)
def test_rl_yank_no_text(lineedit, bridge):
"""Test yank without having deleted anything."""
lineedit.clear()
bridge.rl_yank()
assert lineedit.aug_text() == '|'<|fim▁end|> |
from qutebrowser.misc import readline |
<|file_name|>v1.js<|end_file_name|><|fim▁begin|>/**
* Copyright 2014 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* jshint maxlen: false */
'use strict';
var apirequest = require('../../lib/apirequest');
var createAPIRequest = apirequest.createAPIRequest;
/**
* Cloud Storage API
*
* @classdesc Lets you store and retrieve potentially-large, immutable data objects.
* @namespace storage
* @version v1
* @variation v1
* @this Storage
* @param {object=} options Options for Storage
*/
function Storage(options) {
var self = this;
this._options = options || {};
this.bucketAccessControls = {
/**
* storage.bucketAccessControls.delete
*
* @desc Permanently deletes the ACL entry for the specified entity on the specified bucket.
*
* @alias storage.bucketAccessControls.delete
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of a bucket.
* @param {string} params.entity - The entity holding the permission. Can be user-userId, user-emailAddress, group-groupId, group-emailAddress, allUsers, or allAuthenticatedUsers.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
delete: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket + '/acl/' + params.entity,
method: 'DELETE'
},
params: params,
requiredParams: ['bucket', 'entity'],
pathParams: ['bucket', 'entity'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.bucketAccessControls.get
*
* @desc Returns the ACL entry for the specified entity on the specified bucket.
*
* @alias storage.bucketAccessControls.get
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of a bucket.
* @param {string} params.entity - The entity holding the permission. Can be user-userId, user-emailAddress, group-groupId, group-emailAddress, allUsers, or allAuthenticatedUsers.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
get: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket + '/acl/' + params.entity,
method: 'GET'
},
params: params,
requiredParams: ['bucket', 'entity'],
pathParams: ['bucket', 'entity'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.bucketAccessControls.insert
*
* @desc Creates a new ACL entry on the specified bucket.
*
* @alias storage.bucketAccessControls.insert
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of a bucket.
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
insert: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket + '/acl',
method: 'POST'
},
params: params,
requiredParams: ['bucket'],
pathParams: ['bucket'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.bucketAccessControls.list
*
* @desc Retrieves ACL entries on the specified bucket.
*
* @alias storage.bucketAccessControls.list
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of a bucket.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
list: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket + '/acl',
method: 'GET'
},
params: params,
requiredParams: ['bucket'],
pathParams: ['bucket'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.bucketAccessControls.patch
*
* @desc Updates an ACL entry on the specified bucket. This method supports patch semantics.
*
* @alias storage.bucketAccessControls.patch
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of a bucket.
* @param {string} params.entity - The entity holding the permission. Can be user-userId, user-emailAddress, group-groupId, group-emailAddress, allUsers, or allAuthenticatedUsers.
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
patch: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket + '/acl/' + params.entity,
method: 'PATCH'
},
params: params,
requiredParams: ['bucket', 'entity'],
pathParams: ['bucket', 'entity'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.bucketAccessControls.update
*
* @desc Updates an ACL entry on the specified bucket.
*
* @alias storage.bucketAccessControls.update
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of a bucket.
* @param {string} params.entity - The entity holding the permission. Can be user-userId, user-emailAddress, group-groupId, group-emailAddress, allUsers, or allAuthenticatedUsers.
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
update: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket + '/acl/' + params.entity,
method: 'PUT'
},
params: params,
requiredParams: ['bucket', 'entity'],
pathParams: ['bucket', 'entity'],
context: self
};
return createAPIRequest(parameters, callback);
}
};
this.buckets = {
/**
* storage.buckets.delete
*
* @desc Permanently deletes an empty bucket.
*
* @alias storage.buckets.delete
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of a bucket.
* @param {string=} params.ifMetagenerationMatch - If set, only deletes the bucket if its metageneration matches this value.
* @param {string=} params.ifMetagenerationNotMatch - If set, only deletes the bucket if its metageneration does not match this value.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
delete: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket,
method: 'DELETE'
},
params: params,
requiredParams: ['bucket'],
pathParams: ['bucket'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.buckets.get
*
* @desc Returns metadata for the specified bucket.
*
* @alias storage.buckets.get
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of a bucket.
* @param {string=} params.ifMetagenerationMatch - Makes the return of the bucket metadata conditional on whether the bucket's current metageneration matches the given value.
* @param {string=} params.ifMetagenerationNotMatch - Makes the return of the bucket metadata conditional on whether the bucket's current metageneration does not match the given value.
* @param {string=} params.projection - Set of properties to return. Defaults to noAcl.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
get: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket,
method: 'GET'
},
params: params,
requiredParams: ['bucket'],
pathParams: ['bucket'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.buckets.insert
*
* @desc Creates a new bucket.
*
* @alias storage.buckets.insert
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string=} params.predefinedAcl - Apply a predefined set of access controls to this bucket.
* @param {string} params.project - A valid API project identifier.
* @param {string=} params.projection - Set of properties to return. Defaults to noAcl, unless the bucket resource specifies acl or defaultObjectAcl properties, when it defaults to full.
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
insert: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b',
method: 'POST'
},
params: params,
requiredParams: ['project'],<|fim▁hole|> },
/**
* storage.buckets.list
*
* @desc Retrieves a list of buckets for a given project.
*
* @alias storage.buckets.list
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {integer=} params.maxResults - Maximum number of buckets to return.
* @param {string=} params.pageToken - A previously-returned page token representing part of the larger set of results to view.
* @param {string} params.project - A valid API project identifier.
* @param {string=} params.projection - Set of properties to return. Defaults to noAcl.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
list: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b',
method: 'GET'
},
params: params,
requiredParams: ['project'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.buckets.patch
*
* @desc Updates a bucket. This method supports patch semantics.
*
* @alias storage.buckets.patch
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of a bucket.
* @param {string=} params.ifMetagenerationMatch - Makes the return of the bucket metadata conditional on whether the bucket's current metageneration matches the given value.
* @param {string=} params.ifMetagenerationNotMatch - Makes the return of the bucket metadata conditional on whether the bucket's current metageneration does not match the given value.
* @param {string=} params.predefinedAcl - Apply a predefined set of access controls to this bucket.
* @param {string=} params.projection - Set of properties to return. Defaults to full.
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
patch: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket,
method: 'PATCH'
},
params: params,
requiredParams: ['bucket'],
pathParams: ['bucket'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.buckets.update
*
* @desc Updates a bucket.
*
* @alias storage.buckets.update
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of a bucket.
* @param {string=} params.ifMetagenerationMatch - Makes the return of the bucket metadata conditional on whether the bucket's current metageneration matches the given value.
* @param {string=} params.ifMetagenerationNotMatch - Makes the return of the bucket metadata conditional on whether the bucket's current metageneration does not match the given value.
* @param {string=} params.predefinedAcl - Apply a predefined set of access controls to this bucket.
* @param {string=} params.projection - Set of properties to return. Defaults to full.
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
update: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket,
method: 'PUT'
},
params: params,
requiredParams: ['bucket'],
pathParams: ['bucket'],
context: self
};
return createAPIRequest(parameters, callback);
}
};
this.channels = {
/**
* storage.channels.stop
*
* @desc Stop watching resources through this channel
*
* @alias storage.channels.stop
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
stop: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/channels/stop',
method: 'POST'
},
params: params,
context: self
};
return createAPIRequest(parameters, callback);
}
};
this.defaultObjectAccessControls = {
/**
* storage.defaultObjectAccessControls.delete
*
* @desc Permanently deletes the default object ACL entry for the specified entity on the specified bucket.
*
* @alias storage.defaultObjectAccessControls.delete
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of a bucket.
* @param {string} params.entity - The entity holding the permission. Can be user-userId, user-emailAddress, group-groupId, group-emailAddress, allUsers, or allAuthenticatedUsers.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
delete: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket + '/defaultObjectAcl/' + params.entity,
method: 'DELETE'
},
params: params,
requiredParams: ['bucket', 'entity'],
pathParams: ['bucket', 'entity'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.defaultObjectAccessControls.get
*
* @desc Returns the default object ACL entry for the specified entity on the specified bucket.
*
* @alias storage.defaultObjectAccessControls.get
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of a bucket.
* @param {string} params.entity - The entity holding the permission. Can be user-userId, user-emailAddress, group-groupId, group-emailAddress, allUsers, or allAuthenticatedUsers.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
get: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket + '/defaultObjectAcl/' + params.entity,
method: 'GET'
},
params: params,
requiredParams: ['bucket', 'entity'],
pathParams: ['bucket', 'entity'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.defaultObjectAccessControls.insert
*
* @desc Creates a new default object ACL entry on the specified bucket.
*
* @alias storage.defaultObjectAccessControls.insert
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of a bucket.
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
insert: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket + '/defaultObjectAcl',
method: 'POST'
},
params: params,
requiredParams: ['bucket'],
pathParams: ['bucket'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.defaultObjectAccessControls.list
*
* @desc Retrieves default object ACL entries on the specified bucket.
*
* @alias storage.defaultObjectAccessControls.list
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of a bucket.
* @param {string=} params.ifMetagenerationMatch - If present, only return default ACL listing if the bucket's current metageneration matches this value.
* @param {string=} params.ifMetagenerationNotMatch - If present, only return default ACL listing if the bucket's current metageneration does not match the given value.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
list: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket + '/defaultObjectAcl',
method: 'GET'
},
params: params,
requiredParams: ['bucket'],
pathParams: ['bucket'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.defaultObjectAccessControls.patch
*
* @desc Updates a default object ACL entry on the specified bucket. This method supports patch semantics.
*
* @alias storage.defaultObjectAccessControls.patch
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of a bucket.
* @param {string} params.entity - The entity holding the permission. Can be user-userId, user-emailAddress, group-groupId, group-emailAddress, allUsers, or allAuthenticatedUsers.
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
patch: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket + '/defaultObjectAcl/' + params.entity,
method: 'PATCH'
},
params: params,
requiredParams: ['bucket', 'entity'],
pathParams: ['bucket', 'entity'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.defaultObjectAccessControls.update
*
* @desc Updates a default object ACL entry on the specified bucket.
*
* @alias storage.defaultObjectAccessControls.update
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of a bucket.
* @param {string} params.entity - The entity holding the permission. Can be user-userId, user-emailAddress, group-groupId, group-emailAddress, allUsers, or allAuthenticatedUsers.
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
update: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket + '/defaultObjectAcl/' + params.entity,
method: 'PUT'
},
params: params,
requiredParams: ['bucket', 'entity'],
pathParams: ['bucket', 'entity'],
context: self
};
return createAPIRequest(parameters, callback);
}
};
this.objectAccessControls = {
/**
* storage.objectAccessControls.delete
*
* @desc Permanently deletes the ACL entry for the specified entity on the specified object.
*
* @alias storage.objectAccessControls.delete
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of a bucket.
* @param {string} params.entity - The entity holding the permission. Can be user-userId, user-emailAddress, group-groupId, group-emailAddress, allUsers, or allAuthenticatedUsers.
* @param {string=} params.generation - If present, selects a specific revision of this object (as opposed to the latest version, the default).
* @param {string} params.object - Name of the object.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
delete: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket + '/o/' + params.object + '/acl/' + params.entity,
method: 'DELETE'
},
params: params,
requiredParams: ['bucket', 'object', 'entity'],
pathParams: ['bucket', 'entity', 'object'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.objectAccessControls.get
*
* @desc Returns the ACL entry for the specified entity on the specified object.
*
* @alias storage.objectAccessControls.get
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of a bucket.
* @param {string} params.entity - The entity holding the permission. Can be user-userId, user-emailAddress, group-groupId, group-emailAddress, allUsers, or allAuthenticatedUsers.
* @param {string=} params.generation - If present, selects a specific revision of this object (as opposed to the latest version, the default).
* @param {string} params.object - Name of the object.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
get: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket + '/o/' + params.object + '/acl/' + params.entity,
method: 'GET'
},
params: params,
requiredParams: ['bucket', 'object', 'entity'],
pathParams: ['bucket', 'entity', 'object'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.objectAccessControls.insert
*
* @desc Creates a new ACL entry on the specified object.
*
* @alias storage.objectAccessControls.insert
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of a bucket.
* @param {string=} params.generation - If present, selects a specific revision of this object (as opposed to the latest version, the default).
* @param {string} params.object - Name of the object.
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
insert: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket + '/o/' + params.object + '/acl',
method: 'POST'
},
params: params,
requiredParams: ['bucket', 'object'],
pathParams: ['bucket', 'object'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.objectAccessControls.list
*
* @desc Retrieves ACL entries on the specified object.
*
* @alias storage.objectAccessControls.list
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of a bucket.
* @param {string=} params.generation - If present, selects a specific revision of this object (as opposed to the latest version, the default).
* @param {string} params.object - Name of the object.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
list: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket + '/o/' + params.object + '/acl',
method: 'GET'
},
params: params,
requiredParams: ['bucket', 'object'],
pathParams: ['bucket', 'object'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.objectAccessControls.patch
*
* @desc Updates an ACL entry on the specified object. This method supports patch semantics.
*
* @alias storage.objectAccessControls.patch
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of a bucket.
* @param {string} params.entity - The entity holding the permission. Can be user-userId, user-emailAddress, group-groupId, group-emailAddress, allUsers, or allAuthenticatedUsers.
* @param {string=} params.generation - If present, selects a specific revision of this object (as opposed to the latest version, the default).
* @param {string} params.object - Name of the object.
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
patch: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket + '/o/' + params.object + '/acl/' + params.entity,
method: 'PATCH'
},
params: params,
requiredParams: ['bucket', 'object', 'entity'],
pathParams: ['bucket', 'entity', 'object'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.objectAccessControls.update
*
* @desc Updates an ACL entry on the specified object.
*
* @alias storage.objectAccessControls.update
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of a bucket.
* @param {string} params.entity - The entity holding the permission. Can be user-userId, user-emailAddress, group-groupId, group-emailAddress, allUsers, or allAuthenticatedUsers.
* @param {string=} params.generation - If present, selects a specific revision of this object (as opposed to the latest version, the default).
* @param {string} params.object - Name of the object.
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
update: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket + '/o/' + params.object + '/acl/' + params.entity,
method: 'PUT'
},
params: params,
requiredParams: ['bucket', 'object', 'entity'],
pathParams: ['bucket', 'entity', 'object'],
context: self
};
return createAPIRequest(parameters, callback);
}
};
this.objects = {
/**
* storage.objects.compose
*
* @desc Concatenates a list of existing objects into a new object in the same bucket.
*
* @alias storage.objects.compose
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.destinationBucket - Name of the bucket in which to store the new object.
* @param {string} params.destinationObject - Name of the new object.
* @param {string=} params.destinationPredefinedAcl - Apply a predefined set of access controls to the destination object.
* @param {string=} params.ifGenerationMatch - Makes the operation conditional on whether the object's current generation matches the given value.
* @param {string=} params.ifMetagenerationMatch - Makes the operation conditional on whether the object's current metageneration matches the given value.
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
compose: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.destinationBucket + '/o/' + params.destinationObject + '/compose',
method: 'POST'
},
params: params,
requiredParams: ['destinationBucket', 'destinationObject'],
pathParams: ['destinationBucket', 'destinationObject'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.objects.copy
*
* @desc Copies an object to a specified location. Optionally overrides metadata.
*
* @alias storage.objects.copy
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.destinationBucket - Name of the bucket in which to store the new object. Overrides the provided object metadata's bucket value, if any.
* @param {string} params.destinationObject - Name of the new object. Required when the object metadata is not otherwise provided. Overrides the object metadata's name value, if any.
* @param {string=} params.destinationPredefinedAcl - Apply a predefined set of access controls to the destination object.
* @param {string=} params.ifGenerationMatch - Makes the operation conditional on whether the destination object's current generation matches the given value.
* @param {string=} params.ifGenerationNotMatch - Makes the operation conditional on whether the destination object's current generation does not match the given value.
* @param {string=} params.ifMetagenerationMatch - Makes the operation conditional on whether the destination object's current metageneration matches the given value.
* @param {string=} params.ifMetagenerationNotMatch - Makes the operation conditional on whether the destination object's current metageneration does not match the given value.
* @param {string=} params.ifSourceGenerationMatch - Makes the operation conditional on whether the source object's generation matches the given value.
* @param {string=} params.ifSourceGenerationNotMatch - Makes the operation conditional on whether the source object's generation does not match the given value.
* @param {string=} params.ifSourceMetagenerationMatch - Makes the operation conditional on whether the source object's current metageneration matches the given value.
* @param {string=} params.ifSourceMetagenerationNotMatch - Makes the operation conditional on whether the source object's current metageneration does not match the given value.
* @param {string=} params.projection - Set of properties to return. Defaults to noAcl, unless the object resource specifies the acl property, when it defaults to full.
* @param {string} params.sourceBucket - Name of the bucket in which to find the source object.
* @param {string=} params.sourceGeneration - If present, selects a specific revision of the source object (as opposed to the latest version, the default).
* @param {string} params.sourceObject - Name of the source object.
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
copy: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.sourceBucket + '/o/' + params.sourceObject + '/copyTo/b/' + params.destinationBucket + '/o/' + params.destinationObject,
method: 'POST'
},
params: params,
requiredParams: ['sourceBucket', 'sourceObject', 'destinationBucket', 'destinationObject'],
pathParams: ['destinationBucket', 'destinationObject', 'sourceBucket', 'sourceObject'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.objects.delete
*
* @desc Deletes an object and its metadata. Deletions are permanent if versioning is not enabled for the bucket, or if the generation parameter is used.
*
* @alias storage.objects.delete
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of the bucket in which the object resides.
* @param {string=} params.generation - If present, permanently deletes a specific revision of this object (as opposed to the latest version, the default).
* @param {string=} params.ifGenerationMatch - Makes the operation conditional on whether the object's current generation matches the given value.
* @param {string=} params.ifGenerationNotMatch - Makes the operation conditional on whether the object's current generation does not match the given value.
* @param {string=} params.ifMetagenerationMatch - Makes the operation conditional on whether the object's current metageneration matches the given value.
* @param {string=} params.ifMetagenerationNotMatch - Makes the operation conditional on whether the object's current metageneration does not match the given value.
* @param {string} params.object - Name of the object.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
delete: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket + '/o/' + params.object,
method: 'DELETE'
},
params: params,
requiredParams: ['bucket', 'object'],
pathParams: ['bucket', 'object'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.objects.get
*
* @desc Retrieves objects or their metadata.
*
* @alias storage.objects.get
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of the bucket in which the object resides.
* @param {string=} params.generation - If present, selects a specific revision of this object (as opposed to the latest version, the default).
* @param {string=} params.ifGenerationMatch - Makes the operation conditional on whether the object's generation matches the given value.
* @param {string=} params.ifGenerationNotMatch - Makes the operation conditional on whether the object's generation does not match the given value.
* @param {string=} params.ifMetagenerationMatch - Makes the operation conditional on whether the object's current metageneration matches the given value.
* @param {string=} params.ifMetagenerationNotMatch - Makes the operation conditional on whether the object's current metageneration does not match the given value.
* @param {string} params.object - Name of the object.
* @param {string=} params.projection - Set of properties to return. Defaults to noAcl.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
get: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket + '/o/' + params.object,
method: 'GET'
},
params: params,
requiredParams: ['bucket', 'object'],
pathParams: ['bucket', 'object'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.objects.insert
*
* @desc Stores a new object and metadata.
*
* @alias storage.objects.insert
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of the bucket in which to store the new object. Overrides the provided object metadata's bucket value, if any.
* @param {string=} params.contentEncoding - If set, sets the contentEncoding property of the final object to this value. Setting this parameter is equivalent to setting the contentEncoding metadata property. This can be useful when uploading an object with uploadType=media to indicate the encoding of the content being uploaded.
* @param {string=} params.ifGenerationMatch - Makes the operation conditional on whether the object's current generation matches the given value.
* @param {string=} params.ifGenerationNotMatch - Makes the operation conditional on whether the object's current generation does not match the given value.
* @param {string=} params.ifMetagenerationMatch - Makes the operation conditional on whether the object's current metageneration matches the given value.
* @param {string=} params.ifMetagenerationNotMatch - Makes the operation conditional on whether the object's current metageneration does not match the given value.
* @param {string=} params.name - Name of the object. Required when the object metadata is not otherwise provided. Overrides the object metadata's name value, if any.
* @param {string=} params.predefinedAcl - Apply a predefined set of access controls to this object.
* @param {string=} params.projection - Set of properties to return. Defaults to noAcl, unless the object resource specifies the acl property, when it defaults to full.
* @param {object} params.resource - Media resource metadata
* @param {object} params.media - Media object
* @param {string} params.media.mimeType - Media mime-type
* @param {string|object} params.media.body - Media body contents
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
insert: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket + '/o',
method: 'POST'
},
params: params,
mediaUrl: 'https://www.googleapis.com/upload/storage/v1/b/' + params.bucket + '/o',
requiredParams: ['bucket'],
pathParams: ['bucket'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.objects.list
*
* @desc Retrieves a list of objects matching the criteria.
*
* @alias storage.objects.list
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of the bucket in which to look for objects.
* @param {string=} params.delimiter - Returns results in a directory-like mode. items will contain only objects whose names, aside from the prefix, do not contain delimiter. Objects whose names, aside from the prefix, contain delimiter will have their name, truncated after the delimiter, returned in prefixes. Duplicate prefixes are omitted.
* @param {integer=} params.maxResults - Maximum number of items plus prefixes to return. As duplicate prefixes are omitted, fewer total results may be returned than requested.
* @param {string=} params.pageToken - A previously-returned page token representing part of the larger set of results to view.
* @param {string=} params.prefix - Filter results to objects whose names begin with this prefix.
* @param {string=} params.projection - Set of properties to return. Defaults to noAcl.
* @param {boolean=} params.versions - If true, lists all versions of a file as distinct results.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
list: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket + '/o',
method: 'GET'
},
params: params,
requiredParams: ['bucket'],
pathParams: ['bucket'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.objects.patch
*
* @desc Updates an object's metadata. This method supports patch semantics.
*
* @alias storage.objects.patch
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of the bucket in which the object resides.
* @param {string=} params.generation - If present, selects a specific revision of this object (as opposed to the latest version, the default).
* @param {string=} params.ifGenerationMatch - Makes the operation conditional on whether the object's current generation matches the given value.
* @param {string=} params.ifGenerationNotMatch - Makes the operation conditional on whether the object's current generation does not match the given value.
* @param {string=} params.ifMetagenerationMatch - Makes the operation conditional on whether the object's current metageneration matches the given value.
* @param {string=} params.ifMetagenerationNotMatch - Makes the operation conditional on whether the object's current metageneration does not match the given value.
* @param {string} params.object - Name of the object.
* @param {string=} params.predefinedAcl - Apply a predefined set of access controls to this object.
* @param {string=} params.projection - Set of properties to return. Defaults to full.
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
patch: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket + '/o/' + params.object,
method: 'PATCH'
},
params: params,
requiredParams: ['bucket', 'object'],
pathParams: ['bucket', 'object'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.objects.update
*
* @desc Updates an object's metadata.
*
* @alias storage.objects.update
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of the bucket in which the object resides.
* @param {string=} params.generation - If present, selects a specific revision of this object (as opposed to the latest version, the default).
* @param {string=} params.ifGenerationMatch - Makes the operation conditional on whether the object's current generation matches the given value.
* @param {string=} params.ifGenerationNotMatch - Makes the operation conditional on whether the object's current generation does not match the given value.
* @param {string=} params.ifMetagenerationMatch - Makes the operation conditional on whether the object's current metageneration matches the given value.
* @param {string=} params.ifMetagenerationNotMatch - Makes the operation conditional on whether the object's current metageneration does not match the given value.
* @param {string} params.object - Name of the object.
* @param {string=} params.predefinedAcl - Apply a predefined set of access controls to this object.
* @param {string=} params.projection - Set of properties to return. Defaults to full.
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
update: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket + '/o/' + params.object,
method: 'PUT'
},
params: params,
requiredParams: ['bucket', 'object'],
pathParams: ['bucket', 'object'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* storage.objects.watchAll
*
* @desc Watch for changes on all objects in a bucket.
*
* @alias storage.objects.watchAll
* @memberOf! storage(v1)
*
* @param {object} params - Parameters for request
* @param {string} params.bucket - Name of the bucket in which to look for objects.
* @param {string=} params.delimiter - Returns results in a directory-like mode. items will contain only objects whose names, aside from the prefix, do not contain delimiter. Objects whose names, aside from the prefix, contain delimiter will have their name, truncated after the delimiter, returned in prefixes. Duplicate prefixes are omitted.
* @param {integer=} params.maxResults - Maximum number of items plus prefixes to return. As duplicate prefixes are omitted, fewer total results may be returned than requested.
* @param {string=} params.pageToken - A previously-returned page token representing part of the larger set of results to view.
* @param {string=} params.prefix - Filter results to objects whose names begin with this prefix.
* @param {string=} params.projection - Set of properties to return. Defaults to noAcl.
* @param {boolean=} params.versions - If true, lists all versions of a file as distinct results.
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
watchAll: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/storage/v1/b/' + params.bucket + '/o/watch',
method: 'POST'
},
params: params,
requiredParams: ['bucket'],
pathParams: ['bucket'],
context: self
};
return createAPIRequest(parameters, callback);
}
};
}
/**
* Exports Storage object
* @type Storage
*/
module.exports = Storage;<|fim▁end|> | context: self
};
return createAPIRequest(parameters, callback); |
<|file_name|>details.controller.ts<|end_file_name|><|fim▁begin|>import {IDetailsService} from '../services/details.service';
class DetailsController implements ng.IComponentController {
private detailsService: IDetailsService;
private detailsData: any;
private previousState: any;
constructor($stateParams: any, detailsService: IDetailsService) {
console.log('details controller');
console.log("$stateParams.id=", $stateParams.id);
console.log("this.previousState=", this.previousState);
let id = $stateParams.id;
detailsService.searchByIMDbID(id, 'full').then((result) => {
console.log("detailsData = ", result.data);
this.detailsData = result.data;
});
}
}
export default DetailsController;<|fim▁hole|><|fim▁end|> | DetailsController.$inject = ['$stateParams', 'detailsService']; |
<|file_name|>test_enum.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
from tests.utils import ConverterTestCase
class EnumTestCase(ConverterTestCase):
def test_empty(self):
self.assertGeneratedOutput(
"""
enum Bar {
};
""",
"""
from enum import Enum
class Bar(Enum):
pass
"""
)
def test_without_values(self):
self.assertGeneratedOutput(
"""
enum Bar {
TOP,
RIGHT,
BOTTOM,
LEFT
};
""",
"""
from enum import Enum
class Bar(Enum):
TOP = 0
RIGHT = 1
BOTTOM = 2
LEFT = 3
"""
)
def test_values(self):
self.assertGeneratedOutput(
"""
enum Bar {
TOP = 37,
RIGHT = 42,
BOTTOM = 55,
LEFT = 69
};
""",
"""
from enum import Enum
class Bar(Enum):
TOP = 37
RIGHT = 42
BOTTOM = 55
LEFT = 69
"""
)
def test_initial_values(self):
self.assertGeneratedOutput(
"""
enum Bar {
TOP = 37,
RIGHT,
BOTTOM,
LEFT
};
""",
"""
from enum import Enum
class Bar(Enum):
TOP = 37
RIGHT = 38
BOTTOM = 39
LEFT = 40
"""
)
def test_multiple_initial_values(self):
self.assertGeneratedOutput(
"""
enum Bar {
TOP = 37,
RIGHT,
BOTTOM = 42,
LEFT
};
""",
"""
from enum import Enum
class Bar(Enum):
TOP = 37
RIGHT = 38
BOTTOM = 42
LEFT = 43
"""
)
def test_expressions_for_values(self):
self.assertGeneratedOutput(
"""
enum Bar {
TOP = 1 << 0,
RIGHT = 1 << 1,
BOTTOM = 1 << 2,
LEFT = 1 << 3
};
""",
"""
from enum import Enum
class Bar(Enum):
TOP = 1
RIGHT = 2
BOTTOM = 4
LEFT = 8
"""
)
def test_local_enum_reference(self):
self.assertGeneratedOutput(
"""
enum Bar {<|fim▁hole|> };
void test() {
Bar position = TOP;
}
""",
"""
from enum import Enum
class Bar(Enum):
TOP = 0
RIGHT = 1
BOTTOM = 2
LEFT = 3
def test():
position = Bar.TOP
"""
)<|fim▁end|> | TOP,
RIGHT,
BOTTOM,
LEFT |
<|file_name|>names.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
import re
from lxml.etree import XPath as X
from calibre.utils.filenames import ascii_text
DOCUMENT = 'http://schemas.openxmlformats.org/officeDocument/2006/relationships/officeDocument'
DOCPROPS = 'http://schemas.openxmlformats.org/package/2006/relationships/metadata/core-properties'
APPPROPS = 'http://schemas.openxmlformats.org/officeDocument/2006/relationships/extended-properties'
STYLES = 'http://schemas.openxmlformats.org/officeDocument/2006/relationships/styles'
NUMBERING = 'http://schemas.openxmlformats.org/officeDocument/2006/relationships/numbering'
FONTS = 'http://schemas.openxmlformats.org/officeDocument/2006/relationships/fontTable'
IMAGES = 'http://schemas.openxmlformats.org/officeDocument/2006/relationships/image'
LINKS = 'http://schemas.openxmlformats.org/officeDocument/2006/relationships/hyperlink'
FOOTNOTES = 'http://schemas.openxmlformats.org/officeDocument/2006/relationships/footnotes'
ENDNOTES = 'http://schemas.openxmlformats.org/officeDocument/2006/relationships/endnotes'
THEMES = 'http://schemas.openxmlformats.org/officeDocument/2006/relationships/theme'
namespaces = {<|fim▁hole|> 've': 'http://schemas.openxmlformats.org/markup-compatibility/2006',
# Text Content
'w': 'http://schemas.openxmlformats.org/wordprocessingml/2006/main',
'w10': 'urn:schemas-microsoft-com:office:word',
'wne': 'http://schemas.microsoft.com/office/word/2006/wordml',
'xml': 'http://www.w3.org/XML/1998/namespace',
# Drawing
'a': 'http://schemas.openxmlformats.org/drawingml/2006/main',
'm': 'http://schemas.openxmlformats.org/officeDocument/2006/math',
'mv': 'urn:schemas-microsoft-com:mac:vml',
'pic': 'http://schemas.openxmlformats.org/drawingml/2006/picture',
'v': 'urn:schemas-microsoft-com:vml',
'wp': 'http://schemas.openxmlformats.org/drawingml/2006/wordprocessingDrawing',
# Properties (core and extended)
'cp': 'http://schemas.openxmlformats.org/package/2006/metadata/core-properties',
'dc': 'http://purl.org/dc/elements/1.1/',
'ep': 'http://schemas.openxmlformats.org/officeDocument/2006/extended-properties',
'xsi': 'http://www.w3.org/2001/XMLSchema-instance',
# Content Types
'ct': 'http://schemas.openxmlformats.org/package/2006/content-types',
# Package Relationships
'r': 'http://schemas.openxmlformats.org/officeDocument/2006/relationships',
'pr': 'http://schemas.openxmlformats.org/package/2006/relationships',
# Dublin Core document properties
'dcmitype': 'http://purl.org/dc/dcmitype/',
'dcterms': 'http://purl.org/dc/terms/'
}
xpath_cache = {}
def XPath(expr):
ans = xpath_cache.get(expr, None)
if ans is None:
xpath_cache[expr] = ans = X(expr, namespaces=namespaces)
return ans
def is_tag(x, q):
tag = getattr(x, 'tag', x)
ns, name = q.partition(':')[0::2]
return '{%s}%s' % (namespaces.get(ns, None), name) == tag
def barename(x):
return x.rpartition('}')[-1]
def XML(x):
return '{%s}%s' % (namespaces['xml'], x)
def expand(name):
ns, tag = name.partition(':')[0::2]
if ns:
tag = '{%s}%s' % (namespaces[ns], tag)
return tag
def get(x, attr, default=None):
return x.attrib.get(expand(attr), default)
def ancestor(elem, name):
try:
return XPath('ancestor::%s[1]' % name)(elem)[0]
except IndexError:
return None
def generate_anchor(name, existing):
x = y = 'id_' + re.sub(r'[^0-9a-zA-Z_]', '', ascii_text(name)).lstrip('_')
c = 1
while y in existing:
y = '%s_%d' % (x, c)
c += 1
return y
def children(elem, *args):
return XPath('|'.join('child::%s' % a for a in args))(elem)
def descendants(elem, *args):
return XPath('|'.join('descendant::%s' % a for a in args))(elem)<|fim▁end|> | 'mo': 'http://schemas.microsoft.com/office/mac/office/2008/main',
'o': 'urn:schemas-microsoft-com:office:office', |
<|file_name|>clearpart_test.py<|end_file_name|><|fim▁begin|>import unittest
import mock
import blivet
from pykickstart.constants import CLEARPART_TYPE_ALL, CLEARPART_TYPE_LINUX, CLEARPART_TYPE_NONE
from parted import PARTITION_NORMAL
from blivet.flags import flags
DEVICE_CLASSES = [
blivet.devices.DiskDevice,
blivet.devices.PartitionDevice
]
@unittest.skipUnless(not any(x.unavailable_type_dependencies() for x in DEVICE_CLASSES), "some unsupported device classes required for this test")
class ClearPartTestCase(unittest.TestCase):
def setUp(self):
flags.testing = True
def test_should_clear(self):
""" Test the Blivet.should_clear method. """
b = blivet.Blivet()
DiskDevice = blivet.devices.DiskDevice
PartitionDevice = blivet.devices.PartitionDevice
# sda is a disk with an existing disklabel containing two partitions
sda = DiskDevice("sda", size=100000, exists=True)
sda.format = blivet.formats.get_format("disklabel", device=sda.path,
exists=True)
sda.format._parted_disk = mock.Mock()
sda.format._parted_device = mock.Mock()
sda.format._parted_disk.configure_mock(partitions=[])
b.devicetree._add_device(sda)
# sda1 is a partition containing an existing ext4 filesystem
sda1 = PartitionDevice("sda1", size=500, exists=True,
parents=[sda])
sda1._parted_partition = mock.Mock(**{'type': PARTITION_NORMAL,
'getFlag.return_value': 0})
sda1.format = blivet.formats.get_format("ext4", mountpoint="/boot",
device=sda1.path,
exists=True)
b.devicetree._add_device(sda1)
# sda2 is a partition containing an existing vfat filesystem
sda2 = PartitionDevice("sda2", size=10000, exists=True,
parents=[sda])
sda2._parted_partition = mock.Mock(**{'type': PARTITION_NORMAL,
'getFlag.return_value': 0})
sda2.format = blivet.formats.get_format("vfat", mountpoint="/foo",
device=sda2.path,
exists=True)
b.devicetree._add_device(sda2)
# sdb is an unpartitioned disk containing an xfs filesystem
sdb = DiskDevice("sdb", size=100000, exists=True)
sdb.format = blivet.formats.get_format("xfs", device=sdb.path,
exists=True)
b.devicetree._add_device(sdb)
# sdc is an unformatted/uninitialized/empty disk
sdc = DiskDevice("sdc", size=100000, exists=True)
b.devicetree._add_device(sdc)
# sdd is a disk containing an existing disklabel with no partitions
sdd = DiskDevice("sdd", size=100000, exists=True)
sdd.format = blivet.formats.get_format("disklabel", device=sdd.path,
exists=True)
b.devicetree._add_device(sdd)
#
# clearpart type none
#
b.config.clear_part_type = CLEARPART_TYPE_NONE
self.assertFalse(b.should_clear(sda1),
msg="type none should not clear any partitions")
self.assertFalse(b.should_clear(sda2),
msg="type none should not clear any partitions")
b.config.initialize_disks = False
self.assertFalse(b.should_clear(sda),
msg="type none should not clear non-empty disks")
self.assertFalse(b.should_clear(sdb),
msg="type none should not clear formatting from "
"unpartitioned disks")
self.assertFalse(b.should_clear(sdc),
msg="type none should not clear empty disk without "
"initlabel")
self.assertFalse(b.should_clear(sdd),
msg="type none should not clear empty partition table "
"without initlabel")
b.config.initialize_disks = True
self.assertFalse(b.should_clear(sda),
msg="type none should not clear non-empty disks even "
"with initlabel")
self.assertFalse(b.should_clear(sdb),
msg="type non should not clear formatting from "
"unpartitioned disks even with initlabel")
self.assertTrue(b.should_clear(sdc),
msg="type none should clear empty disks when initlabel "
"is set")
self.assertTrue(b.should_clear(sdd),
msg="type none should clear empty partition table when "
"initlabel is set")
#
# clearpart type linux
#
b.config.clear_part_type = CLEARPART_TYPE_LINUX
self.assertTrue(b.should_clear(sda1),
msg="type linux should clear partitions containing "
"ext4 filesystems")
self.assertFalse(b.should_clear(sda2),
msg="type linux should not clear partitions "
"containing vfat filesystems")
b.config.initialize_disks = False
self.assertFalse(b.should_clear(sda),
msg="type linux should not clear non-empty disklabels")
self.assertTrue(b.should_clear(sdb),
msg="type linux should clear linux-native whole-disk "
"formatting regardless of initlabel setting")
self.assertFalse(b.should_clear(sdc),
msg="type linux should not clear unformatted disks "
"unless initlabel is set")
self.assertFalse(b.should_clear(sdd),
msg="type linux should not clear disks with empty "
"partition tables unless initlabel is set")
b.config.initialize_disks = True
self.assertFalse(b.should_clear(sda),
msg="type linux should not clear non-empty disklabels")
self.assertTrue(b.should_clear(sdb),
msg="type linux should clear linux-native whole-disk "
"formatting regardless of initlabel setting")
self.assertTrue(b.should_clear(sdc),
msg="type linux should clear unformatted disks when "
"initlabel is set")
self.assertTrue(b.should_clear(sdd),<|fim▁hole|> msg="type linux should clear disks with empty "
"partition tables when initlabel is set")
sda1.protected = True
self.assertFalse(b.should_clear(sda1),
msg="protected devices should never be cleared")
self.assertFalse(b.should_clear(sda),
msg="disks containing protected devices should never "
"be cleared")
sda1.protected = False
#
# clearpart type all
#
b.config.clear_part_type = CLEARPART_TYPE_ALL
self.assertTrue(b.should_clear(sda1),
msg="type all should clear all partitions")
self.assertTrue(b.should_clear(sda2),
msg="type all should clear all partitions")
b.config.initialize_disks = False
self.assertTrue(b.should_clear(sda),
msg="type all should initialize all disks")
self.assertTrue(b.should_clear(sdb),
msg="type all should initialize all disks")
self.assertTrue(b.should_clear(sdc),
msg="type all should initialize all disks")
self.assertTrue(b.should_clear(sdd),
msg="type all should initialize all disks")
b.config.initialize_disks = True
self.assertTrue(b.should_clear(sda),
msg="type all should initialize all disks")
self.assertTrue(b.should_clear(sdb),
msg="type all should initialize all disks")
self.assertTrue(b.should_clear(sdc),
msg="type all should initialize all disks")
self.assertTrue(b.should_clear(sdd),
msg="type all should initialize all disks")
sda1.protected = True
self.assertFalse(b.should_clear(sda1),
msg="protected devices should never be cleared")
self.assertFalse(b.should_clear(sda),
msg="disks containing protected devices should never "
"be cleared")
sda1.protected = False
#
# clearpart type list
#
# TODO
def tearDown(self):
flags.testing = False
def test_initialize_disk(self):
"""
magic partitions
non-empty partition table
"""
pass
def test_recursive_remove(self):
"""
protected device at various points in stack
"""
pass<|fim▁end|> | |
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "bitcamp.settings")
<|fim▁hole|> execute_from_command_line(sys.argv)<|fim▁end|> | from django.core.management import execute_from_command_line
|
<|file_name|>main.py<|end_file_name|><|fim▁begin|># Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.<|fim▁hole|>#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def testFunction(request):
return "PASS"
import os
# os.environ["FOO"] is only available at runtime.
print(os.environ["FOO"])<|fim▁end|> | # You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0 |
<|file_name|>qmlprofilertraceview.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of Qt Creator.
**
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
****************************************************************************/
#include "qmlprofilertraceview.h"
#include "qmlprofilertool.h"
#include "qmlprofilerstatemanager.h"
#include "qmlprofilerdatamodel.h"
// Needed for the load&save actions in the context menu
#include <analyzerbase/ianalyzertool.h>
// Comunication with the other views (limit events to range)
#include "qmlprofilerviewmanager.h"
#include <utils/styledbar.h>
#include <QDeclarativeContext>
#include <QToolButton>
#include <QEvent>
#include <QVBoxLayout>
#include <QGraphicsObject>
#include <QScrollBar>
#include <QSlider>
#include <QMenu>
#include <math.h>
using namespace QmlDebug;
namespace QmlProfiler {
namespace Internal {
const int sliderTicks = 10000;
const qreal sliderExp = 3;
/////////////////////////////////////////////////////////
bool MouseWheelResizer::eventFilter(QObject *obj, QEvent *event)
{
if (event->type() == QEvent::Wheel) {
QWheelEvent *ev = static_cast<QWheelEvent *>(event);
if (ev->modifiers() & Qt::ControlModifier) {
emit mouseWheelMoved(ev->pos().x(), ev->pos().y(), ev->delta());
return true;
}
}
return QObject::eventFilter(obj, event);
}
/////////////////////////////////////////////////////////
void ZoomControl::setRange(qint64 startTime, qint64 endTime)
{
if (m_startTime != startTime || m_endTime != endTime) {
m_startTime = startTime;
m_endTime = endTime;
emit rangeChanged();
}
}
/////////////////////////////////////////////////////////
ScrollableDeclarativeView::ScrollableDeclarativeView(QWidget *parent)
: QDeclarativeView(parent)
{
}
ScrollableDeclarativeView::~ScrollableDeclarativeView()
{
}
void ScrollableDeclarativeView::scrollContentsBy(int dx, int dy)
{
// special workaround to track the scrollbar
if (rootObject()) {
int scrollY = rootObject()->property("scrollY").toInt();
rootObject()->setProperty("scrollY", QVariant(scrollY - dy));
}
QDeclarativeView::scrollContentsBy(dx,dy);
}
/////////////////////////////////////////////////////////
class QmlProfilerTraceView::QmlProfilerTraceViewPrivate
{
public:
QmlProfilerTraceViewPrivate(QmlProfilerTraceView *qq) : q(qq) {}
QmlProfilerTraceView *q;
QmlProfilerStateManager *m_profilerState;
Analyzer::IAnalyzerTool *m_profilerTool;
QmlProfilerViewManager *m_viewContainer;
QSize m_sizeHint;
ScrollableDeclarativeView *m_mainView;
QDeclarativeView *m_timebar;
QDeclarativeView *m_overview;
QmlProfilerDataModel *m_profilerDataModel;
ZoomControl *m_zoomControl;
QToolButton *m_buttonRange;
QToolButton *m_buttonLock;
QWidget *m_zoomToolbar;
int m_currentZoomLevel;
};
QmlProfilerTraceView::QmlProfilerTraceView(QWidget *parent, Analyzer::IAnalyzerTool *profilerTool, QmlProfilerViewManager *container, QmlProfilerDataModel *model, QmlProfilerStateManager *profilerState)
: QWidget(parent), d(new QmlProfilerTraceViewPrivate(this))
{
setObjectName(QLatin1String("QML Profiler"));
d->m_zoomControl = new ZoomControl(this);
connect(d->m_zoomControl, SIGNAL(rangeChanged()), this, SLOT(updateRange()));
QVBoxLayout *groupLayout = new QVBoxLayout;
groupLayout->setContentsMargins(0, 0, 0, 0);
groupLayout->setSpacing(0);
d->m_mainView = new ScrollableDeclarativeView(this);
d->m_mainView->setResizeMode(QDeclarativeView::SizeViewToRootObject);
d->m_mainView->setVerticalScrollBarPolicy(Qt::ScrollBarAsNeeded);
d->m_mainView->setBackgroundBrush(QBrush(Qt::white));
d->m_mainView->setAlignment(Qt::AlignLeft | Qt::AlignTop);
d->m_mainView->setFocus();
MouseWheelResizer *resizer = new MouseWheelResizer(this);
connect(resizer,SIGNAL(mouseWheelMoved(int,int,int)), this, SLOT(mouseWheelMoved(int,int,int)));
d->m_mainView->viewport()->installEventFilter(resizer);
QHBoxLayout *toolsLayout = new QHBoxLayout;
d->m_timebar = new QDeclarativeView(this);
d->m_timebar->setResizeMode(QDeclarativeView::SizeRootObjectToView);
d->m_timebar->setSizePolicy(QSizePolicy::Expanding, QSizePolicy::Fixed);
d->m_timebar->setFixedHeight(24);
d->m_overview = new QDeclarativeView(this);
d->m_overview->setResizeMode(QDeclarativeView::SizeRootObjectToView);
d->m_overview->setSizePolicy(QSizePolicy::Expanding, QSizePolicy::Fixed);
d->m_overview->setMaximumHeight(50);
d->m_zoomToolbar = createZoomToolbar();
d->m_zoomToolbar->move(0, d->m_timebar->height());
d->m_zoomToolbar->setVisible(false);
toolsLayout->addWidget(createToolbar());
toolsLayout->addWidget(d->m_timebar);
emit enableToolbar(false);
groupLayout->addLayout(toolsLayout);
groupLayout->addWidget(d->m_mainView);
groupLayout->addWidget(d->m_overview);
setLayout(groupLayout);
d->m_profilerTool = profilerTool;
d->m_viewContainer = container;
d->m_profilerDataModel = model;
connect(d->m_profilerDataModel, SIGNAL(stateChanged()),
this, SLOT(profilerDataModelStateChanged()));
d->m_mainView->rootContext()->setContextProperty(QLatin1String("qmlProfilerDataModel"),
d->m_profilerDataModel);
d->m_overview->rootContext()->setContextProperty(QLatin1String("qmlProfilerDataModel"),
d->m_profilerDataModel);
d->m_profilerState = profilerState;
connect(d->m_profilerState, SIGNAL(stateChanged()),
this, SLOT(profilerStateChanged()));
connect(d->m_profilerState, SIGNAL(clientRecordingChanged()),
this, SLOT(clientRecordingChanged()));
connect(d->m_profilerState, SIGNAL(serverRecordingChanged()),
this, SLOT(serverRecordingChanged()));
// Minimum height: 5 rows of 20 pixels + scrollbar of 50 pixels + 20 pixels margin
setMinimumHeight(170);
d->m_currentZoomLevel = 0;
}
QmlProfilerTraceView::~QmlProfilerTraceView()
{
delete d;
}
/////////////////////////////////////////////////////////
// Initialize widgets
void QmlProfilerTraceView::reset()
{
d->m_mainView->rootContext()->setContextProperty(QLatin1String("zoomControl"), d->m_zoomControl);
d->m_timebar->rootContext()->setContextProperty(QLatin1String("zoomControl"), d->m_zoomControl);
d->m_overview->rootContext()->setContextProperty(QLatin1String("zoomControl"), d->m_zoomControl);
d->m_timebar->setSource(QUrl(QLatin1String("qrc:/qmlprofiler/TimeDisplay.qml")));
d->m_overview->setSource(QUrl(QLatin1String("qrc:/qmlprofiler/Overview.qml")));
d->m_mainView->setSource(QUrl(QLatin1String("qrc:/qmlprofiler/MainView.qml")));
QGraphicsObject *rootObject = d->m_mainView->rootObject();
rootObject->setProperty("width", QVariant(width()));
rootObject->setProperty("candidateHeight", QVariant(height() - d->m_timebar->height() - d->m_overview->height()));
connect(rootObject, SIGNAL(updateCursorPosition()), this, SLOT(updateCursorPosition()));
connect(rootObject, SIGNAL(updateRangeButton()), this, SLOT(updateRangeButton()));
connect(rootObject, SIGNAL(updateLockButton()), this, SLOT(updateLockButton()));
connect(this, SIGNAL(jumpToPrev()), rootObject, SLOT(prevEvent()));
connect(this, SIGNAL(jumpToNext()), rootObject, SLOT(nextEvent()));
connect(rootObject, SIGNAL(selectedEventChanged(int)), this, SIGNAL(selectedEventChanged(int)));
connect(rootObject, SIGNAL(changeToolTip(QString)), this, SLOT(updateToolTip(QString)));
connect(rootObject, SIGNAL(updateVerticalScroll(int)), this, SLOT(updateVerticalScroll(int)));
}
QWidget *QmlProfilerTraceView::createToolbar()
{
Utils::StyledBar *bar = new Utils::StyledBar(this);
bar->setStyleSheet(QLatin1String("background: #9B9B9B"));
bar->setSingleRow(true);
bar->setFixedWidth(150);
bar->setFixedHeight(24);
QHBoxLayout *toolBarLayout = new QHBoxLayout(bar);
toolBarLayout->setMargin(0);
toolBarLayout->setSpacing(0);
QToolButton *buttonPrev= new QToolButton;
buttonPrev->setIcon(QIcon(QLatin1String(":/qmlprofiler/ico_prev.png")));
buttonPrev->setToolTip(tr("Jump to previous event"));
connect(buttonPrev, SIGNAL(clicked()), this, SIGNAL(jumpToPrev()));
connect(this, SIGNAL(enableToolbar(bool)), buttonPrev, SLOT(setEnabled(bool)));
QToolButton *buttonNext= new QToolButton;
buttonNext->setIcon(QIcon(QLatin1String(":/qmlprofiler/ico_next.png")));
buttonNext->setToolTip(tr("Jump to next event"));
connect(buttonNext, SIGNAL(clicked()), this, SIGNAL(jumpToNext()));
connect(this, SIGNAL(enableToolbar(bool)), buttonNext, SLOT(setEnabled(bool)));
QToolButton *buttonZoomControls = new QToolButton;
buttonZoomControls->setIcon(QIcon(QLatin1String(":/qmlprofiler/ico_zoom.png")));
buttonZoomControls->setToolTip(tr("Show zoom slider"));
buttonZoomControls->setCheckable(true);
buttonZoomControls->setChecked(false);
connect(buttonZoomControls, SIGNAL(toggled(bool)), d->m_zoomToolbar, SLOT(setVisible(bool)));
connect(this, SIGNAL(enableToolbar(bool)), buttonZoomControls, SLOT(setEnabled(bool)));
d->m_buttonRange = new QToolButton;
d->m_buttonRange->setIcon(QIcon(QLatin1String(":/qmlprofiler/ico_rangeselection.png")));
d->m_buttonRange->setToolTip(tr("Select range"));
d->m_buttonRange->setCheckable(true);
d->m_buttonRange->setChecked(false);
connect(d->m_buttonRange, SIGNAL(clicked(bool)), this, SLOT(toggleRangeMode(bool)));
connect(this, SIGNAL(enableToolbar(bool)), d->m_buttonRange, SLOT(setEnabled(bool)));
connect(this, SIGNAL(rangeModeChanged(bool)), d->m_buttonRange, SLOT(setChecked(bool)));
d->m_buttonLock = new QToolButton;
d->m_buttonLock->setIcon(QIcon(QLatin1String(":/qmlprofiler/ico_selectionmode.png")));
d->m_buttonLock->setToolTip(tr("View event information on mouseover"));
d->m_buttonLock->setCheckable(true);
d->m_buttonLock->setChecked(false);
connect(d->m_buttonLock, SIGNAL(clicked(bool)), this, SLOT(toggleLockMode(bool)));
connect(this, SIGNAL(enableToolbar(bool)), d->m_buttonLock, SLOT(setEnabled(bool)));
connect(this, SIGNAL(lockModeChanged(bool)), d->m_buttonLock, SLOT(setChecked(bool)));
toolBarLayout->addWidget(buttonPrev);
toolBarLayout->addWidget(buttonNext);
toolBarLayout->addWidget(new Utils::StyledSeparator());
toolBarLayout->addWidget(buttonZoomControls);
toolBarLayout->addWidget(new Utils::StyledSeparator());
toolBarLayout->addWidget(d->m_buttonRange);
toolBarLayout->addWidget(d->m_buttonLock);
return bar;
}
QWidget *QmlProfilerTraceView::createZoomToolbar()
{
Utils::StyledBar *bar = new Utils::StyledBar(this);
bar->setStyleSheet(QLatin1String("background: #9B9B9B"));
bar->setSingleRow(true);
bar->setFixedWidth(150);
bar->setFixedHeight(24);
QHBoxLayout *toolBarLayout = new QHBoxLayout(bar);
toolBarLayout->setMargin(0);
toolBarLayout->setSpacing(0);
QSlider *zoomSlider = new QSlider(Qt::Horizontal);
zoomSlider->setFocusPolicy(Qt::NoFocus);
zoomSlider->setRange(1, sliderTicks);
zoomSlider->setInvertedAppearance(true);
zoomSlider->setPageStep(sliderTicks/100);
connect(this, SIGNAL(enableToolbar(bool)), zoomSlider, SLOT(setEnabled(bool)));
connect(zoomSlider, SIGNAL(valueChanged(int)), this, SLOT(setZoomLevel(int)));
connect(this, SIGNAL(zoomLevelChanged(int)), zoomSlider, SLOT(setValue(int)));
zoomSlider->setStyleSheet(QLatin1String("\
QSlider:horizontal {\
background: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, stop: 0 #444444, stop: 1 #5a5a5a);\
border: 1px #313131;\
height: 20px;\
margin: 0px 0px 0px 0px;\<|fim▁hole|> border: 1px #313131;\
}\
QSlider::sub-page:horizontal {\
background: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, stop: 0 #5a5a5a, stop: 1 #444444);\
border: 1px #313131;\
}\
"));
toolBarLayout->addWidget(zoomSlider);
return bar;
}
/////////////////////////////////////////////////////////
bool QmlProfilerTraceView::hasValidSelection() const
{
QGraphicsObject *rootObject = d->m_mainView->rootObject();
if (rootObject)
return rootObject->property("selectionRangeReady").toBool();
return false;
}
qint64 QmlProfilerTraceView::selectionStart() const
{
QGraphicsObject *rootObject = d->m_mainView->rootObject();
if (rootObject)
return rootObject->property("selectionRangeStart").toLongLong();
return 0;
}
qint64 QmlProfilerTraceView::selectionEnd() const
{
QGraphicsObject *rootObject = d->m_mainView->rootObject();
if (rootObject)
return rootObject->property("selectionRangeEnd").toLongLong();
return 0;
}
void QmlProfilerTraceView::clearDisplay()
{
d->m_zoomControl->setRange(0,0);
updateVerticalScroll(0);
d->m_mainView->rootObject()->setProperty("scrollY", QVariant(0));
QMetaObject::invokeMethod(d->m_mainView->rootObject(), "clearAll");
QMetaObject::invokeMethod(d->m_overview->rootObject(), "clearDisplay");
}
void QmlProfilerTraceView::selectNextEventWithId(int eventId)
{
QGraphicsObject *rootObject = d->m_mainView->rootObject();
if (rootObject)
QMetaObject::invokeMethod(rootObject, "selectNextWithId",
Q_ARG(QVariant,QVariant(eventId)));
}
/////////////////////////////////////////////////////////
// Goto source location
void QmlProfilerTraceView::updateCursorPosition()
{
QGraphicsObject *rootObject = d->m_mainView->rootObject();
emit gotoSourceLocation(rootObject->property("fileName").toString(),
rootObject->property("lineNumber").toInt(),
rootObject->property("columnNumber").toInt());
}
/////////////////////////////////////////////////////////
// Toolbar buttons
void QmlProfilerTraceView::toggleRangeMode(bool active)
{
QGraphicsObject *rootObject = d->m_mainView->rootObject();
bool rangeMode = rootObject->property("selectionRangeMode").toBool();
if (active != rangeMode) {
if (active)
d->m_buttonRange->setIcon(QIcon(QLatin1String(":/qmlprofiler/ico_rangeselected.png")));
else
d->m_buttonRange->setIcon(QIcon(QLatin1String(":/qmlprofiler/ico_rangeselection.png")));
rootObject->setProperty("selectionRangeMode", QVariant(active));
}
}
void QmlProfilerTraceView::updateRangeButton()
{
bool rangeMode = d->m_mainView->rootObject()->property("selectionRangeMode").toBool();
if (rangeMode)
d->m_buttonRange->setIcon(QIcon(QLatin1String(":/qmlprofiler/ico_rangeselected.png")));
else
d->m_buttonRange->setIcon(QIcon(QLatin1String(":/qmlprofiler/ico_rangeselection.png")));
emit rangeModeChanged(rangeMode);
}
void QmlProfilerTraceView::toggleLockMode(bool active)
{
QGraphicsObject *rootObject = d->m_mainView->rootObject();
bool lockMode = !rootObject->property("selectionLocked").toBool();
if (active != lockMode) {
rootObject->setProperty("selectionLocked", QVariant(!active));
rootObject->setProperty("selectedItem", QVariant(-1));
}
}
void QmlProfilerTraceView::updateLockButton()
{
bool lockMode = !d->m_mainView->rootObject()->property("selectionLocked").toBool();
emit lockModeChanged(lockMode);
}
////////////////////////////////////////////////////////
// Zoom control
void QmlProfilerTraceView::setZoomLevel(int zoomLevel)
{
if (d->m_currentZoomLevel != zoomLevel && d->m_mainView->rootObject()) {
QVariant newFactor = pow(qreal(zoomLevel) / qreal(sliderTicks), sliderExp);
d->m_currentZoomLevel = zoomLevel;
QMetaObject::invokeMethod(d->m_mainView->rootObject(), "updateWindowLength", Q_ARG(QVariant, newFactor));
}
}
void QmlProfilerTraceView::updateRange()
{
if (!d->m_profilerDataModel)
return;
qreal duration = d->m_zoomControl->endTime() - d->m_zoomControl->startTime();
if (duration <= 0)
return;
if (d->m_profilerDataModel->traceDuration() <= 0)
return;
int newLevel = pow(duration / d->m_profilerDataModel->traceDuration(), 1/sliderExp) * sliderTicks;
if (d->m_currentZoomLevel != newLevel) {
d->m_currentZoomLevel = newLevel;
emit zoomLevelChanged(newLevel);
}
}
void QmlProfilerTraceView::mouseWheelMoved(int mouseX, int mouseY, int wheelDelta)
{
Q_UNUSED(mouseY);
QGraphicsObject *rootObject = d->m_mainView->rootObject();
if (rootObject) {
QMetaObject::invokeMethod(rootObject, "wheelZoom",
Q_ARG(QVariant, QVariant(mouseX)),
Q_ARG(QVariant, QVariant(wheelDelta)));
}
}
////////////////////////////////////////////////////////
void QmlProfilerTraceView::updateToolTip(const QString &text)
{
setToolTip(text);
}
void QmlProfilerTraceView::updateVerticalScroll(int newPosition)
{
d->m_mainView->verticalScrollBar()->setValue(newPosition);
}
void QmlProfilerTraceView::resizeEvent(QResizeEvent *event)
{
QWidget::resizeEvent(event);
QGraphicsObject *rootObject = d->m_mainView->rootObject();
if (rootObject) {
rootObject->setProperty("width", QVariant(event->size().width()));
int newHeight = event->size().height() - d->m_timebar->height() - d->m_overview->height();
rootObject->setProperty("candidateHeight", QVariant(newHeight));
}
emit resized();
}
////////////////////////////////////////////////////////////////
// Context menu
void QmlProfilerTraceView::contextMenuEvent(QContextMenuEvent *ev)
{
QMenu menu;
QAction *viewAllAction = 0;
QmlProfilerTool *profilerTool = qobject_cast<QmlProfilerTool *>(d->m_profilerTool);
if (profilerTool)
menu.addActions(profilerTool->profilerContextMenuActions());
menu.addSeparator();
QAction *getLocalStatsAction = menu.addAction(tr("Limit Events Pane to Current Range"));
if (!d->m_viewContainer->hasValidSelection())
getLocalStatsAction->setEnabled(false);
QAction *getGlobalStatsAction = menu.addAction(tr("Reset Events Pane"));
if (d->m_viewContainer->hasGlobalStats())
getGlobalStatsAction->setEnabled(false);
if (d->m_profilerDataModel->count() > 0) {
menu.addSeparator();
viewAllAction = menu.addAction(tr("Reset Zoom"));
}
QAction *selectedAction = menu.exec(ev->globalPos());
if (selectedAction) {
if (selectedAction == viewAllAction) {
d->m_zoomControl->setRange(
d->m_profilerDataModel->traceStartTime(),
d->m_profilerDataModel->traceEndTime());
}
if (selectedAction == getLocalStatsAction) {
d->m_viewContainer->getStatisticsInRange(
d->m_viewContainer->selectionStart(),
d->m_viewContainer->selectionEnd());
}
if (selectedAction == getGlobalStatsAction) {
d->m_viewContainer->getStatisticsInRange(
d->m_profilerDataModel->traceStartTime(),
d->m_profilerDataModel->traceEndTime());
}
}
}
/////////////////////////////////////////////////
// Tell QML the state of the profiler
void QmlProfilerTraceView::setRecording(bool recording)
{
QGraphicsObject *rootObject = d->m_mainView->rootObject();
if (rootObject)
rootObject->setProperty("recordingEnabled", QVariant(recording));
}
void QmlProfilerTraceView::setAppKilled()
{
QGraphicsObject *rootObject = d->m_mainView->rootObject();
if (rootObject)
rootObject->setProperty("appKilled",QVariant(true));
}
////////////////////////////////////////////////////////////////
// Profiler State
void QmlProfilerTraceView::profilerDataModelStateChanged()
{
switch (d->m_profilerDataModel->currentState()) {
case QmlProfilerDataModel::Empty :
emit enableToolbar(false);
break;
case QmlProfilerDataModel::AcquiringData :
// nothing to be done
break;
case QmlProfilerDataModel::ProcessingData :
// nothing to be done
break;
case QmlProfilerDataModel::Done :
emit enableToolbar(true);
break;
default:
break;
}
}
void QmlProfilerTraceView::profilerStateChanged()
{
switch (d->m_profilerState->currentState()) {
case QmlProfilerStateManager::AppKilled : {
if (d->m_profilerDataModel->currentState() == QmlProfilerDataModel::AcquiringData)
setAppKilled();
break;
}
default:
// no special action needed for other states
break;
}
}
void QmlProfilerTraceView::clientRecordingChanged()
{
// nothing yet
}
void QmlProfilerTraceView::serverRecordingChanged()
{
setRecording(d->m_profilerState->serverRecording());
}
} // namespace Internal
} // namespace QmlProfiler<|fim▁end|> | }\
QSlider::add-page:horizontal {\
background: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, stop: 0 #5a5a5a, stop: 1 #444444);\ |
<|file_name|>update.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
'''
Module that update the software and its databases
'''
import os
import shutil
from sys import exit
import os.path
import tarfile
import requests
from bs4 import BeautifulSoup
from ...base import *
from ...sentry import sentry
from ...clint import progress
class Updater(object):
def __init__(self, path, ver, url):
self.inst_path = path
self.repo_url = url
self.version = ver
def update_all(self):
'''
Upgrade BigBrother completely
'''
print(color.info.info("Fetching version from Github..."))
# Retrieving github releases
try:
response = requests.get(self.repo_url)
except requests.exceptions.RequestException as e:
print(color.info.error(e))
return
# Getting latest release
soup = BeautifulSoup(response.content, 'html.parser')
try: # Parsing info from page
version = soup.select("ul.tag-references > li > a > span")[0].text
download_url = "https://github.com" + \
soup.select(".release-downloads > li > a")[1]['href']
except Exception as e:
sentry.client.captureException()
print(color.info.error(e))
return
# check version
if version == self.version:
print(color.info.info("You have already the latest version"))
else:
print(color.info.info("New version " + color.bold(
"{ver}".format(ver=version)) + " found"))
# install
if self.install(self.inst_path, download_url):
print(color.info.info("Need to be restarted for changes to be effective"))
exit()
def install(self, path, url):
try:
# downloaded file name
dl_file = self.download(url, path)
# change directory
os.chdir(path)
# extract in path directory
inst_module = self.extract(dl_file)
# normalize name
inst_module_norm = inst_module[:inst_module.find('-')]
if inst_module_norm in os.listdir():
shutil.rmtree(inst_module_norm)
shutil.move(inst_module, inst_module_norm)
print(color.info.info(color.info.success("Installation completed")))
return 1
except Exception as e:
print(color.info.info(color.info.fail("Installation failed")))
print(color.info.error(e))
return 0
def download(self, url, path):
'''
Download module from [url] to [path]
'''
# get name of file to downaload
local_filename = url.split('/')[-1]
try:
stream = requests.get(url, stream=True)
total_length = int(stream.headers['Content-Length'])
except requests.exceptions.RequestException as e:
print(color.info.error(e))
return
# change to downlaod dir
try:<|fim▁hole|> except Exception as e:
print(color.info.error(e))
return
# write on file
with open(local_filename, 'wb') as f:
for chunk in progress.bar(stream.iter_content(chunk_size=1024),
label=local_filename, expected_size=(total_length/1024)):
if chunk:
f.write(chunk)
f.flush()
return local_filename
def extract(self, filename):
try:
tar = tarfile.open(filename)
repo = tar.getnames()[0]
# remove old repo
if repo in os.listdir():
shutil.rmtree(repo)
# extract in current directory
tar.extractall()
return repo
except Exception as e:
print(color.info.error(e))
return
finally:
tar.close()
os.remove(filename)<|fim▁end|> | os.chdir(path) |
<|file_name|>0003_auto_20200417_1418.py<|end_file_name|><|fim▁begin|># Generated by Django 2.2.12 on 2020-04-17 14:18
from django.db import migrations, models
<|fim▁hole|> ]
operations = [
migrations.RemoveField(
model_name='commoncontrol',
name='legacy_impl_smt',
),
migrations.AddField(
model_name='commoncontrol',
name='legacy_imp_stm',
field=models.TextField(blank=True, help_text='Legacy large implementation statement', null=True),
),
]<|fim▁end|> | class Migration(migrations.Migration):
dependencies = [
('controls', '0002_commoncontrol_common_control_provider'), |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from django.core.validators import validate_email, validate_slug, validate_ipv46_address
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from ava.core.models import TimeStampedModel
from ava.core_group.models import Group
from ava.core_identity.validators import validate_skype, validate_twitter
class Identity(TimeStampedModel):
# An identity is an online persona that can map to a single person, a group
# of people, or an automated service.<|fim▁hole|> GROUP = 'GROUP'
PERSON = 'PERSON'
IDENTITY_TYPE_CHOICES = (
(GROUP, 'Group'),
(PERSON, 'Person'),
)
name = models.CharField(max_length=100, verbose_name='Name', null=True, blank=True)
description = models.TextField(max_length=500, verbose_name='Description', null=True, blank=True)
identity_type = models.CharField(max_length=10,
choices=IDENTITY_TYPE_CHOICES,
default=PERSON,
verbose_name='Identity Type')
groups = models.ManyToManyField(Group,
blank=True,
related_name='identities')
def __str__(self):
return self.name or ''
def get_absolute_url(self):
return reverse('identity-detail', kwargs={'pk': self.id})
class Meta:
verbose_name = 'identity'
verbose_name_plural = 'identities'
ordering = ['name']
class Person(TimeStampedModel):
first_name = models.CharField(max_length=75, validators=[validate_slug])
surname = models.CharField(max_length=75, validators=[validate_slug])
identity = models.ManyToManyField('Identity', blank=True)
def __str__(self):
return (self.first_name + " " + self.surname).strip() or ''
def get_absolute_url(self):
return reverse('person-detail', kwargs={'pk': self.id})
class Meta:
verbose_name = 'person'
verbose_name_plural = 'people'
ordering = ['surname', 'first_name']
class Identifier(TimeStampedModel):
"""
TODO: DocString
"""
EMAIL = 'EMAIL'
SKYPE = 'SKYPE'
IP = 'IPADD'
UNAME = 'UNAME'
TWITTER = 'TWITTER'
NAME = 'NAME'
IDENTIFIER_TYPE_CHOICES = (
(EMAIL, 'Email Address'),
(SKYPE, 'Skype ID'),
(IP, 'IP Address'),
(UNAME, 'Username'),
(TWITTER, 'Twitter ID'),
(NAME, 'Other name'),
)
identifier = models.CharField(max_length=100)
identifier_type = models.CharField(max_length=10,
choices=IDENTIFIER_TYPE_CHOICES,
default=EMAIL,
verbose_name='Identifier Type')
identity = models.ForeignKey('Identity', related_name='identifiers')
def __str__(self):
return self.identifier or ''
def get_absolute_url(self):
return reverse('identifier-detail', kwargs={'pk': self.id})
def clean(self):
if self.identifier_type is 'EMAIL':
try:
validate_email(self.identifier)
except ValidationError:
raise ValidationError('Identifier is not a valid email address')
if self.identifier_type is 'IPADD':
try:
validate_ipv46_address(self.identifier)
except ValidationError:
raise ValidationError('Identifier is not a valid IPv4/IPv6 address')
if self.identifier_type is 'UNAME' or self.identifier_type is 'NAME':
try:
validate_slug(self.identifier)
except ValidationError:
raise ValidationError('Identifier is not a valid username or name')
if self.identifier_type is 'SKYPE':
try:
validate_skype(self.identifier)
except ValidationError:
raise ValidationError('Identifier is not a valid Skype user name')
if self.identifier_type is 'TWITTER':
try:
validate_twitter(self.identifier)
except ValidationError:
raise ValidationError('Identifier is not a valid Twitter user name')
class Meta:
unique_together = ("identifier", "identifier_type", "identity")
ordering = ['identifier', 'identifier_type']<|fim▁end|> | |
<|file_name|>binary_sensor.py<|end_file_name|><|fim▁begin|>"""Support for MyQ gateways."""
from pymyq.const import (
DEVICE_STATE as MYQ_DEVICE_STATE,
DEVICE_STATE_ONLINE as MYQ_DEVICE_STATE_ONLINE,
KNOWN_MODELS,
MANUFACTURER,
)
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_CONNECTIVITY,
BinarySensorEntity,
)
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, MYQ_COORDINATOR, MYQ_GATEWAY
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up mysq covers."""
data = hass.data[DOMAIN][config_entry.entry_id]
myq = data[MYQ_GATEWAY]
coordinator = data[MYQ_COORDINATOR]
entities = []
for device in myq.gateways.values():
entities.append(MyQBinarySensorEntity(coordinator, device))
async_add_entities(entities)
class MyQBinarySensorEntity(CoordinatorEntity, BinarySensorEntity):
"""Representation of a MyQ gateway."""
_attr_device_class = DEVICE_CLASS_CONNECTIVITY
def __init__(self, coordinator, device):
"""Initialize with API object, device id."""
super().__init__(coordinator)
self._device = device
@property
def name(self):
"""Return the name of the garage door if any."""
return f"{self._device.name} MyQ Gateway"
@property
def is_on(self):
"""Return if the device is online."""
if not self.coordinator.last_update_success:
return False
# Not all devices report online so assume True if its missing
return self._device.device_json[MYQ_DEVICE_STATE].get(
MYQ_DEVICE_STATE_ONLINE, True
)
@property
def available(self) -> bool:
"""Entity is always available."""
return True
@property
def unique_id(self):
"""Return a unique, Home Assistant friendly identifier for this entity."""
return self._device.device_id
@property<|fim▁hole|> "name": self.name,
"manufacturer": MANUFACTURER,
"sw_version": self._device.firmware_version,
}
model = KNOWN_MODELS.get(self._device.device_id[2:4])
if model:
device_info["model"] = model
return device_info<|fim▁end|> | def device_info(self):
"""Return the device_info of the device."""
device_info = {
"identifiers": {(DOMAIN, self._device.device_id)}, |
<|file_name|>fpll.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Copyright 2004,2005 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of<|fim▁hole|>#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
#
from gnuradio import gr, atsc
import math
def main():
fg = gr.flow_graph()
u = gr.file_source(gr.sizeof_float,"/tmp/atsc_pipe_2")
input_rate = 19.2e6
IF_freq = 5.75e6
# 1/2 as wide because we're designing lp filter
symbol_rate = atsc.ATSC_SYMBOL_RATE/2.
NTAPS = 279
tt = gr.firdes.root_raised_cosine (1.0, input_rate, symbol_rate, .115, NTAPS)
# heterodyne the low pass coefficients up to the specified bandpass
# center frequency. Note that when we do this, the filter bandwidth
# is effectively twice the low pass (2.69 * 2 = 5.38) and hence
# matches the diagram in the ATSC spec.
arg = 2. * math.pi * IF_freq / input_rate
t=[]
for i in range(len(tt)):
t += [tt[i] * 2. * math.cos(arg * i)]
rrc = gr.fir_filter_fff(1, t)
fpll = atsc.fpll()
pilot_freq = IF_freq - 3e6 + 0.31e6
lower_edge = 6e6 - 0.31e6
upper_edge = IF_freq - 3e6 + pilot_freq
transition_width = upper_edge - lower_edge
lp_coeffs = gr.firdes.low_pass (1.0,
input_rate,
(lower_edge + upper_edge) * 0.5,
transition_width,
gr.firdes.WIN_HAMMING);
lp_filter = gr.fir_filter_fff (1,lp_coeffs)
alpha = 1e-5
iir = gr.single_pole_iir_filter_ff(alpha)
remove_dc = gr.sub_ff()
out = gr.file_sink(gr.sizeof_float,"/tmp/atsc_pipe_3")
# out = gr.file_sink(gr.sizeof_float,"/mnt/sata/atsc_data_float")
fg.connect(u, fpll, lp_filter)
fg.connect(lp_filter, iir)
fg.connect(lp_filter, (remove_dc,0))
fg.connect(iir, (remove_dc,1))
fg.connect(remove_dc, out)
fg.run()
if __name__ == '__main__':
main ()<|fim▁end|> | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details. |
<|file_name|>test_basic.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
tests.basic
~~~~~~~~~~~~~~~~~~~~~
The basic functionality.
:copyright: (c) 2015 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import pytest
import re
import uuid
import time
import flask
import pickle
from datetime import datetime
from threading import Thread
from flask._compat import text_type
from werkzeug.exceptions import BadRequest, NotFound, Forbidden
from werkzeug.http import parse_date
from werkzeug.routing import BuildError
import werkzeug.serving
def test_options_work():
app = flask.Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def index():
return 'Hello World'
rv = app.test_client().open('/', method='OPTIONS')
assert sorted(rv.allow) == ['GET', 'HEAD', 'OPTIONS', 'POST']
assert rv.data == b''
def test_options_on_multiple_rules():
app = flask.Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def index():
return 'Hello World'
@app.route('/', methods=['PUT'])
def index_put():
return 'Aha!'
rv = app.test_client().open('/', method='OPTIONS')
assert sorted(rv.allow) == ['GET', 'HEAD', 'OPTIONS', 'POST', 'PUT']
def test_options_handling_disabled():
app = flask.Flask(__name__)
def index():
return 'Hello World!'
index.provide_automatic_options = False
app.route('/')(index)
rv = app.test_client().open('/', method='OPTIONS')
assert rv.status_code == 405
app = flask.Flask(__name__)
def index2():
return 'Hello World!'
index2.provide_automatic_options = True
app.route('/', methods=['OPTIONS'])(index2)
rv = app.test_client().open('/', method='OPTIONS')
assert sorted(rv.allow) == ['OPTIONS']
def test_request_dispatching():
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.request.method
@app.route('/more', methods=['GET', 'POST'])
def more():<|fim▁hole|> rv = c.post('/')
assert rv.status_code == 405
assert sorted(rv.allow) == ['GET', 'HEAD', 'OPTIONS']
rv = c.head('/')
assert rv.status_code == 200
assert not rv.data # head truncates
assert c.post('/more').data == b'POST'
assert c.get('/more').data == b'GET'
rv = c.delete('/more')
assert rv.status_code == 405
assert sorted(rv.allow) == ['GET', 'HEAD', 'OPTIONS', 'POST']
def test_disallow_string_for_allowed_methods():
app = flask.Flask(__name__)
with pytest.raises(TypeError):
@app.route('/', methods='GET POST')
def index():
return "Hey"
def test_url_mapping():
app = flask.Flask(__name__)
random_uuid4 = "7eb41166-9ebf-4d26-b771-ea3f54f8b383"
def index():
return flask.request.method
def more():
return flask.request.method
def options():
return random_uuid4
app.add_url_rule('/', 'index', index)
app.add_url_rule('/more', 'more', more, methods=['GET', 'POST'])
# Issue 1288: Test that automatic options are not added when non-uppercase 'options' in methods
app.add_url_rule('/options', 'options', options, methods=['options'])
c = app.test_client()
assert c.get('/').data == b'GET'
rv = c.post('/')
assert rv.status_code == 405
assert sorted(rv.allow) == ['GET', 'HEAD', 'OPTIONS']
rv = c.head('/')
assert rv.status_code == 200
assert not rv.data # head truncates
assert c.post('/more').data == b'POST'
assert c.get('/more').data == b'GET'
rv = c.delete('/more')
assert rv.status_code == 405
assert sorted(rv.allow) == ['GET', 'HEAD', 'OPTIONS', 'POST']
rv = c.open('/options', method='OPTIONS')
assert rv.status_code == 200
assert random_uuid4 in rv.data.decode("utf-8")
def test_werkzeug_routing():
from werkzeug.routing import Submount, Rule
app = flask.Flask(__name__)
app.url_map.add(Submount('/foo', [
Rule('/bar', endpoint='bar'),
Rule('/', endpoint='index')
]))
def bar():
return 'bar'
def index():
return 'index'
app.view_functions['bar'] = bar
app.view_functions['index'] = index
c = app.test_client()
assert c.get('/foo/').data == b'index'
assert c.get('/foo/bar').data == b'bar'
def test_endpoint_decorator():
from werkzeug.routing import Submount, Rule
app = flask.Flask(__name__)
app.url_map.add(Submount('/foo', [
Rule('/bar', endpoint='bar'),
Rule('/', endpoint='index')
]))
@app.endpoint('bar')
def bar():
return 'bar'
@app.endpoint('index')
def index():
return 'index'
c = app.test_client()
assert c.get('/foo/').data == b'index'
assert c.get('/foo/bar').data == b'bar'
def test_session():
app = flask.Flask(__name__)
app.secret_key = 'testkey'
@app.route('/set', methods=['POST'])
def set():
flask.session['value'] = flask.request.form['value']
return 'value set'
@app.route('/get')
def get():
return flask.session['value']
c = app.test_client()
assert c.post('/set', data={'value': '42'}).data == b'value set'
assert c.get('/get').data == b'42'
def test_session_using_server_name():
app = flask.Flask(__name__)
app.config.update(
SECRET_KEY='foo',
SERVER_NAME='example.com'
)
@app.route('/')
def index():
flask.session['testing'] = 42
return 'Hello World'
rv = app.test_client().get('/', 'http://example.com/')
assert 'domain=.example.com' in rv.headers['set-cookie'].lower()
assert 'httponly' in rv.headers['set-cookie'].lower()
def test_session_using_server_name_and_port():
app = flask.Flask(__name__)
app.config.update(
SECRET_KEY='foo',
SERVER_NAME='example.com:8080'
)
@app.route('/')
def index():
flask.session['testing'] = 42
return 'Hello World'
rv = app.test_client().get('/', 'http://example.com:8080/')
assert 'domain=.example.com' in rv.headers['set-cookie'].lower()
assert 'httponly' in rv.headers['set-cookie'].lower()
def test_session_using_server_name_port_and_path():
app = flask.Flask(__name__)
app.config.update(
SECRET_KEY='foo',
SERVER_NAME='example.com:8080',
APPLICATION_ROOT='/foo'
)
@app.route('/')
def index():
flask.session['testing'] = 42
return 'Hello World'
rv = app.test_client().get('/', 'http://example.com:8080/foo')
assert 'domain=example.com' in rv.headers['set-cookie'].lower()
assert 'path=/foo' in rv.headers['set-cookie'].lower()
assert 'httponly' in rv.headers['set-cookie'].lower()
def test_session_using_application_root():
class PrefixPathMiddleware(object):
def __init__(self, app, prefix):
self.app = app
self.prefix = prefix
def __call__(self, environ, start_response):
environ['SCRIPT_NAME'] = self.prefix
return self.app(environ, start_response)
app = flask.Flask(__name__)
app.wsgi_app = PrefixPathMiddleware(app.wsgi_app, '/bar')
app.config.update(
SECRET_KEY='foo',
APPLICATION_ROOT='/bar'
)
@app.route('/')
def index():
flask.session['testing'] = 42
return 'Hello World'
rv = app.test_client().get('/', 'http://example.com:8080/')
assert 'path=/bar' in rv.headers['set-cookie'].lower()
def test_session_using_session_settings():
app = flask.Flask(__name__)
app.config.update(
SECRET_KEY='foo',
SERVER_NAME='www.example.com:8080',
APPLICATION_ROOT='/test',
SESSION_COOKIE_DOMAIN='.example.com',
SESSION_COOKIE_HTTPONLY=False,
SESSION_COOKIE_SECURE=True,
SESSION_COOKIE_PATH='/'
)
@app.route('/')
def index():
flask.session['testing'] = 42
return 'Hello World'
rv = app.test_client().get('/', 'http://www.example.com:8080/test/')
cookie = rv.headers['set-cookie'].lower()
assert 'domain=.example.com' in cookie
assert 'path=/' in cookie
assert 'secure' in cookie
assert 'httponly' not in cookie
def test_missing_session():
app = flask.Flask(__name__)
def expect_exception(f, *args, **kwargs):
try:
f(*args, **kwargs)
except RuntimeError as e:
assert e.args and 'session is unavailable' in e.args[0]
else:
assert False, 'expected exception'
with app.test_request_context():
assert flask.session.get('missing_key') is None
expect_exception(flask.session.__setitem__, 'foo', 42)
expect_exception(flask.session.pop, 'foo')
def test_session_expiration():
permanent = True
app = flask.Flask(__name__)
app.secret_key = 'testkey'
@app.route('/')
def index():
flask.session['test'] = 42
flask.session.permanent = permanent
return ''
@app.route('/test')
def test():
return text_type(flask.session.permanent)
client = app.test_client()
rv = client.get('/')
assert 'set-cookie' in rv.headers
match = re.search(r'\bexpires=([^;]+)(?i)', rv.headers['set-cookie'])
expires = parse_date(match.group())
expected = datetime.utcnow() + app.permanent_session_lifetime
assert expires.year == expected.year
assert expires.month == expected.month
assert expires.day == expected.day
rv = client.get('/test')
assert rv.data == b'True'
permanent = False
rv = app.test_client().get('/')
assert 'set-cookie' in rv.headers
match = re.search(r'\bexpires=([^;]+)', rv.headers['set-cookie'])
assert match is None
def test_session_stored_last():
app = flask.Flask(__name__)
app.secret_key = 'development-key'
app.testing = True
@app.after_request
def modify_session(response):
flask.session['foo'] = 42
return response
@app.route('/')
def dump_session_contents():
return repr(flask.session.get('foo'))
c = app.test_client()
assert c.get('/').data == b'None'
assert c.get('/').data == b'42'
def test_session_special_types():
app = flask.Flask(__name__)
app.secret_key = 'development-key'
app.testing = True
now = datetime.utcnow().replace(microsecond=0)
the_uuid = uuid.uuid4()
@app.after_request
def modify_session(response):
flask.session['m'] = flask.Markup('Hello!')
flask.session['u'] = the_uuid
flask.session['dt'] = now
flask.session['b'] = b'\xff'
flask.session['t'] = (1, 2, 3)
return response
@app.route('/')
def dump_session_contents():
return pickle.dumps(dict(flask.session))
c = app.test_client()
c.get('/')
rv = pickle.loads(c.get('/').data)
assert rv['m'] == flask.Markup('Hello!')
assert type(rv['m']) == flask.Markup
assert rv['dt'] == now
assert rv['u'] == the_uuid
assert rv['b'] == b'\xff'
assert type(rv['b']) == bytes
assert rv['t'] == (1, 2, 3)
def test_session_cookie_setting():
app = flask.Flask(__name__)
app.testing = True
app.secret_key = 'dev key'
is_permanent = True
@app.route('/bump')
def bump():
rv = flask.session['foo'] = flask.session.get('foo', 0) + 1
flask.session.permanent = is_permanent
return str(rv)
@app.route('/read')
def read():
return str(flask.session.get('foo', 0))
def run_test(expect_header):
with app.test_client() as c:
assert c.get('/bump').data == b'1'
assert c.get('/bump').data == b'2'
assert c.get('/bump').data == b'3'
rv = c.get('/read')
set_cookie = rv.headers.get('set-cookie')
assert (set_cookie is not None) == expect_header
assert rv.data == b'3'
is_permanent = True
app.config['SESSION_REFRESH_EACH_REQUEST'] = True
run_test(expect_header=True)
is_permanent = True
app.config['SESSION_REFRESH_EACH_REQUEST'] = False
run_test(expect_header=False)
is_permanent = False
app.config['SESSION_REFRESH_EACH_REQUEST'] = True
run_test(expect_header=False)
is_permanent = False
app.config['SESSION_REFRESH_EACH_REQUEST'] = False
run_test(expect_header=False)
def test_flashes():
app = flask.Flask(__name__)
app.secret_key = 'testkey'
with app.test_request_context():
assert not flask.session.modified
flask.flash('Zap')
flask.session.modified = False
flask.flash('Zip')
assert flask.session.modified
assert list(flask.get_flashed_messages()) == ['Zap', 'Zip']
def test_extended_flashing():
# Be sure app.testing=True below, else tests can fail silently.
#
# Specifically, if app.testing is not set to True, the AssertionErrors
# in the view functions will cause a 500 response to the test client
# instead of propagating exceptions.
app = flask.Flask(__name__)
app.secret_key = 'testkey'
app.testing = True
@app.route('/')
def index():
flask.flash(u'Hello World')
flask.flash(u'Hello World', 'error')
flask.flash(flask.Markup(u'<em>Testing</em>'), 'warning')
return ''
@app.route('/test/')
def test():
messages = flask.get_flashed_messages()
assert list(messages) == [
u'Hello World',
u'Hello World',
flask.Markup(u'<em>Testing</em>')
]
return ''
@app.route('/test_with_categories/')
def test_with_categories():
messages = flask.get_flashed_messages(with_categories=True)
assert len(messages) == 3
assert list(messages) == [
('message', u'Hello World'),
('error', u'Hello World'),
('warning', flask.Markup(u'<em>Testing</em>'))
]
return ''
@app.route('/test_filter/')
def test_filter():
messages = flask.get_flashed_messages(
category_filter=['message'], with_categories=True)
assert list(messages) == [('message', u'Hello World')]
return ''
@app.route('/test_filters/')
def test_filters():
messages = flask.get_flashed_messages(
category_filter=['message', 'warning'], with_categories=True)
assert list(messages) == [
('message', u'Hello World'),
('warning', flask.Markup(u'<em>Testing</em>'))
]
return ''
@app.route('/test_filters_without_returning_categories/')
def test_filters2():
messages = flask.get_flashed_messages(
category_filter=['message', 'warning'])
assert len(messages) == 2
assert messages[0] == u'Hello World'
assert messages[1] == flask.Markup(u'<em>Testing</em>')
return ''
# Create new test client on each test to clean flashed messages.
c = app.test_client()
c.get('/')
c.get('/test/')
c = app.test_client()
c.get('/')
c.get('/test_with_categories/')
c = app.test_client()
c.get('/')
c.get('/test_filter/')
c = app.test_client()
c.get('/')
c.get('/test_filters/')
c = app.test_client()
c.get('/')
c.get('/test_filters_without_returning_categories/')
def test_request_processing():
app = flask.Flask(__name__)
evts = []
@app.before_request
def before_request():
evts.append('before')
@app.after_request
def after_request(response):
response.data += b'|after'
evts.append('after')
return response
@app.route('/')
def index():
assert 'before' in evts
assert 'after' not in evts
return 'request'
assert 'after' not in evts
rv = app.test_client().get('/').data
assert 'after' in evts
assert rv == b'request|after'
def test_request_preprocessing_early_return():
app = flask.Flask(__name__)
evts = []
@app.before_request
def before_request1():
evts.append(1)
@app.before_request
def before_request2():
evts.append(2)
return "hello"
@app.before_request
def before_request3():
evts.append(3)
return "bye"
@app.route('/')
def index():
evts.append('index')
return "damnit"
rv = app.test_client().get('/').data.strip()
assert rv == b'hello'
assert evts == [1, 2]
def test_after_request_processing():
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def index():
@flask.after_this_request
def foo(response):
response.headers['X-Foo'] = 'a header'
return response
return 'Test'
c = app.test_client()
resp = c.get('/')
assert resp.status_code == 200
assert resp.headers['X-Foo'] == 'a header'
def test_teardown_request_handler():
called = []
app = flask.Flask(__name__)
@app.teardown_request
def teardown_request(exc):
called.append(True)
return "Ignored"
@app.route('/')
def root():
return "Response"
rv = app.test_client().get('/')
assert rv.status_code == 200
assert b'Response' in rv.data
assert len(called) == 1
def test_teardown_request_handler_debug_mode():
called = []
app = flask.Flask(__name__)
app.testing = True
@app.teardown_request
def teardown_request(exc):
called.append(True)
return "Ignored"
@app.route('/')
def root():
return "Response"
rv = app.test_client().get('/')
assert rv.status_code == 200
assert b'Response' in rv.data
assert len(called) == 1
def test_teardown_request_handler_error():
called = []
app = flask.Flask(__name__)
app.config['LOGGER_HANDLER_POLICY'] = 'never'
@app.teardown_request
def teardown_request1(exc):
assert type(exc) == ZeroDivisionError
called.append(True)
# This raises a new error and blows away sys.exc_info(), so we can
# test that all teardown_requests get passed the same original
# exception.
try:
raise TypeError()
except:
pass
@app.teardown_request
def teardown_request2(exc):
assert type(exc) == ZeroDivisionError
called.append(True)
# This raises a new error and blows away sys.exc_info(), so we can
# test that all teardown_requests get passed the same original
# exception.
try:
raise TypeError()
except:
pass
@app.route('/')
def fails():
1 // 0
rv = app.test_client().get('/')
assert rv.status_code == 500
assert b'Internal Server Error' in rv.data
assert len(called) == 2
def test_before_after_request_order():
called = []
app = flask.Flask(__name__)
@app.before_request
def before1():
called.append(1)
@app.before_request
def before2():
called.append(2)
@app.after_request
def after1(response):
called.append(4)
return response
@app.after_request
def after2(response):
called.append(3)
return response
@app.teardown_request
def finish1(exc):
called.append(6)
@app.teardown_request
def finish2(exc):
called.append(5)
@app.route('/')
def index():
return '42'
rv = app.test_client().get('/')
assert rv.data == b'42'
assert called == [1, 2, 3, 4, 5, 6]
def test_error_handling():
app = flask.Flask(__name__)
app.config['LOGGER_HANDLER_POLICY'] = 'never'
@app.errorhandler(404)
def not_found(e):
return 'not found', 404
@app.errorhandler(500)
def internal_server_error(e):
return 'internal server error', 500
@app.errorhandler(Forbidden)
def forbidden(e):
return 'forbidden', 403
@app.route('/')
def index():
flask.abort(404)
@app.route('/error')
def error():
1 // 0
@app.route('/forbidden')
def error2():
flask.abort(403)
c = app.test_client()
rv = c.get('/')
assert rv.status_code == 404
assert rv.data == b'not found'
rv = c.get('/error')
assert rv.status_code == 500
assert b'internal server error' == rv.data
rv = c.get('/forbidden')
assert rv.status_code == 403
assert b'forbidden' == rv.data
def test_before_request_and_routing_errors():
app = flask.Flask(__name__)
@app.before_request
def attach_something():
flask.g.something = 'value'
@app.errorhandler(404)
def return_something(error):
return flask.g.something, 404
rv = app.test_client().get('/')
assert rv.status_code == 404
assert rv.data == b'value'
def test_user_error_handling():
class MyException(Exception):
pass
app = flask.Flask(__name__)
@app.errorhandler(MyException)
def handle_my_exception(e):
assert isinstance(e, MyException)
return '42'
@app.route('/')
def index():
raise MyException()
c = app.test_client()
assert c.get('/').data == b'42'
def test_http_error_subclass_handling():
class ForbiddenSubclass(Forbidden):
pass
app = flask.Flask(__name__)
@app.errorhandler(ForbiddenSubclass)
def handle_forbidden_subclass(e):
assert isinstance(e, ForbiddenSubclass)
return 'banana'
@app.errorhandler(403)
def handle_forbidden_subclass(e):
assert not isinstance(e, ForbiddenSubclass)
assert isinstance(e, Forbidden)
return 'apple'
@app.route('/1')
def index1():
raise ForbiddenSubclass()
@app.route('/2')
def index2():
flask.abort(403)
@app.route('/3')
def index3():
raise Forbidden()
c = app.test_client()
assert c.get('/1').data == b'banana'
assert c.get('/2').data == b'apple'
assert c.get('/3').data == b'apple'
def test_trapping_of_bad_request_key_errors():
app = flask.Flask(__name__)
app.testing = True
@app.route('/fail')
def fail():
flask.request.form['missing_key']
c = app.test_client()
assert c.get('/fail').status_code == 400
app.config['TRAP_BAD_REQUEST_ERRORS'] = True
c = app.test_client()
try:
c.get('/fail')
except KeyError as e:
assert isinstance(e, BadRequest)
else:
assert False, 'Expected exception'
def test_trapping_of_all_http_exceptions():
app = flask.Flask(__name__)
app.testing = True
app.config['TRAP_HTTP_EXCEPTIONS'] = True
@app.route('/fail')
def fail():
flask.abort(404)
c = app.test_client()
with pytest.raises(NotFound):
c.get('/fail')
def test_enctype_debug_helper():
from flask.debughelpers import DebugFilesKeyError
app = flask.Flask(__name__)
app.debug = True
@app.route('/fail', methods=['POST'])
def index():
return flask.request.files['foo'].filename
# with statement is important because we leave an exception on the
# stack otherwise and we want to ensure that this is not the case
# to not negatively affect other tests.
with app.test_client() as c:
try:
c.post('/fail', data={'foo': 'index.txt'})
except DebugFilesKeyError as e:
assert 'no file contents were transmitted' in str(e)
assert 'This was submitted: "index.txt"' in str(e)
else:
assert False, 'Expected exception'
def test_response_creation():
app = flask.Flask(__name__)
@app.route('/unicode')
def from_unicode():
return u'Hällo Wörld'
@app.route('/string')
def from_string():
return u'Hällo Wörld'.encode('utf-8')
@app.route('/args')
def from_tuple():
return 'Meh', 400, {
'X-Foo': 'Testing',
'Content-Type': 'text/plain; charset=utf-8'
}
@app.route('/two_args')
def from_two_args_tuple():
return 'Hello', {
'X-Foo': 'Test',
'Content-Type': 'text/plain; charset=utf-8'
}
@app.route('/args_status')
def from_status_tuple():
return 'Hi, status!', 400
@app.route('/args_header')
def from_response_instance_status_tuple():
return flask.Response('Hello world', 404), {
"X-Foo": "Bar",
"X-Bar": "Foo"
}
c = app.test_client()
assert c.get('/unicode').data == u'Hällo Wörld'.encode('utf-8')
assert c.get('/string').data == u'Hällo Wörld'.encode('utf-8')
rv = c.get('/args')
assert rv.data == b'Meh'
assert rv.headers['X-Foo'] == 'Testing'
assert rv.status_code == 400
assert rv.mimetype == 'text/plain'
rv2 = c.get('/two_args')
assert rv2.data == b'Hello'
assert rv2.headers['X-Foo'] == 'Test'
assert rv2.status_code == 200
assert rv2.mimetype == 'text/plain'
rv3 = c.get('/args_status')
assert rv3.data == b'Hi, status!'
assert rv3.status_code == 400
assert rv3.mimetype == 'text/html'
rv4 = c.get('/args_header')
assert rv4.data == b'Hello world'
assert rv4.headers['X-Foo'] == 'Bar'
assert rv4.headers['X-Bar'] == 'Foo'
assert rv4.status_code == 404
def test_make_response():
app = flask.Flask(__name__)
with app.test_request_context():
rv = flask.make_response()
assert rv.status_code == 200
assert rv.data == b''
assert rv.mimetype == 'text/html'
rv = flask.make_response('Awesome')
assert rv.status_code == 200
assert rv.data == b'Awesome'
assert rv.mimetype == 'text/html'
rv = flask.make_response('W00t', 404)
assert rv.status_code == 404
assert rv.data == b'W00t'
assert rv.mimetype == 'text/html'
def test_make_response_with_response_instance():
app = flask.Flask(__name__)
with app.test_request_context():
rv = flask.make_response(
flask.jsonify({'msg': 'W00t'}), 400)
assert rv.status_code == 400
assert rv.data == b'{\n "msg": "W00t"\n}\n'
assert rv.mimetype == 'application/json'
rv = flask.make_response(
flask.Response(''), 400)
assert rv.status_code == 400
assert rv.data == b''
assert rv.mimetype == 'text/html'
rv = flask.make_response(
flask.Response('', headers={'Content-Type': 'text/html'}),
400, [('X-Foo', 'bar')])
assert rv.status_code == 400
assert rv.headers['Content-Type'] == 'text/html'
assert rv.headers['X-Foo'] == 'bar'
def test_jsonify_no_prettyprint():
app = flask.Flask(__name__)
app.config.update({"JSONIFY_PRETTYPRINT_REGULAR": False})
with app.test_request_context():
compressed_msg = b'{"msg":{"submsg":"W00t"},"msg2":"foobar"}\n'
uncompressed_msg = {
"msg": {
"submsg": "W00t"
},
"msg2": "foobar"
}
rv = flask.make_response(
flask.jsonify(uncompressed_msg), 200)
assert rv.data == compressed_msg
def test_jsonify_prettyprint():
app = flask.Flask(__name__)
app.config.update({"JSONIFY_PRETTYPRINT_REGULAR": True})
with app.test_request_context():
compressed_msg = {"msg":{"submsg":"W00t"},"msg2":"foobar"}
pretty_response =\
b'{\n "msg": {\n "submsg": "W00t"\n }, \n "msg2": "foobar"\n}\n'
rv = flask.make_response(
flask.jsonify(compressed_msg), 200)
assert rv.data == pretty_response
def test_url_generation():
app = flask.Flask(__name__)
@app.route('/hello/<name>', methods=['POST'])
def hello():
pass
with app.test_request_context():
assert flask.url_for('hello', name='test x') == '/hello/test%20x'
assert flask.url_for('hello', name='test x', _external=True) == \
'http://localhost/hello/test%20x'
def test_build_error_handler():
app = flask.Flask(__name__)
# Test base case, a URL which results in a BuildError.
with app.test_request_context():
pytest.raises(BuildError, flask.url_for, 'spam')
# Verify the error is re-raised if not the current exception.
try:
with app.test_request_context():
flask.url_for('spam')
except BuildError as err:
error = err
try:
raise RuntimeError('Test case where BuildError is not current.')
except RuntimeError:
pytest.raises(
BuildError, app.handle_url_build_error, error, 'spam', {})
# Test a custom handler.
def handler(error, endpoint, values):
# Just a test.
return '/test_handler/'
app.url_build_error_handlers.append(handler)
with app.test_request_context():
assert flask.url_for('spam') == '/test_handler/'
def test_custom_converters():
from werkzeug.routing import BaseConverter
class ListConverter(BaseConverter):
def to_python(self, value):
return value.split(',')
def to_url(self, value):
base_to_url = super(ListConverter, self).to_url
return ','.join(base_to_url(x) for x in value)
app = flask.Flask(__name__)
app.url_map.converters['list'] = ListConverter
@app.route('/<list:args>')
def index(args):
return '|'.join(args)
c = app.test_client()
assert c.get('/1,2,3').data == b'1|2|3'
def test_static_files():
app = flask.Flask(__name__)
app.testing = True
rv = app.test_client().get('/static/index.html')
assert rv.status_code == 200
assert rv.data.strip() == b'<h1>Hello World!</h1>'
with app.test_request_context():
assert flask.url_for('static', filename='index.html') == \
'/static/index.html'
rv.close()
def test_none_response():
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def test():
return None
try:
app.test_client().get('/')
except ValueError as e:
assert str(e) == 'View function did not return a response'
pass
else:
assert "Expected ValueError"
def test_request_locals():
assert repr(flask.g) == '<LocalProxy unbound>'
assert not flask.g
def test_test_app_proper_environ():
app = flask.Flask(__name__)
app.config.update(
SERVER_NAME='localhost.localdomain:5000'
)
@app.route('/')
def index():
return 'Foo'
@app.route('/', subdomain='foo')
def subdomain():
return 'Foo SubDomain'
rv = app.test_client().get('/')
assert rv.data == b'Foo'
rv = app.test_client().get('/', 'http://localhost.localdomain:5000')
assert rv.data == b'Foo'
rv = app.test_client().get('/', 'https://localhost.localdomain:5000')
assert rv.data == b'Foo'
app.config.update(SERVER_NAME='localhost.localdomain')
rv = app.test_client().get('/', 'https://localhost.localdomain')
assert rv.data == b'Foo'
try:
app.config.update(SERVER_NAME='localhost.localdomain:443')
rv = app.test_client().get('/', 'https://localhost.localdomain')
# Werkzeug 0.8
assert rv.status_code == 404
except ValueError as e:
# Werkzeug 0.7
assert str(e) == (
"the server name provided "
"('localhost.localdomain:443') does not match the "
"server name from the WSGI environment ('localhost.localdomain')"
)
try:
app.config.update(SERVER_NAME='localhost.localdomain')
rv = app.test_client().get('/', 'http://foo.localhost')
# Werkzeug 0.8
assert rv.status_code == 404
except ValueError as e:
# Werkzeug 0.7
assert str(e) == (
"the server name provided "
"('localhost.localdomain') does not match the "
"server name from the WSGI environment ('foo.localhost')"
)
rv = app.test_client().get('/', 'http://foo.localhost.localdomain')
assert rv.data == b'Foo SubDomain'
def test_exception_propagation():
def apprunner(config_key):
app = flask.Flask(__name__)
app.config['LOGGER_HANDLER_POLICY'] = 'never'
@app.route('/')
def index():
1 // 0
c = app.test_client()
if config_key is not None:
app.config[config_key] = True
try:
c.get('/')
except Exception:
pass
else:
assert False, 'expected exception'
else:
assert c.get('/').status_code == 500
# we have to run this test in an isolated thread because if the
# debug flag is set to true and an exception happens the context is
# not torn down. This causes other tests that run after this fail
# when they expect no exception on the stack.
for config_key in 'TESTING', 'PROPAGATE_EXCEPTIONS', 'DEBUG', None:
t = Thread(target=apprunner, args=(config_key,))
t.start()
t.join()
def test_max_content_length():
app = flask.Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 64
@app.before_request
def always_first():
flask.request.form['myfile']
assert False
@app.route('/accept', methods=['POST'])
def accept_file():
flask.request.form['myfile']
assert False
@app.errorhandler(413)
def catcher(error):
return '42'
c = app.test_client()
rv = c.post('/accept', data={'myfile': 'foo' * 100})
assert rv.data == b'42'
def test_url_processors():
app = flask.Flask(__name__)
@app.url_defaults
def add_language_code(endpoint, values):
if flask.g.lang_code is not None and \
app.url_map.is_endpoint_expecting(endpoint, 'lang_code'):
values.setdefault('lang_code', flask.g.lang_code)
@app.url_value_preprocessor
def pull_lang_code(endpoint, values):
flask.g.lang_code = values.pop('lang_code', None)
@app.route('/<lang_code>/')
def index():
return flask.url_for('about')
@app.route('/<lang_code>/about')
def about():
return flask.url_for('something_else')
@app.route('/foo')
def something_else():
return flask.url_for('about', lang_code='en')
c = app.test_client()
assert c.get('/de/').data == b'/de/about'
assert c.get('/de/about').data == b'/foo'
assert c.get('/foo').data == b'/en/about'
def test_inject_blueprint_url_defaults():
app = flask.Flask(__name__)
bp = flask.Blueprint('foo.bar.baz', __name__,
template_folder='template')
@bp.url_defaults
def bp_defaults(endpoint, values):
values['page'] = 'login'
@bp.route('/<page>')
def view(page):
pass
app.register_blueprint(bp)
values = dict()
app.inject_url_defaults('foo.bar.baz.view', values)
expected = dict(page='login')
assert values == expected
with app.test_request_context('/somepage'):
url = flask.url_for('foo.bar.baz.view')
expected = '/login'
assert url == expected
def test_nonascii_pathinfo():
app = flask.Flask(__name__)
app.testing = True
@app.route(u'/киртест')
def index():
return 'Hello World!'
c = app.test_client()
rv = c.get(u'/киртест')
assert rv.data == b'Hello World!'
def test_debug_mode_complains_after_first_request():
app = flask.Flask(__name__)
app.debug = True
@app.route('/')
def index():
return 'Awesome'
assert not app.got_first_request
assert app.test_client().get('/').data == b'Awesome'
try:
@app.route('/foo')
def broken():
return 'Meh'
except AssertionError as e:
assert 'A setup function was called' in str(e)
else:
assert False, 'Expected exception'
app.debug = False
@app.route('/foo')
def working():
return 'Meh'
assert app.test_client().get('/foo').data == b'Meh'
assert app.got_first_request
def test_before_first_request_functions():
got = []
app = flask.Flask(__name__)
@app.before_first_request
def foo():
got.append(42)
c = app.test_client()
c.get('/')
assert got == [42]
c.get('/')
assert got == [42]
assert app.got_first_request
def test_before_first_request_functions_concurrent():
got = []
app = flask.Flask(__name__)
@app.before_first_request
def foo():
time.sleep(0.2)
got.append(42)
c = app.test_client()
def get_and_assert():
c.get("/")
assert got == [42]
t = Thread(target=get_and_assert)
t.start()
get_and_assert()
t.join()
assert app.got_first_request
def test_routing_redirect_debugging():
app = flask.Flask(__name__)
app.debug = True
@app.route('/foo/', methods=['GET', 'POST'])
def foo():
return 'success'
with app.test_client() as c:
try:
c.post('/foo', data={})
except AssertionError as e:
assert 'http://localhost/foo/' in str(e)
assert ('Make sure to directly send '
'your POST-request to this URL') in str(e)
else:
assert False, 'Expected exception'
rv = c.get('/foo', data={}, follow_redirects=True)
assert rv.data == b'success'
app.debug = False
with app.test_client() as c:
rv = c.post('/foo', data={}, follow_redirects=True)
assert rv.data == b'success'
def test_route_decorator_custom_endpoint():
app = flask.Flask(__name__)
app.debug = True
@app.route('/foo/')
def foo():
return flask.request.endpoint
@app.route('/bar/', endpoint='bar')
def for_bar():
return flask.request.endpoint
@app.route('/bar/123', endpoint='123')
def for_bar_foo():
return flask.request.endpoint
with app.test_request_context():
assert flask.url_for('foo') == '/foo/'
assert flask.url_for('bar') == '/bar/'
assert flask.url_for('123') == '/bar/123'
c = app.test_client()
assert c.get('/foo/').data == b'foo'
assert c.get('/bar/').data == b'bar'
assert c.get('/bar/123').data == b'123'
def test_preserve_only_once():
app = flask.Flask(__name__)
app.debug = True
@app.route('/fail')
def fail_func():
1 // 0
c = app.test_client()
for x in range(3):
with pytest.raises(ZeroDivisionError):
c.get('/fail')
assert flask._request_ctx_stack.top is not None
assert flask._app_ctx_stack.top is not None
# implicit appctx disappears too
flask._request_ctx_stack.top.pop()
assert flask._request_ctx_stack.top is None
assert flask._app_ctx_stack.top is None
def test_preserve_remembers_exception():
app = flask.Flask(__name__)
app.debug = True
errors = []
@app.route('/fail')
def fail_func():
1 // 0
@app.route('/success')
def success_func():
return 'Okay'
@app.teardown_request
def teardown_handler(exc):
errors.append(exc)
c = app.test_client()
# After this failure we did not yet call the teardown handler
with pytest.raises(ZeroDivisionError):
c.get('/fail')
assert errors == []
# But this request triggers it, and it's an error
c.get('/success')
assert len(errors) == 2
assert isinstance(errors[0], ZeroDivisionError)
# At this point another request does nothing.
c.get('/success')
assert len(errors) == 3
assert errors[1] is None
def test_get_method_on_g():
app = flask.Flask(__name__)
app.testing = True
with app.app_context():
assert flask.g.get('x') is None
assert flask.g.get('x', 11) == 11
flask.g.x = 42
assert flask.g.get('x') == 42
assert flask.g.x == 42
def test_g_iteration_protocol():
app = flask.Flask(__name__)
app.testing = True
with app.app_context():
flask.g.foo = 23
flask.g.bar = 42
assert 'foo' in flask.g
assert 'foos' not in flask.g
assert sorted(flask.g) == ['bar', 'foo']
def test_subdomain_basic_support():
app = flask.Flask(__name__)
app.config['SERVER_NAME'] = 'localhost'
@app.route('/')
def normal_index():
return 'normal index'
@app.route('/', subdomain='test')
def test_index():
return 'test index'
c = app.test_client()
rv = c.get('/', 'http://localhost/')
assert rv.data == b'normal index'
rv = c.get('/', 'http://test.localhost/')
assert rv.data == b'test index'
def test_subdomain_matching():
app = flask.Flask(__name__)
app.config['SERVER_NAME'] = 'localhost'
@app.route('/', subdomain='<user>')
def index(user):
return 'index for %s' % user
c = app.test_client()
rv = c.get('/', 'http://mitsuhiko.localhost/')
assert rv.data == b'index for mitsuhiko'
def test_subdomain_matching_with_ports():
app = flask.Flask(__name__)
app.config['SERVER_NAME'] = 'localhost:3000'
@app.route('/', subdomain='<user>')
def index(user):
return 'index for %s' % user
c = app.test_client()
rv = c.get('/', 'http://mitsuhiko.localhost:3000/')
assert rv.data == b'index for mitsuhiko'
def test_multi_route_rules():
app = flask.Flask(__name__)
@app.route('/')
@app.route('/<test>/')
def index(test='a'):
return test
rv = app.test_client().open('/')
assert rv.data == b'a'
rv = app.test_client().open('/b/')
assert rv.data == b'b'
def test_multi_route_class_views():
class View(object):
def __init__(self, app):
app.add_url_rule('/', 'index', self.index)
app.add_url_rule('/<test>/', 'index', self.index)
def index(self, test='a'):
return test
app = flask.Flask(__name__)
_ = View(app)
rv = app.test_client().open('/')
assert rv.data == b'a'
rv = app.test_client().open('/b/')
assert rv.data == b'b'
def test_run_defaults(monkeypatch):
rv = {}
# Mocks werkzeug.serving.run_simple method
def run_simple_mock(*args, **kwargs):
rv['result'] = 'running...'
app = flask.Flask(__name__)
monkeypatch.setattr(werkzeug.serving, 'run_simple', run_simple_mock)
app.run()
assert rv['result'] == 'running...'
def test_run_server_port(monkeypatch):
rv = {}
# Mocks werkzeug.serving.run_simple method
def run_simple_mock(hostname, port, application, *args, **kwargs):
rv['result'] = 'running on %s:%s ...' % (hostname, port)
app = flask.Flask(__name__)
monkeypatch.setattr(werkzeug.serving, 'run_simple', run_simple_mock)
hostname, port = 'localhost', 8000
app.run(hostname, port, debug=True)
assert rv['result'] == 'running on %s:%s ...' % (hostname, port)<|fim▁end|> | return flask.request.method
c = app.test_client()
assert c.get('/').data == b'GET' |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.