prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>Rx.js<|end_file_name|><|fim▁begin|>"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var rx_1 = require("rx");
/* tslint:enable */
function cache(callback) {
var cached$ = this.replay(undefined, 1);
var subscription = cached$.connect();<|fim▁hole|>rx_1.Observable.prototype.cache = cache;
//# sourceMappingURL=Rx.js.map<|fim▁end|> | callback(function () { return subscription.dispose(); });
return cached$;
} |
<|file_name|>fetch_test.go<|end_file_name|><|fim▁begin|>// Copyright 2014 The rkt Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
"archive/tar"
"bytes"
"encoding/base64"
"fmt"
"io"
"io/ioutil"
"net/http"
"net/http/httptest"
"net/url"
"os"
"path/filepath"
"reflect"
"strings"
"testing"
"time"
"github.com/coreos/rkt/pkg/aci"
"github.com/coreos/rkt/pkg/keystore"
"github.com/coreos/rkt/pkg/keystore/keystoretest"
"github.com/coreos/rkt/rkt/config"
"github.com/coreos/rkt/store"
"github.com/coreos/rkt/Godeps/_workspace/src/github.com/appc/spec/discovery"
"github.com/coreos/rkt/Godeps/_workspace/src/github.com/appc/spec/schema/types"
)
type httpError struct {
code int
message string
}
func (e *httpError) Error() string {
return fmt.Sprintf("%d: %s", e.code, e.message)
}
type serverHandler struct {
body []byte
t *testing.T
auth string
}<|fim▁hole|> switch h.auth {
case "deny":
if _, ok := r.Header[http.CanonicalHeaderKey("Authorization")]; ok {
w.WriteHeader(http.StatusBadRequest)
return
}
case "none":
// no auth to do.
case "basic":
payload, httpErr := getAuthPayload(r, "Basic")
if httpErr != nil {
w.WriteHeader(httpErr.code)
return
}
creds, err := base64.StdEncoding.DecodeString(string(payload))
if err != nil {
w.WriteHeader(http.StatusBadRequest)
return
}
parts := strings.Split(string(creds), ":")
if len(parts) != 2 {
w.WriteHeader(http.StatusBadRequest)
return
}
user := parts[0]
password := parts[1]
if user != "bar" || password != "baz" {
w.WriteHeader(http.StatusUnauthorized)
return
}
case "bearer":
payload, httpErr := getAuthPayload(r, "Bearer")
if httpErr != nil {
w.WriteHeader(httpErr.code)
return
}
if payload != "sometoken" {
w.WriteHeader(http.StatusUnauthorized)
return
}
default:
panic("bug in test")
}
w.Write(h.body)
}
func getAuthPayload(r *http.Request, authType string) (string, *httpError) {
auth := r.Header.Get("Authorization")
if auth == "" {
err := &httpError{
code: http.StatusUnauthorized,
message: "No auth",
}
return "", err
}
parts := strings.Split(auth, " ")
if len(parts) != 2 {
err := &httpError{
code: http.StatusBadRequest,
message: "Malformed auth",
}
return "", err
}
if parts[0] != authType {
err := &httpError{
code: http.StatusUnauthorized,
message: "Wrong auth",
}
return "", err
}
return parts[1], nil
}
type testHeaderer struct {
h http.Header
}
func (h *testHeaderer) Header() http.Header {
return h.h
}
func TestNewDiscoveryApp(t *testing.T) {
tests := []struct {
in string
w *discovery.App
}{
// not a valid AC name
{
"bad AC name",
nil,
},
// simple case - default arch, os should be substituted
{
"foo.com/bar",
&discovery.App{
Name: "foo.com/bar",
Labels: map[types.ACIdentifier]string{
"arch": defaultArch,
"os": defaultOS,
},
},
},
// overriding arch, os should work
{
"www.abc.xyz/my/app,os=freebsd,arch=i386",
&discovery.App{
Name: "www.abc.xyz/my/app",
Labels: map[types.ACIdentifier]string{
"arch": "i386",
"os": "freebsd",
},
},
},
// setting version should work
{
"yes.com/no:v1.2.3",
&discovery.App{
Name: "yes.com/no",
Labels: map[types.ACIdentifier]string{
"version": "v1.2.3",
"arch": defaultArch,
"os": defaultOS,
},
},
},
// arbitrary user-supplied labels
{
"example.com/foo/haha,val=one",
&discovery.App{
Name: "example.com/foo/haha",
Labels: map[types.ACIdentifier]string{
"val": "one",
"arch": defaultArch,
"os": defaultOS,
},
},
},
// combinations
{
"one.two/appname:three,os=four,foo=five,arch=six",
&discovery.App{
Name: "one.two/appname",
Labels: map[types.ACIdentifier]string{
"version": "three",
"os": "four",
"foo": "five",
"arch": "six",
},
},
},
}
for i, tt := range tests {
g := newDiscoveryApp(tt.in)
if !reflect.DeepEqual(g, tt.w) {
t.Errorf("#%d: got %v, want %v", i, g, tt.w)
}
}
}
func TestDownloading(t *testing.T) {
dir, err := ioutil.TempDir("", "download-image")
if err != nil {
t.Fatalf("error creating tempdir: %v", err)
}
defer os.RemoveAll(dir)
imj := `{
"acKind": "ImageManifest",
"acVersion": "0.5.2",
"name": "example.com/test01"
}`
entries := []*aci.ACIEntry{
// An empty file
{
Contents: "hello",
Header: &tar.Header{
Name: "rootfs/file01.txt",
Size: 5,
},
},
}
aci, err := aci.NewACI(dir, imj, entries)
if err != nil {
t.Fatalf("error creating test tar: %v", err)
}
// Rewind the ACI
if _, err := aci.Seek(0, 0); err != nil {
t.Fatalf("unexpected error: %v", err)
}
body, err := ioutil.ReadAll(aci)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
noauthServer := &serverHandler{
body: body,
t: t,
auth: "none",
}
basicServer := &serverHandler{
body: body,
t: t,
auth: "basic",
}
oauthServer := &serverHandler{
body: body,
t: t,
auth: "bearer",
}
denyServer := &serverHandler{
body: body,
t: t,
auth: "deny",
}
noAuthTS := httptest.NewTLSServer(noauthServer)
defer noAuthTS.Close()
basicTS := httptest.NewTLSServer(basicServer)
defer basicTS.Close()
oauthTS := httptest.NewTLSServer(oauthServer)
defer oauthTS.Close()
denyAuthTS := httptest.NewServer(denyServer)
noAuth := http.Header{}
// YmFyOmJheg== is base64(bar:baz)
basicAuth := http.Header{"Authorization": {"Basic YmFyOmJheg=="}}
bearerAuth := http.Header{"Authorization": {"Bearer sometoken"}}
urlToName := map[string]string{
noAuthTS.URL: "no auth",
basicTS.URL: "basic",
oauthTS.URL: "oauth",
denyAuthTS.URL: "deny auth",
}
tests := []struct {
ACIURL string
hit bool
options http.Header
authFail bool
}{
{noAuthTS.URL, false, noAuth, false},
{noAuthTS.URL, true, noAuth, false},
{noAuthTS.URL, true, bearerAuth, false},
{noAuthTS.URL, true, basicAuth, false},
{basicTS.URL, false, noAuth, true},
{basicTS.URL, false, bearerAuth, true},
{basicTS.URL, false, basicAuth, false},
{oauthTS.URL, false, noAuth, true},
{oauthTS.URL, false, basicAuth, true},
{oauthTS.URL, false, bearerAuth, false},
{denyAuthTS.URL, false, basicAuth, false},
{denyAuthTS.URL, true, bearerAuth, false},
{denyAuthTS.URL, true, noAuth, false},
}
s, err := store.NewStore(dir)
if err != nil {
t.Fatalf("unexpected error %v", err)
}
for _, tt := range tests {
_, ok, err := s.GetRemote(tt.ACIURL)
if err != nil {
t.Fatalf("unexpected err: %v", err)
}
if tt.hit == false && ok {
t.Fatalf("expected miss got a hit")
}
if tt.hit == true && !ok {
t.Fatalf("expected a hit got a miss")
}
parsed, err := url.Parse(tt.ACIURL)
if err != nil {
panic(fmt.Sprintf("Invalid url from test server: %s", tt.ACIURL))
}
headers := map[string]config.Headerer{
parsed.Host: &testHeaderer{tt.options},
}
ft := &fetcher{
imageActionData: imageActionData{
s: s,
headers: headers,
insecureSkipVerify: true,
},
}
_, aciFile, _, err := ft.fetch("", tt.ACIURL, "", nil, "")
if err == nil {
defer os.Remove(aciFile.Name())
}
if err != nil && !tt.authFail {
t.Fatalf("expected download to succeed, it failed: %v (server: %q, headers: `%v`)", err, urlToName[tt.ACIURL], tt.options)
}
if err == nil && tt.authFail {
t.Fatalf("expected download to fail, it succeeded (server: %q, headers: `%v`)", urlToName[tt.ACIURL], tt.options)
}
if err != nil {
continue
}
key, err := s.WriteACI(aciFile, false)
if err != nil {
t.Fatalf("unexpected err: %v", err)
}
rem := store.NewRemote(tt.ACIURL, "")
rem.BlobKey = key
err = s.WriteRemote(rem)
if err != nil {
t.Fatalf("unexpected err: %v", err)
}
}
s.Dump(false)
}
func TestFetchImage(t *testing.T) {
dir, err := ioutil.TempDir("", "fetch-image")
if err != nil {
t.Fatalf("error creating tempdir: %v", err)
}
defer os.RemoveAll(dir)
s, err := store.NewStore(dir)
if err != nil {
t.Fatalf("unexpected error %v", err)
}
defer s.Dump(false)
ks, ksPath, err := keystore.NewTestKeystore()
if err != nil {
t.Errorf("unexpected error %v", err)
}
defer os.RemoveAll(ksPath)
key := keystoretest.KeyMap["example.com/app"]
if _, err := ks.StoreTrustedKeyPrefix("example.com/app", bytes.NewBufferString(key.ArmoredPublicKey)); err != nil {
t.Fatalf("unexpected error %v", err)
}
a, err := aci.NewBasicACI(dir, "example.com/app")
defer a.Close()
if err != nil {
t.Fatalf("unexpected error %v", err)
}
// Rewind the ACI
if _, err := a.Seek(0, 0); err != nil {
t.Fatalf("unexpected error %v", err)
}
asc, err := aci.NewDetachedSignature(key.ArmoredPrivateKey, a)
if err != nil {
t.Fatalf("unexpected error %v", err)
}
// Rewind the ACI.
if _, err := a.Seek(0, 0); err != nil {
t.Fatalf("unexpected error %v", err)
}
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
switch filepath.Ext(r.URL.Path) {
case ".aci":
io.Copy(w, a)
return
case ".asc":
io.Copy(w, asc)
return
default:
t.Fatalf("unknown extension %v", r.URL.Path)
}
}))
defer ts.Close()
ft := &fetcher{
imageActionData: imageActionData{
s: s,
ks: ks,
},
}
_, err = ft.fetchImage(fmt.Sprintf("%s/app.aci", ts.URL), "", true)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
}
func TestFetchImageFromStore(t *testing.T) {
dir, err := ioutil.TempDir("", "fetch-image")
if err != nil {
t.Fatalf("error creating tempdir: %v", err)
}
defer os.RemoveAll(dir)
s, err := store.NewStore(dir)
if err != nil {
t.Fatalf("unexpected error %v", err)
}
defer s.Dump(false)
// Test an aci without os and arch labels
a, err := aci.NewBasicACI(dir, "example.com/app")
defer a.Close()
if err != nil {
t.Fatalf("unexpected error %v", err)
}
// Rewind the ACI
if _, err := a.Seek(0, 0); err != nil {
t.Fatalf("unexpected error %v", err)
}
_, err = s.WriteACI(a, false)
if err != nil {
t.Fatalf("unexpected error %v", err)
}
ft := &fetcher{
imageActionData: imageActionData{
s: s,
},
}
_, err = ft.fetchImageFromStore("example.com/app")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
}
type redirectingServerHandler struct {
destServer string
}
func (h *redirectingServerHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Location", fmt.Sprintf("%s/%s", h.destServer, r.URL.Path))
w.WriteHeader(http.StatusTemporaryRedirect)
}
type cachingServerHandler struct {
aciBody []byte
ascBody []byte
etag string
maxAge int
t *testing.T
}
func (h *cachingServerHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
switch filepath.Ext(r.URL.Path) {
case ".aci":
if h.maxAge > 0 {
w.Header().Set("Cache-Control", fmt.Sprintf("max-age=%d", h.maxAge))
}
if h.etag != "" {
w.Header().Set("ETag", h.etag)
if cc := r.Header.Get("If-None-Match"); cc == h.etag {
w.WriteHeader(http.StatusNotModified)
return
}
}
w.Write(h.aciBody)
return
case ".asc":
w.Write(h.ascBody)
return
}
}
func TestFetchImageCache(t *testing.T) {
dir, err := ioutil.TempDir("", "fetch-image-cache")
if err != nil {
t.Fatalf("error creating tempdir: %v", err)
}
defer os.RemoveAll(dir)
s, err := store.NewStore(dir)
if err != nil {
t.Fatalf("unexpected error %v", err)
}
defer s.Dump(false)
ks, ksPath, err := keystore.NewTestKeystore()
if err != nil {
t.Errorf("unexpected error %v", err)
}
defer os.RemoveAll(ksPath)
key := keystoretest.KeyMap["example.com/app"]
if _, err := ks.StoreTrustedKeyPrefix("example.com/app", bytes.NewBufferString(key.ArmoredPublicKey)); err != nil {
t.Fatalf("unexpected error %v", err)
}
a, err := aci.NewBasicACI(dir, "example.com/app")
defer a.Close()
if err != nil {
t.Fatalf("unexpected error %v", err)
}
// Rewind the ACI
if _, err := a.Seek(0, 0); err != nil {
t.Fatalf("unexpected error %v", err)
}
asc, err := aci.NewDetachedSignature(key.ArmoredPrivateKey, a)
if err != nil {
t.Fatalf("unexpected error %v", err)
}
// Rewind the ACI
if _, err := a.Seek(0, 0); err != nil {
t.Fatalf("unexpected error %v", err)
}
aciBody, err := ioutil.ReadAll(a)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
ascBody, err := ioutil.ReadAll(asc)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
nocacheServer := &cachingServerHandler{
aciBody: aciBody,
ascBody: ascBody,
etag: "",
maxAge: 0,
t: t,
}
etagServer := &cachingServerHandler{
aciBody: aciBody,
ascBody: ascBody,
etag: "123456789",
maxAge: 0,
t: t,
}
maxAgeServer := &cachingServerHandler{
aciBody: aciBody,
ascBody: ascBody,
etag: "",
maxAge: 10,
t: t,
}
etagMaxAgeServer := &cachingServerHandler{
aciBody: aciBody,
ascBody: ascBody,
etag: "123456789",
maxAge: 10,
t: t,
}
nocacheTS := httptest.NewServer(nocacheServer)
defer nocacheTS.Close()
etagTS := httptest.NewServer(etagServer)
defer etagTS.Close()
maxAgeTS := httptest.NewServer(maxAgeServer)
defer maxAgeTS.Close()
etagMaxAgeTS := httptest.NewServer(etagMaxAgeServer)
defer etagMaxAgeTS.Close()
type testData struct {
URL string
etag string
cacheMaxAge int
shouldUseCached bool
}
tests := []testData{
{nocacheTS.URL, "", 0, false},
{etagTS.URL, "123456789", 0, true},
{maxAgeTS.URL, "", 10, true},
{etagMaxAgeTS.URL, "123456789", 10, true},
}
testFn := func(tt testData, useRedirect bool) {
aciURL := fmt.Sprintf("%s/app.aci", tt.URL)
if useRedirect {
redirectingTS := httptest.NewServer(&redirectingServerHandler{destServer: tt.URL})
defer redirectingTS.Close()
aciURL = fmt.Sprintf("%s/app.aci", redirectingTS.URL)
}
ft := &fetcher{
imageActionData: imageActionData{
s: s,
ks: ks,
},
}
_, err = ft.fetchImage(aciURL, "", true)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
rem, _, err := s.GetRemote(aciURL)
if err != nil {
t.Fatalf("Error getting remote info: %v\n", err)
}
if rem.ETag != tt.etag {
t.Errorf("expected remote to have a ETag header argument")
}
if rem.CacheMaxAge != tt.cacheMaxAge {
t.Errorf("expected max-age header argument to be %q", tt.cacheMaxAge)
}
downloadTime := rem.DownloadTime
_, err = ft.fetchImage(aciURL, "", true)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
rem, _, err = s.GetRemote(aciURL)
if err != nil {
t.Fatalf("Error getting remote info: %v\n", err)
}
if rem.ETag != tt.etag {
t.Errorf("expected remote to have a ETag header argument")
}
if rem.CacheMaxAge != tt.cacheMaxAge {
t.Errorf("expected max-age header argument to be %q", tt.cacheMaxAge)
}
if tt.shouldUseCached {
if downloadTime != rem.DownloadTime {
t.Errorf("expected current download time to be the same of the previous one (no download) but they differ")
}
} else {
if downloadTime == rem.DownloadTime {
t.Errorf("expected current download time to be different from the previous one (new image download) but they are the same")
}
}
if err := s.RemoveACI(rem.BlobKey); err != nil {
t.Fatalf("unexpected error: %v", err)
}
}
// repeat the tests with and without a redirecting server
for i := 0; i <= 1; i++ {
useRedirect := false
if i == 1 {
useRedirect = true
}
for _, tt := range tests {
testFn(tt, useRedirect)
}
}
}
func TestSigURLFromImgURL(t *testing.T) {
tests := []struct {
in, out string
}{
{
"http://localhost/aci-latest-linux-amd64.aci",
"http://localhost/aci-latest-linux-amd64.aci.asc",
},
}
for i, tt := range tests {
out := ascURLFromImgURL(tt.in)
if out != tt.out {
t.Errorf("#%d: got %v, want %v", i, out, tt.out)
}
}
}
func TestGetMaxAge(t *testing.T) {
ma := getMaxAge("max-age=10")
if ma != 10 {
t.Errorf("got max-age: %d, want %d", ma, 10)
}
ma = getMaxAge("no-cache")
if ma != 0 {
t.Errorf("got max-age: %d, want %d", ma, 0)
}
ma = getMaxAge("no-store")
if ma != 0 {
t.Errorf("got max-age: %d, want %d", ma, 0)
}
}
func TestUseCached(t *testing.T) {
ma := 10
t1 := time.Now().Add(-11 * time.Second)
if useCached(t1, ma) {
t.Errorf("expected useCached to return false")
}
t1 = time.Now().Add(-1 * time.Second)
if !useCached(t1, ma) {
t.Errorf("expected useCached to return true")
}
}<|fim▁end|> |
func (h *serverHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { |
<|file_name|>test.sql.js<|end_file_name|><|fim▁begin|>if (typeof require != 'undefined' || !jsyrup)
var jsyrup = require('../../../jsyrup');
describe('when creating', function() {
var ds, modelClass, callback;
beforeEach(function() {
ds = new jsyrup.SQLDataSource();
callback = jasmine.createSpy();
modelClass = jsyrup.ModelFactory({
key: 'id',
datasources: {
sql: 'items'
},
schema: {
id: { type: 'Integer' },
name: { type: 'Text' }
}
});
});
it('should exists', function() {
expect(ds).toBeDefined();
});
it('should have a craete method', function() {
expect(ds.create).toBeFunction();
});
describe('when creating', function() {
var instance;
beforeEach(function() {
instance = new modelClass();
instance.set('name', 'Fred');
});
it('should create', function() {
spyOn(ds, '_execute');
ds.create(instance, callback);
expect(ds._execute).toHaveBeenCalledWith(
"INSERT INTO items (items.name) VALUES ($1)",
['Fred'], callback);
});
});
describe('when updating', function() {
var instance;
beforeEach(function() {
instance = new modelClass();
instance.load({ id: 3, name: 'Frank' });<|fim▁hole|> it('should update', function() {
spyOn(ds, '_execute');
ds.update(instance, callback);
expect(ds._execute).toHaveBeenCalledWith(
"UPDATE items SET items.name = $1 WHERE items.id = $2",
['Frank', 3], callback);
});
});
});<|fim▁end|> | });
|
<|file_name|>client.py<|end_file_name|><|fim▁begin|>#-*-coding: utf-8-*-
from tornado import gen
from tornado.httpclient import AsyncHTTPClient, HTTPError, HTTPRequest
from tornado.options import options
from functools import wraps
from tornado import escape
import tornado.ioloop
import base64
import time
import datetime
import json
from math import exp
AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient")
formula = lambda x: 2 ** 10 / (1 + pow(exp(1), -(x - 2 ** 7) / 2 ** 5))
def loop_call(delta=60 * 1000):
def wrap_loop(func):
@wraps(func)
def wrap_func(*args, **kwargs):
func(*args, **kwargs)
tornado.ioloop.IOLoop.instance().add_timeout(
datetime.timeelta(milliseconds=delta),
wrap_func)
return wrap_func
return wrap_loop
def sync_loop_call(delta=60 * 1000):
"""
Wait for func down then process add_timeout
"""
def wrap_loop(func):
@wraps(func)
@gen.coroutine
def wrap_func(*args, **kwargs):
options.logger.info("function %r start at %d" %
(func.__name__, int(time.time())))
try:
yield func(*args, **kwargs)
except Exception, e:
options.logger.error("function %r error: %s" %
(func.__name__, e))
options.logger.info("function %r end at %d" %
(func.__name__, int(time.time())))
tornado.ioloop.IOLoop.instance().add_timeout(
datetime.timedelta(milliseconds=delta),
wrap_func)
return wrap_func
return wrap_loop
class TornadoDataRequest(HTTPRequest):
def __init__(self, url, **kwargs):
super(TornadoDataRequest, self).__init__(url, **kwargs)
self.auth_username = options.username
self.auth_password = options.password
self.user_agent = "Tornado-data"
@gen.coroutine
def GetPage(url):
client = AsyncHTTPClient()
request = TornadoDataRequest(url, method='GET')
try:
response = yield client.fetch(request)
except HTTPError, e:
response = e
raise gen.Return(response)
@gen.coroutine
def PutPage(url, body):
client = AsyncHTTPClient()
request = TornadoDataRequest(url, method='PUT', body=body)
try:
response = yield client.fetch(request)
except HTTPError, e:
response = e
raise gen.Return(response)
@gen.coroutine
def PatchPage(url, body):
client = AsyncHTTPClient.configurable_default()()
request = TornadoDataRequest(url, method="PATCH", body=body)
try:
response = yield client.fetch(request)
except HTTPError, e:
response = e
raise gen.Return(response)
@gen.coroutine
def commit(url, message, data):
resp = yield GetPage(url)
if resp.code == 200:
resp = escape.json_decode(resp.body)
sha = resp["sha"]
body = json.dumps({
"message": message,
"content": base64.b64encode(json.dumps(data)),
"committer": {"name": "cloudaice", "email": "[email protected]"},
"sha": sha
})
resp = yield PutPage(url, body)
raise gen.Return(resp)
else:
raise gen.Return(resp)
<|fim▁hole|> try:
body = json.dumps({
"description": "update file at utctime %s" %
datetime.datetime.utcfromtimestamp(time.time()),
"files": {
filename: {
"content": json.dumps(data, indent=4, separators=(',', ': '))
}
}
})
except Exception, e:
options.logger.error("Error: %s" % e)
resp = yield PatchPage(gist_url, body)
raise gen.Return(resp)<|fim▁end|> | @gen.coroutine
def update_file(gist_url, filename, data): |
<|file_name|>Bills.py<|end_file_name|><|fim▁begin|>import time
from Facturacion.Objects.Bill import Bill, ItemLine
from Facturacion.Controllers.ControlDB import BillsController
from Facturacion.Collections.Vendors import Vendors
from Facturacion.Collections.Clients import Clients
from Facturacion.Collections.Items import Items
class Bills:
facturas = {}
controller = BillsController()
@classmethod
def load_bills(cls):
for bill in cls.controller.get_bills():
cls.facturas[bill[0]] = Bill(bill[0], Vendors.vendedores[bill[1]], Clients.clientes[bill[2]], bill[3])
cls.load_items(bill[0])
@classmethod
def load_items(cls, cod_factura):
factura = cls.facturas[cod_factura]
for item in cls.controller.get_items(cod_factura):
factura.add_item(ItemLine(factura, Items.articulos[item[0]], item[1]))
@classmethod
def get_max_code(cls):
if not cls.facturas:<|fim▁hole|> def items_to_il(cls, cod_factura, dict_articulos):
il_list = []
for item, cant in dict_articulos.items():
il_list.append(ItemLine(Bills.facturas[cod_factura], item, cant))
return il_list
@classmethod
def add_bill(cls, cod_factura, cif_vendedor, cif_cliente, dict_articulos):
if cod_factura in cls.facturas:
return
vendedor = Vendors.vendedores[cif_vendedor]
cliente = Clients.clientes[cif_cliente]
fecha = time.strftime("%d/%m/%Y")
factura = Bill(cod_factura, vendedor, cliente, fecha)
cls.facturas[cod_factura] = factura
cls.controller.add_bill(factura)
for linea in cls.items_to_il(cod_factura, dict_articulos):
factura.add_item(linea)
cls.controller.add_line(linea)<|fim▁end|> | return 1
return max(cls.facturas, key=int) + 1
@classmethod |
<|file_name|>server.js<|end_file_name|><|fim▁begin|>// EXPRESS SERVER HERE //
// BASE SETUP
var express = require('express'),
app = express(),
bodyParser = require('body-parser'),
cookieParser = require('cookie-parser'),
session = require('express-session'),
methodOverride = require('method-override'),
// routes = require('./routes/routes'),
morgan = require('morgan'),
serveStatic = require('serve-static'),
errorHandler = require('errorhandler');
// =========================CONFIGURATION===========================//
// =================================================================//
app.set('port', process.env.PORT || 9001);
/*
* Set to 9001 to not interfere with Gulp 9000.
* If you're using Cloud9, or an IDE that uses a different port, process.env.PORT will
* take care of your problems. You don't need to set a new port.
*/
<|fim▁hole|>app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json());
app.use(methodOverride());
app.use(morgan('dev'));
app.use(cookieParser('secret'));
app.use(session({secret: 'evernote now', resave: true, saveUninitialized: true}));
app.use(function(req, res, next) {
res.locals.session = req.session;
next();
});
if (process.env.NODE_ENV === 'development') {
app.use(errorHandler());
}
// ==========================ROUTER=================================//
// =================================================================//
// ROUTES FOR THE API - RUN IN THE ORDER LISTED
var router = express.Router();
// ------------- ROUTES ---------------- //
// REGISTERING THE ROUTES
app.use('/', router);
// STARTING THE SERVER
console.log('Serving on port ' + app.get('port') + '. Serving more Nodes than Big Macs!');
app.listen(app.get('port')); // Not used if Gulp is activated - it is bypassed
exports = module.exports = app; // This is needed otherwise the index.js for routes will not work<|fim▁end|> | app.use(serveStatic('app', {'index': 'true'})); // Set to True or False if you want to start on Index or not
app.use('/bower_components', express.static(__dirname + '/bower_components')); |
<|file_name|>0003_auto_20160921_1608.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def forwards(apps, schema_editor):
"""Create Peers group."""
Group = apps.get_model('auth', 'Group')
Group.objects.create(name='Peers')
def backwards(apps, schema_editor):
"""Delete Peers group."""
Group = apps.get_model('auth', 'Group')
Group.objects.filter(name='Peers').delete()
class Migration(migrations.Migration):
dependencies = [
('profiles', '0002_auto_20160908_1534'),
]
<|fim▁hole|> ]<|fim▁end|> | operations = [
migrations.RunPython(forwards, backwards) |
<|file_name|>__openerp__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
{
'name': 'test-import-export',
'version': '0.1',
'category': 'Tests',
'description': """A module to test import/export.""",
'author': 'OpenERP SA',
'maintainer': 'OpenERP SA',
'website': 'http://www.openerp.com',
'depends': ['base'],<|fim▁hole|> 'auto_install': False,
}<|fim▁end|> | 'data': ['ir.model.access.csv'],
'installable': True, |
<|file_name|>Scope.java<|end_file_name|><|fim▁begin|>package ru.mephi.interpreter;
import org.antlr.v4.runtime.tree.ParseTree;
import java.math.BigInteger;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author Anton_Chkadua
*/
public class Scope {
static Scope GLOBAL = new Scope(null);
private Scope parent;
private Map<BigInteger, Variable> variables = new HashMap<>();
private Map<Function, ParseTree> functions = new HashMap<>();
private BigInteger memoryCounter = BigInteger.ZERO;
Scope(Scope parent) {
this.parent = parent;
if (parent != null) {
memoryCounter = parent.getMemoryCounter();
}
}
private BigInteger getMemoryCounter() {
return memoryCounter;
}
void add(Variable variable) throws RuntimeLangException {
if (variables.values().contains(variable)) {
throw new RuntimeLangException(RuntimeLangException.Type.DUPLICATE_IDENTIFIER);
}
if (variable instanceof Array) {
((Array) variable).setScope(this);
for (int i = 0; i < ((Array) variable).memoryLength; i++) {
variables.put(memoryCounter, variable);
memoryCounter = memoryCounter.add(BigInteger.ONE);
}
} else {
variables.put(memoryCounter, variable);
memoryCounter = memoryCounter.add(BigInteger.ONE);
}
}
public void remove(String name) throws RuntimeLangException {
Variable toBeRemoved = get(name);
BigInteger address = variables.keySet().stream().filter(key -> variables.get(key).equals(toBeRemoved)).findFirst().orElseThrow(() -> new RuntimeLangException(
RuntimeLangException.Type.NO_SUCH_VARIABLE));
variables.remove(address);
}
Scope getParent() {
return parent;
}
Variable get(String name) throws RuntimeLangException {
Variable candidate =
variables.values().stream().filter(variable -> variable.getName().equals(name)).findAny()
.orElse(null);
if (candidate == null) {
if (parent != null) {
candidate = parent.get(name);
} else {
throw new RuntimeLangException(RuntimeLangException.Type.NO_SUCH_VARIABLE);
}
}
return candidate;
}
Variable getByAddress(Pointer pointer) throws RuntimeLangException {
Variable variable = variables.get(pointer.getValue());
System.out.println(variable);
if (variable instanceof Array)
{
int address = getVariableAddress(variable).intValue();
int index = pointer.getValue().intValue() - address;
return variable.getElement(index);
} else {
return variable;<|fim▁hole|> void setValueByAddress(Pointer pointer, BigInteger value) throws RuntimeLangException {
if (pointer.constantValue) throw new RuntimeLangException(RuntimeLangException.Type.ILLEGAL_MODIFICATION);
variables.get(pointer.getValue()).setValue(value);
}
BigInteger getVariableAddress(Variable variable) throws RuntimeLangException {
if (!variables.values().contains(variable)) {
throw new RuntimeLangException(RuntimeLangException.Type.NO_SUCH_VARIABLE);
}
for (Map.Entry<BigInteger, Variable> entry : variables.entrySet()) {
if (entry.getValue().name.equals(variable.name)) return entry.getKey();
}
return null;
}
void addFunction(Function function, ParseTree functionTree) throws RuntimeLangException {
if (functions.containsKey(function)) {
throw new RuntimeLangException(RuntimeLangException.Type.DUPLICATE_IDENTIFIER);
}
functions.put(function, functionTree);
}
ParseTree getFunctionTree(String name, List<Class> types) throws RuntimeLangException {
ParseTree tree = functions.get(getFunction(name, types));
if (tree == null) {
if (parent != null) {
tree = parent.getFunctionTree(name, types);
} else {
throw new RuntimeLangException(RuntimeLangException.Type.NO_SUCH_FUNCTION);
}
}
return tree;
}
Function getFunction(String name, List<Class> types) throws RuntimeLangException {
Map.Entry<Function, ParseTree> entryCandidate =
functions.entrySet().stream().filter(entry -> entry.getKey().name.equals(name)).findAny()
.orElse(null);
Function candidate = null;
if (entryCandidate == null) {
if (parent != null) {
candidate = parent.getFunction(name, types);
}
} else {
candidate = entryCandidate.getKey();
}
if (candidate == null) {
if (name.equals("main")) {
throw new RuntimeLangException(RuntimeLangException.Type.NO_MAIN_FUNCTION);
} else {
throw new RuntimeLangException(RuntimeLangException.Type.NO_SUCH_FUNCTION);
}
}
if (candidate.args.size() != types.size()) {
throw new RuntimeLangException(RuntimeLangException.Type.NO_SUCH_FUNCTION);
}
for (int i = 0; i < candidate.args.size(); i++) {
if (!candidate.args.get(i).getType().equals(types.get(i))) {
throw new RuntimeLangException(RuntimeLangException.Type.NO_SUCH_FUNCTION);
}
}
return candidate;
}
@Override
public String toString() {
String parent = this.parent != null ? this.parent.toString() : "";
StringBuilder builder = new StringBuilder();
for (Variable variable : variables.values()) {
builder.append(variable.getName()).append("-").append(variable.getType()).append("-length-")
.append(variable.getLength());
try {
builder.append("-value-").append(variable.getValue()).append('-').append(variable.constantValue)
.append("\r\n");
} catch (RuntimeLangException e) {
System.out.println(e.getType());
}
}
return builder.insert(0, parent).toString();
}
}<|fim▁end|> | }
}
|
<|file_name|>test_trace.py<|end_file_name|><|fim▁begin|># Testing the line trace facility.
from test import test_support
import unittest
import sys
import difflib
# A very basic example. If this fails, we're in deep trouble.
def basic():
return 1
basic.events = [(0, 'call'),
(1, 'line'),
(1, 'return')]
# Many of the tests below are tricky because they involve pass statements.
# If there is implicit control flow around a pass statement (in an except
# clause or else caluse) under what conditions do you set a line number
# following that clause?
# The entire "while 0:" statement is optimized away. No code
# exists for it, so the line numbers skip directly from "del x"
# to "x = 1".
def arigo_example():
x = 1
del x
while 0:
pass
x = 1
arigo_example.events = [(0, 'call'),
(1, 'line'),
(2, 'line'),
(5, 'line'),
(5, 'return')]
# check that lines consisting of just one instruction get traced:
def one_instr_line():
x = 1
del x
x = 1
one_instr_line.events = [(0, 'call'),
(1, 'line'),
(2, 'line'),
(3, 'line'),
(3, 'return')]
def no_pop_tops(): # 0
x = 1 # 1
for a in range(2): # 2
if a: # 3
x = 1 # 4
else: # 5
x = 1 # 6
no_pop_tops.events = [(0, 'call'),
(1, 'line'),
(2, 'line'),
(3, 'line'),
(6, 'line'),
(2, 'line'),
(3, 'line'),
(4, 'line'),
(2, 'line'),
(2, 'return')]
def no_pop_blocks():
y = 1
while not y:
bla
x = 1
no_pop_blocks.events = [(0, 'call'),
(1, 'line'),
(2, 'line'),
(4, 'line'),
(4, 'return')]
def called(): # line -3
x = 1
def call(): # line 0
called()
call.events = [(0, 'call'),
(1, 'line'),
(-3, 'call'),
(-2, 'line'),
(-2, 'return'),
(1, 'return')]
def raises():
raise Exception
def test_raise():
try:
raises()
except Exception, exc:
x = 1
test_raise.events = [(0, 'call'),
(1, 'line'),
(2, 'line'),
(-3, 'call'),
(-2, 'line'),
(-2, 'exception'),
(-2, 'return'),
(2, 'exception'),
(3, 'line'),
(4, 'line'),
(4, 'return')]
def _settrace_and_return(tracefunc):
sys.settrace(tracefunc)
sys._getframe().f_back.f_trace = tracefunc
def settrace_and_return(tracefunc):
_settrace_and_return(tracefunc)
settrace_and_return.events = [(1, 'return')]
def _settrace_and_raise(tracefunc):
sys.settrace(tracefunc)
sys._getframe().f_back.f_trace = tracefunc
raise RuntimeError
def settrace_and_raise(tracefunc):
try:
_settrace_and_raise(tracefunc)
except RuntimeError, exc:
pass
settrace_and_raise.events = [(2, 'exception'),
(3, 'line'),
(4, 'line'),
(4, 'return')]
# implicit return example
# This test is interesting because of the else: pass
# part of the code. The code generate for the true
# part of the if contains a jump past the else branch.
# The compiler then generates an implicit "return None"
# Internally, the compiler visits the pass statement
# and stores its line number for use on the next instruction.
# The next instruction is the implicit return None.
def ireturn_example():
a = 5
b = 5
if a == b:
b = a+1
else:
pass
ireturn_example.events = [(0, 'call'),
(1, 'line'),
(2, 'line'),
(3, 'line'),
(4, 'line'),
(6, 'line'),
(6, 'return')]
# Tight loop with while(1) example (SF #765624)
def tightloop_example():
items = range(0, 3)
try:
i = 0
while 1:
b = items[i]; i+=1
except IndexError:<|fim▁hole|>tightloop_example.events = [(0, 'call'),
(1, 'line'),
(2, 'line'),
(3, 'line'),
(4, 'line'),
(5, 'line'),
(5, 'line'),
(5, 'line'),
(5, 'line'),
(5, 'exception'),
(6, 'line'),
(7, 'line'),
(7, 'return')]
def tighterloop_example():
items = range(1, 4)
try:
i = 0
while 1: i = items[i]
except IndexError:
pass
tighterloop_example.events = [(0, 'call'),
(1, 'line'),
(2, 'line'),
(3, 'line'),
(4, 'line'),
(4, 'line'),
(4, 'line'),
(4, 'line'),
(4, 'exception'),
(5, 'line'),
(6, 'line'),
(6, 'return')]
def generator_function():
try:
yield True
"continued"
finally:
"finally"
def generator_example():
# any() will leave the generator before its end
x = any(generator_function())
# the following lines were not traced
for x in range(10):
y = x
generator_example.events = ([(0, 'call'),
(2, 'line'),
(-6, 'call'),
(-5, 'line'),
(-4, 'line'),
(-4, 'return'),
(-4, 'call'),
(-4, 'exception'),
(-1, 'line'),
(-1, 'return')] +
[(5, 'line'), (6, 'line')] * 10 +
[(5, 'line'), (5, 'return')])
class Tracer:
def __init__(self):
self.events = []
def trace(self, frame, event, arg):
self.events.append((frame.f_lineno, event))
return self.trace
def traceWithGenexp(self, frame, event, arg):
(o for o in [1])
self.events.append((frame.f_lineno, event))
return self.trace
class TraceTestCase(unittest.TestCase):
def compare_events(self, line_offset, events, expected_events):
events = [(l - line_offset, e) for (l, e) in events]
if events != expected_events:
self.fail(
"events did not match expectation:\n" +
"\n".join(difflib.ndiff([str(x) for x in expected_events],
[str(x) for x in events])))
def run_and_compare(self, func, events):
tracer = Tracer()
sys.settrace(tracer.trace)
func()
sys.settrace(None)
self.compare_events(func.func_code.co_firstlineno,
tracer.events, events)
def run_test(self, func):
self.run_and_compare(func, func.events)
def run_test2(self, func):
tracer = Tracer()
func(tracer.trace)
sys.settrace(None)
self.compare_events(func.func_code.co_firstlineno,
tracer.events, func.events)
def test_01_basic(self):
self.run_test(basic)
def test_02_arigo(self):
self.run_test(arigo_example)
def test_03_one_instr(self):
self.run_test(one_instr_line)
def test_04_no_pop_blocks(self):
self.run_test(no_pop_blocks)
def test_05_no_pop_tops(self):
self.run_test(no_pop_tops)
def test_06_call(self):
self.run_test(call)
def test_07_raise(self):
self.run_test(test_raise)
def test_08_settrace_and_return(self):
self.run_test2(settrace_and_return)
def test_09_settrace_and_raise(self):
self.run_test2(settrace_and_raise)
def test_10_ireturn(self):
self.run_test(ireturn_example)
def test_11_tightloop(self):
self.run_test(tightloop_example)
def test_12_tighterloop(self):
self.run_test(tighterloop_example)
def test_13_genexp(self):
self.run_test(generator_example)
# issue1265: if the trace function contains a generator,
# and if the traced function contains another generator
# that is not completely exhausted, the trace stopped.
# Worse: the 'finally' clause was not invoked.
tracer = Tracer()
sys.settrace(tracer.traceWithGenexp)
generator_example()
sys.settrace(None)
self.compare_events(generator_example.func_code.co_firstlineno,
tracer.events, generator_example.events)
def test_14_onliner_if(self):
def onliners():
if True: False
else: True
return 0
self.run_and_compare(
onliners,
[(0, 'call'),
(1, 'line'),
(3, 'line'),
(3, 'return')])
def test_15_loops(self):
# issue1750076: "while" expression is skipped by debugger
def for_example():
for x in range(2):
pass
self.run_and_compare(
for_example,
[(0, 'call'),
(1, 'line'),
(2, 'line'),
(1, 'line'),
(2, 'line'),
(1, 'line'),
(1, 'return')])
def while_example():
# While expression should be traced on every loop
x = 2
while x > 0:
x -= 1
self.run_and_compare(
while_example,
[(0, 'call'),
(2, 'line'),
(3, 'line'),
(4, 'line'),
(3, 'line'),
(4, 'line'),
(3, 'line'),
(3, 'return')])
def test_16_blank_lines(self):
exec("def f():\n" + "\n" * 256 + " pass")
self.run_and_compare(
f,
[(0, 'call'),
(257, 'line'),
(257, 'return')])
class RaisingTraceFuncTestCase(unittest.TestCase):
def trace(self, frame, event, arg):
"""A trace function that raises an exception in response to a
specific trace event."""
if event == self.raiseOnEvent:
raise ValueError # just something that isn't RuntimeError
else:
return self.trace
def f(self):
"""The function to trace; raises an exception if that's the case
we're testing, so that the 'exception' trace event fires."""
if self.raiseOnEvent == 'exception':
x = 0
y = 1/x
else:
return 1
def run_test_for_event(self, event):
"""Tests that an exception raised in response to the given event is
handled OK."""
self.raiseOnEvent = event
try:
for i in xrange(sys.getrecursionlimit() + 1):
sys.settrace(self.trace)
try:
self.f()
except ValueError:
pass
else:
self.fail("exception not thrown!")
except RuntimeError:
self.fail("recursion counter not reset")
# Test the handling of exceptions raised by each kind of trace event.
def test_call(self):
self.run_test_for_event('call')
def test_line(self):
self.run_test_for_event('line')
def test_return(self):
self.run_test_for_event('return')
def test_exception(self):
self.run_test_for_event('exception')
def test_trash_stack(self):
def f():
for i in range(5):
print i # line tracing will raise an exception at this line
def g(frame, why, extra):
if (why == 'line' and
frame.f_lineno == f.func_code.co_firstlineno + 2):
raise RuntimeError, "i am crashing"
return g
sys.settrace(g)
try:
f()
except RuntimeError:
# the test is really that this doesn't segfault:
import gc
gc.collect()
else:
self.fail("exception not propagated")
# 'Jump' tests: assigning to frame.f_lineno within a trace function
# moves the execution position - it's how debuggers implement a Jump
# command (aka. "Set next statement").
class JumpTracer:
"""Defines a trace function that jumps from one place to another,
with the source and destination lines of the jump being defined by
the 'jump' property of the function under test."""
def __init__(self, function):
self.function = function
self.jumpFrom = function.jump[0]
self.jumpTo = function.jump[1]
self.done = False
def trace(self, frame, event, arg):
if not self.done and frame.f_code == self.function.func_code:
firstLine = frame.f_code.co_firstlineno
if frame.f_lineno == firstLine + self.jumpFrom:
# Cope with non-integer self.jumpTo (because of
# no_jump_to_non_integers below).
try:
frame.f_lineno = firstLine + self.jumpTo
except TypeError:
frame.f_lineno = self.jumpTo
self.done = True
return self.trace
# The first set of 'jump' tests are for things that are allowed:
def jump_simple_forwards(output):
output.append(1)
output.append(2)
output.append(3)
jump_simple_forwards.jump = (1, 3)
jump_simple_forwards.output = [3]
def jump_simple_backwards(output):
output.append(1)
output.append(2)
jump_simple_backwards.jump = (2, 1)
jump_simple_backwards.output = [1, 1, 2]
def jump_out_of_block_forwards(output):
for i in 1, 2:
output.append(2)
for j in [3]: # Also tests jumping over a block
output.append(4)
output.append(5)
jump_out_of_block_forwards.jump = (3, 5)
jump_out_of_block_forwards.output = [2, 5]
def jump_out_of_block_backwards(output):
output.append(1)
for i in [1]:
output.append(3)
for j in [2]: # Also tests jumping over a block
output.append(5)
output.append(6)
output.append(7)
jump_out_of_block_backwards.jump = (6, 1)
jump_out_of_block_backwards.output = [1, 3, 5, 1, 3, 5, 6, 7]
def jump_to_codeless_line(output):
output.append(1)
# Jumping to this line should skip to the next one.
output.append(3)
jump_to_codeless_line.jump = (1, 2)
jump_to_codeless_line.output = [3]
def jump_to_same_line(output):
output.append(1)
output.append(2)
output.append(3)
jump_to_same_line.jump = (2, 2)
jump_to_same_line.output = [1, 2, 3]
# Tests jumping within a finally block, and over one.
def jump_in_nested_finally(output):
try:
output.append(2)
finally:
output.append(4)
try:
output.append(6)
finally:
output.append(8)
output.append(9)
jump_in_nested_finally.jump = (4, 9)
jump_in_nested_finally.output = [2, 9]
# The second set of 'jump' tests are for things that are not allowed:
def no_jump_too_far_forwards(output):
try:
output.append(2)
output.append(3)
except ValueError, e:
output.append('after' in str(e))
no_jump_too_far_forwards.jump = (3, 6)
no_jump_too_far_forwards.output = [2, True]
def no_jump_too_far_backwards(output):
try:
output.append(2)
output.append(3)
except ValueError, e:
output.append('before' in str(e))
no_jump_too_far_backwards.jump = (3, -1)
no_jump_too_far_backwards.output = [2, True]
# Test each kind of 'except' line.
def no_jump_to_except_1(output):
try:
output.append(2)
except:
e = sys.exc_info()[1]
output.append('except' in str(e))
no_jump_to_except_1.jump = (2, 3)
no_jump_to_except_1.output = [True]
def no_jump_to_except_2(output):
try:
output.append(2)
except ValueError:
e = sys.exc_info()[1]
output.append('except' in str(e))
no_jump_to_except_2.jump = (2, 3)
no_jump_to_except_2.output = [True]
def no_jump_to_except_3(output):
try:
output.append(2)
except ValueError, e:
output.append('except' in str(e))
no_jump_to_except_3.jump = (2, 3)
no_jump_to_except_3.output = [True]
def no_jump_to_except_4(output):
try:
output.append(2)
except (ValueError, RuntimeError), e:
output.append('except' in str(e))
no_jump_to_except_4.jump = (2, 3)
no_jump_to_except_4.output = [True]
def no_jump_forwards_into_block(output):
try:
output.append(2)
for i in 1, 2:
output.append(4)
except ValueError, e:
output.append('into' in str(e))
no_jump_forwards_into_block.jump = (2, 4)
no_jump_forwards_into_block.output = [True]
def no_jump_backwards_into_block(output):
try:
for i in 1, 2:
output.append(3)
output.append(4)
except ValueError, e:
output.append('into' in str(e))
no_jump_backwards_into_block.jump = (4, 3)
no_jump_backwards_into_block.output = [3, 3, True]
def no_jump_into_finally_block(output):
try:
try:
output.append(3)
x = 1
finally:
output.append(6)
except ValueError, e:
output.append('finally' in str(e))
no_jump_into_finally_block.jump = (4, 6)
no_jump_into_finally_block.output = [3, 6, True] # The 'finally' still runs
def no_jump_out_of_finally_block(output):
try:
try:
output.append(3)
finally:
output.append(5)
output.append(6)
except ValueError, e:
output.append('finally' in str(e))
no_jump_out_of_finally_block.jump = (5, 1)
no_jump_out_of_finally_block.output = [3, True]
# This verifies the line-numbers-must-be-integers rule.
def no_jump_to_non_integers(output):
try:
output.append(2)
except ValueError, e:
output.append('integer' in str(e))
no_jump_to_non_integers.jump = (2, "Spam")
no_jump_to_non_integers.output = [True]
# This verifies that you can't set f_lineno via _getframe or similar
# trickery.
def no_jump_without_trace_function():
try:
previous_frame = sys._getframe().f_back
previous_frame.f_lineno = previous_frame.f_lineno
except ValueError, e:
# This is the exception we wanted; make sure the error message
# talks about trace functions.
if 'trace' not in str(e):
raise
else:
# Something's wrong - the expected exception wasn't raised.
raise RuntimeError, "Trace-function-less jump failed to fail"
class JumpTestCase(unittest.TestCase):
def compare_jump_output(self, expected, received):
if received != expected:
self.fail( "Outputs don't match:\n" +
"Expected: " + repr(expected) + "\n" +
"Received: " + repr(received))
def run_test(self, func):
tracer = JumpTracer(func)
sys.settrace(tracer.trace)
output = []
func(output)
sys.settrace(None)
self.compare_jump_output(func.output, output)
def test_01_jump_simple_forwards(self):
self.run_test(jump_simple_forwards)
def test_02_jump_simple_backwards(self):
self.run_test(jump_simple_backwards)
def test_03_jump_out_of_block_forwards(self):
self.run_test(jump_out_of_block_forwards)
def test_04_jump_out_of_block_backwards(self):
self.run_test(jump_out_of_block_backwards)
def test_05_jump_to_codeless_line(self):
self.run_test(jump_to_codeless_line)
def test_06_jump_to_same_line(self):
self.run_test(jump_to_same_line)
def test_07_jump_in_nested_finally(self):
self.run_test(jump_in_nested_finally)
def test_08_no_jump_too_far_forwards(self):
self.run_test(no_jump_too_far_forwards)
def test_09_no_jump_too_far_backwards(self):
self.run_test(no_jump_too_far_backwards)
def test_10_no_jump_to_except_1(self):
self.run_test(no_jump_to_except_1)
def test_11_no_jump_to_except_2(self):
self.run_test(no_jump_to_except_2)
def test_12_no_jump_to_except_3(self):
self.run_test(no_jump_to_except_3)
def test_13_no_jump_to_except_4(self):
self.run_test(no_jump_to_except_4)
def test_14_no_jump_forwards_into_block(self):
self.run_test(no_jump_forwards_into_block)
def test_15_no_jump_backwards_into_block(self):
self.run_test(no_jump_backwards_into_block)
def test_16_no_jump_into_finally_block(self):
self.run_test(no_jump_into_finally_block)
def test_17_no_jump_out_of_finally_block(self):
self.run_test(no_jump_out_of_finally_block)
def test_18_no_jump_to_non_integers(self):
self.run_test(no_jump_to_non_integers)
def test_19_no_jump_without_trace_function(self):
no_jump_without_trace_function()
def test_main():
test_support.run_unittest(
TraceTestCase,
RaisingTraceFuncTestCase,
JumpTestCase
)
if __name__ == "__main__":
test_main()<|fim▁end|> | pass
|
<|file_name|>publish.js<|end_file_name|><|fim▁begin|>module.exports = publish
var url = require('url')
var semver = require('semver')
var Stream = require('stream').Stream
var assert = require('assert')
var fixer = require('normalize-package-data').fixer
var concat = require('concat-stream')
var ssri = require('ssri')
function escaped (name) {
return name.replace('/', '%2f')
}
function publish (uri, params, cb) {
assert(typeof uri === 'string', 'must pass registry URI to publish')
assert(params && typeof params === 'object', 'must pass params to publish')
assert(typeof cb === 'function', 'must pass callback to publish')
var access = params.access
assert(
(!access) || ['public', 'restricted'].indexOf(access) !== -1,
"if present, access level must be either 'public' or 'restricted'"
)
var auth = params.auth
assert(auth && typeof auth === 'object', 'must pass auth to publish')
if (!(auth.token ||
(auth.password && auth.username && auth.email))) {
var er = new Error('auth required for publishing')
er.code = 'ENEEDAUTH'
return cb(er)
}
var metadata = params.metadata
assert(
metadata && typeof metadata === 'object',
'must pass package metadata to publish'
)
try {
fixer.fixNameField(metadata, {strict: true, allowLegacyCase: true})
} catch (er) {
return cb(er)
}
var version = semver.clean(metadata.version)
if (!version) return cb(new Error('invalid semver: ' + metadata.version))
metadata.version = version
var body = params.body
assert(body, 'must pass package body to publish')
assert(body instanceof Stream, 'package body passed to publish must be a stream')
var client = this
var sink = concat(function (tarbuffer) {
putFirst.call(client, uri, metadata, tarbuffer, access, auth, cb)
})
sink.on('error', cb)
body.pipe(sink)
}
function putFirst (registry, data, tarbuffer, access, auth, cb) {
// optimistically try to PUT all in one single atomic thing.
<|fim▁hole|> var root = {
_id: data.name,
name: data.name,
description: data.description,
'dist-tags': {},
versions: {},
readme: data.readme || ''
}
if (access) root.access = access
if (!auth.token) {
root.maintainers = [{ name: auth.username, email: auth.email }]
data.maintainers = JSON.parse(JSON.stringify(root.maintainers))
}
root.versions[ data.version ] = data
var tag = data.tag || this.config.defaultTag
root['dist-tags'][tag] = data.version
var tbName = data.name + '-' + data.version + '.tgz'
var tbURI = data.name + '/-/' + tbName
var integrity = ssri.fromData(tarbuffer, {
algorithms: ['sha1', 'sha512']
})
data._id = data.name + '@' + data.version
data.dist = data.dist || {}
// Don't bother having sha1 in the actual integrity field
data.dist.integrity = integrity['sha512'][0].toString()
// Legacy shasum support
data.dist.shasum = integrity['sha1'][0].hexDigest()
data.dist.tarball = url.resolve(registry, tbURI)
.replace(/^https:\/\//, 'http://')
root._attachments = {}
root._attachments[ tbName ] = {
'content_type': 'application/octet-stream',
'data': tarbuffer.toString('base64'),
'length': tarbuffer.length
}
var fixed = url.resolve(registry, escaped(data.name))
var client = this
var options = {
method: 'PUT',
body: root,
auth: auth
}
this.request(fixed, options, function (er, parsed, json, res) {
var r409 = 'must supply latest _rev to update existing package'
var r409b = 'Document update conflict.'
var conflict = res && res.statusCode === 409
if (parsed && (parsed.reason === r409 || parsed.reason === r409b)) {
conflict = true
}
// a 409 is typical here. GET the data and merge in.
if (er && !conflict) {
client.log.error('publish', 'Failed PUT ' + (res && res.statusCode))
return cb(er)
}
if (!er && !conflict) return cb(er, parsed, json, res)
// let's see what versions are already published.
client.request(fixed + '?write=true', { auth: auth }, function (er, current) {
if (er) return cb(er)
putNext.call(client, registry, data.version, root, current, auth, cb)
})
})
}
function putNext (registry, newVersion, root, current, auth, cb) {
// already have the tardata on the root object
// just merge in existing stuff
var curVers = Object.keys(current.versions || {}).map(function (v) {
return semver.clean(v, true)
}).concat(Object.keys(current.time || {}).map(function (v) {
if (semver.valid(v, true)) return semver.clean(v, true)
}).filter(function (v) {
return v
}))
if (curVers.indexOf(newVersion) !== -1) {
return cb(conflictError(root.name, newVersion))
}
current.versions[newVersion] = root.versions[newVersion]
current._attachments = current._attachments || {}
for (var i in root) {
switch (i) {
// objects that copy over the new stuffs
case 'dist-tags':
case 'versions':
case '_attachments':
for (var j in root[i]) {
current[i][j] = root[i][j]
}
break
// ignore these
case 'maintainers':
break
// copy
default:
current[i] = root[i]
}
}
var maint = JSON.parse(JSON.stringify(root.maintainers))
root.versions[newVersion].maintainers = maint
var uri = url.resolve(registry, escaped(root.name))
var options = {
method: 'PUT',
body: current,
auth: auth
}
this.request(uri, options, cb)
}
function conflictError (pkgid, version) {
var e = new Error('cannot modify pre-existing version')
e.code = 'EPUBLISHCONFLICT'
e.pkgid = pkgid
e.version = version
return e
}<|fim▁end|> | // If 409, then GET and merge, try again.
// If other error, then fail.
|
<|file_name|>http_config.go<|end_file_name|><|fim▁begin|>// Copyright 2020 Huawei Technologies Co.,Ltd.
//
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package config
import (
"context"
"fmt"
"net"
"time"
"k8s.io/autoscaler/cluster-autoscaler/cloudprovider/huaweicloud/huaweicloud-sdk-go-v3/core/httphandler"
)
const DefaultTimeout = 120 * time.Second
const DefaultRetries = 0
const DefaultIgnoreSSLVerification = false
type DialContext func(ctx context.Context, network string, addr string) (net.Conn, error)
type HttpConfig struct {
DialContext DialContext
Timeout time.Duration
Retries int
HttpProxy *Proxy
IgnoreSSLVerification bool
HttpHandler *httphandler.HttpHandler
}
func DefaultHttpConfig() *HttpConfig {
return &HttpConfig{
Timeout: DefaultTimeout,
Retries: DefaultRetries,
IgnoreSSLVerification: DefaultIgnoreSSLVerification,
}
}
func (config *HttpConfig) WithDialContext(dial DialContext) *HttpConfig {
config.DialContext = dial
return config
}
func (config *HttpConfig) WithTimeout(timeout time.Duration) *HttpConfig {
config.Timeout = timeout
return config
}
func (config *HttpConfig) WithRetries(retries int) *HttpConfig {
config.Retries = retries
return config
}
func (config *HttpConfig) WithIgnoreSSLVerification(ignore bool) *HttpConfig {
config.IgnoreSSLVerification = ignore
return config
}
func (config *HttpConfig) WithHttpHandler(handler *httphandler.HttpHandler) *HttpConfig {
config.HttpHandler = handler
return config
}
func (config *HttpConfig) WithProxy(proxy *Proxy) *HttpConfig {
config.HttpProxy = proxy
return config
}
type Proxy struct {
Schema string
Host string
Port int
Username string
Password string
}
func NewProxy() *Proxy {
return &Proxy{}<|fim▁hole|>}
func (p *Proxy) WithSchema(schema string) *Proxy {
p.Schema = schema
return p
}
func (p *Proxy) WithHost(host string) *Proxy {
p.Host = host
return p
}
func (p *Proxy) WithPort(port int) *Proxy {
p.Port = port
return p
}
func (p *Proxy) WithUsername(name string) *Proxy {
p.Username = name
return p
}
func (p *Proxy) WithPassword(pwd string) *Proxy {
p.Password = pwd
return p
}
func (p *Proxy) GetProxyUrl() string {
var proxyUrl string
if p.Username != "" {
proxyUrl = fmt.Sprintf("%s://%s:%s@%s", p.Schema, p.Username, p.Password, p.Host)
} else {
proxyUrl = fmt.Sprintf("%s://%s", p.Schema, p.Host)
}
if p.Port != 0 {
proxyUrl = fmt.Sprintf("%s:%d", proxyUrl, p.Port)
}
return proxyUrl
}<|fim▁end|> | |
<|file_name|>char.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
<|fim▁hole|> let c: char = 'x';
let d: char = 'x';
assert_eq!(c, 'x');
assert_eq!('x', c);
assert_eq!(c, c);
assert_eq!(c, d);
assert_eq!(d, c);
assert_eq!(d, 'x');
assert_eq!('x', d);
}<|fim▁end|> | // pretty-expanded FIXME #23616
pub fn main() { |
<|file_name|>client.cpp<|end_file_name|><|fim▁begin|>#include <onyxudp/udpclient.h>
#include <stdio.h>
#include <string.h>
#include <assert.h>
/* This test just makes sure the client library compiles and doens't crash on start. */
void on_error(udp_client_params_t *client, UDPERR code, char const *name) {<|fim▁hole|> fprintf(stderr, "on_error: code %d: %s\n", code, name);
assert(!"should not happen");
}
int main() {
udp_client_params_t params;
memset(¶ms, 0, sizeof(params));
params.app_id = 123;
params.app_version = 321;
params.on_error = on_error;
params.on_idle = NULL;
params.on_payload = NULL;
params.on_disconnect = NULL;
udp_client_t *client = udp_client_initialize(¶ms);
assert(client != NULL);
udp_client_terminate(client);
return 0;
}<|fim▁end|> | |
<|file_name|>laogong21.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#coding=utf-8
'''
@author: sheng
@license:
'''
<|fim▁hole|>
SPELL=u'láogōng'
CN=u'劳宫'
NAME=u'laogong21'
CHANNEL='pericardium'
CHANNEL_FULLNAME='PericardiumChannelofHand-Jueyin'
SEQ='PC8'
if __name__ == '__main__':
pass<|fim▁end|> | |
<|file_name|>grid_world_3D_env.py<|end_file_name|><|fim▁begin|>import numpy as np
from .base import Env
from rllab.spaces import Discrete
from rllab.envs.base import Step
from rllab.core.serializable import Serializable
MAPS = {
"chain": [
"GFFFFFFFFFFFFFSFFFFFFFFFFFFFG"
],
"4x4_safe": [
"SFFF",
"FWFW",
"FFFW",
"WFFG"
],
"4x4": [
[
"SFFF",
"FHFH",
"FFFH",
"HFFF"
],
[
"FFFF",
"FHFH",
"FFFH",
"HFFG"
]
],
"8x8": [
"FFFFSFFF",
"FFFFFFFF",
"FFFHFFFF",
"FFFFFHFF",
"FFFHFFFF",
"FHHFFFHF",
"FHFFHFHF",
"FFFHFFFG"
],
}
class GridWorld3DEnv(Env, Serializable):
"""
'S' : starting point
'F' or '.': free space
'W' or 'x': wall
'H' or 'o': hole (terminates episode)
'G' : goal
"""
def __init__(self, desc='4x4'):
Serializable.quick_init(self, locals())
#print("desc before isinstance",desc)
if isinstance(desc, str):
desc = MAPS[desc]
#print("desc before nparray \n",desc)
desc[0] = list(map(list, desc[0]))
#print(desc[0])
desc[1] = list(map(list, desc[1]))
#print(desc[1])
desc= np.array(list(desc))
#print("desc after nparray \n",desc)
desc[desc == '.'] = 'F'
desc[desc == 'o'] = 'H'
desc[desc == 'x'] = 'W'
self.desc = desc
self.levels, self.n_row, self.n_col = desc.shape[:]
#print("desc before search start \n", desc)
(start_z,), (start_x,), (start_y,) = np.nonzero(desc == 'S')
print('x', start_x)
print('y', start_y)
print('z', start_z)
self.start_state = start_x * self.n_col + start_y + start_z * (self.n_col + self.n_row)
self.state = None
self.domain_fig = None
def reset(self):
self.state = self.start_state
return self.state
@staticmethod
def action_from_direction(d):
"""
Return the action corresponding to the given direction. This is a helper method for debugging and testing
purposes.
:return: the action index corresponding to the given direction
"""
return dict(
left=0,
down=1,
right=2,
up=3,
climb_up=4,
climb_down=5
)[d]
def step(self, action):
"""
action map:
0: left
1: down
2: right
3: up
4: climb_up
5: climb_down
:param action: should be a one-hot vector encoding the action
:return:
"""
possible_next_states = self.get_possible_next_states(self.state, action)
#x = self.state // self.n_col
#y = self.state % self.n_col
#coords = np.array([x, y])
#print(coords)
#now=self.desc
#now[coords[0], coords[1]]='X'
#print(now)
probs = [x[1] for x in possible_next_states]
next_state_idx = np.random.choice(len(probs), p=probs)
next_state = possible_next_states[next_state_idx][0]
print("next state is", next_state)
next_z = next_state // (self.n_col * self.n_row)
next_x = (next_state - next_z*(self.n_col * self.n_row)) // self.n_col #Note: this is not a comment :D
next_y = (next_state - next_z*(self.n_col * self.n_row)) % self.n_col
#print(self.n_col)
#print(self.n_row)
#print(self.levels)
#print("the next z is", next_z)
#print("the next x is", next_x)
#print("the next y is", next_y)
next_state_type = self.desc[next_z, next_x, next_y]
#print(next_state_type)
#print(self.desc)
# Here we fix what each position does.
if next_state_type == 'H':
done = True
reward = 0
elif next_state_type in ['F', 'S']:
done = False
reward = 0
elif next_state_type == 'G':
done = True
reward = 1
else:
raise NotImplementedError
self.state = next_state
return Step(observation=self.state, reward=reward, done=done)
def get_possible_next_states(self, state, action):
"""
Given the state and action, return a list of possible next states and their probabilities. Only next states
with nonzero probabilities will be returned
:param state: start state
:param action: action
:return: a list of pairs (s', p(s'|s,a))
"""
# assert self.observation_space.contains(state)
# assert self.action_space.contains(action)
z = self.state // (self.n_col * self.n_row)
x = (self.state - z*(self.n_col * self.n_row)) // self.n_col #Note: this is not a comment :D
y = (self.state - z*(self.n_col * self.n_row)) % self.n_col
coords = np.array([z, x, y])
#print('NEW STEP')
#print(coords)
#print(coords)
self.desc[0] = list(map(list, self.desc[0]))
#print(desc[0])
self.desc[1] = list(map(list, self.desc[1]))
#print(desc[1])<|fim▁hole|>
now[z, x, y]='X'
print(now)
#Possible increments produced by the actions.
#print(action)
increments = np.array([[0, 0, -1], [0, 1, 0], [0, 0, 1], [0, -1, 0], [1, 0, 0], [-1, 0, 0]])
next_coords = np.clip(
coords + increments[action],
[0, 0, 0],
[self.levels -1, self.n_row - 1, self.n_col - 1]
)
#print(next_coords)
next_state = next_coords[0] * (self.n_col + self.n_row) + next_coords[1] * self.n_col + next_coords[2] #Calculate next step
#print(next_state)
state_type = self.desc[z, x, y]
next_state_type = self.desc[next_coords[0], next_coords[1], next_coords[2]]
#print(next_state_type)
if next_state_type == 'W' or state_type == 'H' or state_type == 'G':
return [(state, 1.)]
else:
return [(next_state, 1.)]
@property
def action_space(self):
return Discrete(6)
@property
def observation_space(self):
return Discrete(self.n_row * self.n_col * self.levels)<|fim▁end|> | now= np.array(list(self.desc))
#now=np.array(list(map(list, self.desc)))
#print(now) |
<|file_name|>translate-provider.js<|end_file_name|><|fim▁begin|>'use strict';
var storageKey = 'VN_TRANSLATE';
// ReSharper disable once InconsistentNaming
function Translate($translate, $translatePartialLoader, storage, options, disableTranslations) {
this.$translate = $translate;
this.$translatePartialLoader = $translatePartialLoader;
this.storage = storage;
this.disableTranslations = disableTranslations;
this.configure(angular.extend(options, this.getConfig()));
this.addPart = $translatePartialLoader.addPart;
}
Translate.prototype.getConfig = function() {
var storage = this.storage;
var config = JSON.parse(storage.get(storageKey)) || {};
var lang = storage.get('NG_TRANSLATE_LANG_KEY');
if (!this.disableTranslations && lang && lang !== 'undefined') {
config.lang = lang;
}
return config;
};
Translate.prototype.configure = function(config) {
config = angular.extend(this.getConfig(), config);
this.storage.set(storageKey, JSON.stringify(config));
this.$translate.use(config.lang);
};
Translate.prototype.addParts = function() {
if (this.disableTranslations) {
return true;
}
var loader = this.$translatePartialLoader;
angular.forEach(arguments, function(part) {
loader.addPart(part);
});
return this.$translate.refresh();
};
function TranslateProvider($translateProvider) {
this.$translateProvider = $translateProvider;
this.setPreferredLanguage = $translateProvider.preferredLanguage;
}
TranslateProvider.prototype.$get = [
'$translate', '$translatePartialLoader', 'storage',
function($translate, $translatePartialLoader, storage) {
var options = this.options;
return new Translate($translate, $translatePartialLoader, storage, {
region: options.region,
lang: options.lang,
country: options.country
}, options.disableTranslations);
}
];
TranslateProvider.prototype.configure = function(options) {
options = angular.extend({ region: 'us', lang: 'en', country: 'us' }, options);
if (options.lang) {<|fim▁hole|>
if (!options.disableTranslations) {
this.initTranslateProvider(options.lang);
}
};
TranslateProvider.prototype.initTranslateProvider = function(lang) {
var $translateProvider = this.$translateProvider;
$translateProvider.useLoader('$translatePartialLoader', {
urlTemplate: '/translations/{part}/{lang}.json'
});
if (lang === 'en') {
$translateProvider.useMessageFormatInterpolation();
}
$translateProvider.useMissingTranslationHandlerLog();
$translateProvider.useLocalStorage();
};
angular.module('Volusion.toolboxCommon')
.provider('translate', ['$translateProvider', TranslateProvider]);<|fim▁end|> | this.setPreferredLanguage(options.lang);
}
this.options = options; |
<|file_name|>glm_poisson_example.py<|end_file_name|><|fim▁begin|># Used swedish insurance data from smalldata instead of MASS/insurance due to the license of the MASS R package.
import h2o
from h2o.estimators.glm import H2OGeneralizedLinearEstimator
h2o.init()
h2o_df = h2o.import_file("http://h2o-public-test-data.s3.amazonaws.com/smalldata/glm_test/Motor_insurance_sweden.txt", sep = '\t')<|fim▁hole|>poisson_fit = H2OGeneralizedLinearEstimator(family = "poisson")
poisson_fit.train(y="Claims", x = ["Payment", "Insured", "Kilometres", "Zone", "Bonus", "Make"], training_frame = h2o_df)<|fim▁end|> | |
<|file_name|>webpack.config.js<|end_file_name|><|fim▁begin|>var ExtractTextPlugin = require("extract-text-webpack-plugin");
var HtmlWebpackPlugin = require("html-webpack-plugin");
var path = require("path");
var webpack = require("webpack");
var projectTemplatesRoot = "../../ppb/templates/";
module.exports = {
context: path.resolve(__dirname, "src"),
entry: {
app: "./js/main.js"
},
output: {
path: path.resolve(__dirname, "dist"),
filename: "js/site.js?[hash]",
publicPath: "/site_media/static"
},
module: {
loaders: [
{
test: /\.(gif|png|ico|jpg|svg)$/,
include: [
path.resolve(__dirname, "src/images")
],
loader: "file-loader?name=/images/[name].[ext]"
},<|fim▁hole|> {
test: /\.(woff|woff2|ttf|eot|svg)(\?v=[0-9]\.[0-9]\.[0-9])?$/,
include: [
path.resolve(__dirname, "/src/fonts"),
path.resolve(__dirname, "../node_modules")
],
loader: "file-loader?name=/fonts/[name].[ext]?[hash]"
},
{ test: /\.jsx?$/, loader: "babel-loader", query: {compact: false} },
]
},
resolve: {
extensions: ["", ".js", ".jsx"],
},
plugins: [
new webpack.IgnorePlugin(/^\.\/locale$/, /moment$/),
new ExtractTextPlugin("css/site.css?[hash]"),
new HtmlWebpackPlugin({
filename: projectTemplatesRoot + "_styles.html",
templateContent: function(templateParams, compilation) {
var link = "";
for (var css in templateParams.htmlWebpackPlugin.files.css) {
link += "<link href='" + templateParams.htmlWebpackPlugin.files.css[css] + "' rel='stylesheet' />\n"
}
return link;
}
}),
new HtmlWebpackPlugin({
filename: projectTemplatesRoot + "_scripts.html",
templateContent: function(templateParams, compilation) {
var script = "";
for (var js in templateParams.htmlWebpackPlugin.files.js) {
script += "<script src='" + templateParams.htmlWebpackPlugin.files.js[js] + "'></script>\n"
}
return script;
}
})
]
};<|fim▁end|> | { test: /\.less$/, loader: ExtractTextPlugin.extract("style-loader", "css-loader!less-loader") }, |
<|file_name|>KxDesktopServices.cpp<|end_file_name|><|fim▁begin|>#include "KxDesktopServices.h"
#include <QDesktopServices>
#include <QUrl>
KxDesktopServices::KxDesktopServices(QObject *parent)
: QObject(parent)
{
}
<|fim▁hole|>void KxDesktopServices::openUrl(const QString &url)
{
QDesktopServices::openUrl(QUrl(url));
}<|fim▁end|> | |
<|file_name|>SynchronizerDeadlock.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2005 Sun Microsystems, Inc. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
* CA 95054 USA or visit www.sun.com if you need additional information or
* have any questions.
*/
/*
* @summary SynchronizerDeadlock creates threads that are deadlocked
* waiting for JSR-166 synchronizers.
* @author Mandy Chung
* @build Barrier
*/
import java.lang.management.*;
import java.util.*;
import java.util.concurrent.locks.*;
public class SynchronizerDeadlock {
private Lock a = new ReentrantLock();
private Lock b = new ReentrantLock();
private Lock c = new ReentrantLock();
private final int EXPECTED_THREADS = 3;
private Thread[] dThreads = new Thread[EXPECTED_THREADS];
private Barrier go = new Barrier(1);
private Barrier barr = new Barrier(EXPECTED_THREADS);
public SynchronizerDeadlock() {
dThreads[0] = new DeadlockingThread("Deadlock-Thread-1", a, b);
dThreads[1] = new DeadlockingThread("Deadlock-Thread-2", b, c);
dThreads[2] = new DeadlockingThread("Deadlock-Thread-3", c, a);
// make them daemon threads so that the test will exit
for (int i = 0; i < EXPECTED_THREADS; i++) {
dThreads[i].setDaemon(true);
dThreads[i].start();
}
}
void goDeadlock() {
// Wait until all threads have started
barr.await();
// reset for later signals
barr.set(EXPECTED_THREADS);
while (go.getWaiterCount() != EXPECTED_THREADS) {
synchronized(this) {
try {
wait(100);
} catch (InterruptedException e) {
// ignore
}
}
}
// sleep a little so that all threads are blocked before notified.
try {
Thread.sleep(100);
} catch (InterruptedException e) {
// ignore
}
go.signal();
}
void waitUntilDeadlock() {
barr.await();
for (int i=0; i < 100; i++) {
// sleep a little while to wait until threads are blocked.
try {
Thread.sleep(100);
} catch (InterruptedException e) {
// ignore
}
boolean retry = false;
for (Thread t: dThreads) {
if (t.getState() == Thread.State.RUNNABLE) {
retry = true;
break;
}
}
if (!retry) {
break;
}
}
}
private class DeadlockingThread extends Thread {
private final Lock lock1;
private final Lock lock2;
DeadlockingThread(String name, Lock lock1, Lock lock2) {
super(name);
this.lock1 = lock1;
this.lock2 = lock2;
}
public void run() {
f();
}
private void f() {
lock1.lock();
try {
barr.signal();
go.await();
g();
} finally {
lock1.unlock();
}
}
private void g() {
barr.signal();
lock2.lock();
throw new RuntimeException("should not reach here.");
}
}
void checkResult(long[] threads) {
if (threads.length != EXPECTED_THREADS) {
ThreadDump.threadDump();
throw new RuntimeException("Expected to have " +
EXPECTED_THREADS + " to be in the deadlock list");
}
boolean[] found = new boolean[EXPECTED_THREADS];
for (int i = 0; i < threads.length; i++) {
for (int j = 0; j < dThreads.length; j++) {
if (dThreads[j].getId() == threads[i]) {
found[j] = true;
}
}
}
boolean ok = true;
for (int j = 0; j < found.length; j++) {
ok = ok && found[j];
}
if (!ok) {
System.out.print("Returned result is [");
for (int j = 0; j < threads.length; j++) {
System.out.print(threads[j] + " ");
}
System.out.println("]");
System.out.print("Expected result is [");
for (int j = 0; j < threads.length; j++) {
System.out.print(dThreads[j] + " ");
}
System.out.println("]");
throw new RuntimeException("Unexpected result returned " +
" by findMonitorDeadlockedThreads method.");
}<|fim▁hole|><|fim▁end|> | }
} |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from django.core.exceptions import ValidationError
from django.db.models.fields.related import ForeignObject
try:
from django.db.models.fields.related_descriptors import ForwardManyToOneDescriptor
except ImportError:
from django.db.models.fields.related import ReverseSingleRelatedObjectDescriptor as ForwardManyToOneDescriptor
from django.utils.encoding import python_2_unicode_compatible
import logging
logger = logging.getLogger(__name__)
# Python 3 fixes.
import sys
if sys.version > '3':
long = int
basestring = (str, bytes)
unicode = str
__all__ = ['Country', 'State', 'Locality', 'Address', 'AddressField']
class InconsistentDictError(Exception):
pass
def _to_python(value):
raw = value.get('raw', '')
country = value.get('country', '')
country_code = value.get('country_code', '')
state = value.get('state', '')
state_code = value.get('state_code', '')
locality = value.get('locality', '')
postal_code = value.get('postal_code', '')
street_number = value.get('street_number', '')
route = value.get('route', '')
formatted = value.get('formatted', '')
latitude = value.get('latitude', None)
longitude = value.get('longitude', None)
# If there is no value (empty raw) then return None.
if not raw:
return None
# If we have an inconsistent set of value bail out now.
if (country or state or locality) and not (country and state and locality):
raise InconsistentDictError
# Handle the country.
try:
country_obj = Country.objects.get(name=country)
except Country.DoesNotExist:
if country:
if len(country_code) > Country._meta.get_field('code').max_length:
if country_code != country:
raise ValueError('Invalid country code (too long): %s'%country_code)
country_code = ''
country_obj = Country.objects.create(name=country, code=country_code)
else:
country_obj = None
# Handle the state.
try:
state_obj = State.objects.get(name=state, country=country_obj)
except State.DoesNotExist:
if state:
if len(state_code) > State._meta.get_field('code').max_length:
if state_code != state:
raise ValueError('Invalid state code (too long): %s'%state_code)
state_code = ''
state_obj = State.objects.create(name=state, code=state_code, country=country_obj)
else:
state_obj = None
<|fim▁hole|> except Locality.DoesNotExist:
if locality:
locality_obj = Locality.objects.create(name=locality, postal_code=postal_code, state=state_obj)
else:
locality_obj = None
# Handle the address.
try:
if not (street_number or route or locality):
address_obj = Address.objects.get(raw=raw)
else:
address_obj = Address.objects.get(
street_number=street_number,
route=route,
locality=locality_obj
)
except Address.DoesNotExist:
address_obj = Address(
street_number=street_number,
route=route,
raw=raw,
locality=locality_obj,
formatted=formatted,
latitude=latitude,
longitude=longitude,
)
# If "formatted" is empty try to construct it from other values.
if not address_obj.formatted:
address_obj.formatted = unicode(address_obj)
# Need to save.
address_obj.save()
# Done.
return address_obj
##
## Convert a dictionary to an address.
##
def to_python(value):
# Keep `None`s.
if value is None:
return None
# Is it already an address object?
if isinstance(value, Address):
return value
# If we have an integer, assume it is a model primary key. This is mostly for
# Django being a cunt.
elif isinstance(value, (int, long)):
return value
# A string is considered a raw value.
elif isinstance(value, basestring):
obj = Address(raw=value)
obj.save()
return obj
# A dictionary of named address components.
elif isinstance(value, dict):
# Attempt a conversion.
try:
return _to_python(value)
except InconsistentDictError:
return Address.objects.create(raw=value['raw'])
# Not in any of the formats I recognise.
raise ValidationError('Invalid address value.')
##
## A country.
##
@python_2_unicode_compatible
class Country(models.Model):
name = models.CharField(max_length=40, unique=True, blank=True)
code = models.CharField(max_length=2, blank=True) # not unique as there are duplicates (IT)
class Meta:
verbose_name_plural = 'Countries'
ordering = ('name',)
def __str__(self):
return '%s'%(self.name or self.code)
##
## A state. Google refers to this as `administration_level_1`.
##
@python_2_unicode_compatible
class State(models.Model):
name = models.CharField(max_length=165, blank=True)
code = models.CharField(max_length=3, blank=True)
country = models.ForeignKey(Country, related_name='states')
class Meta:
unique_together = ('name', 'country')
ordering = ('country', 'name')
def __str__(self):
txt = self.to_str()
country = '%s'%self.country
if country and txt:
txt += ', '
txt += country
return txt
def to_str(self):
return '%s'%(self.name or self.code)
##
## A locality (suburb).
##
@python_2_unicode_compatible
class Locality(models.Model):
name = models.CharField(max_length=165, blank=True)
postal_code = models.CharField(max_length=10, blank=True)
state = models.ForeignKey(State, related_name='localities')
class Meta:
verbose_name_plural = 'Localities'
unique_together = ('name', 'state')
ordering = ('state', 'name')
def __str__(self):
txt = '%s'%self.name
state = self.state.to_str() if self.state else ''
if txt and state:
txt += ', '
txt += state
if self.postal_code:
txt += ' %s'%self.postal_code
cntry = '%s'%(self.state.country if self.state and self.state.country else '')
if cntry:
txt += ', %s'%cntry
return txt
##
## An address. If for any reason we are unable to find a matching
## decomposed address we will store the raw address string in `raw`.
##
@python_2_unicode_compatible
class Address(models.Model):
street_number = models.CharField(max_length=20, blank=True)
route = models.CharField(max_length=100, blank=True)
locality = models.ForeignKey(Locality, related_name='addresses', blank=True, null=True)
raw = models.CharField(max_length=200)
formatted = models.CharField(max_length=200, blank=True)
latitude = models.FloatField(blank=True, null=True)
longitude = models.FloatField(blank=True, null=True)
class Meta:
verbose_name_plural = 'Addresses'
ordering = ('locality', 'route', 'street_number')
# unique_together = ('locality', 'route', 'street_number')
def __str__(self):
if self.formatted != '':
txt = '%s'%self.formatted
elif self.locality:
txt = ''
if self.street_number:
txt = '%s'%self.street_number
if self.route:
if txt:
txt += ' %s'%self.route
locality = '%s'%self.locality
if txt and locality:
txt += ', '
txt += locality
else:
txt = '%s'%self.raw
return txt
def clean(self):
if not self.raw:
raise ValidationError('Addresses may not have a blank `raw` field.')
def as_dict(self):
ad = dict(
street_number=self.street_number,
route=self.route,
raw=self.raw,
formatted=self.formatted,
latitude=self.latitude if self.latitude else '',
longitude=self.longitude if self.longitude else '',
)
if self.locality:
ad['locality'] = self.locality.name
ad['postal_code'] = self.locality.postal_code
if self.locality.state:
ad['state'] = self.locality.state.name
ad['state_code'] = self.locality.state.code
if self.locality.state.country:
ad['country'] = self.locality.state.country.name
ad['country_code'] = self.locality.state.country.code
return ad
class AddressDescriptor(ForwardManyToOneDescriptor):
def __set__(self, inst, value):
super(AddressDescriptor, self).__set__(inst, to_python(value))
##
## A field for addresses in other models.
##
class AddressField(models.ForeignKey):
description = 'An address'
def __init__(self, **kwargs):
kwargs['to'] = 'address.Address'
super(AddressField, self).__init__(**kwargs)
def contribute_to_class(self, cls, name, virtual_only=False):
super(ForeignObject, self).contribute_to_class(cls, name, virtual_only=virtual_only)
setattr(cls, self.name, AddressDescriptor(self))
# def deconstruct(self):
# name, path, args, kwargs = super(AddressField, self).deconstruct()
# del kwargs['to']
# return name, path, args, kwargs
def formfield(self, **kwargs):
from .forms import AddressField as AddressFormField
defaults = dict(form_class=AddressFormField)
defaults.update(kwargs)
return super(AddressField, self).formfield(**defaults)<|fim▁end|> | # Handle the locality.
try:
locality_obj = Locality.objects.get(name=locality, state=state_obj) |
<|file_name|>TestPathTool.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# ***************************************************************************
# * Copyright (c) 2017 sliptonic <[email protected]> *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
import Path
from PathTests.PathTestUtils import PathTestBase
class TestPathTool(PathTestBase):
def test00(self):
'''Verify templateAttrs'''
name = 'tool 1'
mat = 'Carbide'
typ = 'EndMill'<|fim▁hole|> dia = 1.7
flat = 7.2
offset = 3.2
corner = 4
height = 45.3
angle = 118
tool = Path.Tool()
tool.Name = name
tool.ToolType = typ
tool.Material = mat
tool.Diameter = dia
tool.LengthOffset = offset
tool.FlatRadius = flat
tool.CornerRadius = corner
tool.CuttingEdgeAngle = angle
tool.CuttingEdgeHeight = height
attrs = tool.templateAttrs()
self.assertEqual(attrs['name'], name)
self.assertEqual(attrs['diameter'], dia)
self.assertEqual(attrs['material'], mat)
self.assertEqual(attrs['tooltype'], typ)
self.assertEqual(attrs['lengthOffset'], offset)
self.assertEqual(attrs['flatRadius'], flat)
self.assertEqual(attrs['cornerRadius'], corner)
self.assertEqual(attrs['cuttingEdgeAngle'], angle)
self.assertEqual(attrs['cuttingEdgeHeight'], height)
return tool
def test01(self):
'''Verify template roundtrip'''
t0 = self.test00()
t1 = Path.Tool()
t1.setFromTemplate(t0.templateAttrs())
self.assertEqual(t0.Name, t1.Name)
self.assertEqual(t0.ToolType, t1.ToolType)
self.assertEqual(t0.Material, t1.Material)
self.assertEqual(t0.Diameter, t1.Diameter)
self.assertEqual(t0.LengthOffset, t1.LengthOffset)
self.assertEqual(t0.FlatRadius, t1.FlatRadius)
self.assertEqual(t0.CornerRadius, t1.CornerRadius)
self.assertEqual(t0.CuttingEdgeAngle, t1.CuttingEdgeAngle)
self.assertEqual(t0.CuttingEdgeHeight, t1.CuttingEdgeHeight)
def test02(self):
'''Verify template dictionary construction'''
t0 = self.test00()
t1 = Path.Tool(t0.templateAttrs())
self.assertEqual(t0.Name, t1.Name)
self.assertEqual(t0.ToolType, t1.ToolType)
self.assertEqual(t0.Material, t1.Material)
self.assertEqual(t0.Diameter, t1.Diameter)
self.assertEqual(t0.LengthOffset, t1.LengthOffset)
self.assertEqual(t0.FlatRadius, t1.FlatRadius)
self.assertEqual(t0.CornerRadius, t1.CornerRadius)
self.assertEqual(t0.CuttingEdgeAngle, t1.CuttingEdgeAngle)
self.assertEqual(t0.CuttingEdgeHeight, t1.CuttingEdgeHeight)<|fim▁end|> | |
<|file_name|>StreamInfo.java<|end_file_name|><|fim▁begin|>package chatty.util.api;
import chatty.Helper;
import chatty.util.DateTime;<|fim▁hole|>import java.util.logging.Logger;
/**
* Holds the current info (name, viewers, title, game) of a stream, as well
* as a history of the same information and stuff like when the info was
* last requested, whether it's currently waiting for an API response etc.
*
* @author tduva
*/
public class StreamInfo {
private static final Logger LOGGER = Logger.getLogger(StreamInfo.class.getName());
/**
* All lowercase name of the stream
*/
public final String stream;
/**
* Correctly capitalized name of the stream. May be null if no set.
*/
private String display_name;
private long lastUpdated = 0;
private long lastStatusChange = 0;
private String status = "";
private String game = "";
private int viewers = 0;
private long startedAt = -1;
private long lastOnline = -1;
private long startedAtWithPicnic = -1;
private boolean online = false;
private boolean updateSucceeded = false;
private int updateFailedCounter = 0;
private boolean requested = false;
private boolean followed = false;
/**
* The time the stream was changed from online -> offline, so recheck if
* that actually is correct after some time. If this is -1, then do nothing.
* Should be set to -1 with EVERY update (received data), except when it's
* not already -1 on the change from online -> offline (to avoid request
* spam if recheckOffline() is always true).
*/
private long recheckOffline = -1;
// When the viewer stats where last calculated
private long lastViewerStats;
// How long at least between viewer stats calculations
private static final int VIEWERSTATS_DELAY = 30*60*1000;
// How long a stats range can be at most
private static final int VIEWERSTATS_MAX_LENGTH = 35*60*1000;
private static final int RECHECK_OFFLINE_DELAY = 10*1000;
/**
* Maximum length in seconds of what should count as a PICNIC (short stream
* offline period), to set the online time with PICNICs correctly.
*/
private static final int MAX_PICNIC_LENGTH = 600;
/**
* The current full status (title + game), updated when new data is set.
*/
private String currentFullStatus;
private String prevFullStatus;
private final LinkedHashMap<Long,StreamInfoHistoryItem> history = new LinkedHashMap<>();
private int expiresAfter = 300;
private final StreamInfoListener listener;
public StreamInfo(String stream, StreamInfoListener listener) {
this.listener = listener;
this.stream = stream.toLowerCase(Locale.ENGLISH);
}
private void streamInfoUpdated() {
if (listener != null) {
listener.streamInfoUpdated(this);
}
}
public void setRequested() {
this.requested = true;
}
public boolean isRequested() {
return requested;
}
private void streamInfoStatusChanged() {
lastStatusChange = System.currentTimeMillis();
if (listener != null) {
listener.streamInfoStatusChanged(this, getFullStatus());
}
}
@Override
public String toString() {
return "Online: "+online+
" Status: "+status+
" Game: "+game+
" Viewers: "+viewers;
}
public String getFullStatus() {
return currentFullStatus;
}
private String makeFullStatus() {
if (online) {
String fullStatus = status;
if (status == null) {
fullStatus = "No stream title set";
}
if (game != null) {
fullStatus += " ("+game+")";
}
return fullStatus;
}
else if (!updateSucceeded) {
return "";
}
else {
return "Stream offline";
}
}
public String getStream() {
return stream;
}
/**
* The correctly capitalized name of the stream, or the all lowercase name
* if correctly capitalized name is not set.
*
* @return The correctly capitalized name or all lowercase name
*/
public String getDisplayName() {
return display_name != null ? display_name : stream;
}
/**
* Whether a correctly capitalized name is set, which if true is returned
* by {@see getDisplayName()}.
*
* @return true if a correctly capitalized name is set, false otherwise
*/
public boolean hasDisplayName() {
return display_name != null;
}
/**
* Sets the correctly capitalized name for this stream.
*
* @param name The correctly capitalized name
*/
public void setDisplayName(String name) {
this.display_name = name;
}
/**
* Set stream info from followed streams request.
*
* @param status The current stream title
* @param game The current game being played
* @param viewers The current viewercount
* @param startedAt The timestamp when the stream was started, -1 if not set
*/
public void setFollowed(String status, String game, int viewers, long startedAt) {
//System.out.println(status);
followed = true;
boolean saveToHistory = false;
if (hasExpired()) {
saveToHistory = true;
}
set(status, game, viewers, startedAt, saveToHistory);
}
/**
* Set stream info from a regular stream info request.
*
* @param status The current stream title
* @param game The current game being played
* @param viewers The current viewercount
* @param startedAt The timestamp when the stream was started, -1 if not set
*/
public void set(String status, String game, int viewers, long startedAt) {
set(status, game, viewers, startedAt, true);
}
/**
* This should only be used when the update was successful.
*
* @param status The current title of the stream
* @param game The current game
* @param viewers The current viewercount
* @param startedAt The timestamp when the stream was started, -1 if not set
* @param saveToHistory Whether to save the data to history
*/
private void set(String status, String game, int viewers, long startedAt, boolean saveToHistory) {
this.status = Helper.trim(Helper.removeLinebreaks(status));
this.game = Helper.trim(game);
this.viewers = viewers;
// Always set to -1 (do nothing) when stream is set as online, but also
// output a message if necessary
if (recheckOffline != -1) {
if (this.startedAt < startedAt) {
LOGGER.info("StreamInfo " + stream + ": Stream not offline anymore");
} else {
LOGGER.info("StreamInfo " + stream + ": Stream not offline");
}
}
recheckOffline = -1;
if (lastOnlineAgo() > MAX_PICNIC_LENGTH) {
// Only update online time with PICNICs when offline time was long
// enough (of course also depends on what stream data Chatty has)
this.startedAtWithPicnic = startedAt;
}
this.startedAt = startedAt;
this.lastOnline = System.currentTimeMillis();
this.online = true;
if (saveToHistory) {
addHistoryItem(System.currentTimeMillis(),new StreamInfoHistoryItem(viewers, status, game));
}
setUpdateSucceeded(true);
}
public void setExpiresAfter(int expiresAfter) {
this.expiresAfter = expiresAfter;
}
public void setUpdateFailed() {
setUpdateSucceeded(false);
}
private void setUpdateSucceeded(boolean succeeded) {
updateSucceeded = succeeded;
setUpdated();
if (succeeded) {
updateFailedCounter = 0;
}
else {
updateFailedCounter++;
if (recheckOffline != -1) {
// If an offline check is pending and the update failed, then
// just set as offline now (may of course not be accurate at all
// anymore).
LOGGER.warning("StreamInfo "+stream+": Update failed, delayed setting offline");
setOffline();
}
}
currentFullStatus = makeFullStatus();
if (succeeded && !currentFullStatus.equals(prevFullStatus) ||
lastUpdateLongAgo()) {
prevFullStatus = currentFullStatus;
streamInfoStatusChanged();
}
// Call at the end, so stuff is already updated
streamInfoUpdated();
}
public void setOffline() {
// If switching from online to offline
if (online && recheckOffline == -1) {
LOGGER.info("Waiting to recheck offline status for " + stream);
recheckOffline = System.currentTimeMillis();
} else {
if (recheckOffline != -1) {
//addHistoryItem(recheckOffline, new StreamInfoHistoryItem());
LOGGER.info("Offline after check: "+stream);
}
recheckOffline = -1;
this.online = false;
addHistoryItem(System.currentTimeMillis(), new StreamInfoHistoryItem());
}
setUpdateSucceeded(true);
}
/**
* Whether to recheck the offline status by requesting the stream status
* again earlier than usual.
*
* @return true if it should be checked, false otherwise
*/
public boolean recheckOffline() {
return recheckOffline != -1
&& System.currentTimeMillis() - recheckOffline > RECHECK_OFFLINE_DELAY;
}
public boolean getFollowed() {
return followed;
}
public boolean getOnline() {
return this.online;
}
/**
* The time the stream was started. As always, this may contain stale data
* if the stream info is not valid or the stream offline.
*
* @return The timestamp or -1 if no time was received
*/
public long getTimeStarted() {
return startedAt;
}
/**
* The time the stream was started, including short disconnects (max 10
* minutes). If there was no disconnect, then the time is equal to
* getTimeStarted(). As always, this may contain stale data if the stream
* info is not valid or the stream offline.
*
* @return The timestamp or -1 if not time was received or the time is
* invalid
*/
public long getTimeStartedWithPicnic() {
return startedAtWithPicnic;
}
/**
* How long ago the stream was last online. If the stream was never seen as
* online this session, then a huge number will be returned.
*
* @return The number of seconds that have passed since the stream was last
* seen as online
*/
public long lastOnlineAgo() {
return (System.currentTimeMillis() - lastOnline) / 1000;
}
public long getLastOnlineTime() {
return lastOnline;
}
private void setUpdated() {
lastUpdated = System.currentTimeMillis() / 1000;
requested = false;
}
// Getters
/**
* Gets the status stored for this stream. May not be correct, check
* isValid() before using any data.
*
* @return
*/
public String getStatus() {
return status;
}
/**
* Gets the title stored for this stream, which is the same as the status,
* unless the status is null. As opposed to getStatus() this never returns
* null.
*
* @return
*/
public String getTitle() {
if (status == null) {
return "No stream title set";
}
return status;
}
/**
* Gets the game stored for this stream. May not be correct, check
* isValid() before using any data.
*
* @return
*/
public String getGame() {
return game;
}
/**
* Gets the viewers stored for this stream. May not be correct, check
* isValid() before using any data.
*
* @return
*/
public int getViewers() {
return viewers;
}
/**
* Calculates the number of seconds that passed after the last update
*
* @return Number of seconds that have passed after the last update
*/
public long getUpdatedDelay() {
return (System.currentTimeMillis() / 1000) - lastUpdated;
}
/**
* Checks if the info should be updated. The stream info takes longer
* to expire when there were failed attempts at downloading the info from
* the API. This only affects hasExpired(), not isValid().
*
* @return true if the info should be updated, false otherwise
*/
public boolean hasExpired() {
return getUpdatedDelay() > expiresAfter * (1+ updateFailedCounter / 2);
}
/**
* Checks if the info is valid, taking into account if the last request
* succeeded and how old the data is.
*
* @return true if the info can be used, false otherwise
*/
public boolean isValid() {
if (!updateSucceeded || getUpdatedDelay() > expiresAfter*2) {
return false;
}
return true;
}
public boolean lastUpdateLongAgo() {
if (updateSucceeded && getUpdatedDelay() > expiresAfter*4) {
return true;
}
return false;
}
/**
* Returns the number of seconds the last status change is ago.
*
* @return
*/
public long getStatusChangeTimeAgo() {
return (System.currentTimeMillis() - lastStatusChange) / 1000;
}
public long getStatusChangeTime() {
return lastStatusChange;
}
private void addHistoryItem(Long time, StreamInfoHistoryItem item) {
synchronized(history) {
history.put(time, item);
}
}
public LinkedHashMap<Long,StreamInfoHistoryItem> getHistory() {
synchronized(history) {
return new LinkedHashMap<>(history);
}
}
/**
* Create a summary of the viewercount in the interval that hasn't been
* calculated yet (delay set as a constant).
*
* @param force Get stats even if the delay hasn't passed yet.
* @return
*/
public ViewerStats getViewerStats(boolean force) {
synchronized(history) {
if (lastViewerStats == 0 && !force) {
// No stats output yet, so assume current time as start, so
// it's output after the set delay
lastViewerStats = System.currentTimeMillis() - 5000;
}
long timePassed = System.currentTimeMillis() - lastViewerStats;
if (!force && timePassed < VIEWERSTATS_DELAY) {
return null;
}
long startAt = lastViewerStats+1;
// Only calculate the max length
if (timePassed > VIEWERSTATS_MAX_LENGTH) {
startAt = System.currentTimeMillis() - VIEWERSTATS_MAX_LENGTH;
}
int min = -1;
int max = -1;
int total = 0;
int count = 0;
long firstTime = -1;
long lastTime = -1;
StringBuilder b = new StringBuilder();
// Initiate with -2, because -1 already means offline
int prevViewers = -2;
for (long time : history.keySet()) {
if (time < startAt) {
continue;
}
// Start doing anything for values >= startAt
// Update so that it contains the last value that was looked at
// at the end of this method
lastViewerStats = time;
int viewers = history.get(time).getViewers();
// Append to viewercount development String
if (prevViewers > -1 && viewers != -1) {
// If there is a prevViewers set and if online
int diff = viewers - prevViewers;
if (diff >= 0) {
b.append("+");
}
b.append(Helper.formatViewerCount(diff));
} else if (viewers != -1) {
if (prevViewers == -1) {
// Previous was offline, so show that
b.append("_");
}
b.append(Helper.formatViewerCount(viewers));
}
prevViewers = viewers;
if (viewers == -1) {
continue;
}
// Calculate min/max/sum/count only when online
if (firstTime == -1) {
firstTime = time;
}
lastTime = time;
if (viewers > max) {
max = viewers;
}
if (min == -1 || viewers < min) {
min = viewers;
}
total += viewers;
count++;
}
// After going through all values, do some finishing work
if (prevViewers == -1) {
// Last value was offline, so show that
b.append("_");
}
if (count == 0) {
return null;
}
int avg = total / count;
return new ViewerStats(min, max, avg, firstTime, lastTime, count, b.toString());
}
}
/**
* Holds a set of immutable values that make up viewerstats.
*/
public static class ViewerStats {
public final int max;
public final int min;
public final int avg;
public final long startTime;
public final long endTime;
public final int count;
public final String history;
public ViewerStats(int min, int max, int avg, long startTime,
long endTime, int count, String history) {
this.max = max;
this.min = min;
this.avg = avg;
this.startTime = startTime;
this.endTime = endTime;
this.count = count;
this.history = history;
}
/**
* Which duration the data in this stats covers. This is not necessarily
* the whole duration that was worked with (e.g. if the stream went
* offline at the end, that data may not be included). This is the range
* between the first and last valid data point.
*
* @return The number of seconds this data covers.
*/
public long duration() {
return (endTime - startTime) / 1000;
}
/**
* Checks if these viewerstats contain any viewer data.
*
* @return
*/
public boolean isValid() {
// If min was set to another value than the initial one, then this
// means at least one data point with a viewercount was there.
return min != -1;
}
@Override
public String toString() {
return "Viewerstats ("+DateTime.format2(startTime)
+"-"+DateTime.format2(endTime)+"):"
+ " avg:"+Helper.formatViewerCount(avg)
+ " min:"+Helper.formatViewerCount(min)
+ " max:"+Helper.formatViewerCount(max)
+ " ["+count+"/"+history+"]";
}
}
}<|fim▁end|> | import java.util.LinkedHashMap;
import java.util.Locale; |
<|file_name|>TestUtils.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from numpy.testing import assert_approx_equal, assert_allclose, assert_array_equal
from UliEngineering.SignalProcessing.Utils import *
from parameterized import parameterized
import concurrent.futures
import numpy as np
import datetime
import unittest
unstairMethods = [
("left",),
("middle",),
("right",),
("reduce",),
]
class TestRemoveMean(unittest.TestCase):
def testRemoveMean(self):
assert_allclose(remove_mean([]), [])
assert_allclose(remove_mean([1.0, 2.0, 3.0]), [-1.0, 0.0, 1.0])
class TestRMS(unittest.TestCase):
def testRMS(self):
assert_allclose(rms([]), [])
assert_allclose(rms([1.0, 2.0, 3.0]), np.sqrt(np.mean([1*1, 2*2, 3*3])))
class TestPeakToPeak(unittest.TestCase):
def testPeakToPeak(self):
assert_allclose(peak_to_peak(None), 0.0)
assert_allclose(peak_to_peak([]), 0.0)
assert_allclose(peak_to_peak([0.0]), 0.0)
assert_allclose(peak_to_peak([1.0]), 0.0)
assert_allclose(peak_to_peak([1.0, 1.0]), 0.0)
assert_allclose(peak_to_peak([1.0, 2.0]), 1.0)
assert_allclose(peak_to_peak([2.0, 1.0]), 1.0)
assert_allclose(peak_to_peak([0, 1, 3, -3, 0, 5, 0.7, 0.9]), 8)
assert_allclose(peak_to_peak(np.asarray([])), 0.0)
assert_allclose(peak_to_peak(np.asarray([0.0])), 0.0)
assert_allclose(peak_to_peak(np.asarray([1.0])), 0.0)
assert_allclose(peak_to_peak(np.asarray([1.0, 1.0])), 0.0)
assert_allclose(peak_to_peak(np.asarray([1.0, 2.0])), 1.0)
assert_allclose(peak_to_peak(np.asarray([2.0, 1.0])), 1.0)
assert_allclose(peak_to_peak(np.asarray([0, 1, 3, -3, 0, 5, 0.7, 0.9])), 8)
class TestUnstair(unittest.TestCase):
@parameterized.expand(unstairMethods)
def testNoReduction(self, method):
# Test if unstair returns the original array for a non-step function
x = np.arange(10)
y = np.square(x)
xres, yres = unstair(x, y, method=method)
assert_array_equal(xres, x)
assert_array_equal(yres, y)
def testSimpleLeft(self):
y = np.asarray([0, 0, 0, 1, 2, 2, 2, 3, 4, 5, 5])
x = np.arange(y.size)
xexpected = [0, 3, 4, 7, 8, 9, 10]
yexpected = y[xexpected]
xres, yres = unstair(x, y, method="left")
assert_array_equal(xres, xexpected)
assert_array_equal(yres, yexpected)
def testSimpleRight(self):
y = np.asarray([0, 0, 0, 1, 2, 2, 2, 3, 4, 5, 5])
x = np.arange(y.size)
xexpected = [0, 2, 3, 6, 7, 8, 10]
yexpected = y[xexpected]
xres, yres = unstair(x, y, method="right")
assert_array_equal(xres, xexpected)
assert_array_equal(yres, yexpected)
def testSimpleMiddle(self):
y = np.asarray([0, 0, 0, 1, 2, 2, 2, 3, 4, 5, 5])
x = np.arange(y.size)
xexpected = [0, 1, 3, 5, 7, 8, 10]
yexpected = y[xexpected]
xres, yres = unstair(x, y, method="middle")
assert_array_equal(xres, xexpected)
assert_array_equal(yres, yexpected)
def testSimpleReduce(self):
y = np.asarray([0, 0, 0, 1, 2, 2, 2, 3, 4, 5, 5])
x = np.arange(y.size)
xexpected = [0, 2, 3, 4, 6, 7, 8, 9, 10]
yexpected = y[xexpected]
xres, yres = unstair(x, y, method="reduce")
assert_array_equal(xres, xexpected)
assert_array_equal(yres, yexpected)
@parameterized.expand(unstairMethods)
def testSine(self, method):
# Test with a rounded sine function. Data should be reduced
sx = np.arange(1000) * .02
rsine = np.round(np.sin(sx) * 10.) / 10.
rx, ry = unstair(sx, rsine, method=method)
self.assertLess(rx.size, sx.size)
self.assertLess(ry.size, rsine.size)
class TestOptimumPolyfit(unittest.TestCase):
def testBasic(self):
x = np.linspace(-100., 100., 10000)
y = np.square(x)
poly, deg, score = optimum_polyfit(x, y)
self.assertLess(score, 1e-10)
self.assertEqual(np.max(np.abs(y - poly(x))), score)
def testRandom(self):
x = np.linspace(-100., 100., 1000)
y = np.random.random_sample(x.size)
poly, deg, score = optimum_polyfit(x, y)
class TestLinSpace(unittest.TestCase):
@parameterized.expand([
(0.0, 100.0, 101, True),
(0.0, 100.0, 202, True),
(0.0, 100.0, 735, True),
(0.0, 200.0, 101, True),
(12.5, 202.3, 101, True),
(0.0, 100.0, 101, False),
(0.0, 100.0, 202, False),
(0.0, 100.0, 735, False),
(0.0, 200.0, 101, False),
(12.5, 202.3, 101, False),
])
def testBasic(self, start, end, n, endpoint):
params = (start, end, n)
spc = LinRange(*params, endpoint=endpoint)
linspc = np.linspace(*params, endpoint=endpoint)
self.assertEqual(len(spc), params[2])
self.assertEqual(len(spc), linspc.size)
self.assertEqual((len(spc),), linspc.shape)
assert_allclose(spc[:], linspc)
# Test samplerate
assert_approx_equal(spc.samplerate(), (n - 1 if endpoint else n) / (end - start))
# Test some slice
istart, iend = len(spc) // 3, len(spc) // 2
assert_allclose(spc[istart:iend], linspc[istart:iend])
# Test negative indices
assert_allclose(spc[-istart], linspc[-istart])
# Test mid
self.assertEqual(spc.mid, (start + end) / 2.)
# Test view
assert_allclose(spc.view(0, None).size, linspc.size)
assert_allclose(spc.view(0, None)[:], linspc)
def test_equal(self):
l1 = LinRange(0., 100., 100, endpoint=False)<|fim▁hole|> self.assertTrue(l2 == l1)
self.assertFalse(l3 == l1)
self.assertFalse(l3 == l2)
def test_repr(self):
l = LinRange(0., 100., 100, endpoint=False)
self.assertEqual("LinRange(0.0, 100.0, 1.0)", str(l))
l = LinRange(0., 100., 100, endpoint=False, dtype=int)
self.assertEqual("LinRange(0.0, 100.0, 1.0, dtype=int)", str(l))
def testDtype(self):
lin1 = LinRange(0.0, 100.0, 101)
self.assertIsInstance(lin1, LinRange)
self.assertIsInstance(lin1.view(0, 5), LinRange)
class TestAggregate(unittest.TestCase):
def test_aggregate(self):
self.assertEqual([("a", 1), ("b", 1), ("c", 1)], list(aggregate("abc")))
self.assertEqual([], list(aggregate("")))
self.assertEqual([("a", 2), ("b", 1), ("c", 2), ("d", 1)],
list(aggregate("aabccd")))<|fim▁end|> | l2 = LinRange(0., 100., 100, endpoint=False)
l3 = LinRange(0., 100., 100, endpoint=True)
self.assertTrue(l1 == l2) |
<|file_name|>article_server.py<|end_file_name|><|fim▁begin|># coding=utf-8
from __init__ import *
from dao.dbArticle import SolutionArticle
from dao.dbTag import Tag
def generate_tags(data):
tag_list = []
for tag in data:
if tag == '':
continue
has_tag = Tag.query.filter(Tag.name == tag).first()
if not has_tag:
new_tag = Tag(tag)
new_tag.save()
tag_list.append(new_tag)
else:
tag_list.append(has_tag)
return tag_list
def post(form, user, is_draft):
has = SolutionArticle.query.filter(SolutionArticle.id == form.sid.data).first()
tags = generate_tags(form.tags.data)
content_list = form.content.data.split('<-more->')
list_len = len(content_list)
if list_len > 2:
raise Exception(u'more标签的使用超过限制')
if has and has.user != user and user.is_admin == 0:
raise Exception(u'你没有权限修改该文章')
if not has:
has = SolutionArticle(form.title.data,user)
else:
has.title = form.title.data
has.last_update_time = datetime.now()
if list_len == 1 :
has.md_shortcut = content_list[0]
has.md_content = ""
elif content_list[0].strip() == "" :
has.md_shortcut = content_list[1]
has.md_content = ""
else:
has.md_shortcut = content_list[0]
has.md_content = content_list[1]
oj = form.problem_oj_name.data
pid = form.problem_pid.data
has.is_top = form.is_top.data
has.is_draft = is_draft
has.problem_oj_name = oj
has.problem_pid = pid
has.tags = tags
has.save()
def filter_query(query_type=None, keyword=''):
if query_type == 'title' and keyword != '':
query = SolutionArticle.query.filter(SolutionArticle.title.like('%' + keyword + '%'))
elif query_type == 'tag' and keyword != '':
tag_row = Tag.query.filter(Tag.name==keyword).first()
query = tag_row.solutions if tag_row else None
else:
query = SolutionArticle.query
return query
def get_list(offset=0, limit=20, user=None, query_type=None, keyword=''):
if not user:
query = filter_query(query_type, keyword)
return query.filter(SolutionArticle.is_draft==0).\
order_by(SolutionArticle.is_top.desc(), SolutionArticle.last_update_time.desc()).\
offset(offset).limit(limit).all() if query else []
elif user.is_admin:
return SolutionArticle.query.\
order_by(SolutionArticle.is_top.desc(), SolutionArticle.last_update_time.desc()).\
offset(offset).limit(limit).all()
elif user.is_coach:
return SolutionArticle.query.join(SolutionArticle.user)\
.filter(User.school==user.school, User.rights < 4).\
order_by(SolutionArticle.is_top.desc(), SolutionArticle.last_update_time.desc()).\
offset(offset).limit(limit).all()
else:
return SolutionArticle.query.filter(SolutionArticle.user==user).\
order_by(SolutionArticle.is_top.desc(), SolutionArticle.last_update_time.desc()).\
offset(offset).limit(limit).all()
def get_count(user=None, query_type=None, keyword=''):
if not user:
query = filter_query(query_type, keyword)
return query.filter(SolutionArticle.is_draft==0).\
order_by(SolutionArticle.is_top.desc(), SolutionArticle.last_update_time.desc()).\
count() if query else 0
elif user.is_admin:
return SolutionArticle.query.\
order_by(SolutionArticle.is_top.desc(), SolutionArticle.last_update_time.desc()).\
count()
elif user.is_coach:
return SolutionArticle.query.join(SolutionArticle.user)\
.filter(User.school==user.school, User.rights < 4).\
order_by(SolutionArticle.is_top.desc(), SolutionArticle.last_update_time.desc()).\
count()
else:
return SolutionArticle.query.filter(SolutionArticle.user==user).\
order_by(SolutionArticle.is_top.desc(), SolutionArticle.last_update_time.desc()).\
count()
def get_recent(limit=5):
return get_list(0, limit)
def get_by_id(sid):
return SolutionArticle.query.filter(SolutionArticle.id == sid).first_or_404()<|fim▁hole|>
def get_archive():
archive = db.session\
.query(SolutionArticle.last_update_time, SolutionArticle.title, SolutionArticle.url, SolutionArticle.is_top)\
.filter(SolutionArticle.is_draft==0)\
.order_by(SolutionArticle.is_top.desc(),SolutionArticle.last_update_time.desc())\
.all()
archives = dict()
for article in archive:
year = article.last_update_time.year
if year not in archives:
archives[year] = []
archives[year].append(article)
return archives
def get_archive_by_tag(tag):
tag_row = Tag.query.filter(Tag.name==tag).first()
if not tag_row:
return None
archive = tag_row.solutions\
.filter(SolutionArticle.is_draft==0)\
.order_by(SolutionArticle.is_top.desc(), SolutionArticle.last_update_time.desc())\
.all()
archives = dict()
for article in archive:
year = article.last_update_time.year
if year not in archives:
archives[year] = []
archives[year].append(article)
return archives
def get_all_tags():
tags_row = Tag.query.filter(Tag.solutions!=None).all()
tags = []
for tag in tags_row:
if tag.solutions.filter(SolutionArticle.is_draft==0).count():
tags.append(tag)
return tags
def related_submits(article, offset=0, limit=10):
if article.problem_oj_name == '' or article.problem_pid == '':
return []
query = Submit.query.filter(Submit.oj_name==article.problem_oj_name,Submit.pro_id==article.problem_pid)\
#filter(or_(Submit.result == 'OK', Submit.result == 'Accepted')).all()
return query.offset(offset).limit(limit).all()
def related_submits_count(article):
if article.problem_oj_name == '' or article.problem_pid == '':
return 0
query = Submit.query.filter(Submit.oj_name==article.problem_oj_name,Submit.pro_id==article.problem_pid)\
#filter(or_(Submit.result == 'OK', Submit.result == 'Accepted')).all()
return query.count()<|fim▁end|> |
def delete_by_id(sid):
SolutionArticle.query.filter(SolutionArticle.id == sid).with_lockmode('update').delete()
db.session.commit() |
<|file_name|>ContactTest.java<|end_file_name|><|fim▁begin|>package fr.eurecom.senml.entity;
import javax.jdo.annotations.IdGeneratorStrategy;
import javax.jdo.annotations.PersistenceCapable;
import javax.jdo.annotations.Persistent;
import javax.jdo.annotations.PrimaryKey;
import com.google.appengine.api.datastore.Key;
@PersistenceCapable
public class ContactTest {
@PrimaryKey
@Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY)
private Key key;
@Persistent<|fim▁hole|> private String firstName;
@Persistent
private String lastName;
@Persistent
private String email;
public ContactTest(String firstName, String lastName, String email) {
this.firstName = firstName;
this.lastName = lastName;
this.email = email;
}
public String getirstName() {return firstName;}
public String getLastName() {return lastName;}
public String getEmail() {return email;}
public Key getKey() {return key;}
}<|fim▁end|> | |
<|file_name|>0011_auto_20171114_1543.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-11-14 21:43
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('recipe', '0010_auto_20171114_1443'),
]
operations = [
migrations.RemoveField(
model_name='direction',
name='recipe',
),
migrations.DeleteModel(
name='Direction',
),<|fim▁hole|> ]<|fim▁end|> | |
<|file_name|>example_test.go<|end_file_name|><|fim▁begin|>package anydata_test
import (
"bufio"
"fmt"
"strings"
"github.com/pbnjay/anydata"
)
// List matching lines from a species taxonomy inside a remote tarball.<|fim▁hole|> ftch, err := anydata.GetFetcher(taxNames)
if err != nil {
panic(err)
}
// download the tarball (if necessary)
err = ftch.Fetch(taxNames)
if err != nil {
panic(err)
}
// get an io.Reader to read from names.dmp
rdr, err := ftch.GetReader()
if err != nil {
panic(err)
}
// print every line containing "scientific name"
scanner := bufio.NewScanner(rdr)
for scanner.Scan() {
line := scanner.Text()
if strings.Contains(line, "scientific name") {
fmt.Println(line)
}
}
// Output:
// 1 | root | | scientific name |
// 2 | Bacteria | Bacteria <prokaryote> | scientific name |
// 6 | Azorhizobium | | scientific name |
// 7 | Azorhizobium caulinodans | | scientific name |
// 9 | Buchnera aphidicola | | scientific name |
// 10 | Cellvibrio | | scientific name |
// 11 | [Cellvibrio] gilvus | | scientific name |
// 13 | Dictyoglomus | | scientific name |
// 14 | Dictyoglomus thermophilum | | scientific name |
// 16 | Methylophilus | | scientific name |
// 17 | Methylophilus methylotrophus | | scientific name |
// ...
}<|fim▁end|> | func Example_usage() {
// get a Fetcher for names.dmp in the the NCBI Taxonomy tarball
taxNames := "ftp://ftp.ncbi.nih.gov/pub/taxonomy/taxdump.tar.gz#names.dmp" |
<|file_name|>InvokersTest.java<|end_file_name|><|fim▁begin|>package net.distilledcode.httpclient.impl.metatype.reflection;
import org.apache.http.client.config.RequestConfig;
import org.junit.Test;
import java.util.Map;
import static org.hamcrest.CoreMatchers.allOf;
import static org.hamcrest.CoreMatchers.hasItem;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.junit.Assert.assertThat;
public class InvokersTest {
private static class TestBean {
private boolean featureEnabled = true;
// getters
public String getFooBar() { return null; }
public void getFooBarVoid() {}
// setters
public void setBarFoo(String fooBar) {}
public void setBarFooNoArgs() {}
// boolean switch (only called for enabling, disabled by default
public void enableFeature() {
featureEnabled = true;
}
// boolean switch (only called for disabling, enabled by default
void disableFeature() {
featureEnabled = false;
}
}
@Test
public void invokeMethods() throws Exception {
// builder.setMaxRedirects(5)
Invokers.Invoker<Void> setMaxRedirects = new Invokers.Invoker<>(RequestConfig.Builder.class.getDeclaredMethod("setMaxRedirects", int.class));
RequestConfig.Builder builder = RequestConfig.custom();
setMaxRedirects.invoke(builder, 17);<|fim▁hole|> // requestConfig.getMaxRedirects()
Invokers.Invoker<Integer> getMaxRedirects = new Invokers.Invoker<>(RequestConfig.class.getDeclaredMethod("getMaxRedirects"));
RequestConfig requestConfig = builder.build();
assertThat(getMaxRedirects.invoke(requestConfig), is(17));
}
@Test
public void beanGetters() throws Exception {
Map<String, Invokers.Invoker<?>> testBeanGetters = Invokers.beanGetters(TestBean.class);
assertThat(testBeanGetters.keySet(), allOf(
hasItem("foo.bar"),
not(hasItem("foo.bar.void"))
));
}
@Test
public void beanSetters() throws Exception {
Map<String, Invokers.Invoker<?>> testBeanGetters = Invokers.beanSetters(TestBean.class);
assertThat(testBeanGetters.keySet(), allOf(
hasItem("bar.foo"),
not(hasItem("bar.foo.no.args"))
));
}
@Test
public void conditionalSetter() throws Exception {
Invokers.Invoker<?> featureDisabler = Invokers.conditionalNoArgsSetter(TestBean.class.getDeclaredMethod("disableFeature"), false);
TestBean testBean = new TestBean();
assertThat(testBean.featureEnabled, is(true));
featureDisabler.invoke(testBean, false);
assertThat(testBean.featureEnabled, is(false));
}
@Test
public void conditionalSetterIgnored() throws Exception {
Invokers.Invoker<?> featureDisabler = Invokers.conditionalNoArgsSetter(TestBean.class.getDeclaredMethod("disableFeature"), true);
TestBean testBean = new TestBean();
assertThat(testBean.featureEnabled, is(true));
featureDisabler.invoke(testBean, false);
assertThat(testBean.featureEnabled, is(true));
}
}<|fim▁end|> | |
<|file_name|>EventNetLookup.java<|end_file_name|><|fim▁begin|>/*
Copyright 2011 Anton Kraievoy [email protected]
This file is part of Holonet.
Holonet is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Holonet is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Holonet. If not, see <http://www.gnu.org/licenses/>.
*/
package algores.holonet.core.events;
import algores.holonet.core.CommunicationException;
import algores.holonet.core.Network;
import algores.holonet.core.Node;
import algores.holonet.core.RequestPair;
import algores.holonet.core.api.Address;
import algores.holonet.core.api.Key;<|fim▁hole|>import algores.holonet.core.api.tier1.delivery.LookupService;
import com.google.common.base.Optional;
import org.akraievoy.cnet.gen.vo.EntropySource;
import java.util.Collection;
/**
* Lookup entry event.
*/
public class EventNetLookup extends Event<EventNetLookup> {
protected int retries = 1;
public Result executeInternal(final Network network, final EntropySource eSource) {
Result aggregateResult = Result.PASSIVE;
for (int sequentialIndex = 0; sequentialIndex < retries; sequentialIndex++) {
Optional<RequestPair> optRequest =
network.generateRequestPair(eSource);
if (!optRequest.isPresent()) {
if (sequentialIndex > 0) {
throw new IllegalStateException(
"request model became empty amid request generation streak?"
);
}
break;
}
RequestPair request = optRequest.get();
Collection<Key> serverKeys =
request.server.getServices().getStorage().getKeys();
final Key mapping =
serverKeys.isEmpty() ?
// we may also pull other keys from the range, not only the greatest one
request.server.getServices().getRouting().ownRoute().getRange().getRKey().prev() :
eSource.randomElement(serverKeys);
final LookupService lookupSvc =
request.client.getServices().getLookup();
final Address address;
try {
address = lookupSvc.lookup(
mapping.getKey(), true, LookupService.Mode.GET,
Optional.of(request.server.getAddress())
);
} catch (CommunicationException e) {
if (!aggregateResult.equals(Result.FAILURE)) {
aggregateResult = handleEventFailure(e, null);
}
continue;
}
final Node lookupResult = network.getEnv().getNode(address);
if (
!lookupResult.equals(request.server)
) {
network.getInterceptor().reportInconsistentLookup(LookupService.Mode.GET);
}
aggregateResult = Result.SUCCESS;
}
return aggregateResult;
}
public void setRetries(int retryCount) {
this.retries = retryCount;
}
public EventNetLookup withRetries(int retryCount) {
setRetries(retryCount);
return this;
}
}<|fim▁end|> | |
<|file_name|>TimeseriesShowController.js<|end_file_name|><|fim▁begin|>'use strict';
let TimeseriesShowController = function($scope, $controller, $timeout,
NpolarApiSecurity, NpolarTranslate,
npdcAppConfig,
Timeseries, TimeseriesModel, TimeseriesCitation, google, Sparkline) {
'ngInject';
let ctrl = this;
ctrl.authors = (t) => {
return t.authors.map(a => {
return { name: a['@id']};
});
};
// @todo NpolarLinkModel?
ctrl.collection_link = (links,hreflang='en') => {
return links.find(l => l.rel === 'collection' && l.hreflang === hreflang);
};
$controller("NpolarBaseController", {$scope: $scope});
$scope.resource = Timeseries;
let chartElement = Sparkline.getElement();
if (chartElement) {
chartElement.innerHTML = '';
}
$scope.show().$promise.then(timeseries => {
$scope.data = timeseries.data;
$scope.data_not_null = timeseries.data.filter(t => t.value !== undefined);
// Create facet-style links with counts to timeseries with all of the same keywords...
if (!$scope.keywords && timeseries.keywords && timeseries.keywords.length > 0) {
$scope.keywords = {};
let keywords = TimeseriesModel.keywords(timeseries);
['en', 'no'].forEach(l => {
let k = keywords[l];
let href = $scope.resource.uiBase+`?filter-keywords.@value=${ k.join(',') }`;
let link = { keywords: keywords[l], count: 0, href };
Timeseries.feed({"filter-keywords.@value": k.join(','), facets: 'keywords,species,locations.placename', limit: 0}).$promise.then((r) => {
link.count = r.feed.opensearch.totalResults; // All keywords
// Count for all keywords + species
if (timeseries.species) {
let f = r.feed.facets.find(f => f.hasOwnProperty('species'));
if (f && f.species) {
let c = f.species.find(f => f.term === timeseries.species);
link.count_keywords_and_species = c.count;
}
}
// Count for first all keywords + placename[0]
if (timeseries.locations && timeseries.locations.length > 0) {
let f = r.feed.facets.find(f => f.hasOwnProperty('locations.placename'));
if (f && f['locations.placename']) {
let c = f['locations.placename'].find(f => f.term === timeseries.locations[0].placename);
link.count_keywords_and_placename = c.count;
}
}
$scope.keywords[l] = link;
}, (e) => {
$scope.keywords[l] = link;
});
});
}<|fim▁hole|>
$scope.citation = (t) => {
if (!t) { return; }
return TimeseriesCitation.citation(timeseries);
};
// Create graph
if ($scope.data && $scope.data.length > 0) {
$timeout(function(){
$scope.sparkline = true;
let sparkline = timeseries.data.map(d => [d.value]);
google.setOnLoadCallback(Sparkline.draw(sparkline));
});
}
// Count number of timeseries belonging to the same collection
if (timeseries.links && timeseries.links.length > 0) {
['en', 'nb'].forEach(l => {
if (!$scope.collection || !$scope.collection[l]) {
let link = ctrl.collection_link(timeseries.links, l);
if (link && link.href) {
let query = {"filter-links.href": link.href, limit: 0 };
Timeseries.feed(query).$promise.then(r => {
if (!$scope.collection) {
$scope.collection = {};
}
$scope.collection[l] = { href: $scope.resource.uiBase+`?filter-links.href=${link.href}`,
title: link.title,
count: r.feed.opensearch.totalResults
};
});
}
}
});
}
});
};
module.exports = TimeseriesShowController;<|fim▁end|> | |
<|file_name|>form_arrayrun.js<|end_file_name|><|fim▁begin|>if (typeof FormTarget === 'undefined') {
FormTarget = {};
}
FormTarget.arrayrun = (function($) {
/*
* Expected config {
* isAdmin: boolean,
* instruments: array
* }
*/
return {
getUserManualUrl: function() {
return Urls.external.userManual('array_runs');
},
getSaveUrl: function(arrayrun) {
return arrayrun.id ? Urls.rest.arrayRuns.update(arrayrun.id) : Urls.rest.arrayRuns.create;
},
getSaveMethod: function(arrayrun) {
return arrayrun.id ? 'PUT' : 'POST';
},
getEditUrl: function(arrayrun) {
return Urls.ui.arrayRuns.edit(arrayrun.id);
},
getSections: function(config, object) {
return [{
title: 'Array Run Information',
fields: [{
title: 'Array Run ID',
data: 'id',
type: 'read-only',
getDisplayValue: function(arrayrun) {
return arrayrun.id || 'Unsaved';
}
}, {
title: 'Instrument',
data: 'instrumentId',
type: 'dropdown',
include: !object.id,
required: true,
nullValue: 'SELECT',
source: config.instruments,
getItemLabel: Utils.array.getName,
getItemValue: Utils.array.getId,
sortSource: Utils.sorting.standardSort('name')
}, {
title: 'Instrument',
data: 'instrumentId',
type: 'read-only',
include: !!object.id,
getDisplayValue: function(arrayrun) {
return arrayrun.instrumentName;
}
}, {
title: 'Alias',
data: 'alias',
type: 'text',
required: true,
maxLength: 255
}, {
title: 'Description',
data: 'description',
type: 'text',
maxLength: 255
}, {
title: 'Run Path',
data: 'filePath',
type: 'text',
maxLength: 255
}, {
title: 'Array',
data: 'arrayId',
type: 'read-only',
getDisplayValue: function(arrayrun) {
return arrayrun.arrayAlias;
},
getLink: function(arrayrun) {
return Urls.ui.arrays.edit(arrayrun.arrayId);
}
}, {
title: 'Change Array',
type: 'special',
makeControls: function(form) {
return [$('<button>').addClass('ui-state-default').attr('type', 'button').text('Search').click(function() {
Utils.showDialog('Array Search', 'Search', [{
label: 'Search',
property: 'query',
type: 'text',
required: true
}], function(formData) {
Utils.ajaxWithDialog('Searching', 'GET', Urls.rest.arrayRuns.arraySearch + '?' + jQuery.param({
q: formData.query
}), null, function(data) {
if (!data || !data.length) {
Utils.showOkDialog('Search Results', ['No matching arrays found']);
return;
} else {
Utils.showWizardDialog('Search Results', data.map(function(array) {
return {
name: array.alias,
handler: function() {
form.updateField('arrayId', {
value: array.id,
label: array.alias,
link: Urls.ui.arrays.edit(array.id)
});
updateSamplesTable(array);
}
};
}));
}<|fim▁hole|> });
});
}), $('<button>').addClass('ui-state-default').attr('type', 'button').text('Remove').click(function() {
if (form.get('arrayId')) {
Utils.showConfirmDialog("Remove Array", "Remove", ["Remove the array from this array run?"], function() {
form.updateField('arrayId', {
value: null,
label: '',
link: null
});
updateSamplesTable(null);
});
} else {
Utils.showOkDialog('Remove Array', ['No array set']);
}
})];
}
}, {
title: 'Status',
data: 'status',
type: 'dropdown',
required: true,
source: Constants.healthTypes,
getItemLabel: function(item) {
return item.label;
},
getItemValue: function(item) {
return item.label;
},
onChange: function(newValue, form) {
var status = getStatus(newValue);
var updates = {
required: status.isDone,
// Editable if run is done and either there's no value set or user is admin
disabled: !status.isDone || (form.get('completionDate') && !config.isAdmin)
};
if (!status.isDone) {
updates.value = null;
}
form.updateField('completionDate', updates);
},
// Only editable by admin if run is done
disabled: !object.status ? false : (getStatus(object.status).isDone && !config.isAdmin)
}, {
title: 'Start Date',
data: 'startDate',
type: 'date',
required: true,
disabled: object.startDate && !config.isAdmin
}, {
title: 'Completion Date',
data: 'completionDate',
type: 'date'
}]
}];
}
}
function getStatus(label) {
return Utils.array.findUniqueOrThrow(function(item) {
return item.label === label;
}, Constants.healthTypes);
}
function updateSamplesTable(array) {
$('#listingSamplesTable').empty();
var data = [];
var lengthOptions = [50, 25, 10];
if (array) {
data = array.samples.map(function(sample) {
return [sample.coordinates, Box.utils.hyperlinkifyBoxable(sample.name, sample.id, sample.name),
Box.utils.hyperlinkifyBoxable(sample.name, sample.id, sample.alias)];
});
lengthOptions.unshift(array.columns * array.rows);
}
$('#listingSamplesTable')
.dataTable(
{
"aaData": data,
"aoColumns": [{
"sTitle": "Position"
}, {
"sTitle": "Sample Name"
}, {
"sTitle": "Sample Alias"
}],
"bJQueryUI": true,
"bDestroy": true,
"aLengthMenu": [lengthOptions, lengthOptions],
"iDisplayLength": lengthOptions[0],
"sPaginationType": "full_numbers",
"sDom": '<"#toolbar.fg-toolbar ui-widget-header ui-corner-bl ui-corner-br ui-helper-clearfix"lf>r<t><"fg-toolbar ui-widget-header ui-corner-bl ui-corner-br ui-helper-clearfix"ip>',
"aaSorting": [[0, "asc"]]
}).css("width", "100%");
}
})(jQuery);<|fim▁end|> | |
<|file_name|>test_pxe_service_catalogs.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import fauxfactory
import pytest
from widgetastic.utils import partial_match
from cfme import test_requirements
from cfme.infrastructure.provider import InfraProvider
from cfme.infrastructure.provider.rhevm import RHEVMProvider
from cfme.infrastructure.pxe import get_pxe_server_from_config, get_template_from_config
from cfme.services.service_catalogs import ServiceCatalogs
from cfme.utils import testgen
from cfme.utils.blockers import BZ
from cfme.utils.conf import cfme_data
from cfme.utils.generators import random_vm_name
from cfme.utils.log import logger
<|fim▁hole|> pytest.mark.usefixtures('uses_infra_providers'),
test_requirements.service,
pytest.mark.tier(2)
]
def pytest_generate_tests(metafunc):
# Filter out providers without provisioning data or hosts defined
argnames, argvalues, idlist = testgen.providers_by_class(
metafunc, [InfraProvider], required_fields=[
['provisioning', 'pxe_server'],
['provisioning', 'pxe_image'],
['provisioning', 'pxe_image_type'],
['provisioning', 'pxe_kickstart'],
['provisioning', 'pxe_template'],
['provisioning', 'datastore'],
['provisioning', 'host'],
['provisioning', 'pxe_root_password'],
['provisioning', 'vlan']
])
pargnames, pargvalues, pidlist = testgen.pxe_servers(metafunc)
argnames = argnames
pxe_server_names = [pval[0] for pval in pargvalues]
new_idlist = []
new_argvalues = []
for i, argvalue_tuple in enumerate(argvalues):
args = dict(zip(argnames, argvalue_tuple))
if args['provider'].type == "scvmm":
continue
pxe_server_name = args['provider'].data['provisioning']['pxe_server']
if pxe_server_name not in pxe_server_names:
continue
pxe_cust_template = args['provider'].data['provisioning']['pxe_kickstart']
if pxe_cust_template not in cfme_data.get('customization_templates', {}).keys():
continue
new_idlist.append(idlist[i])
new_argvalues.append(argvalues[i])
testgen.parametrize(metafunc, argnames, new_argvalues, ids=new_idlist, scope="module")
@pytest.fixture(scope='module')
def pxe_server(appliance, provider):
provisioning_data = provider.data['provisioning']
pxe_server_name = provisioning_data['pxe_server']
return get_pxe_server_from_config(pxe_server_name, appliance=appliance)
@pytest.fixture(scope='module')
def pxe_cust_template(appliance, provider):
provisioning_data = provider.data['provisioning']
pxe_cust_template = provisioning_data['pxe_kickstart']
return get_template_from_config(pxe_cust_template, create=True, appliance=appliance)
@pytest.fixture(scope="function")
def setup_pxe_servers_vm_prov(pxe_server, pxe_cust_template, provisioning):
if not pxe_server.exists():
pxe_server.create()
pxe_server.set_pxe_image_type(provisioning['pxe_image'], provisioning['pxe_image_type'])
@pytest.fixture(scope="function")
def catalog_item(appliance, provider, dialog, catalog, provisioning,
setup_pxe_servers_vm_prov):
# generate_tests makes sure these have values
pxe_template, host, datastore, pxe_server, pxe_image, pxe_kickstart, pxe_root_password,\
pxe_image_type, pxe_vlan = map(
provisioning.get, (
'pxe_template', 'host', 'datastore', 'pxe_server', 'pxe_image', 'pxe_kickstart',
'pxe_root_password', 'pxe_image_type', 'vlan'
)
)
provisioning_data = {
'catalog': {'catalog_name': {'name': pxe_template, 'provider': provider.name},
'provision_type': 'PXE',
'pxe_server': pxe_server,
'pxe_image': {'name': pxe_image},
'vm_name': random_vm_name('pxe_service')},
'environment': {'datastore_name': {'name': datastore},
'host_name': {'name': host}},
'customize': {'root_password': pxe_root_password,
'custom_template': {'name': pxe_kickstart}},
'network': {'vlan': partial_match(pxe_vlan)},
}
item_name = fauxfactory.gen_alphanumeric()
return appliance.collections.catalog_items.create(
provider.catalog_item_type,
name=item_name,
description="my catalog", display_in=True, catalog=catalog,
dialog=dialog, prov_data=provisioning_data)
@pytest.mark.rhv1
@pytest.mark.meta(blockers=[BZ(1633540, forced_streams=['5.10'],
unblock=lambda provider: not provider.one_of(RHEVMProvider)),
BZ(1633516, forced_streams=['5.10'],
unblock=lambda provider: not provider.one_of(RHEVMProvider))])
@pytest.mark.usefixtures('setup_pxe_servers_vm_prov')
def test_pxe_servicecatalog(appliance, setup_provider, provider, catalog_item, request):
"""Tests RHEV PXE service catalog
Metadata:
test_flag: pxe, provision
"""
vm_name = catalog_item.prov_data['catalog']["vm_name"]
request.addfinalizer(
lambda: appliance.collections.infra_vms.instantiate(
"{}0001".format(vm_name), provider).cleanup_on_provider()
)
service_catalogs = ServiceCatalogs(appliance, catalog_item.catalog, catalog_item.name)
service_catalogs.order()
# nav to requests page happens on successful provision
logger.info('Waiting for cfme provision request for service %s', catalog_item.name)
request_description = catalog_item.name
provision_request = appliance.collections.requests.instantiate(request_description,
partial_check=True)
provision_request.wait_for_request(num_sec=3600)
msg = "Provisioning failed with the message {}".format(provision_request.rest.message)
assert provision_request.is_succeeded(), msg<|fim▁end|> | pytestmark = [
pytest.mark.meta(server_roles="+automate"), |
<|file_name|>merkletree.rs<|end_file_name|><|fim▁begin|>use errors::prelude::*;
use services::ledger::merkletree::proof::{Lemma, Proof};
use services::ledger::merkletree::tree::{LeavesIntoIterator, LeavesIterator, Tree, TreeLeafData};
use utils::crypto::hash::{Hash, HASHBYTES};
/// A Merkle tree is a binary tree, with values of type `T` at the leafs,
/// and where every internal node holds the hash of the concatenation of the hashes of its children nodes.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct MerkleTree {
/// The root of the inner binary tree
pub root: Tree,
/// The height of the tree
pub height: usize,
/// The number of leaf nodes in the tree
pub count: usize,
/// The number of nodes in the tree
pub nodes_count: usize
}
impl MerkleTree {
/// Constructs a Merkle Tree from a vector of data blocks.
/// Returns `None` if `values` is empty.
pub fn from_vec(values: Vec<TreeLeafData>) -> IndyResult<Self> {
if values.is_empty() {
return Ok(MerkleTree {
root: Tree::empty(Hash::hash_empty()?),
height: 0,
count: 0,
nodes_count: 0
});
}
let count = values.len();
let mut nodes_count = 0;
let mut height = 0;
let mut cur = Vec::with_capacity(count);
for v in values {
let leaf = Tree::new_leaf(v)?;
cur.push(leaf);
}
while cur.len() > 1 {
let mut next = Vec::new();
while !cur.is_empty() {
if cur.len() == 1 {
next.push(cur.remove(0));
}
else {
let left = cur.remove(0);
let right = cur.remove(0);
let combined_hash = Hash::hash_nodes(
left.hash(),
right.hash()
)?;
let node = Tree::Node {
hash: combined_hash.to_vec(),
left: Box::new(left),
right: Box::new(right)
};
next.push(node);
nodes_count+=1;
}
}
height += 1;
cur = next;
}
debug_assert!(cur.len() == 1);
let root = cur.remove(0);
Ok(MerkleTree {
root: root,
height: height,
count: count,
nodes_count: nodes_count
})
}
<|fim▁hole|> self.root.hash()
}
/// Returns the hex root hash of Merkle tree
pub fn root_hash_hex(&self) -> String {
let rh = self.root.hash();
let mut ret:String = String::with_capacity(HASHBYTES *2);
for i in rh {
ret.push_str(&format!("{:02x}", i));
}
return ret;
}
/// Returns the height of Merkle tree
pub fn height(&self) -> usize {
self.height
}
/// Returns the number of leaves in the Merkle tree
pub fn count(&self) -> usize {
self.count
}
/// Returns whether the Merkle tree is empty or not
pub fn is_empty(&self) -> bool {
self.count() == 0
}
/// Generate an inclusion proof for the given value.
/// Returns `None` if the given value is not found in the tree.
pub fn gen_proof(&self, value: TreeLeafData) -> IndyResult<Option<Proof>> {
let root_hash = self.root_hash().clone();
let leaf_hash = Hash::hash_leaf(&value)?;
Ok(Lemma::new(&self.root, leaf_hash.to_vec().as_slice()).map(|lemma|
Proof::new(root_hash, lemma, value)
))
}
/// Creates an `Iterator` over the values contained in this Merkle tree.
pub fn iter(&self) -> LeavesIterator {
self.root.iter()
}
}
impl IntoIterator for MerkleTree {
type Item = TreeLeafData;
type IntoIter = LeavesIntoIterator;
/// Creates a consuming iterator, that is, one that moves each value out of the Merkle tree.
/// The tree cannot be used after calling this.
fn into_iter(self) -> Self::IntoIter {
self.root.into_iter()
}
}
impl <'a> IntoIterator for &'a MerkleTree {
type Item = &'a TreeLeafData;
type IntoIter = LeavesIterator<'a>;
/// Creates a borrowing `Iterator` over the values contained in this Merkle tree.
fn into_iter(self) -> Self::IntoIter {
self.root.iter()
}
}<|fim▁end|> | /// Returns the root hash of Merkle tree
pub fn root_hash(&self) -> &Vec<u8> { |
<|file_name|>read.go<|end_file_name|><|fim▁begin|>// Copyright 2017 ETH Zurich
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hpkt
import (
"github.com/scionproto/scion/go/lib/addr"
"github.com/scionproto/scion/go/lib/common"
"github.com/scionproto/scion/go/lib/l4"
"github.com/scionproto/scion/go/lib/scmp"
"github.com/scionproto/scion/go/lib/spath"
"github.com/scionproto/scion/go/lib/spkt"
"github.com/scionproto/scion/go/lib/util"
)
// ParseScnPkt populates the SCION fields in s with information from b
func ParseScnPkt(s *spkt.ScnPkt, b common.RawBytes) error {
pCtx := newParseCtx(s, b)
return pCtx.parse()
}
// offsets holds start and end offsets for packet sections
type offsets struct {
start, end int
}<|fim▁hole|>
// parseCtx holds the state for the packet parser
type parseCtx struct {
// SCION packet structure we need to fill in
s *spkt.ScnPkt
// Buffer to parse
b common.RawBytes
// Current parse offset
offset int
// Helper container for common header fields; also tracks the next
// protocol we need to parse
cmnHdr *spkt.CmnHdr
// Number of found HBH extensions
hbhCounter int
// Maximum number of allowed HBH extensions
hbhLimit int
// Protocol type of next header (L4, HBH class, E2E class)
nextHdr common.L4ProtocolType
// Memorize section start and end offsets for when we need to jump
cmnHdrOffsets offsets
extHdrOffsets offsets
addrHdrOffsets offsets
fwdPathOffsets offsets
l4HdrOffsets offsets
pldOffsets offsets
// Methods for parsing various packet elements; can be overwritten by extensions
// FIXME(scrye): when the need arises, these should probably be changed to queues
// (e.g., when multiple handlers need to be chained)
HBHExtParser pktParser
E2EExtParser pktParser
AddrHdrParser pktParser
FwdPathParser pktParser
L4Parser pktParser
}
func newParseCtx(s *spkt.ScnPkt, b common.RawBytes) *parseCtx {
pCtx := &parseCtx{
s: s,
b: b,
cmnHdr: &spkt.CmnHdr{},
hbhLimit: common.ExtnMaxHBH,
}
pCtx.E2EExtParser = pCtx.DefaultE2EExtParser
pCtx.HBHExtParser = pCtx.DefaultHBHExtParser
pCtx.AddrHdrParser = pCtx.DefaultAddrHdrParser
pCtx.FwdPathParser = pCtx.DefaultFwdPathParser
pCtx.L4Parser = pCtx.DefaultL4Parser
return pCtx
}
// parse contains the processing flow
func (p *parseCtx) parse() error {
// A SCION header is parsed in the following order:
// 1. Common header
// 2. Extension headers, in the order they are placed in the packet.
// 3. Address headers
// 4. Forwarding path
// 5. L4 header
// 6. Payload
//
// Hop By Hop (HBH) extensions can override 2-6, while End to end (E2E)
// extensions can override 5-6.
if err := p.CmnHdrParser(); err != nil {
return common.NewBasicError("Unable to parse common header", err)
}
p.nextHdr = p.cmnHdr.NextHdr
// We'll advance the end offset for extensions as we parse them
p.extHdrOffsets.start = int(p.cmnHdr.HdrLen * common.LineLen)
p.extHdrOffsets.end = p.extHdrOffsets.start
// Skip after SCION header
p.offset = p.extHdrOffsets.start
if err := p.HBHAllExtsParser(); err != nil {
return common.NewBasicError("Unable to parse HBH extensions", err)
}
if err := p.E2EAllExtsParser(); err != nil {
return common.NewBasicError("Unable to parse E2E extensions", err)
}
// Return to the start of the address header
p.offset = p.cmnHdrOffsets.end
if err := p.AddrHdrParser(); err != nil {
return common.NewBasicError("Unable to parse address header", err)
}
if err := p.FwdPathParser(); err != nil {
return common.NewBasicError("Unable to parse path header", err)
}
// Jump after extensions
p.offset = p.extHdrOffsets.end
if err := p.L4Parser(); err != nil {
return common.NewBasicError("Unable to parse L4 content", err)
}
return nil
}
func (p *parseCtx) CmnHdrParser() error {
p.cmnHdrOffsets.start = p.offset
if err := p.cmnHdr.Parse(p.b[:spkt.CmnHdrLen]); err != nil {
return err
}
p.offset += spkt.CmnHdrLen
p.cmnHdrOffsets.end = p.offset
if int(p.cmnHdr.TotalLen) != len(p.b) {
return common.NewBasicError("Malformed total packet length", nil,
"expected", p.cmnHdr.TotalLen, "actual", len(p.b))
}
return nil
}
func (p *parseCtx) HBHAllExtsParser() error {
// SCION packets can contain at most 3 HBH extensions, which must appear
// immediately after the path header. If an SCMP HBH extension is present,
// it must be the first extension and raises the allowed HBH limit to 4.
// E2E extensions appear after HBH extensions (if any), or after the path
// header.
for p.nextHdr == common.HopByHopClass {
p.hbhCounter += 1
if err := p.HBHExtParser(); err != nil {
return common.NewBasicError("Unable to parse HBH extension", err)
}
if p.hbhCounter > p.hbhLimit {
ext := p.s.HBHExt[len(p.s.HBHExt)-1]
return common.NewBasicError("HBH extension limit exceeded", nil,
"type", ext.Class(), "position", p.hbhCounter-1, "limit", p.hbhLimit)
}
}
return nil
}
func (p *parseCtx) E2EAllExtsParser() error {
for p.nextHdr == common.End2EndClass {
if err := p.E2EExtParser(); err != nil {
return common.NewBasicError("Unable to parse E2E extension", err)
}
}
return nil
}
func (p *parseCtx) DefaultHBHExtParser() error {
if len(p.b[p.offset:]) < common.LineLen {
return common.NewBasicError("Truncated extension", nil)
}
// Parse 3-byte extension header first
// We know the type of the next header, so we save it for the protocol loop
p.nextHdr = common.L4ProtocolType(p.b[p.offset])
hdrLen := p.b[p.offset+1]
extnType := p.b[p.offset+2]
// Advance end of extensions headers offset
p.extHdrOffsets.end += int(hdrLen * common.LineLen)
// Parse the rest of the extension header, depending on extension type
switch extnType {
case common.ExtnSCMPType.Type:
if p.hbhCounter != 1 {
// SCMP HBH extensions must come immediately after the path header
return common.NewBasicError("Invalid placement of HBH SCMP extension (must be first)",
nil, "position", p.hbhCounter-1, "offset", p.offset)
}
// SCMP HBH extensions increase the limit of HBH extensions by 1
p.hbhLimit += 1
extn, err := scmp.ExtnFromRaw(p.b[p.offset+common.ExtnSubHdrLen : p.extHdrOffsets.end])
if err != nil {
return common.NewBasicError("Unable to parse extension header", err,
"type", extn.Class(), "position", p.hbhCounter-1)
}
p.s.HBHExt = append(p.s.HBHExt, extn)
default:
return common.NewBasicError("Unsupported HBH extension type", nil,
"type", extnType, "position", p.hbhCounter-1)
}
p.offset = p.extHdrOffsets.end
return nil
}
func (p *parseCtx) DefaultE2EExtParser() error {
return common.NewBasicError("Not implemented", nil)
}
func (p *parseCtx) DefaultAddrHdrParser() error {
var err error
p.addrHdrOffsets.start = p.offset
p.s.DstIA.Parse(p.b[p.offset:])
p.offset += addr.IABytes
p.s.SrcIA.Parse(p.b[p.offset:])
p.offset += addr.IABytes
if p.s.DstHost, err = addr.HostFromRaw(p.b[p.offset:], p.cmnHdr.DstType); err != nil {
return common.NewBasicError("Unable to parse destination host address", err)
}
p.offset += p.s.DstHost.Size()
if p.s.SrcHost, err = addr.HostFromRaw(p.b[p.offset:], p.cmnHdr.SrcType); err != nil {
return common.NewBasicError("Unable to parse source host address", err)
}
p.offset += p.s.SrcHost.Size()
// Validate address padding bytes
padBytes := util.CalcPadding(p.offset, common.LineLen)
if pos, ok := isZeroMemory(p.b[p.offset : p.offset+padBytes]); !ok {
return common.NewBasicError("Invalid padding", nil,
"position", pos, "expected", 0, "actual", p.b[p.offset+pos])
}
p.offset += padBytes
p.addrHdrOffsets.end = p.offset
return nil
}
func (p *parseCtx) DefaultFwdPathParser() error {
p.fwdPathOffsets.start = p.offset
pathLen := p.cmnHdr.HdrLenBytes() - p.offset
if pathLen > 0 {
if p.s.Path == nil {
p.s.Path = &spath.Path{}
}
p.s.Path.Raw = p.b[p.offset : p.offset+pathLen]
p.s.Path.InfOff = p.cmnHdr.InfoFOffBytes() - p.offset
p.s.Path.HopOff = p.cmnHdr.HopFOffBytes() - p.offset
p.offset += pathLen
}
p.fwdPathOffsets.end = p.offset
return nil
}
func (p *parseCtx) DefaultL4Parser() error {
var err error
p.l4HdrOffsets.start = p.offset
switch p.nextHdr {
case common.L4UDP:
if p.s.L4, err = l4.UDPFromRaw(p.b[p.offset : p.offset+l4.UDPLen]); err != nil {
return common.NewBasicError("Unable to parse UDP header", err)
}
case common.L4SCMP:
if p.s.L4, err = scmp.HdrFromRaw(p.b[p.offset : p.offset+scmp.HdrLen]); err != nil {
return common.NewBasicError("Unable to parse SCMP header", err)
}
default:
return common.NewBasicError("Unsupported NextHdr value", nil,
"expected", common.L4UDP, "actual", p.nextHdr)
}
p.offset += p.s.L4.L4Len()
p.l4HdrOffsets.end = p.offset
// Parse L4 payload
p.pldOffsets.start = p.offset
pldLen := len(p.b) - p.pldOffsets.start
if err = p.s.L4.Validate(pldLen); err != nil {
return common.NewBasicError("L4 validation failed", err)
}
switch p.nextHdr {
case common.L4UDP:
p.s.Pld = common.RawBytes(p.b[p.offset : p.offset+pldLen])
case common.L4SCMP:
hdr, ok := p.s.L4.(*scmp.Hdr)
if !ok {
return common.NewBasicError(
"Unable to extract SCMP payload, type assertion failed", nil)
}
p.s.Pld, err = scmp.PldFromRaw(p.b[p.offset:p.offset+pldLen],
scmp.ClassType{Class: hdr.Class, Type: hdr.Type})
if err != nil {
return common.NewBasicError("Unable to parse SCMP payload", err)
}
}
p.offset += pldLen
p.pldOffsets.end = p.offset
// Run checksum function
err = l4.CheckCSum(p.s.L4, p.b[p.addrHdrOffsets.start:p.addrHdrOffsets.end],
p.b[p.pldOffsets.start:p.pldOffsets.end])
if err != nil {
return common.NewBasicError("Checksum failed", err)
}
return nil
}<|fim▁end|> |
// Processing/parsing callback type
type pktParser func() error |
<|file_name|>publikacja.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
################################################
## Aplikacja wspomagajaca tworzenie bazy publikacji naukowych wpsółpracujaca z Google Scholar
## Copyright (C) 2013 Damian Baran
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
################################################
import wx
import os
import wx.xrc
import modules.baz.cDatabase as cDatabase
import linecache
###########################################################################
## Class PubDialog
###########################################################################
## Dokumentacja dla klasy
#
# Klasa zawiera widok z zarzadzaniem publikacjami
class PubDialog ( wx.Dialog ):
## Konstruktor
def __init__( self ):
wx.Dialog.__init__ ( self, None, id = wx.ID_ANY, title = u"Zarządzanie Publikacjami", pos = wx.DefaultPosition, size = wx.Size( 450,430 ), style = wx.DEFAULT_DIALOG_STYLE )
self.session = cDatabase.connectDatabase()
self.listType = []
self.getType()
ico = wx.Icon('icon/pub.ico', wx.BITMAP_TYPE_ICO)
self.SetIcon(ico)
self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize )
bSizer1 = wx.BoxSizer( wx.VERTICAL )
bSizer28 = wx.BoxSizer( wx.VERTICAL )
bSizer21 = wx.BoxSizer( wx.VERTICAL )
self.m_staticText1 = wx.StaticText( self, wx.ID_ANY, u"Dodawanie Publikacji", wx.DefaultPosition, wx.DefaultSize, wx.ALIGN_CENTRE|wx.ST_NO_AUTORESIZE )
self.m_staticText1.Wrap( -1 )
bSizer21.Add( self.m_staticText1, 0, wx.EXPAND|wx.ALL, 5 )
bSizer28.Add( bSizer21, 0, wx.EXPAND|wx.ALIGN_CENTER_HORIZONTAL, 5 )
bSizer1.Add( bSizer28, 0, wx.EXPAND, 5 )
bSizer26 = wx.BoxSizer( wx.HORIZONTAL )
bSizer15 = wx.BoxSizer( wx.VERTICAL )
bSizer3 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText2 = wx.StaticText( self, wx.ID_ANY, u"Tytuł:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText2.Wrap( -1 )
bSizer3.Add( self.m_staticText2, 1, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 )
self.m_textCtrl2 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 145,-1 ), 0 )
bSizer3.Add( self.m_textCtrl2, 0, wx.BOTTOM|wx.RIGHT|wx.LEFT, 5 )
bSizer15.Add( bSizer3, 0, wx.EXPAND|wx.ALIGN_CENTER_HORIZONTAL, 5 )
bSizer5 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText4 = wx.StaticText( self, wx.ID_ANY, u"Autorzy:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText4.Wrap( -1 )
bSizer5.Add( self.m_staticText4, 1, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 )
self.m_textCtrl4 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 145,-1 ), 0 )
bSizer5.Add( self.m_textCtrl4, 0, wx.BOTTOM|wx.RIGHT|wx.LEFT, 5 )
bSizer15.Add( bSizer5, 0, wx.EXPAND, 5 )
bSizer4 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText3 = wx.StaticText( self, wx.ID_ANY, u"Cytowania:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText3.Wrap( -1 )
bSizer4.Add( self.m_staticText3, 1, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 )
self.m_textCtrl3 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 145,-1 ), 0 )
bSizer4.Add( self.m_textCtrl3, 0, wx.BOTTOM|wx.RIGHT|wx.LEFT, 5 )
bSizer15.Add( bSizer4, 0, wx.ALIGN_CENTER_HORIZONTAL|wx.EXPAND, 5 )
bSizer6 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText5 = wx.StaticText( self, wx.ID_ANY, u"Typ:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText5.Wrap( -1 )
bSizer6.Add( self.m_staticText5, 1, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 )
m_choice1Choices = self.listType
self.m_choice1 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 145,-1 ), m_choice1Choices, 0 )
self.m_choice1.SetSelection( 0 )
bSizer6.Add( self.m_choice1, 0, wx.BOTTOM|wx.RIGHT|wx.LEFT, 5 )
bSizer15.Add( bSizer6, 0, wx.EXPAND, 5 )
bSizer7 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText6 = wx.StaticText( self, wx.ID_ANY, u"Rok:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText6.Wrap( -1 )
bSizer7.Add( self.m_staticText6, 1, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 )
self.m_textCtrl5 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 145,-1 ), 0 )
bSizer7.Add( self.m_textCtrl5, 0, wx.BOTTOM|wx.RIGHT|wx.LEFT, 5 )
bSizer15.Add( bSizer7, 0, wx.EXPAND, 5 )
bSizer8 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText7 = wx.StaticText( self, wx.ID_ANY, u"DOI:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText7.Wrap( -1 )
bSizer8.Add( self.m_staticText7, 1, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 )
self.m_textCtrl6 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 145,-1 ), 0 )
bSizer8.Add( self.m_textCtrl6, 0, wx.BOTTOM|wx.RIGHT|wx.LEFT, 5 )
bSizer15.Add( bSizer8, 0, wx.EXPAND, 5 )
bSizer29 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText9 = wx.StaticText( self, wx.ID_ANY, u"Inny klucz:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText9.Wrap( -1 )
bSizer29.Add( self.m_staticText9, 1, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 )
self.m_textCtrl7 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 145,-1 ), 0 )
bSizer29.Add( self.m_textCtrl7, 0, wx.BOTTOM|wx.RIGHT|wx.LEFT, 5 )
bSizer15.Add( bSizer29, 0, wx.EXPAND, 5 )
bSizer9 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText8 = wx.StaticText( self, wx.ID_ANY, u"Wydawca:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText8.Wrap( -1 )
bSizer9.Add( self.m_staticText8, 1, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 )
m_choice2Choices = cDatabase.getJournalName(self.session)
self.m_choice2 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 145,-1 ), m_choice2Choices, 0 )
bSizer9.Add( self.m_choice2, 0, wx.BOTTOM|wx.RIGHT|wx.LEFT, 5 )
bSizer15.Add( bSizer9, 0, wx.EXPAND, 5 )
bSizer17 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText10 = wx.StaticText( self, wx.ID_ANY, u"Źródło:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText10.Wrap( -1 )
bSizer17.Add( self.m_staticText10, 1, wx.ALL, 5 )
self.m_textCtrl71 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 145,-1 ), 0 )
bSizer17.Add( self.m_textCtrl71, 0, wx.BOTTOM|wx.RIGHT|wx.LEFT, 5 )
bSizer15.Add( bSizer17, 1, wx.EXPAND, 5 )
bSizer18 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText99 = wx.StaticText( self, wx.ID_ANY, u"LMCP:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText99.Wrap( -1 )
bSizer18.Add( self.m_staticText99, 1, wx.ALL, 5 )
self.m_textCtrl99 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 145,-1 ), 0 )
self.m_textCtrl99.SetToolTipString( u"Ilość punktów na liście ministerialnej" )
bSizer18.Add( self.m_textCtrl99, 0, wx.BOTTOM|wx.RIGHT|wx.LEFT, 5 )
bSizer15.Add( bSizer18, 1, wx.EXPAND, 5 )
bSizer19 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText98 = wx.StaticText( self, wx.ID_ANY, u"JCR:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText98.Wrap( -1 )
bSizer19.Add( self.m_staticText98, 1, wx.ALL, 5 )
m_choice3Choices = ['True', 'False']
self.m_choice3 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 145,-1 ), m_choice3Choices, 0 )
bSizer19.Add( self.m_choice3, 0, wx.BOTTOM|wx.RIGHT|wx.LEFT, 5 )
bSizer15.Add( bSizer19, 1, wx.EXPAND, 5 )
bSizer26.Add( bSizer15, 1, wx.EXPAND, 5 )
bSizer23 = wx.BoxSizer( wx.VERTICAL )
bSizer10 = wx.BoxSizer( wx.VERTICAL )
m_checkList3Choices = cDatabase.getUserName(self.session)
self.m_checkList3 = wx.CheckListBox( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 200,281 ), m_checkList3Choices, 0 )
self.m_checkList3.SetToolTipString( u"Powiąż autorów z publikacją" )
bSizer10.Add( self.m_checkList3, 0, wx.EXPAND|wx.BOTTOM|wx.RIGHT|wx.LEFT, 5 )
bSizer23.Add( bSizer10, 0, wx.EXPAND, 5 )
bSizer26.Add( bSizer23, 1, wx.EXPAND, 5 )
bSizer1.Add( bSizer26, 0, wx.EXPAND, 5 )
bSizer55 = wx.BoxSizer( wx.HORIZONTAL )
self.m_textCtrl55 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( -1,50 ), wx.TE_MULTILINE )
self.m_textCtrl55.SetToolTipString( u"Notatki do publikacji" )
bSizer55.Add( self.m_textCtrl55, 1, wx.ALL|wx.EXPAND, 5 )
bSizer1.Add( bSizer55, 0, wx.EXPAND, 5 )
bSizer11 = wx.BoxSizer( wx.HORIZONTAL )
self.m_button1 = wx.Button( self, wx.ID_ANY, u"Dodaj", wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer11.Add( self.m_button1, 0, wx.ALL|wx.EXPAND, 5 )
self.m_button3 = wx.Button( self, wx.ID_ANY, u"Zatwierdź", wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer11.Add( self.m_button3, 0, wx.ALL, 5 )
self.m_button4 = wx.Button( self, wx.ID_ANY, u"Zamknij", wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer11.Add( self.m_button4, 0, wx.ALL, 5 )
self.m_staticText11 = wx.StaticText( self, wx.ID_ANY, u"", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText11.Wrap( -1 )
bSizer11.Add( self.m_staticText11, 1, wx.ALL, 5 )
self.m_staticText12 = wx.StaticText( self, wx.ID_ANY, u"", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText12.Wrap( -1 )
bSizer11.Add( self.m_staticText12, 1, wx.ALL, 5 )
bSizer1.Add( bSizer11, 0, wx.ALIGN_RIGHT, 5 )
self.SetSizer( bSizer1 )
self.Layout()
self.Centre( wx.BOTH )
self.m_button3.Hide()
self.m_staticText11.Hide()
self.m_staticText12.Hide()
##################################################
## Bind
###################################################
self.m_button1.Bind(wx.EVT_BUTTON, self.addPubValue)
self.m_button4.Bind(wx.EVT_BUTTON, self.close)
self.m_button3.Bind(wx.EVT_BUTTON, self.editPubValue)<|fim▁hole|>###################################################
self.getType()
## Dokumentacja getType
# @param self Wskaźnik obiektu
#
# @return void
# Funkcja pobiera typy publikacji z pliku
def getType(self):
count = len(open('type.txt', 'rU').readlines())
for i in range(count):
self.listType.append(linecache.getline('type.txt',i+1))
print self.listType
## Dokumentacja editPubValue
# @param self Wskaźnik obiektu
# @param event Wywołanie żadania
#
# @return void
# Funkcja wysyla zadanie edycji wybranej publikacji
def editPubValue(self, event):
#Pobiera wartosci z kontrolek do edycji
tmp = self.m_staticText1.GetLabel()
tmp = tmp.split('. ', 1)
t0 = tmp[1]
t1 = self.m_textCtrl2.GetValue()
t2 = self.m_textCtrl4.GetValue()
t3 = self.m_textCtrl3.GetValue()
t4 = self.m_choice1.GetStringSelection()
t5 = self.m_textCtrl5.GetValue()
t6 = self.m_textCtrl6.GetValue()
t7 = self.m_textCtrl7.GetValue()
t8 = self.m_choice2.GetStringSelection()
t10 = self.m_textCtrl71.GetValue()
t11 = self.m_textCtrl99.GetValue() #Lista ministerialna
t12 = self.m_choice3.GetStringSelection() #czy jest w JCR
t13 = self.m_textCtrl55.GetValue() #notatka
#Odznacza już powiazanych autorów
ch = cDatabase.editItemAuthor(self.session, t0)
t9 = self.getCheckUser()
#Pobiera wartosci ID dla zaznaczonych autorów
tmp = cDatabase.getJournalNameID(self.session)
print t8
if t8 != u'':
t8 = tmp[t8]
else:
t8 = None
t = (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13)
#Sprawdzenie czy obowiazkowe wartości nie sa puste
if t1 != '' and t2 != '' and t3 != '' and t5 != '':
cDatabase.editPubData(self.session, t, t0)
wx.MessageBox(u'Zauktualizowano wartości!', u'Sukces', wx.OK | wx.ICON_INFORMATION)
else:
wx.MessageBox(u'Nie podana nazwy grupy \nlub nie wybrano autorów.', u'Bład', wx.OK | wx.ICON_INFORMATION)
self.Destroy()
## Dokumentacja addPubValue
# @param self Wskaźnik obiektu
# @param event Wywołanie żadania
#
# @return void
# Funkcja wysyla zadanie dodania nowej publikacji
def addPubValue(self, event):
#Pobiera wartosci z kontrolek do edycji
tx1 = self.m_textCtrl2.GetValue() #tytul
tx2 = self.m_textCtrl4.GetValue() #autor
tx3 = self.m_textCtrl3.GetValue() #cytowania
tx4 = self.m_choice1.GetStringSelection() #typ
tx5 = self.m_textCtrl5.GetValue() #rok
tx6 = self.m_textCtrl6.GetValue() #doi
tx9 = self.m_textCtrl7.GetValue() #identy
tx7 = self.m_choice2.GetStringSelection() #wydawca ID
tx8 = self.getCheckUser() #autor id
tx10 = self.m_textCtrl71.GetValue() #zrodlo
tx11 = self.m_staticText11.GetLabel() #urlpub
tx12 = self.m_staticText12.GetLabel() #urlcit
tx13 = self.m_textCtrl99.GetValue() #Lista ministerialna
tx14 = self.m_choice3.GetStringSelection() #jcr
tx15 = self.m_textCtrl55.GetValue() #note
#Pobiera wartosci ID dla zaznaczonych autorów
tmp = cDatabase.getJournalNameID(self.session)
if tx7 != u'':
tx7 = tmp[tx7]
else:
tx7 = None
t = (tx1, tx2, tx3, tx4, tx5, tx6, tx9, tx7, tx8, tx11, tx12, tx10, tx13, tx14, tx15)
#Sprawdzenie czy obowiazkowe wartości nie sa puste
if tx1 != '' and tx2 != '' and tx3 != '' and tx5 != '':
cDatabase.addPubData(self.session, t)
else:
wx.MessageBox(u'Pola "Tytuł, Autor, Cytowania, Rok" sa wymagane!', u'Bład', wx.OK | wx.ICON_INFORMATION)
self.Destroy()
## Dokumentacja getCheckUser
# @param self Wskaźnik obiektu
#
# @return list Lista ID autorow powiazanych z publikacja
# Funkcja pobiera id wszystkich powiazanych autorów do publikacji
def getCheckUser(self):
result = []
guser = cDatabase.getUserName(self.session)
t = cDatabase.getUserNameID(self.session)
for i in range(len(guser)):
if self.m_checkList3.IsChecked(i):
id = t[guser[i]]
result.append(id)
return result
## Dokumentacja close
# @param self Wskaźnik obiektu
# @param event Wywołanie żadania
#
# @return void
# Funkcja zamyka okienko z zarzadzaniem publikacjami
def close(self, event):
"""Zamyka okienko publikacji"""
self.Destroy()
if __name__ == "__main__":
app = wx.App(False)
controller = PubDialog()
controller.Show()
app.MainLoop()<|fim▁end|> |
###################################################
## Metody |
<|file_name|>weather.routes.ts<|end_file_name|><|fim▁begin|>import { Routes, RouterModule } from '@angular/router';
import { WeatherComponent } from './weather.component';
const routes: Routes = [
{ path: '', component: WeatherComponent }
];<|fim▁hole|><|fim▁end|> |
export const routing = RouterModule.forChild(routes); |
<|file_name|>board.rs<|end_file_name|><|fim▁begin|>use std::process::Command;
use common::*;
use archive::Step;
fn clear_screen() {
let mut child = Command::new("clear").spawn().unwrap();
child.wait().unwrap();
}
pub struct Board {
points: Vec<Vec<StoneType>>,
}
impl Board {
pub fn new() -> Board {
let v = vec![vec![StoneType::None;BOARD_SIZE];BOARD_SIZE];
Board { points: v }
}
pub fn print(&self) {
clear_screen();
println!("Welcome to Gobang game!");
print!(" ");
for i in 0..BOARD_SIZE {
print!("{:>2} ", ((i+65) as u8) as char);
}
println!();
for i in 0..BOARD_SIZE {
print!("{:02} ", BOARD_SIZE - i);
for j in 0..BOARD_SIZE {
print!(" {} ", self.points[i][j].as_char());
}
println!();
}
}
pub fn move_black(&mut self, x: usize, y: usize) {
self.points[x][y] = StoneType::Black;
}
pub fn move_white(&mut self, x: usize, y: usize) {
self.points[x][y] = StoneType::White;
}
fn clear(&mut self) {
for i in 0..BOARD_SIZE {
for j in 0..BOARD_SIZE {
self.points[i][j] = StoneType::None;
}
}
}
pub fn load_archive(&mut self, steps: &[Step]) {
self.clear();
for step in steps {
match step.color {
StoneType::Black => self.move_black(step.x, step.y),
StoneType::White => self.move_white(step.x, step.y),
StoneType::None => println!("Wrong step"),
}
}
self.print();
println!("Load success!");
}
pub fn win(&self, x: usize, y: usize) -> bool {
self.win_horizontal(x, y)
|| self.win_vertical(x, y)
|| self.win_diagonal_a(x, y)
|| self.win_diagonal_b(x, y)
}
fn win_horizontal(&self, x: usize, y: usize) -> bool {
let mut serial_count: i32 = 1;
let color: StoneType = self.points[x][y];
let mut inc: usize = 0;
let mut east: bool = true;
let mut west: bool = true;
while east || west {
inc += 1;
if y >= inc && self.points[x][y-inc] == color {
serial_count += 1;
} else {
east = false;
}
if y + inc < BOARD_SIZE && self.points[x][y+inc] == color {
serial_count += 1;
} else {
west = false;
}
}
serial_count == WIN_SERIAL_COUNT
}
fn win_vertical(&self, x: usize, y: usize) -> bool {
let mut serial_count: i32 = 1;
let color: StoneType = self.points[x][y];
let mut inc: usize = 0;
let mut north: bool = true;
let mut south: bool = true;
while north || south {
inc += 1;
if x >= inc && self.points[x-inc][y] == color {
serial_count += 1;
} else {
north = false;
}
<|fim▁hole|> south = false;
}
}
serial_count == WIN_SERIAL_COUNT
}
fn win_diagonal_a(&self, x: usize, y: usize) -> bool {
let mut serial_count: i32 = 1;
let color: StoneType = self.points[x][y];
let mut inc: usize = 0;
let mut northeast: bool = true;
let mut southwest: bool = true;
while northeast || southwest {
inc += 1;
if x >= inc && y + inc < BOARD_SIZE &&
self.points[x-inc][y+inc] == color {
serial_count += 1;
} else {
northeast = false;
}
if y >= inc && x + inc < BOARD_SIZE &&
self.points[x+inc][y-inc] == color {
serial_count += 1;
} else {
southwest = false;
}
}
serial_count == WIN_SERIAL_COUNT
}
fn win_diagonal_b(&self, x: usize, y: usize) -> bool {
let mut serial_count: i32 = 1;
let color: StoneType = self.points[x][y];
let mut inc: usize = 0;
let mut northwest: bool = true;
let mut southeast: bool = true;
while northwest || southeast {
inc += 1;
if x >= inc && y >= inc && self.points[x-inc][y-inc] == color {
serial_count += 1;
} else {
northwest = false;
}
if x + inc < BOARD_SIZE && y + inc < BOARD_SIZE &&
self.points[x+inc][y+inc] == color {
serial_count += 1;
} else {
southeast = false;
}
}
serial_count == WIN_SERIAL_COUNT
}
pub fn has_stone(&self, x: usize, y: usize) -> bool {
self.points[x][y] != StoneType::None
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_win_horizontal() {
let mut board = Board::new();
board.move_black(0, 0);
board.move_black(0, 1);
board.move_black(0, 2);
board.move_black(0, 3);
board.move_black(0, 4);
assert!(board.win_horizontal(0, 4));
}
#[test]
fn test_win_vertical() {
let mut board = Board::new();
board.move_black(0, 0);
board.move_black(1, 0);
board.move_black(2, 0);
board.move_black(3, 0);
board.move_black(4, 0);
assert!(board.win_vertical(4, 0));
}
#[test]
fn test_win_digonal_a() {
let mut board = Board::new();
board.move_black(0, 4);
board.move_black(1, 3);
board.move_black(2, 2);
board.move_black(3, 1);
board.move_black(4, 0);
assert!(board.win_diagonal_a(4, 0));
}
#[test]
fn test_win_digonal_b() {
let mut board = Board::new();
board.move_black(0, 0);
board.move_black(1, 1);
board.move_black(2, 2);
board.move_black(3, 3);
board.move_black(4, 4);
assert!(board.win_diagonal_b(4, 4));
}
#[test]
fn test_move() {
let mut board = Board::new();
board.move_black(0, 0);
assert!(!board.has_stone(1, 1));
assert!(board.has_stone(0, 0));
board.move_white(1, 1);
assert!(board.has_stone(1, 1));
assert!(board.has_stone(0, 0));
}
}<|fim▁end|> | if x + inc < BOARD_SIZE && self.points[x+inc][y] == color {
serial_count += 1;
} else { |
<|file_name|>DeviceStore.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2012-2013 eBay Software Foundation and ios-driver committers
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.uiautomation.ios;
import com.google.common.collect.ImmutableList;
import org.libimobiledevice.ios.driver.binding.exceptions.SDKException;
import org.libimobiledevice.ios.driver.binding.model.ApplicationInfo;
import org.libimobiledevice.ios.driver.binding.model.DeviceInfo;
import org.libimobiledevice.ios.driver.binding.services.DeviceCallBack;
import org.libimobiledevice.ios.driver.binding.services.DeviceService;
import org.libimobiledevice.ios.driver.binding.services.IOSDevice;
import org.libimobiledevice.ios.driver.binding.services.ImageMountingService;
import org.libimobiledevice.ios.driver.binding.services.InformationService;
import org.libimobiledevice.ios.driver.binding.services.InstallerService;
import org.openqa.selenium.WebDriverException;
import org.uiautomation.ios.application.IPAShellApplication;
import org.uiautomation.ios.utils.DDILocator;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.logging.Logger;
public class DeviceStore extends DeviceCallBack {
private static final Logger log = Logger.getLogger(DeviceStore.class.getName());
private final List<RealDevice> reals = new CopyOnWriteArrayList<RealDevice>();
private final List<SimulatorDevice> sims = new CopyOnWriteArrayList<SimulatorDevice>();
private final ApplicationStore apps;<|fim▁hole|>
public DeviceStore(ApplicationStore apps, Set<String> uuidWhitelist) {
super();
this.apps = apps;
this.uuidWhitelist = uuidWhitelist;
}
/**
* @return immutable copy of the currently available devices.
*/
public List<Device> getDevices() {
List<Device> all = new ArrayList<Device>();
all.addAll(reals);
all.addAll(sims);
return ImmutableList.copyOf(all);
}
public List<RealDevice> getRealDevices() {
return reals;
}
public List<SimulatorDevice> getSimulatorDevices() {
return sims;
}
public void add(SimulatorDevice simulatorDevice) {
sims.add(simulatorDevice);
}
@Override
protected void onDeviceAdded(String uuid) {
if (!uuidWhitelist.isEmpty() && !uuidWhitelist.contains(uuid)) {
log.info("device detected but not whitelisted");
return;
}
RealDevice d = null;
try {
IOSDevice device = DeviceService.get(uuid);
DeviceInfo info = new DeviceInfo(uuid);
d = new RealDevice(info);
log.info("new device detected (" + uuid + ") " + info.getDeviceName());
reals.add(d);
InstallerService s = new InstallerService(device);
String id = "com.apple.mobilesafari";
ApplicationInfo safari = s.getApplication(id);
String v = (String) safari.getProperty("CFBundleVersion");
log.info("device " + info.getDeviceName() + " = safari " + v);
IPAShellApplication ipa = new IPAShellApplication(id, v, safari);
apps.add(ipa);
InformationService i = new InformationService(device);
if (!i.isDevModeEnabled()) {
log.warning(
"The device " + uuid + " is not set to dev mode. It can't be used for testing.");
File ddi = DDILocator.locateDDI(device);
mount(device, ddi);
log.info("DDI mounted.Device now in dev mode.");
}
} catch (SDKException | WebDriverException e) {
if (d != null) {
reals.remove(d);
}
}
}
private void mount(IOSDevice device, File ddi) throws SDKException {
ImageMountingService service = null;
try {
service = new ImageMountingService(device);
service.mount(ddi);
} finally {
if (service != null) {
service.free();
}
}
}
@Override
protected void onDeviceRemoved(String uuid) {
if (!uuidWhitelist.isEmpty() && !uuidWhitelist.contains(uuid)) {
log.info("device removed but not whitelisted");
return;
}
for (RealDevice d : reals) {
if (d.getUuid().equals(uuid)) {
log.info("Removing " + uuid + " for the devices pool");
boolean ok = reals.remove(d);
if (!ok) {
log.warning("device " + uuid + " has been unplugged, but was never there ?");
}
}
}
}
}<|fim▁end|> | private final Set<String> uuidWhitelist; |
<|file_name|>snapshots_client.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#<|fim▁hole|># http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import time
import urllib
from tempest.common import rest_client
from tempest import config
from tempest import exceptions
from tempest.openstack.common import log as logging
CONF = config.CONF
LOG = logging.getLogger(__name__)
class SnapshotsClientJSON(rest_client.RestClient):
"""Client class to send CRUD Volume API requests."""
def __init__(self, auth_provider):
super(SnapshotsClientJSON, self).__init__(auth_provider)
self.service = CONF.volume.catalog_type
self.build_interval = CONF.volume.build_interval
self.build_timeout = CONF.volume.build_timeout
def list_snapshots(self, params=None):
"""List all the snapshot."""
url = 'snapshots'
if params:
url += '?%s' % urllib.urlencode(params)
resp, body = self.get(url)
body = json.loads(body)
return resp, body['snapshots']
def list_snapshots_with_detail(self, params=None):
"""List the details of all snapshots."""
url = 'snapshots/detail'
if params:
url += '?%s' % urllib.urlencode(params)
resp, body = self.get(url)
body = json.loads(body)
return resp, body['snapshots']
def get_snapshot(self, snapshot_id):
"""Returns the details of a single snapshot."""
url = "snapshots/%s" % str(snapshot_id)
resp, body = self.get(url)
body = json.loads(body)
return resp, body['snapshot']
def create_snapshot(self, volume_id, **kwargs):
"""
Creates a new snapshot.
volume_id(Required): id of the volume.
force: Create a snapshot even if the volume attached (Default=False)
display_name: Optional snapshot Name.
display_description: User friendly snapshot description.
"""
post_body = {'volume_id': volume_id}
post_body.update(kwargs)
post_body = json.dumps({'snapshot': post_body})
resp, body = self.post('snapshots', post_body)
body = json.loads(body)
return resp, body['snapshot']
def update_snapshot(self, snapshot_id, **kwargs):
"""Updates a snapshot."""
put_body = json.dumps({'snapshot': kwargs})
resp, body = self.put('snapshots/%s' % snapshot_id, put_body)
body = json.loads(body)
return resp, body['snapshot']
# NOTE(afazekas): just for the wait function
def _get_snapshot_status(self, snapshot_id):
resp, body = self.get_snapshot(snapshot_id)
status = body['status']
# NOTE(afazekas): snapshot can reach an "error"
# state in a "normal" lifecycle
if (status == 'error'):
raise exceptions.SnapshotBuildErrorException(
snapshot_id=snapshot_id)
return status
# NOTE(afazkas): Wait reinvented again. It is not in the correct layer
def wait_for_snapshot_status(self, snapshot_id, status):
"""Waits for a Snapshot to reach a given status."""
start_time = time.time()
old_value = value = self._get_snapshot_status(snapshot_id)
while True:
dtime = time.time() - start_time
time.sleep(self.build_interval)
if value != old_value:
LOG.info('Value transition from "%s" to "%s"'
'in %d second(s).', old_value,
value, dtime)
if (value == status):
return value
if dtime > self.build_timeout:
message = ('Time Limit Exceeded! (%ds)'
'while waiting for %s, '
'but we got %s.' %
(self.build_timeout, status, value))
raise exceptions.TimeoutException(message)
time.sleep(self.build_interval)
old_value = value
value = self._get_snapshot_status(snapshot_id)
def delete_snapshot(self, snapshot_id):
"""Delete Snapshot."""
return self.delete("snapshots/%s" % str(snapshot_id))
def is_resource_deleted(self, id):
try:
self.get_snapshot(id)
except exceptions.NotFound:
return True
return False
def reset_snapshot_status(self, snapshot_id, status):
"""Reset the specified snapshot's status."""
post_body = json.dumps({'os-reset_status': {"status": status}})
resp, body = self.post('snapshots/%s/action' % snapshot_id, post_body)
return resp, body
def update_snapshot_status(self, snapshot_id, status, progress):
"""Update the specified snapshot's status."""
post_body = {
'status': status,
'progress': progress
}
post_body = json.dumps({'os-update_snapshot_status': post_body})
url = 'snapshots/%s/action' % str(snapshot_id)
resp, body = self.post(url, post_body)
return resp, body
def create_snapshot_metadata(self, snapshot_id, metadata):
"""Create metadata for the snapshot."""
put_body = json.dumps({'metadata': metadata})
url = "snapshots/%s/metadata" % str(snapshot_id)
resp, body = self.post(url, put_body)
body = json.loads(body)
return resp, body['metadata']
def get_snapshot_metadata(self, snapshot_id):
"""Get metadata of the snapshot."""
url = "snapshots/%s/metadata" % str(snapshot_id)
resp, body = self.get(url)
body = json.loads(body)
return resp, body['metadata']
def update_snapshot_metadata(self, snapshot_id, metadata):
"""Update metadata for the snapshot."""
put_body = json.dumps({'metadata': metadata})
url = "snapshots/%s/metadata" % str(snapshot_id)
resp, body = self.put(url, put_body)
body = json.loads(body)
return resp, body['metadata']
def update_snapshot_metadata_item(self, snapshot_id, id, meta_item):
"""Update metadata item for the snapshot."""
put_body = json.dumps({'meta': meta_item})
url = "snapshots/%s/metadata/%s" % (str(snapshot_id), str(id))
resp, body = self.put(url, put_body)
body = json.loads(body)
return resp, body['meta']
def delete_snapshot_metadata_item(self, snapshot_id, id):
"""Delete metadata item for the snapshot."""
url = "snapshots/%s/metadata/%s" % (str(snapshot_id), str(id))
resp, body = self.delete(url)
return resp, body
def force_delete_snapshot(self, snapshot_id):
"""Force Delete Snapshot."""
post_body = json.dumps({'os-force_delete': {}})
resp, body = self.post('snapshots/%s/action' % snapshot_id, post_body)
return resp, body<|fim▁end|> | |
<|file_name|>prog13.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# programme qui demande un nombre et affiche les 10 triples successifs<|fim▁hole|>nombre = int(chaine)
triple = nombre
compteur=1
while(compteur<=10):
triple=triple*3
print(triple)
compteur=compteur+1<|fim▁end|> | chaine = input("donne un nombre : ") |
<|file_name|>WeaponType.java<|end_file_name|><|fim▁begin|>package jpelc.learning.designpatterns.factorymethod;
public enum WeaponType {
SHORT_SWORD("short sword"), SPEAR("spear"), AXE("axe"), UNDEFINED("");
private String title;
WeaponType(String title) {
this.title = title;
}<|fim▁hole|> @Override
public String toString() {
return title;
}
}<|fim▁end|> | |
<|file_name|>save-case.component.ts<|end_file_name|><|fim▁begin|>import { ChangeDetectionStrategy, Component, HostBinding, Input, OnDestroy, OnInit } from '@angular/core';
import { animate, style, transition, trigger } from '@angular/animations';
import { select, Store } from '@ngrx/store';
import { ICasesState, selectCaseById, selectSelectedCase } from '../../reducers/cases.reducer';
import { CloseModalAction, RenameCaseAction, SaveCaseAsAction } from '../../actions/cases.actions';
import { take, tap } from 'rxjs/operators';
import { cloneDeep } from '../../../../core/utils/rxjs/operators/cloneDeep';
import { ICase } from '../../models/case.model';
import { AutoSubscription, AutoSubscriptions } from 'auto-subscriptions';
import * as moment from 'moment';
const animationsDuring = '0.2s';
<|fim▁hole|> trigger('modalContent', [
transition(':enter', [style({
'backgroundColor': '#27b2cf',
transform: 'translate(0, -100%)'
}), animate(animationsDuring, style({ 'backgroundColor': 'white', transform: 'translate(0, 0)' }))]),
transition(':leave', [style({
'backgroundColor': 'white',
transform: 'translate(0, 0)'
}), animate(animationsDuring, style({ 'backgroundColor': '#27b2cf', transform: 'translate(0, -100%)' }))])
])
];
@Component({
selector: 'ansyn-save-case',
templateUrl: './save-case.component.html',
styleUrls: ['./save-case.component.less'],
changeDetection: ChangeDetectionStrategy.OnPush,
animations
})
@AutoSubscriptions()
export class SaveCaseComponent implements OnInit, OnDestroy {
@Input() caseId: string;
caseName: string;
@HostBinding('@modalContent')
get modalContent() {
return true;
};
constructor(
protected store: Store<ICasesState>
) {
}
@AutoSubscription
caseName$ = () => this.store.pipe(
select(selectCaseById(this.caseId)),
tap( (_case) => {
this.caseName = _case ? _case.name : moment(new Date()).format('DD/MM/YYYY HH:mm:ss').toLocaleString();
})
);
private cloneDeepOneTime(selector) {
return this.store.pipe(
select(selector),
take(1),
cloneDeep()
)
}
ngOnDestroy(): void {
}
ngOnInit(): void {}
close(): void {
this.store.dispatch(new CloseModalAction());
}
saveNewCase() {
return this.cloneDeepOneTime(selectSelectedCase).pipe(
tap((selectedCase: ICase) => {
this.store.dispatch(new SaveCaseAsAction({ ...selectedCase, name: this.caseName }));
})
)
}
renameCase() {
return this.cloneDeepOneTime(selectCaseById(this.caseId)).pipe(
tap( (_case: ICase) => {
const oldName = _case.name;
this.store.dispatch(new RenameCaseAction({case: _case, oldName: oldName, newName: this.caseName }));
})
)
}
onSubmitCase() {
const obs = this.caseId ? this.renameCase() : this.saveNewCase();
obs.pipe(
tap(this.close.bind(this))
).subscribe()
}
}<|fim▁end|> | const animations: any[] = [ |
<|file_name|>email_template_preview.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2009 Sharoon Thomas
# Copyright (C) 2010-Today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
from openerp.osv import fields, osv
class email_template_preview(osv.osv_memory):
_inherit = "email.template"
_name = "email_template.preview"
_description = "Email Template Preview"
def _get_records(self, cr, uid, context=None):
"""
Return Records of particular Email Template's Model
"""
if context is None:
context = {}
template_id = context.get('template_id', False)
if not template_id:
return []
email_template = self.pool.get('email.template')
template = email_template.browse(cr, uid, int(template_id), context=context)
template_object = template.model_id
model = self.pool[template_object.model]
record_ids = model.search(cr, uid, [], 0, 10, 'id', context=context)
default_id = context.get('default_res_id')
if default_id and default_id not in record_ids:
record_ids.insert(0, default_id)
return model.name_get(cr, uid, record_ids, context)
def default_get(self, cr, uid, fields, context=None):
if context is None:
context = {}
result = super(email_template_preview, self).default_get(cr, uid, fields, context=context)
email_template = self.pool.get('email.template')<|fim▁hole|> template_id = context.get('template_id')
if 'res_id' in fields and not result.get('res_id'):
records = self._get_records(cr, uid, context=context)
result['res_id'] = records and records[0][0] or False # select first record as a Default
if template_id and 'model_id' in fields and not result.get('model_id'):
result['model_id'] = email_template.read(cr, uid, int(template_id), ['model_id'], context).get('model_id', False)
return result
_columns = {
'res_id': fields.selection(_get_records, 'Sample Document'),
'partner_ids': fields.many2many('res.partner', string='Recipients'),
}
def on_change_res_id(self, cr, uid, ids, res_id, context=None):
if context is None:
context = {'value': {}}
if not res_id or not context.get('template_id'):
return {'value': {}}
email_template = self.pool.get('email.template')
template_id = context.get('template_id')
template = email_template.browse(cr, uid, template_id, context=context)
# generate and get template values
mail_values = email_template.generate_email(cr, uid, template_id, res_id, context=context)
vals = dict((field, mail_values.get(field, False)) for field in ('email_from', 'email_to', 'email_cc', 'reply_to', 'subject', 'body_html', 'partner_to', 'partner_ids', 'attachment_ids'))
vals['name'] = template.name
return {'value': vals}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|> | |
<|file_name|>test_trajectories.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from nose.tools import assert_raises
from nose.tools import assert_dict_equal
from numpy.testing import assert_array_equal
from numpy.testing import assert_array_almost_equal
from numpy.testing import assert_almost_equal
import numpy as np
import tempfile
import pandas as pd
from sktracker import data
from sktracker.trajectories import Trajectories
def test_attributes():
trajs = data.brownian_trajs_df()
trajs = Trajectories(trajs)
assert_array_equal(trajs.t_stamps, np.array([0, 1, 2, 3, 4]))
assert_array_equal(trajs.labels, np.array([0, 1, 2, 3, 4]))
segments = {0: [(0, 0), (1, 0), (2, 0), (3, 0), (4, 0)],
1: [(0, 1), (1, 1), (2, 1), (3, 1), (4, 1)],
2: [(0, 2), (1, 2), (2, 2), (3, 2), (4, 2)],
3: [(0, 3), (1, 3), (2, 3), (3, 3), (4, 3)],
4: [(0, 4), (1, 4), (2, 4), (3, 4), (4, 4)]}
assert_dict_equal(trajs.segment_idxs, segments)
traj = np.array([[ -9.25386045, 11.34555088, 22.11820326, 3. , 0. ],
[ 11.05321776, 3.23738477, 2.62790435, 2. , 1. ],
[ 16.6824928 , 14.602054 , -12.1218683 , 4. , 2. ],
[ 17.22410516, 14.8068125 , -11.87642753, 4. , 3. ],
[ 2.80222495, -13.13783042, 8.56406878, 0. , 4. ]])
t_stamp, traj_to_test = list(trajs.iter_segments)[0]
assert_array_almost_equal(traj, traj_to_test)
assert list(trajs.get_segments().keys()) == [0, 1, 2, 3, 4]
def test_structure():
trajs = data.brownian_trajs_df()
trajs = Trajectories(trajs)
assert_raises(ValueError, trajs.check_trajs_df_structure, ['t_idx'])
assert_raises(ValueError, trajs.check_trajs_df_structure, ['t_stamp', 'label'], ['dx'])
trajs.check_trajs_df_structure(['t_stamp', 'label'], ['x', 'y', 'z'])
def test_copy():
trajs = data.brownian_trajs_df()
trajs = Trajectories(trajs)
assert isinstance(trajs.copy(), Trajectories)
def test_empty():
empty = Trajectories.empty_trajs(columns=['x', 'y'])
assert empty.shape == (0, 2)
assert empty.empty is True
def test_reverse():
trajs = data.brownian_trajs_df()
trajs = Trajectories(trajs)
assert trajs.reverse().shape == (25, 5)
trajs = data.brownian_trajs_df()
trajs = Trajectories(trajs)
trajs.reverse(inplace=True)
assert trajs.shape == (25, 5)
def test_write_hdf():
trajs = data.brownian_trajs_df()
trajs = Trajectories(trajs)
tmp_store = tempfile.NamedTemporaryFile(suffix='h5')
with pd.get_store(tmp_store.name) as store:
store['trajs'] = trajs
def test_interpolate():
trajs = Trajectories(data.with_gaps_df())
trajs.set_index('true_label', inplace=True, append=True)
trajs.reset_index(level='label', drop=True, inplace=True)
trajs.index.set_names(['t_stamp', 'label'], inplace=True)
interpolated = trajs.time_interpolate(sampling=3, time_step=0.1, s=1)
# t_stamps_in = interpolated.index.get_level_values('t_stamp')
# indexer = t_stamps_in % 2 == 0
# interpolated.loc[indexer].shape, trajs.shape
# indexer = interpolated.t_stamps % 3 == 0
# assert interpolated.loc[indexer].shape[0] == trajs.shape[0]
dts = interpolated.get_segments()[0].t.diff().dropna()
# All time points should be equaly spaced
assert_almost_equal(dts.min(), dts.max())
def get_get_diff():
trajs = Trajectories(data.brownian_trajs_df())
diffs = trajs.get_diff()
x_diffs = diffs.to_dict()['x']
real_x_diffs = {(1, 2): 3.8452299074207819,
(3, 2): 4.7476193900872765,
(0, 0): np.nan,
(3, 0): 0.54161236467700746,
(0, 4): np.nan,
(1, 4): -5.6929349491048624,
(1, 3): -30.136494087633611,
(2, 3): 23.240228721514185,
(2, 1): -23.9264368052234,
(2, 4): 0.63465512968445115,
(4, 2): -4.5501817884252063,
(1, 0): 20.307078207040306,
(0, 3): np.nan,
(4, 0): -14.421880216023439,
(0, 1): np.nan,
(3, 3): -6.5845079821965991,
(4, 1): -19.329775838349192,
(3, 1): 18.084232469105203,
(4, 4): 24.644945052453025,
(0, 2): np.nan,
(2, 0): 5.6292750381105723,
(4, 3): 13.209596167161628,
(2, 2): -3.7469188310869228,
(3, 4): -17.381636024737336,
(1, 1): 13.827909766138866}
assert_almost_equal(x_diffs, real_x_diffs)
def test_get_speeds():
trajs = Trajectories(data.brownian_trajs_df())
speeds = trajs.get_speeds().tolist()
real_speeds = [np.nan,
np.nan,
np.nan,
np.nan,
np.nan,
857.99153458573994,
1596.9530747771976,
873.15267834726137,
1282.3088174598233,
408.98588960526808,
378.40023709328955,
1809.9895146014187,
917.93227668556324,
592.31881736181106,
0.48325048326444919,
0.39551116881922965,
798.29858694043128,
1085.3214310682606,
405.49164945495221,
550.37555144616226,
1406.707586739079,
1031.9444945962532,
1077.6619763794718,
1445.7789239945778,
739.66839622816326]
assert_almost_equal(speeds, real_speeds)
def test_scale():
trajs = data.brownian_trajs_df()
trajs = Trajectories(trajs)
scaled = trajs.scale(factors=[2., 2., 2.],
coords=['x', 'y', 'z'], inplace=False)
assert_array_almost_equal(scaled[['x', 'y', 'z']] / 2., trajs[['x', 'y', 'z']])
trajs = trajs.scale(factors=[2., 2., 2.],
coords=['x', 'y', 'z'], inplace=True)
assert_array_almost_equal(scaled[['x', 'y', 'z']], trajs[['x', 'y', 'z']])
assert_raises(ValueError, trajs.scale, factors=[2., 2., 2.], coords=['x', 'y'], inplace=False)
def test_project():
trajs = Trajectories(data.directed_motion_trajs_df())
trajs.rename(columns={'true_label': 'new_label'}, inplace=True)
trajs.relabel()
trajs.project([0, 1],
coords=['x', 'y'],
keep_first_time=False,
reference=None,
inplace=True,
progress=False)
excepted = np.array([[ 0.27027431, 0. ],
[-0.27027431, 0. ],
[-0.25306519, 0.69683713],
[ 0.04633664, 0.31722648]])
assert_array_almost_equal(excepted, trajs.loc[:,['x_proj', 'y_proj']].values[:4])
trajs = trajs.project([0, 1],
coords=['x', 'y'],
keep_first_time=False,
reference=None,
inplace=False,
progress=False)
assert_array_almost_equal(excepted, trajs.loc[:,['x_proj', 'y_proj']].values[:4])
assert_raises(ValueError, trajs.project, [0, 1], coords=['x', 'y', 'z', 't'])
def test_get_colors():
"""
"""
trajs = data.brownian_trajs_df()
trajs = Trajectories(trajs)
colors = trajs.get_colors()
assert colors == {0: '#FF0000', 1: '#ADFF00', 2: '#00FFA9', 3: '#0408FF', 4: '#FF00AC'}
colors = trajs.get_colors(alpha=0.5)
assert colors == {0: '#FF000080',
1: '#ADFF0080',
2: '#00FFA980',
3: '#0408FF80',
4: '#FF00AC80'}
colors = trajs.get_colors(rgba=True)
good_colors = {0: (1.0, 0.0, 0.0, 1.0),
1: (0.67977809154279767, 1.0, 0.0, 1.0),
2: (0.0, 1.0, 0.66360181783683614, 1.0),
3: (0.015440535661123769, 0.031618928677752463, 1.0, 1.0),
4: (1.0, 0.0, 0.67279469669175529, 1.0)}
assert colors == good_colors
def test_get_longest_segments():
"""
"""
trajs = data.brownian_trajs_df()
trajs = Trajectories(trajs)
assert trajs.get_longest_segments(1) == [4]
def test_get_shortest_segments():
"""
"""
trajs = data.brownian_trajs_df()
trajs = Trajectories(trajs)
assert trajs.get_shortest_segments(1) == [0]
def test_remove_segments():
"""
"""
trajs = data.brownian_trajs_df()
trajs = Trajectories(trajs)
trajs.remove_segments(1, inplace=True)
assert np.all(trajs.labels == [0, 2, 3, 4])
def test_merge():
"""
"""
trajs1 = Trajectories(data.brownian_trajs_df())
trajs2 = Trajectories(data.brownian_trajs_df())
new = trajs1.merge(trajs2)
assert len(trajs1.labels) + len(trajs2.labels) == len(new.labels)
def test_relabel():
"""
"""
trajs = Trajectories(data.brownian_trajs_df())
trajs.columns = ['x', 'y', 'z', 'new_label', 't']
trajs.relabel(inplace=True)
new_values = [[1.933058243735795, -14.581064591435775, 11.603556633147544, 0.0],
[-12.862215173899491, -2.8611502446443238, -2.2738941196781424, 0.0],
[9.100887851132633, 2.837252570763561, 2.875753940450461, 0.0],<|fim▁hole|> assert trajs.iloc[:4].values.tolist() == new_values
trajs = Trajectories(data.brownian_trajs_df())
trajs.columns = ['x', 'y', 'z', 'new_label', 't']
trajs = trajs.relabel(inplace=False)
new_values = [[1.933058243735795, -14.581064591435775, 11.603556633147544, 0.0],
[-12.862215173899491, -2.8611502446443238, -2.2738941196781424, 0.0],
[9.100887851132633, 2.837252570763561, 2.875753940450461, 0.0],
[-9.253860446235523, 11.345550876585719, 22.118203258275745, 0.0]]
assert trajs.iloc[:4].values.tolist() == new_values
def test_relabel_fromzero():
"""
"""
trajs = Trajectories(data.brownian_trajs_df())
original_labels = trajs.labels
idx = pd.IndexSlice
trajs.loc[idx[:, 1], :] = 55
relabeled = trajs.relabel_fromzero('label', inplace=False)
assert np.all(relabeled.labels == original_labels)
trajs.loc[idx[:, 1], :] = 55
relabeled = trajs.relabel_fromzero('label', inplace=False)
assert np.all(relabeled.labels == original_labels)
def test_remove_spots():
"""
"""
trajs = Trajectories(data.brownian_trajs_df())
new_trajs = trajs.remove_spots([(3, 2), (0, 0)], inplace=False)
new_indexes = [(0, 1), (0, 2), (0, 3), (0, 4), (1, 0), (1, 1), (1, 2),
(1, 3), (1, 4), (2, 0), (2, 1), (2, 2), (2, 3), (2, 4),
(3, 0), (3, 1), (3, 3), (3, 4), (4, 0), (4, 1), (4, 2),
(4, 3), (4, 4)]
assert new_trajs.index.tolist() == new_indexes
new_trajs = trajs.remove_spots((0, 0), inplace=False)
new_indexes = [(0, 1), (0, 2), (0, 3), (0, 4), (1, 0), (1, 1), (1, 2),
(1, 3), (1, 4), (2, 0), (2, 1), (2, 2), (2, 3), (2, 4),
(3, 0), (3, 1), (3, 2), (3, 3), (3, 4), (4, 0), (4, 1),
(4, 2), (4, 3), (4, 4)]
assert new_trajs.index.tolist() == new_indexes
def test_merge_segments():
"""
"""
trajs = Trajectories(data.brownian_trajs_df())
trajs.reset_index(inplace=True)
trajs.loc[15, ['label']] = 88
trajs.loc[20, ['label']] = 88
trajs.set_index(['t_stamp', 'label'], inplace=True)
new_trajs = trajs.merge_segments([0, 88], inplace=False)
assert_array_almost_equal(trajs.values, new_trajs.values)
trajs = Trajectories(data.brownian_trajs_df())
good_trajs = trajs.copy()
trajs.reset_index(inplace=True)
trajs.loc[15, ['label']] = 88
trajs.loc[20, ['label']] = 88
trajs.set_index(['t_stamp', 'label'], inplace=True)
trajs.merge_segments([0, 88], inplace=True)
assert_array_almost_equal(trajs.values, good_trajs.values)
def test_cut_segments():
"""
"""
trajs = Trajectories(data.brownian_trajs_df())
trajs.cut_segments((2, 3), inplace=True)
new_indexes = [(0, 0), (0, 1), (0, 2), (0, 3), (0, 4), (1, 0), (1, 1),
(1, 2), (1, 3), (1, 4), (2, 0), (2, 1), (2, 2), (2, 3),
(2, 4), (3, 0), (3, 1), (3, 2), (3, 4), (3, 5), (4, 0),
(4, 1), (4, 2), (4, 4), (4, 5)]
assert trajs.index.tolist() == new_indexes
trajs = Trajectories(data.brownian_trajs_df())
trajs = trajs.cut_segments((2, 3), inplace=False)
new_indexes = [(0, 0), (0, 1), (0, 2), (0, 3), (0, 4), (1, 0), (1, 1),
(1, 2), (1, 3), (1, 4), (2, 0), (2, 1), (2, 2), (2, 3),
(2, 4), (3, 0), (3, 1), (3, 2), (3, 4), (3, 5), (4, 0),
(4, 1), (4, 2), (4, 4), (4, 5)]
assert trajs.index.tolist() == new_indexes
def test_duplicate_segments():
"""
"""
trajs = Trajectories(data.brownian_trajs_df())
trajs = trajs.duplicate_segments(2)
new_indexes = [(0, 0), (0, 1), (0, 2), (0, 3), (0, 4), (0, 5), (1, 0),
(1, 1), (1, 2), (1, 3), (1, 4), (1, 5), (2, 0), (2, 1),
(2, 2), (2, 3), (2, 4), (2, 5), (3, 0), (3, 1), (3, 2),
(3, 3), (3, 4), (3, 5), (4, 0), (4, 1), (4, 2), (4, 3),
(4, 4), (4, 5)]
assert trajs.index.tolist() == new_indexes
def test_get_bounds():
"""
"""
trajs = Trajectories(data.brownian_trajs_df())
trajs['t'] *= 10
t_stamp_bounds = {0: (0, 4), 1: (0, 4), 2: (0, 4), 3: (0, 4), 4: (0, 4)}
t_bounds = {0: (0.0, 40.0), 1: (0.0, 40.0), 2: (0.0, 40.0), 3: (0.0, 40.0), 4: (0.0, 40.0)}
assert trajs.get_bounds() == t_stamp_bounds
assert trajs.get_bounds(column='t') == t_bounds
def test_get_t_stamps_correspondences():
"""
"""
trajs = Trajectories(data.brownian_trajs_df())
trajs['t'] *= 33
data_values = [132, 33, 99, 66, 33, 33, 99., 99, 132]
t_stamps = trajs.get_t_stamps_correspondences(data_values, column='t')
assert_array_equal(t_stamps, [4, 1, 3, 2, 1, 1, 3, 3, 4])<|fim▁end|> | [-9.253860446235523, 11.345550876585719, 22.118203258275745, 0.0]]
|
<|file_name|>MonteCarlo_ParticleState.cpp<|end_file_name|><|fim▁begin|>//---------------------------------------------------------------------------//
//!
//! \file MonteCarlo_ParticleState.cpp
//! \author Alex Robinson
//! \brief Basic particle state class definition.
//!
//---------------------------------------------------------------------------//
// FRENSIE Includes
#include "MonteCarlo_ParticleState.hpp"
#include "Utility_PhysicalConstants.hpp"
#include "Utility_DirectionHelpers.hpp"
namespace MonteCarlo{
// Default constructor
/*! \details The default constructor should only be called before loading the
* particle state from an archive.
*/
ParticleState::ParticleState()
: d_history_number( 0 ),
d_particle_type(),
d_position(),
d_direction{0.0,0.0,1.0},
d_energy( 0.0 ),
d_time( 0.0 ),
d_collision_number( 0 ),
d_generation_number( 0 ),
d_weight( 1.0 ),
d_cell( Geometry::ModuleTraits::invalid_internal_cell_handle ),
d_lost( false ),
d_gone( false ),
d_ray( d_position, d_direction, false )
{ /* ... */ }
// Constructor
ParticleState::ParticleState(
const ParticleState::historyNumberType history_number,
const ParticleType type )
: d_history_number( history_number ),
d_particle_type( type ),
d_position(),
d_direction(),
d_energy( 0.0 ),
d_time( 0.0 ),
d_collision_number( 0 ),
d_generation_number( 0 ),
d_weight( 1.0 ),
d_cell( Geometry::ModuleTraits::invalid_internal_cell_handle ),
d_lost( false ),
d_gone( false ),
d_ray( d_position, d_direction, false )
{ /* ... */ }
// Copy constructor
/*! \details When copied, the new particle is assumed to not be lost and
* not be gone.
*/
ParticleState::ParticleState( const ParticleState& existing_base_state,
const ParticleType new_type,
const bool increment_generation_number,
const bool reset_collision_number )
: d_history_number( existing_base_state.d_history_number ),
d_particle_type( new_type ),
d_position{existing_base_state.d_position[0],
existing_base_state.d_position[1],
existing_base_state.d_position[2]},
d_direction{existing_base_state.d_direction[0],
existing_base_state.d_direction[1],
existing_base_state.d_direction[2]},
d_energy( existing_base_state.d_energy ),
d_time( existing_base_state.d_time ),
d_collision_number( existing_base_state.d_collision_number ),
d_generation_number( existing_base_state.d_generation_number ),
d_weight( existing_base_state.d_weight ),
d_cell( existing_base_state.d_cell ),
d_lost( false ),
d_gone( false ),
d_ray( d_position, d_direction, false )
{
// Increment the generation number if requested
if( increment_generation_number )
++d_generation_number;
// Reset the collision number if requested
if( reset_collision_number )
d_collision_number = 0u;
}
// Clone the particle state but change the history number
/*! \details This method returns a heap-allocated pointer. It is only safe
* to call this method inside of a smart pointer constructor or reset method.
* The clone will only need a new history number in very rare circumstances
* (e.g. state source).
*/
ParticleState* ParticleState::clone(
const ParticleState::historyNumberType new_history_number ) const
{
ParticleState* clone_state = this->clone();
clone_state->d_history_number = new_history_number;
return clone_state;
}
// Return the history number
ParticleState::historyNumberType ParticleState::getHistoryNumber() const
{
return d_history_number;
}
// Return the particle type
ParticleType ParticleState::getParticleType() const
{
return d_particle_type;
}
// Return the cell handle for the cell containing the particle
Geometry::ModuleTraits::InternalCellHandle ParticleState::getCell() const
{
return d_cell;
}
// Set the cell containing the particle
void ParticleState::setCell(
const Geometry::ModuleTraits::InternalCellHandle cell )
{
// Make sure the cell handle is valid
testPrecondition( cell !=
Geometry::ModuleTraits::invalid_internal_cell_handle);
d_cell = cell;
}
// Return the x position of the particle
double ParticleState::getXPosition() const
{
return d_position[0];
}
// Return the y position of the particle
double ParticleState::getYPosition() const
{
return d_position[1];
}
// Return the z position of the particle
double ParticleState::getZPosition() const
{
return d_position[2];
}
// Return the position of the particle
const double* ParticleState::getPosition() const
{
return d_position;
}
// Set the position of the particle
void ParticleState::setPosition( const double x_position,
const double y_position,
const double z_position )
{
// Make sure the coordinates are valid
testPrecondition( !ST::isnaninf( x_position ) );
testPrecondition( !ST::isnaninf( y_position ) );
testPrecondition( !ST::isnaninf( z_position ) );
d_position[0] = x_position;
d_position[1] = y_position;
d_position[2] = z_position;
}
// Return the x direction of the particle
double ParticleState::getXDirection() const
{
return d_direction[0];
}
// Return the y direction of the particle
double ParticleState::getYDirection() const
{
return d_direction[1];
}
// Return the z direction of the particle
double ParticleState::getZDirection() const
{
return d_direction[2];
}
// Return the direction of the particle
const double* ParticleState::getDirection() const
{
return d_direction;
}
// Set the direction of the particle
void ParticleState::setDirection( const double x_direction,
const double y_direction,
const double z_direction )
{
// Make sure the direction coordinates are valid
testPrecondition( !ST::isnaninf( x_direction ) );
testPrecondition( !ST::isnaninf( y_direction ) );
testPrecondition( !ST::isnaninf( z_direction ) );
// Make sure the direction is a unit vector
testPrecondition( Utility::validDirection( x_direction,
y_direction,
z_direction ) );
d_direction[0] = x_direction;
d_direction[1] = y_direction;
d_direction[2] = z_direction;
}
// Rotate the direction of the particle using polar a. cosine and azimuthal a.
/*! \details The polar angle cosine and azimuthal angle are w.r.t. the
* current particle direction and not the global coordinate system. These
* are the variables the commonly occur when sampling a new direction
* for the particle from a scattering distribution. This function is therefore
* meant to avoid duplicate code that would otherwise arise when determining
* the new particle direction
*/
void ParticleState::rotateDirection( const double polar_angle_cosine,
const double azimuthal_angle )
{
// Make sure the current particle direction is valid (initialized)
testPrecondition( Utility::validDirection( this->getDirection() ) );
// Make sure the polar angle cosine is valid
testPrecondition( polar_angle_cosine >= -1.0 );
testPrecondition( polar_angle_cosine <= 1.0 );
// Make sure the azimuthal angle is valid
testPrecondition( azimuthal_angle >= 0.0 );
testPrecondition( azimuthal_angle <= 2*Utility::PhysicalConstants::pi );
double outgoing_direction[3];
Utility::rotateDirectionThroughPolarAndAzimuthalAngle( polar_angle_cosine,
azimuthal_angle,
this->getDirection(),
outgoing_direction );
this->setDirection( outgoing_direction );
}
// Advance the particle along its direction by the requested distance
void ParticleState::advance( const double distance )
{
// Make sure the distance is valid
testPrecondition( !ST::isnaninf( distance ) );
d_position[0] += d_direction[0]*distance;
d_position[1] += d_direction[1]*distance;
d_position[2] += d_direction[2]*distance;
// Compute the time to traverse the distance
d_time += calculateTraversalTime( distance );
}
// Set the energy of the particle
/*! The default implementation is only valid for massless particles (It is
* assumed that the speed of the particle does not change with the energy).
*/
void ParticleState::setEnergy( const ParticleState::energyType energy )
{
// Make sure the energy is valid
testPrecondition( !ST::isnaninf( energy ) );
testPrecondition( energy > 0.0 );
d_energy = energy;
}
// Return the time state of the particle
ParticleState::timeType ParticleState::getTime() const
{
return d_time;
}
// Set the time state of the particle
void ParticleState::setTime( const ParticleState::timeType time )
{
d_time = time;
}
// Return the collision number of the particle
ParticleState::collisionNumberType ParticleState::getCollisionNumber() const
{
return d_collision_number;
}
// Increment the collision number
void ParticleState::incrementCollisionNumber()
{
++d_collision_number;
}
// Reset the collision number
/*! \details This should rarely be used - try to rely on the contructor to
* reset the collision number.
*/
void ParticleState::resetCollisionNumber()
{
d_collision_number = 0u;
}
// Return the generation number of the particle
ParticleState::generationNumberType ParticleState::getGenerationNumber() const
{
return d_generation_number;
}
// Increment the generation number
void ParticleState::incrementGenerationNumber()
{
++d_generation_number;
}
// Return the weight of the particle
double ParticleState::getWeight() const
{
return d_weight;
}
// Set the weight of the particle
void ParticleState::setWeight( const double weight )
{
d_weight = weight;
}
// Multiply the weight of the particle by a factor
void ParticleState::multiplyWeight( const double weight_factor )
{
// Make sure that the current weight is valid
testPrecondition( d_weight > 0.0 );
d_weight *= weight_factor;
}
// Return if the particle is lost
bool ParticleState::isLost() const
{
return d_lost;
}
// Set the particle as lost
void ParticleState::setAsLost()
{
d_lost = true;
}<|fim▁hole|>// Return if the particle is gone
bool ParticleState::isGone() const
{
return d_gone;
}
// Set the particle as gone
void ParticleState::setAsGone()
{
d_gone = true;
}
} // end MonteCarlo
//---------------------------------------------------------------------------//
// end MonteCarlo_ParticleState.cpp
//---------------------------------------------------------------------------//<|fim▁end|> | |
<|file_name|>views.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import logging
logger = logging.getLogger("sikteeri.views")
from django.conf import settings<|fim▁hole|>from django.template import RequestContext
from django.utils.translation import ugettext_lazy as _
from sikteeri.version import VERSION
def frontpage(request):
if settings.MAINTENANCE_MESSAGE == None:
if not request.user.is_authenticated():
return redirect('membership.views.new_application')
return render_to_response('frontpage.html',
dict(title=_('Django and the jazz cigarette'),
version=VERSION),
context_instance=RequestContext(request))
else:
return render_to_response('maintenance_message.html',
{"title": _('Under maintenance'),
"maintenance_message": settings.MAINTENANCE_MESSAGE},
context_instance=RequestContext(request))<|fim▁end|> | from django.shortcuts import render_to_response, redirect |
<|file_name|>napalm_get_model.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
Ex 1. Construct a script that retrieves NAPALM facts from two IOS routers, two Arista switches, and one Junos device.
pynet-rtr1 (Cisco IOS) 184.105.247.70
pynet-rtr2 (Cisco IOS) 184.105.247.71
pynet-sw1 (Arista EOS) 184.105.247.72
pynet-sw2 (Arista EOS) 184.105.247.73
juniper-srx 184.105.247.76
Retrieve the 'model' number from each device and print the model to standard out.
As part of this exercise define the devices that you use in a Python file (for example my_devices.py) and import
these devices into your program. Optionally, define the devices in a YAML file and read this my_devices.yml file in.
"""
from __future__ import print_function<|fim▁hole|>
from getpass import getpass
from pprint import pprint
from napalm_base import get_network_driver
from pyeapi.eapilib import CommandError
import yaml
import re
YAML_FILE = 'my_devices.yml'
def main():
with open(YAML_FILE) as f:
my_devices = yaml.load(f)
#pprint(my_devices)
pwd = getpass()
print("{:<20} {:<20} {:<20}".format("Device Type", "Hostname", "Model"))
for device_dict in my_devices:
device_dict['password'] = pwd
device_type = device_dict.pop('device_type')
driver = get_network_driver(device_type)
device=driver(**device_dict)
device.open()
facts = device.get_facts()
print('*' * 80)
print("{:<20} {:<20} {:<20}".format(device_type, device_dict['hostname'], facts['model']))
print('*' * 80)
print
if __name__ == "__main__":
main()<|fim▁end|> | from __future__ import unicode_literals |
<|file_name|>pwf2.py<|end_file_name|><|fim▁begin|>import numpy as np
from ase import Hartree
from gpaw.aseinterface import GPAW
from gpaw.lcao.overlap import NewTwoCenterIntegrals
from gpaw.utilities import unpack
from gpaw.utilities.tools import tri2full, lowdin
from gpaw.lcao.tools import basis_subset2, get_bfi2
from gpaw.coulomb import get_vxc as get_ks_xc
from gpaw.utilities.blas import r2k, gemm
from gpaw.lcao.projected_wannier import dots, condition_number, eigvals, \
get_bfs, get_lcao_projections_HSP
def get_rot(F_MM, V_oM, L):
eps_M, U_MM = np.linalg.eigh(F_MM)
indices = eps_M.real.argsort()[-L:]
U_Ml = U_MM[:, indices]
U_Ml /= np.sqrt(dots(U_Ml.T.conj(), F_MM, U_Ml).diagonal())
U_ow = V_oM.copy()
U_lw = np.dot(U_Ml.T.conj(), F_MM)
for col1, col2 in zip(U_ow.T, U_lw.T):
norm = np.linalg.norm(np.hstack((col1, col2)))
col1 /= norm
col2 /= norm
return U_ow, U_lw, U_Ml
def get_lcao_xc(calc, P_aqMi, bfs=None, spin=0):
nq = len(calc.wfs.ibzk_qc)
nao = calc.wfs.setups.nao
dtype = calc.wfs.dtype
if bfs is None:
bfs = get_bfs(calc)
if calc.density.nt_sg is None:
calc.density.interpolate()
nt_sg = calc.density.nt_sg
vxct_sg = calc.density.finegd.zeros(calc.wfs.nspins)
calc.hamiltonian.xc.calculate(calc.density.finegd, nt_sg, vxct_sg)
vxct_G = calc.wfs.gd.zeros()
calc.hamiltonian.restrict(vxct_sg[spin], vxct_G)
Vxc_qMM = np.zeros((nq, nao, nao), dtype)
for q, Vxc_MM in enumerate(Vxc_qMM):
bfs.calculate_potential_matrix(vxct_G, Vxc_MM, q)
tri2full(Vxc_MM, 'L')
# Add atomic PAW corrections
for a, P_qMi in P_aqMi.items():
D_sp = calc.density.D_asp[a][:]
H_sp = np.zeros_like(D_sp)
calc.hamiltonian.xc.calculate_paw_correction(calc.wfs.setups[a],
D_sp, H_sp)
H_ii = unpack(H_sp[spin])
for Vxc_MM, P_Mi in zip(Vxc_qMM, P_qMi):
Vxc_MM += dots(P_Mi, H_ii, P_Mi.T.conj())
return Vxc_qMM * Hartree
def get_xc2(calc, w_wG, P_awi, spin=0):
if calc.density.nt_sg is None:
calc.density.interpolate()
nt_g = calc.density.nt_sg[spin]
vxct_g = calc.density.finegd.zeros()
calc.hamiltonian.xc.get_energy_and_potential(nt_g, vxct_g)
vxct_G = calc.wfs.gd.empty()
calc.hamiltonian.restrict(vxct_g, vxct_G)
# Integrate pseudo part
Nw = len(w_wG)
xc_ww = np.empty((Nw, Nw))
r2k(.5 * calc.wfs.gd.dv, w_wG, vxct_G * w_wG, .0, xc_ww)
tri2full(xc_ww, 'L')
# Add atomic PAW corrections
for a, P_wi in P_awi.items():
D_sp = calc.density.D_asp[a][:]
H_sp = np.zeros_like(D_sp)
calc.wfs.setups[a].xc_correction.calculate_energy_and_derivatives(
D_sp, H_sp)
H_ii = unpack(H_sp[spin])
xc_ww += dots(P_wi, H_ii, P_wi.T.conj())
return xc_ww * Hartree
class ProjectedWannierFunctionsFBL:
"""PWF in the finite band limit.
::
--N
|w_w> = > |psi_n> U_nw
--n=1
"""
def __init__(self, V_nM, No, ortho=False):
Nw = V_nM.shape[1]
assert No <= Nw
V_oM, V_uM = V_nM[:No], V_nM[No:]
F_MM = np.dot(V_uM.T.conj(), V_uM)
U_ow, U_lw, U_Ml = get_rot(F_MM, V_oM, Nw - No)
self.U_nw = np.vstack((U_ow, dots(V_uM, U_Ml, U_lw)))
# stop here ?? XXX
self.S_ww = self.rotate_matrix(np.ones(1))
if ortho:
lowdin(self.U_nw, self.S_ww)
self.S_ww = np.identity(Nw)
self.norms_n = np.dot(self.U_nw, np.linalg.solve(
self.S_ww, self.U_nw.T.conj())).diagonal()
def rotate_matrix(self, A_nn):
if A_nn.ndim == 1:
return np.dot(self.U_nw.T.conj() * A_nn, self.U_nw)
else:
return dots(self.U_nw.T.conj(), A_nn, self.U_nw)
def rotate_projections(self, P_ani):
P_awi = {}
for a, P_ni in P_ani.items():
P_awi[a] = np.tensordot(self.U_nw, P_ni, axes=[[0], [0]])
return P_awi
def rotate_function(self, psit_nG):
return np.tensordot(self.U_nw, psit_nG, axes=[[0], [0]])
class ProjectedWannierFunctionsIBL:
"""PWF in the infinite band limit.
::
--No --Nw
|w_w> = > |psi_o> U_ow + > |f_M> U_Mw
--o=1 --M=1
"""
def __init__(self, V_nM, S_MM, No, lcaoindices=None):
Nw = V_nM.shape[1]
assert No <= Nw
self.V_oM, V_uM = V_nM[:No], V_nM[No:]
F_MM = S_MM - np.dot(self.V_oM.T.conj(), self.V_oM)
U_ow, U_lw, U_Ml = get_rot(F_MM, self.V_oM, Nw - No)
self.U_Mw = np.dot(U_Ml, U_lw)
self.U_ow = U_ow - np.dot(self.V_oM, self.U_Mw)
if lcaoindices is not None:
for i in lcaoindices:
self.U_ow[:, i] = 0.0
self.U_Mw[:, i] = 0.0
self.U_Mw[i, i] = 1.0
# stop here ?? XXX
self.S_ww = self.rotate_matrix(np.ones(1), S_MM)
P_uw = np.dot(V_uM, self.U_Mw)
self.norms_n = np.hstack((
np.dot(U_ow, np.linalg.solve(self.S_ww, U_ow.T.conj())).diagonal(),
np.dot(P_uw, np.linalg.solve(self.S_ww, P_uw.T.conj())).diagonal()))
def rotate_matrix(self, A_o, A_MM):
assert A_o.ndim == 1
A_ww = dots(self.U_ow.T.conj() * A_o, self.V_oM, self.U_Mw)
A_ww += np.conj(A_ww.T)
A_ww += np.dot(self.U_ow.T.conj() * A_o, self.U_ow)
A_ww += dots(self.U_Mw.T.conj(), A_MM, self.U_Mw)
return A_ww
def rotate_projections(self, P_aoi, P_aMi, indices=None):
if indices is None:
U_ow = self.U_ow
U_Mw = self.U_Mw
else:
U_ow = self.U_ow[:, indices]
U_Mw = self.U_Mw[:, indices]
P_awi = {}
for a, P_oi in P_aoi.items():
P_awi[a] = np.tensordot(U_Mw, P_aMi[a], axes=[[0], [0]])
if len(U_ow) > 0:
P_awi[a] += np.tensordot(U_ow, P_oi, axes=[[0], [0]])
return P_awi
def rotate_function(self, psit_oG, bfs, q=-1, indices=None):
if indices is None:
U_ow = self.U_ow
U_Mw = self.U_Mw
else:
U_ow = self.U_ow[:, indices]
U_Mw = self.U_Mw[:, indices]
w_wG = np.zeros((U_ow.shape[1],) + psit_oG.shape[1:])
if len(U_ow) > 0:
gemm(1., psit_oG, U_ow.T.copy(), 0., w_wG)
bfs.lcao_to_grid(U_Mw.T.copy(), w_wG, q)
return w_wG
class PWFplusLCAO(ProjectedWannierFunctionsIBL):
def __init__(self, V_nM, S_MM, No, pwfmask, lcaoindices=None):
Nw = V_nM.shape[1]
self.V_oM = V_nM[:No]
dtype = V_nM.dtype
# Do PWF optimization for pwfbasis submatrix only!
Npwf = len(pwfmask.nonzero()[0])
pwfmask2 = np.outer(pwfmask, pwfmask)
s_MM = S_MM[pwfmask2].reshape(Npwf, Npwf)
v_oM = self.V_oM[:, pwfmask]
f_MM = s_MM - np.dot(v_oM.T.conj(), v_oM)
nw = len(s_MM)
assert No <= nw
u_ow, u_lw, u_Ml = get_rot(f_MM, v_oM, nw - No)
u_Mw = np.dot(u_Ml, u_lw)
u_ow = u_ow - np.dot(v_oM, u_Mw)
# Determine U for full lcao basis
self.U_ow = np.zeros((No, Nw), dtype)
for U_w, u_w in zip(self.U_ow, u_ow):
np.place(U_w, pwfmask, u_w)
self.U_Mw = np.identity(Nw, dtype)
np.place(self.U_Mw, pwfmask2, u_Mw.flat)
if lcaoindices is not None:
for i in lcaoindices:
self.U_ow[:, i] = 0.0
self.U_Mw[:, i] = 0.0
self.U_Mw[i, i] = 1.0
self.S_ww = self.rotate_matrix(np.ones(1), S_MM)
self.norms_n = None
def set_lcaoatoms(calc, pwf, lcaoatoms):
ind = get_bfi(calc, lcaoatoms)
for i in ind:
pwf.U_ow[:, i] = 0.0
pwf.U_Mw[:, i] = 0.0
pwf_U_Mw[i, i] = 1.0
class PWF2:
def __init__(self, gpwfilename, fixedenergy=0., spin=0, ibl=True,
basis='sz', zero_fermi=False, pwfbasis=None, lcaoatoms=None,
projection_data=None):
calc = GPAW(gpwfilename, txt=None, basis=basis)
assert calc.wfs.gd.comm.size == 1
assert calc.wfs.kpt_comm.size == 1
assert calc.wfs.band_comm.size == 1
if zero_fermi:
try:
Ef = calc.get_fermi_level()
except NotImplementedError:
Ef = calc.get_homo_lumo().mean()
else:
Ef = 0.0
self.ibzk_kc = calc.get_ibz_k_points()
self.nk = len(self.ibzk_kc)
self.eps_kn = [calc.get_eigenvalues(kpt=q, spin=spin) - Ef
for q in range(self.nk)]
self.M_k = [sum(eps_n <= fixedenergy) for eps_n in self.eps_kn]
print 'Fixed states:', self.M_k
self.calc = calc
self.dtype = self.calc.wfs.dtype
self.spin = spin
self.ibl = ibl
self.pwf_q = []
self.norms_qn = []
self.S_qww = []
self.H_qww = []
if ibl:
if pwfbasis is not None:
pwfmask = basis_subset2(calc.atoms.get_chemical_symbols(),
basis, pwfbasis)
if lcaoatoms is not None:
lcaoindices = get_bfi2(calc.atoms.get_chemical_symbols(),
basis,
lcaoatoms)
else:
lcaoindices = None
self.bfs = get_bfs(calc)
if projection_data is None:
V_qnM, H_qMM, S_qMM, self.P_aqMi = get_lcao_projections_HSP(
calc, bfs=self.bfs, spin=spin, projectionsonly=False)
else:
V_qnM, H_qMM, S_qMM, self.P_aqMi = projection_data
H_qMM -= Ef * S_qMM
for q, M in enumerate(self.M_k):
if pwfbasis is None:
pwf = ProjectedWannierFunctionsIBL(V_qnM[q], S_qMM[q], M,
lcaoindices)
else:
pwf = PWFplusLCAO(V_qnM[q], S_qMM[q], M, pwfmask,
lcaoindices)
self.pwf_q.append(pwf)
self.norms_qn.append(pwf.norms_n)
self.S_qww.append(pwf.S_ww)
self.H_qww.append(pwf.rotate_matrix(self.eps_kn[q][:M],
H_qMM[q]))
else:
if projection_data is None:
V_qnM = get_lcao_projections_HSP(calc, spin=spin)
else:
V_qnM = projection_data
for q, M in enumerate(self.M_k):
pwf = ProjectedWannierFunctionsFBL(V_qnM[q], M, ortho=False)
self.pwf_q.append(pwf)
self.norms_qn.append(pwf.norms_n)
self.S_qww.append(pwf.S_ww)
self.H_qww.append(pwf.rotate_matrix(self.eps_kn[q]))
for S in self.S_qww:
print 'Condition number: %0.1e' % condition_number(S)
def get_hamiltonian(self, q=0, indices=None):
if indices is None:
return self.H_qww[q]
else:
return self.H_qww[q].take(indices, 0).take(indices, 1)
def get_overlap(self, q=0, indices=None):
if indices is None:
return self.S_qww[q]
else:
return self.S_qww[q].take(indices, 0).take(indices, 1)
def get_projections(self, q=0, indices=None):
kpt = self.calc.wfs.kpt_u[self.spin * self.nk + q]
if not hasattr(self, 'P_awi'):
if self.ibl:
M = self.M_k[q]
self.P_awi = self.pwf_q[q].rotate_projections(
dict([(a, P_ni[:M]) for a, P_ni in kpt.P_ani.items()]),
dict([(a, P_qMi[q]) for a, P_qMi in self.P_aqMi.items()]),
indices)
else:
self.P_awi = pwf.rotate_projections(kpt.P_ani, indices)
return self.P_awi
def get_orbitals(self, q=0, indices=None):
self.calc.wfs.initialize_wave_functions_from_restart_file()
kpt = self.calc.wfs.kpt_u[self.spin * self.nk + q]
if not hasattr(self, 'w_wG'):
if self.ibl:
self.w_wG = self.pwf_q[q].rotate_function(
kpt.psit_nG[:self.M_k[q]], self.bfs, q, indices)
else:
self.w_wG = self.pwf_q[q].rotate_function(
kpt.psit_nG, indices)
return self.w_wG
def get_Fcore(self, q=0, indices=None):
if indices is None:
Fcore_ww = np.zeros_like(self.H_qww[q])
else:
Fcore_ww = np.zeros((len(indices), len(indices)))
for a, P_wi in self.get_projections(q, indices).items():
X_ii = unpack(self.calc.wfs.setups[a].X_p)
Fcore_ww -= dots(P_wi, X_ii, P_wi.T.conj())
return Fcore_ww * Hartree
def get_eigs(self, q=0):
return eigvals(self.H_qww[q], self.S_ww[q])
def get_condition_number(self, q=0):
return condition_number(self.S_qww[q])
def get_xc(self, q=0, indices=None):
#self.calc.density.ghat.set_positions(
# self.calc.atoms.get_scaled_positions() % 1.)
#self.calc.hamiltonian.poisson.initialize()
if self.ibl:
return get_xc2(self.calc, self.get_orbitals(q, indices),
self.get_projections(q, indices), self.spin)
else:
return self.pwf_q[q].rotate_matrix(get_ks_xc(self.calc,
spin=self.spin))
class LCAOwrap:
def __init__(self, calc, spin=0):
assert calc.wfs.gd.comm.size == 1
assert calc.wfs.kpt_comm.size == 1
assert calc.wfs.band_comm.size == 1
from gpaw.lcao.tools import get_lcao_hamiltonian
H_skMM, S_kMM = get_lcao_hamiltonian(calc)
self.calc = calc
self.dtype = calc.wfs.dtype
self.spin = spin
self.H_qww = H_skMM[spin]
self.S_qww = S_kMM<|fim▁hole|> self.Nw = self.S_qww.shape[-1]
for S in self.S_qww:
print 'Condition number: %0.1e' % condition_number(S)
def get_hamiltonian(self, q=0, indices=None):
if indices is None:
return self.H_qww[q]
else:
return self.H_qww[q].take(indices, 0).take(indices, 1)
def get_overlap(self, q=0, indices=None):
if indices is None:
return self.S_qww[q]
else:
return self.S_qww[q].take(indices, 0).take(indices, 1)
def get_projections(self, q=0, indices=None):
if indices is None:
return dict([(a, P_qwi[q]) for a, P_qwi in self.P_aqwi.items()])
else:
return dict([(a, P_qwi[q].take(indices, 0))
for a, P_qwi in self.P_aqwi.items()])
def get_orbitals(self, q=-1, indices=None):
assert q == -1
if indices is None:
indices = range(self.Nw)
Ni = len(indices)
C_wM = np.zeros((Ni, self.Nw), self.dtype)
for i, C_M in zip(indices, C_wM):
C_M[i] = 1.0
w_wG = self.calc.wfs.gd.zeros(Ni, dtype=self.dtype)
self.calc.wfs.basis_functions.lcao_to_grid(C_wM, w_wG, q=-1)
return w_wG
def get_Fcore(self, q=0, indices=None):
if indices is None:
Fcore_ww = np.zeros_like(self.H_qww[q])
else:
Fcore_ww = np.zeros((len(indices), len(indices)))
for a, P_wi in self.get_projections(q, indices).items():
if self.calc.wfs.setups[a].type != 'ghost':
X_ii = unpack(self.calc.wfs.setups[a].X_p)
Fcore_ww -= dots(P_wi, X_ii, P_wi.T.conj())
return Fcore_ww * Hartree
def get_xc(self, q=0, indices=None):
if not hasattr(self, 'Vxc_qww'):
self.Vxc_qww = get_lcao_xc(self.calc, self.P_aqwi,
bfs=self.calc.wfs.basis_functions,
spin=self.spin)
if indices is None:
return self.Vxc_qww[q]
else:
return self.Vxc_qww[q].take(indices, 0).take(indices, 1)<|fim▁end|> | self.P_aqwi = calc.wfs.P_aqMi |
<|file_name|>cookie.py<|end_file_name|><|fim▁begin|>import json
from django.conf import settings
from django.contrib.messages.storage.base import BaseStorage, Message
from django.http import SimpleCookie
from django.utils.crypto import salted_hmac, constant_time_compare
from django.utils.safestring import SafeData, mark_safe
from django.utils import six
class MessageEncoder(json.JSONEncoder):
"""
Compactly serializes instances of the ``Message`` class as JSON.
"""
message_key = '__json_message'
def default(self, obj):
if isinstance(obj, Message):
# Using 0/1 here instead of False/True to produce more compact json
is_safedata = 1 if isinstance(obj.message, SafeData) else 0
message = [self.message_key, is_safedata, obj.level, obj.message]
if obj.extra_tags:
message.append(obj.extra_tags)
return message
return super(MessageEncoder, self).default(obj)
class MessageDecoder(json.JSONDecoder):
"""
Decodes JSON that includes serialized ``Message`` instances.
"""
def process_messages(self, obj):
if isinstance(obj, list) and obj:
if obj[0] == MessageEncoder.message_key:
if len(obj) == 3:
# Compatibility with previously-encoded messages
return Message(*obj[1:])
if obj[1]:
obj[3] = mark_safe(obj[3])
return Message(*obj[2:])
return [self.process_messages(item) for item in obj]
if isinstance(obj, dict):
return {key: self.process_messages(value)
for key, value in six.iteritems(obj)}
return obj
def decode(self, s, **kwargs):
decoded = super(MessageDecoder, self).decode(s, **kwargs)
return self.process_messages(decoded)
class CookieStorage(BaseStorage):
"""
Stores messages in a cookie.
"""
cookie_name = 'messages'
# uwsgi's default configuration enforces a maximum size of 4kb for all the
# HTTP headers. In order to leave some room for other cookies and headers,
# restrict the session cookie to 1/2 of 4kb. See #18781.
max_cookie_size = 2048
not_finished = '__messagesnotfinished__'
def _get(self, *args, **kwargs):
"""
Retrieves a list of messages from the messages cookie. If the
not_finished sentinel value is found at the end of the message list,
remove it and return a result indicating that not all messages were
retrieved by this storage.
"""
data = self.request.COOKIES.get(self.cookie_name)
messages = self._decode(data)
all_retrieved = not (messages and messages[-1] == self.not_finished)
if messages and not all_retrieved:
# remove the sentinel value
messages.pop()
return messages, all_retrieved
def _update_cookie(self, encoded_data, response):
"""
Either sets the cookie with the encoded data if there is any data to
store, or deletes the cookie.
"""
if encoded_data:
response.set_cookie(self.cookie_name, encoded_data,
domain=settings.SESSION_COOKIE_DOMAIN,
secure=settings.SESSION_COOKIE_SECURE or None,
httponly=settings.SESSION_COOKIE_HTTPONLY or None)
else:
response.delete_cookie(self.cookie_name,
domain=settings.SESSION_COOKIE_DOMAIN)
def _store(self, messages, response, remove_oldest=True, *args, **kwargs):
"""
Stores the messages to a cookie, returning a list of any messages which
could not be stored.
If the encoded data is larger than ``max_cookie_size``, removes
messages until the data fits (these are the messages which are
returned), and add the not_finished sentinel value to indicate as much.
"""
unstored_messages = []
encoded_data = self._encode(messages)
if self.max_cookie_size:
# data is going to be stored eventually by SimpleCookie, which
# adds its own overhead, which we must account for.
cookie = SimpleCookie() # create outside the loop
def stored_length(val):
return len(cookie.value_encode(val)[1])
while encoded_data and stored_length(encoded_data) > self.max_cookie_size:
if remove_oldest:
unstored_messages.append(messages.pop(0))
else:
unstored_messages.insert(0, messages.pop())
encoded_data = self._encode(messages + [self.not_finished],
encode_empty=unstored_messages)
self._update_cookie(encoded_data, response)
return unstored_messages
<|fim▁hole|> Creates an HMAC/SHA1 hash based on the value and the project setting's
SECRET_KEY, modified to make it unique for the present purpose.
"""
key_salt = 'django.contrib.messages'
return salted_hmac(key_salt, value).hexdigest()
def _encode(self, messages, encode_empty=False):
"""
Returns an encoded version of the messages list which can be stored as
plain text.
Since the data will be retrieved from the client-side, the encoded data
also contains a hash to ensure that the data was not tampered with.
"""
if messages or encode_empty:
encoder = MessageEncoder(separators=(',', ':'))
value = encoder.encode(messages)
return '%s$%s' % (self._hash(value), value)
def _decode(self, data):
"""
Safely decodes an encoded text stream back into a list of messages.
If the encoded text stream contained an invalid hash or was in an
invalid format, ``None`` is returned.
"""
if not data:
return None
bits = data.split('$', 1)
if len(bits) == 2:
hash, value = bits
if constant_time_compare(hash, self._hash(value)):
try:
# If we get here (and the JSON decode works), everything is
# good. In any other case, drop back and return None.
return json.loads(value, cls=MessageDecoder)
except ValueError:
pass
# Mark the data as used (so it gets removed) since something was wrong
# with the data.
self.used = True
return None<|fim▁end|> | def _hash(self, value):
""" |
<|file_name|>core.py<|end_file_name|><|fim▁begin|># plugins module for amsn2
"""
Plugins with amsn2 will be a subclass of the aMSNPlugin() class.
When this module is initially imported it should load the plugins from the last session. Done in the init() proc.
Then the GUI should call plugins.loadPlugin(name) or plugins.unLoadPlugin(name) in order to deal with plugins.
"""
# init()
# Called when the plugins module is imported (only for the first time).
# Should find plugins and populate a list ready for getPlugins().
# Should also auto-update all plugins.
def init(): pass<|fim▁hole|>
# loadPlugin(plugin_name)
# Called (by the GUI or from init()) to load a plugin. plugin_name as set in plugin's XML (or from getPlugins()).
# This loads the module for the plugin. The module is then responsible for calling plugins.registerPlugin(instance).
def loadPlugin(plugin_name): pass
# unLoadPlugin(plugin_name)
# Called to unload a plugin. Name is name as set in plugin's XML.
def unLoadPlugin(plugin_name): pass
# registerPlugin(plugin_instance)
# Saves the instance of the plugin, and registers it in the loaded list.
def registerPlugin(plugin_instance): pass
# getPlugins()
# Returns a list of all available plugins, as in ['Plugin 1', 'Plugin 2']
def getPlugins(): pass
# getPluginsWithStatus()
# Returns a list with a list item for each plugin with the plugin's name, and Loaded or NotLoaded either way.
# IE: [['Plugin 1', 'Loaded'], ['Plugin 2', 'NotLoaded']]
def getPluginsWithStatus(): pass
# getLoadedPlugins()
# Returns a list of loaded plugins. as in ['Plugin 1', 'Plugin N']
def getLoadedPlugins(): pass
# findPlugin(plugin_name)
# Retruns the running instance of the plugin with name plugin_name, or None if not found.
def findPlugin(plugin_name): pass
# saveConfig(plugin_name, data)
def saveConfig(plugin_name, data): pass
# Calls the init procedure.
# Will only be called on the first import (thanks to python).
init()<|fim▁end|> | |
<|file_name|>oauth_app.py<|end_file_name|><|fim▁begin|>"""
Copyright (c) 2012-2013 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from rest_framework.response import Response
from django.db import transaction<|fim▁hole|>import rest_framework_custom as rfc
from storageadmin.exceptions import RockStorAPIException
from storageadmin.util import handle_exception
class OauthAppView(rfc.GenericView):
serializer_class = OauthAppSerializer
def get_queryset(self, *args, **kwargs):
if ('name' in self.kwargs):
self.paginate_by = 0
try:
return OauthApp.objects.get(name=self.kwargs['name'])
except:
return []
return OauthApp.objects.all()
@transaction.atomic
def post(self, request):
with self._handle_exception(request):
name = request.data['name']
username = request.user.username
if (OauthApp.objects.filter(name=name).exists()):
e_msg = ('application with name: %s already exists.' % name)
handle_exception(Exception(e_msg), request)
try:
user = User.objects.get(username=username)
except:
e_msg = ('User with name: %s does not exist' % username)
handle_exception(Exception(e_msg), request)
client_type = OauthApplication.CLIENT_CONFIDENTIAL
auth_grant_type = OauthApplication.GRANT_CLIENT_CREDENTIALS
app = OauthApplication(name=name, client_type=client_type,
authorization_grant_type=auth_grant_type,
user=user.user)
app.save()
oauth_app = OauthApp(name=name, application=app, user=user)
oauth_app.save()
return Response(OauthAppSerializer(oauth_app).data)
@transaction.atomic
def delete(self, request, name):
with self._handle_exception(request):
try:
app = OauthApp.objects.get(name=name)
except:
e_msg = ('application with name: %s does not exist' % name)
handle_exception(Exception(e_msg), request)
app.application.delete()
app.delete()
return Response()<|fim▁end|> | from oauth2_provider.models import Application as OauthApplication
from storageadmin.models import (OauthApp, User)
from storageadmin.serializers import OauthAppSerializer |
<|file_name|>testalchemy.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from sqlalchemy import create_engine, and_, or_
from sqlalchemy.orm.session import sessionmaker
from rts2.db import Targets,Grb
Session = sessionmaker()
engine = create_engine('postgresql://petr:petr@localhost/stars',echo='debug')<|fim▁hole|>Session.configure(bind=engine)
sess = Session()
targ = sess.query(Targets)
#q = sess.query(ApacheCatalog)
print targ.filter(Targets.tar_id == 1000).all()
print targ.filter(and_(Targets.tar_ra < 20, Targets.tar_dec < 0, Targets.tar_dec > -20)).all()
grb = sess.query(Grb)
print grb.filter(Grb.tar_id == 50001).all()<|fim▁end|> | |
<|file_name|>entities.js<|end_file_name|><|fim▁begin|>game.PlayerEntity = me.Entity.extend ({ //builds the player class
init: function(x, y, settings){
this.setSuper(x, y);
this.setPlayerTimer();
this.setAttributes();
this.type="PlayerEntity";
this.setFlags();
me.game.viewport.follow(this.pos, me.game.viewport.AXIS.BOTH); //locks camera on the character
this.addAnimation();
this.renderable.setCurrentAnimation("idle"); //sets the idle animation
},
setSuper: function(x, y){
this._super(me.Entity, 'init', [x, y, {//._super reaches to the object entity
image: "player",//uses the image player
width: 64, //preserves the height and width for player
height: 64,
spritewidth: "64", //uses height and width for player
spriteheight: "64",
getShape: function(){
return(new me.Rect(0, 0, 64, 64)) . toPolygon(); //creates a little rectangle for what the player can walk into.
}
}]);
},
setPlayerTimer: function(){
this.now = new Date().getTime(); //keeps track of what time it is
this.lastHit = this.now; //same as this.now
this.lastSpear = this.now;
this.lastAttack = new Date().getTime();
},
setAttributes: function(){
this.health = game.data.playerHealth;
this.body.setVelocity(game.data.playerMoveSpeed, 20); //sets velocity to 5
this.attack = game.data.playerAttack;
},
setFlags: function(){
this.facing = "right"; //makes the character face right
this.dead = false;
this.attacking = false;
},
addAnimation: function(){
this.renderable.addAnimation("idle", [78]); //idle animation
this.renderable.addAnimation("walk", [143, 144, 145, 146, 147, 148, 149, 150, 151], 80); //walking animation
this.renderable.addAnimation("attack", [195, 196, 197, 198, 199, 200], 80); //setting the attack animation
},
update: function(delta){
this.now = new Date().getTime(); //everytime we call update it updates the time
this.dead = this.checkIfDead();
this.checkKeyPressesAndMove();
this.checkAbilityKeys();
this.setAnimation();
me.collision.check(this, true, this.collideHandler.bind(this), true);
this.body.update(delta); //delta is the change in time
this._super(me.Entity, "update", [delta]);
return true;
},
checkIfDead: function(){
if (this.health <= 0){
return true;
}
},
checkKeyPressesAndMove: function(){
if(me.input.isKeyPressed("right")){ //checks to see if the right key is pressed
this.moveRight();
}
else if(me.input.isKeyPressed("left")){ //allows the player to move left
this.moveLeft();
}
else{
this.body.vel.x = 0; //stops the movement
}
if(me.input.isKeyPressed("jump") && !this.jumping && !this.falling){ //allows the player to jump without double jumping or falling and jumping
this.jump();
}
this.attacking = me.input.isKeyPressed("attack"); //attack key
},
moveRight: function(){
this.body.vel.x += this.body.accel.x * me.timer.tick; //adds the velocity to the set velocity and mutiplies by the me.timer.tick and makes the movement smooth
this.facing = "right"; //sets the character to face right
this.flipX(false);
},
moveLeft: function(){
this.body.vel.x -= this.body.accel.x * me.timer.tick;
this.facing = "left";
this.flipX(true);
},
jump: function(){
this.body.jumping = true;
this.body.vel.y -= this.body.accel.y * me.timer.tick;
},
checkAbilityKeys: function(){
if(me.input.isKeyPressed("skill1")){
// this.speedBurst();
}else if(me.input.isKeyPressed("skill2")){
// this.eatCreep();
}else if(me.input.isKeyPressed("skill3")){
this.throwSpear();
}
},
throwSpear: function(){
if(this.now-this.lastSpear >= game.data.spearTimer*100 && game.data.ability3 > 0){
this.lastSpear = this.now;
var spear = me.pool.pull("spear", this.pos.x, this.pos.y, {}, this.facing);
me.game.world.addChild(spear, 10);
}
},
setAnimation: function(){
if(this.attacking){
if(!this.renderable.isCurrentAnimation("attack")){
this.renderable.setCurrentAnimation("attack", "idle");
this.renderable.setAnimationFrame();
}
}
else if(this.body.vel.x !== 0 && !this.renderable.isCurrentAnimation("attack")){ //changes the animation from attack to walking
if (!this.renderable.isCurrentAnimation("walk")) { //sets the current animation for walk
this.renderable.setCurrentAnimation("walk");
};
}
else if(!this.renderable.isCurrentAnimation("attack")){ //changes the animation from attack to idle
this.renderable.setCurrentAnimation("idle"); //if the player is not walking it uses idle animation
}
},
loseHealth: function(damage){
this.health = this.health - damage;
},
collideHandler: function(response){
if(response.b.type==='EnemyBaseEntity'){ //sees if the enemy base entitiy is near a player entity and if so it is solid from left and right and top
this.collideWithEnemyBase(response);
}
else if(response.b.type==='EnemyCreep'){
this.collideWithEnemyCreep(response);
}
},
collideWithEnemyBase: function(response){
var ydif = this.pos.y - response.b.pos.y;
var xdif = this.pos.x - response.b.pos.x;
if(ydif<-40 && xdif<70 && xdif>-35){
this.body.falling=false;
this.body.vel.y = -1;
}
if(xdif>-35 && this.facing==='right' && (xdif<0)){
this.body.vel.x = 0;
//this.pos.x = this.pos.x -1;
}
else if(xdif<70 && this.facing==='left' && (xdif>0)){
this.body.vel.x=0;
//this.pos.x = this.pos.x +1;
}
if(this.renderable.isCurrentAnimation("attack") && this.now-this.lastHit >= game.data.playerAttackTimer){ //if the animation is attack it will lose the base health and that it will check when the lasthit was
this.lastHit = this.now;
response.b.loseHealth(game.data.playerAttack);
}
},
collideWithEnemyCreep: function(response){
var xdif = this.pos.x - response.b.pos.x;
var ydif = this.pos.y - response.b.pos.y;
this.stopMovement(xdif);
if(this.checkAttack(xdif, ydif)){
this.hitCreep(response);
};
},
stopMovement: function(xdif){<|fim▁hole|> //this.pos.x = this.pos.x + 1;
if (this.facing === "left"){
this.body.vel.x = 0;
}
}
else{
//this.pos.x = this.pos.x - 1;
if(this.facing === "right"){
this.body.vel.x = 0;
}
}
},
checkAttack: function(xdif, ydif){
if(this.renderable.isCurrentAnimation("attack") && this.now - this.lastHit >= game.data.playerAttackTimer
&& (Math.abs(ydif) <=40) &&
((xdif>0) && this.facing==="left") || (((xdif<0) && this.facing === "right"))){
this.lastHit = this.now;
return true;
}
return false;
},
hitCreep: function(response){
if(response.b.health <= game.data.playerAttack){
game.data.gold += 1;
}
response.b.loseHealth(game.data.playerAttack);
}
});
//intermeidiae challenge creating an ally creep
game.MyCreep = me.Entity.extend({
init: function(x, y, settings){
this._super(me.Entity, 'init', [x, y, {
image: "creep2",
width: 100,
height:85,
spritewidth: "100",
spriteheight: "85",
getShape: function(){
return (new me.Rect(0, 0, 52, 100)).toPolygon();
}
}]);
this.health = game.data.allyCreepHealth;
this.alwaysUpdate = true;
// //this.attacking lets us know if the enemy is currently attacking
this.attacking = false;
// //keeps track of when our creep last attacked anyting
this.lastAttacking = new Date().getTime();
this.lastHit = new Date().getTime();
this.now = new Date().getTime();
this.body.setVelocity(game.data.allyCreepMoveSpeed, 20);
this.type = "MyCreep";
this.renderable.addAnimation("walk", [0, 1, 2, 3, 4], 80);
this.renderable.setCurrentAnimation("walk");
},
update: function(delta) {
// this.now = new Date().getTime();
this.body.vel.x += this.body.accel.x * me.timer.tick;
this.flipX(true);
me.collision.check(this, true, this.collideHandler.bind(this), true);
this.body.update(delta);
this._super(me.Entity, "update", [delta]);
return true;
},
collideHandler: function(response) {
if(response.b.type==='EnemyBaseEntity'){
this.attacking = true;
//this.lastAttacking = this.now;
this.body.vel.x = 0;
this.pos.x = this.pos.x +1;
//checks that it has been at least 1 second since this creep hit a base
if((this.now-this.lastHit <= game.data.allyCreepAttackTimer)){
//updates the last hit timer
this.lastHit = this.now;
//makes the player base call its loseHealth function and passes it a damage of 1
response.b.loseHealth(1);
}
}
}
});<|fim▁end|> | if(xdif > 0){ |
<|file_name|>manager.py<|end_file_name|><|fim▁begin|># Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import logging
import os
import shutil
from pants.backend.jvm.subsystems.scoverage_platform import ScoveragePlatform
from pants.backend.jvm.tasks.coverage.cobertura import Cobertura
from pants.backend.jvm.tasks.coverage.engine import NoCoverage
from pants.backend.jvm.tasks.coverage.jacoco import Jacoco
from pants.backend.jvm.tasks.coverage.scoverage import Scoverage
from pants.subsystem.subsystem import Subsystem
from pants.util.dirutil import safe_mkdir
from pants.util.strutil import safe_shlex_split
logger = logging.getLogger(__name__)
class CodeCoverageSettings:
"""A class containing settings for code coverage tasks."""
def __init__(
self,
options,
context,
workdir,
tool_classpath,
confs,
log,
copy2=shutil.copy2,
copytree=shutil.copytree,
is_file=os.path.isfile,
safe_md=safe_mkdir,
):
self.options = options
self.context = context
self.workdir = workdir
self.tool_classpath = tool_classpath
self.confs = confs
self.log = log
self.coverage_dir = os.path.join(self.workdir, "coverage")
self.coverage_jvm_options = []
for jvm_option in options.coverage_jvm_options:
self.coverage_jvm_options.extend(safe_shlex_split(jvm_option))
self.coverage_open = options.coverage_open
self.coverage_force = options.coverage_force
# Injecting these methods to make unit testing cleaner.
self.copy2 = copy2
self.copytree = copytree
self.is_file = is_file
self.safe_makedir = safe_md
@classmethod
def from_task(cls, task, workdir=None):
return cls(
options=task.get_options(),
context=task.context,
workdir=workdir or task.workdir,
tool_classpath=task.tool_classpath,
confs=task.confs,
log=task.context.log,
)
class CodeCoverage(Subsystem):
"""Manages setup and construction of JVM code coverage engines."""
options_scope = "coverage"
@classmethod
def subsystem_dependencies(cls):
return super().subsystem_dependencies() + (
Cobertura.Factory,
Jacoco.Factory,
Scoverage.Factory,
)
# TODO(jtrobec): move these to subsystem scope after deprecating
@staticmethod
def register_junit_options(register, register_jvm_tool):
register("--coverage", type=bool, fingerprint=True, help="Collect code coverage data.")
register(
"--coverage-processor",
advanced=True,
fingerprint=True,
choices=["cobertura", "jacoco", "scoverage"],
default=None,
help="Which coverage processor to use if --coverage is enabled. If this option is "
"unset but coverage is enabled implicitly or explicitly, defaults to 'cobertura'. "
"If this option is explicitly set, implies --coverage. If this option is set to "
"scoverage, then first scoverage MUST be enabled by passing option "
"--scoverage-enable-scoverage.",
)<|fim▁hole|> register(
"--coverage-open",
type=bool,
fingerprint=True,
help="Open the generated HTML coverage report in a browser. Implies --coverage ",
)
register(
"--coverage-jvm-options",
advanced=True,
type=list,
fingerprint=True,
help="JVM flags to be added when running the coverage processor. For example: "
"{flag}=-Xmx4g {flag}=-Xms2g".format(flag="--coverage-jvm-options"),
)
register(
"--coverage-force",
advanced=True,
type=bool,
help="Attempt to run the reporting phase of coverage even if tests failed "
"(defaults to False, as otherwise the coverage results would be unreliable).",
)
# register options for coverage engines
# TODO(jtrobec): get rid of these calls when engines are dependent subsystems
Cobertura.register_junit_options(register, register_jvm_tool)
class InvalidCoverageEngine(Exception):
"""Indicates an invalid coverage engine type was selected."""
def get_coverage_engine(self, task, output_dir, all_targets, execute_java):
options = task.get_options()
enable_scoverage = ScoveragePlatform.global_instance().get_options().enable_scoverage
processor = options.coverage_processor
if processor == "scoverage" and not enable_scoverage:
raise self.InvalidCoverageEngine(
"Cannot set processor to scoverage without first enabling "
"scoverage (by passing --scoverage-enable-scoverage option)"
)
if enable_scoverage:
if processor not in (None, "scoverage"):
raise self.InvalidCoverageEngine(
f"Scoverage is enabled. "
f"Cannot use {processor} as the engine. Set engine to scoverage "
f"(--test-junit-coverage-processor=scoverage)"
)
processor = "scoverage"
if options.coverage or processor or options.is_flagged("coverage_open"):
settings = CodeCoverageSettings.from_task(task, workdir=output_dir)
if processor in ("cobertura", None):
return Cobertura.Factory.global_instance().create(
settings, all_targets, execute_java
)
elif processor == "jacoco":
return Jacoco.Factory.global_instance().create(settings, all_targets, execute_java)
elif processor == "scoverage":
return Scoverage.Factory.global_instance().create(
settings, all_targets, execute_java
)
else:
# NB: We should never get here since the `--coverage-processor` is restricted by `choices`,
# but for clarity.
raise self.InvalidCoverageEngine(
"Unknown and unexpected coverage processor {!r}!".format(
options.coverage_processor
)
)
else:
return NoCoverage()<|fim▁end|> | # We need to fingerprint this even though it nominally UI-only affecting option since the
# presence of this option alone can implicitly flag on `--coverage`. |
<|file_name|>event_bridge_test.py<|end_file_name|><|fim▁begin|># Bulletproof Arma Launcher
# Copyright (C) 2016 Sascha Ebert
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
import unittest
import time
import os
import shutil
import sys
import json
from multiprocessing import Pipe
from datetime import datetime
from datetime import timedelta
from mock import patch, Mock
from kivy.clock import Clock
from nose.plugins.attrib import attr
from utils.process import Process
def worker_func(con):
con.send('test1')
con.send('test2')
class EventBridgeTest(unittest.TestCase):
def setUp(self):
# To fix the Windows forking system it's necessary to point __main__ to
# the module we want to execute in the forked process
self.old_main = sys.modules["__main__"]
self.old_main_file = sys.modules["__main__"].__file__
sys.modules["__main__"] = sys.modules["tests.utils.event_bridge_test"]
sys.modules["__main__"].__file__ = sys.modules["tests.utils.event_bridge_test"].__file__
def tearDown(self):
sys.modules["__main__"] = self.old_main
sys.modules["__main__"].__file__ = self.old_main_file
def test_connection_can_hold_more_than_one_msg(self):
parent_conn, child_conn = Pipe()
p = Process(target=worker_func, args=(child_conn,))<|fim▁hole|> p.start()
# time.sleep(2)
self.assertEqual(parent_conn.recv(), 'test1')
self.assertEqual(parent_conn.recv(), 'test2')
p.join()<|fim▁end|> | |
<|file_name|>task_03.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Module copies data module Variable DIRECTIONS"""
from data import DIRECTIONS<|fim▁hole|><|fim▁end|> |
DIRECTIONS = DIRECTIONS[0:3]+('West',) |
<|file_name|>float_fast_math.rs<|end_file_name|><|fim▁begin|>#![feature(core_intrinsics)]<|fim▁hole|>#[inline(never)]
pub fn test_operations(a: f64, b: f64) {
// make sure they all map to the correct operation
unsafe {
assert_eq!(fadd_fast(a, b), a + b);
assert_eq!(fsub_fast(a, b), a - b);
assert_eq!(fmul_fast(a, b), a * b);
assert_eq!(fdiv_fast(a, b), a / b);
assert_eq!(frem_fast(a, b), a % b);
}
}
fn main() {
test_operations(1., 2.);
test_operations(10., 5.);
}<|fim▁end|> |
use std::intrinsics::{fadd_fast, fsub_fast, fmul_fast, fdiv_fast, frem_fast};
|
<|file_name|>featured_thumbnail.js<|end_file_name|><|fim▁begin|>function featured_thumb(){
jQuery('ul.thumbnails').each(function(index, element) {
var get_class=jQuery(this).attr('class');
var get_parent=jQuery(this).closest('div');
var wt=jQuery(this).closest('div').width();//width total
var col=jQuery(this).attr('data-columns');//columns
var dt=Math.floor(wt/col);
var mt=6;
var wa=dt-mt;
var mg=3;
var ft_size=jQuery(this).attr('data-ftsize');
jQuery(this).css('font-size',ft_size+'px');<|fim▁hole|> get_parent.find('ul.thumbnails li p').css({'margin':'0'});
});
}
window.onload = featured_thumb;<|fim▁end|> | get_parent.find('ul.thumbnails li').css({'max-width':wa+'px','margin':mg+'px','padding':'0'});
get_parent.find('ul.thumbnails li h5').css({'font-size':ft_size+'px','font-weight':'bold','padding':'0 0 5px','margin':'0'});
get_parent.find('ul.thumbnails li img').css({'height':wa+'px','margin':'0'}); |
<|file_name|>0007_freebasicscontroller_postgres_db_url.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
<|fim▁hole|>
dependencies = [
('freebasics', '0006_change_site_url_field_type'),
]
operations = [
migrations.AddField(
model_name='freebasicscontroller',
name='postgres_db_url',
field=models.TextField(null=True, blank=True),
),
]<|fim▁end|> |
class Migration(migrations.Migration): |
<|file_name|>angular-paginate.js<|end_file_name|><|fim▁begin|>/**
* Angular Paginate
* @homepage https://github.com/darthwade/angular-paginate
* @author Vadym Petrychenko https://github.com/darthwade
* @license The MIT License (http://www.opensource.org/licenses/mit-license.php)
* @copyright 2014 Vadym Petrychenko
*/
(function (factory) {
if (typeof define === 'function' && define.amd) {
// AMD
define(['angular'], factory);
} else if (typeof exports === 'object') {
// CommonJS
factory(require('angular'));
} else {
// Browser globals
factory(window.angular)
}
}(function (angular) {
'use strict';
angular.module('darthwade.paginate', [])
.provider('$paginate', function () {
var provider = this;
provider.templateUrl = 'angular-paginate.html';
provider.options = {
perPage: 10, // Items count per page.
range: 5, // Number of pages neighbouring the current page which will be displayed.
boundaryLinks: true, // Whether to display First / Last buttons.
directionLinks: true, // Whether to display Previous / Next buttons.
rotate: true, // Whether to keep current page in the middle of the visible ones.
paramName: 'page',
previousText: 'Previous', // Text for previous button
nextText: 'Next', // Text for next button
moreText: '...' // Text for more button
};
<|fim▁hole|> return new Paginator(options);
};
wrapper.getDefaultOptions = function() {
return provider.options;
};
wrapper.getTemplateUrl = function() {
return provider.templateUrl;
};
return wrapper;
};
/**
* Overrides default options
* @param {Object} options
*/
provider.setDefaultOptions = function (options) {
angular.extend(provider.options, options);
};
provider.setTemplateUrl = function (templateUrl) {
provider.templateUrl = templateUrl;
};
var Paginator = function(options) {
var self = this;
var defaultOptions = {
$page: 1,
$objects: [],
$totalCount: 0,
$startIndex: 0,
$endIndex: 0,
$totalPages: 0,
onPageChange: angular.noop
};
self.page = function (page) {
if (self.$page === page) {
return;
}
self.$page = page;
calculate();
if (self.onPageChange) {
self.onPageChange.call(self);
}
};
self.options = function (options) {
angular.extend(self, options);
};
self.previous = function () {
if (self.hasPrevious()) {
self.page(self.$page - 1);
}
};
self.next = function () {
if (self.hasNext()) {
self.page(self.$page + 1);
}
};
self.hasPrevious = function () {
return self.$page > 1;
};
self.hasNext = function () {
return self.$page < self.$totalPages;
};
// Create page object used in template
var makePage = function (number, text, active) {
return {
number: number,
text: text,
active: active
};
};
var getPages = function () {
var pages = [];
// Default page limits
var startPage = 1, endPage = self.$totalPages;
var isRanged = self.range < self.$totalPages;
// recompute if maxSize
if (isRanged) {
if (self.rotate) {
// Current page is displayed in the middle of the visible ones
startPage = Math.max(self.$page - Math.floor(self.range / 2), 1);
endPage = startPage + self.range - 1;
// Adjust if limit is exceeded
if (endPage > self.$totalPages) {
endPage = self.$totalPages;
startPage = endPage - self.range + 1;
}
} else {
// Visible pages are paginated with maxSize
startPage = ((Math.ceil(self.$page / self.range) - 1) * self.range) + 1;
// Adjust last page if limit is exceeded
endPage = Math.min(startPage + self.range - 1, self.$totalPages);
}
}
// Add page number links
for (var number = startPage; number <= endPage; number++) {
var page = makePage(number, number, number === self.$page);
pages.push(page);
}
// Add links to move between page sets
if (isRanged) { // && !self.rotate
var margin = self.boundaryLinks ? 1 : 0;
if (startPage - margin > 1) {
var previousPageSet = makePage(startPage - 1, self.moreText, false);
pages.unshift(previousPageSet);
}
if (endPage + margin < self.$totalPages) {
var nextPageSet = makePage(endPage + 1, self.moreText, false);
pages.push(nextPageSet);
}
}
// Add boundary links if needed
if (self.boundaryLinks) {
if (startPage > 1) {
var firstPage = makePage(1, 1, false);
pages.unshift(firstPage);
}
if (endPage < self.$totalPages) {
var lastPage = makePage(self.$totalPages, self.$totalPages, false);
pages.push(lastPage);
}
}
return pages;
};
var calculate = function() {
self.$page = parseInt(self.$page) || 1;
self.$objects = self.$objects || [];
self.$totalCount = parseInt(self.$totalCount) || 0;
self.$totalPages = Math.ceil(self.$totalCount / self.perPage);
self.$startIndex = (self.$page - 1) * self.perPage;
self.$endIndex = self.$startIndex + self.$objects.length;
if (self.$endIndex) {
self.$startIndex += 1;
}
self.$pages = getPages();
};
angular.extend(self, provider.options, defaultOptions, options);
calculate();
};
return provider;
})
.directive('dwPaginate', ['$paginate', function ($paginate) {
return {
restrict: 'EA',
scope: {
paginator: '=dwPaginate'
},
replace: true,
templateUrl: $paginate.getTemplateUrl(),
link: function (scope, element, attrs) {
}
};
}]);
}));<|fim▁end|> | provider.$get = function() {
var wrapper = function(options) { |
<|file_name|>chan_track.py<|end_file_name|><|fim▁begin|>"""
Track channel ops for permissions checks
Requires:
server_info.py
"""
import gc
import json
import logging
import weakref
from collections import Mapping, Iterable, namedtuple
from contextlib import suppress
from numbers import Number
from operator import attrgetter
from irclib.parser import Prefix
import cloudbot.bot
from cloudbot import hook
from cloudbot.clients.irc import IrcClient
from cloudbot.util import web
from cloudbot.util.mapping import KeyFoldDict, KeyFoldMixin
logger = logging.getLogger("cloudbot")
class WeakDict(dict):
"""
A subclass of dict to allow it to be weakly referenced
"""
class MemberNotFoundException(KeyError):
def __init__(self, name, chan):
super().__init__(
"No such member '{}' in channel '{}'".format(
name, chan.name
)
)
self.name = name
self.chan = chan
self.members = list(chan.users.values())
self.nicks = [
memb.user.nick for memb in self.members
]
self.masks = [
memb.user.mask.mask for memb in self.members
]
class ChannelMembersDict(KeyFoldDict):
def __init__(self, chan):
super().__init__()
self.chan = weakref.ref(chan)
def __getitem__(self, item):
try:
return super().__getitem__(item)
except KeyError as e:
raise MemberNotFoundException(item, self.chan()) from e
def __delitem__(self, item):
try:
super().__delitem__(item)
except KeyError as e:
raise MemberNotFoundException(item, self.chan()) from e
def pop(self, key, *args, **kwargs):
try:
return super().pop(key, *args, **kwargs)
except KeyError as e:
raise MemberNotFoundException(key, self.chan()) from e
class KeyFoldWeakValueDict(KeyFoldMixin, weakref.WeakValueDictionary):
"""
KeyFolded WeakValueDictionary
"""
class ChanDict(KeyFoldDict):
"""
Mapping for channels on a network
"""
def __init__(self, conn):
"""
:type conn: cloudbot.client.Client
"""
super().__init__()
self.conn = weakref.ref(conn)
def getchan(self, name):
"""
:type name: str
"""
try:
return self[name]
except KeyError:
self[name] = value = Channel(name, self.conn())
return value
class UsersDict(KeyFoldWeakValueDict):
"""
Mapping for users on a network
"""
def __init__(self, conn):
"""
:type conn: cloudbot.client.Client
"""
super().__init__()
self.conn = weakref.ref(conn)
def getuser(self, nick):<|fim▁hole|> :type nick: str
"""
try:
return self[nick]
except KeyError:
self[nick] = value = User(nick, self.conn())
return value
class MappingAttributeAdapter:
"""
Map item lookups to attribute lookups
"""
def __init__(self):
self.data = {}
def __getitem__(self, item):
try:
return getattr(self, item)
except AttributeError:
return self.data[item]
def __setitem__(self, key, value):
if not hasattr(self, key):
self.data[key] = value
else:
setattr(self, key, value)
class Channel(MappingAttributeAdapter):
"""
Represents a channel and relevant data
"""
class Member(MappingAttributeAdapter):
"""
Store a user's membership with the channel
"""
def __init__(self, user, channel):
self.user = user
self.channel = channel
self.conn = user.conn
self.status = []
super().__init__()
def add_status(self, status, sort=True):
"""
Add a status to this membership
:type status: plugins.core.server_info.Status
:type sort: bool
"""
if status in self.status:
logger.warning(
"[%s|chantrack] Attempted to add existing status "
"to channel member: %s %s",
self.conn.name, self, status
)
else:
self.status.append(status)
if sort:
self.sort_status()
def remove_status(self, status):
"""
:type status: plugins.core.server_info.Status
"""
if status not in self.status:
logger.warning(
"[%s|chantrack] Attempted to remove status not set "
"on member: %s %s",
self.conn.name, self, status
)
else:
self.status.remove(status)
def sort_status(self):
"""
Ensure the status list is properly sorted
"""
status = list(set(self.status))
status.sort(key=attrgetter("level"), reverse=True)
self.status = status
def __init__(self, name, conn):
"""
:type name: str
:type conn: cloudbot.client.Client
"""
super().__init__()
self.name = name
self.conn = weakref.proxy(conn)
self.users = ChannelMembersDict(self)
self.receiving_names = False
def get_member(self, user, create=False):
"""
:type user: User
:type create: bool
:rtype: Channel.Member
"""
try:
data = self.users[user.nick]
except KeyError:
if not create:
raise
self.users[user.nick] = data = self.Member(user, self)
return data
class User(MappingAttributeAdapter):
"""
Represent a user on a network
"""
def __init__(self, name, conn):
"""
:type name: str
:type conn: cloudbot.client.Client
"""
self.mask = Prefix(name)
self.conn = weakref.proxy(conn)
self.realname = None
self._account = None
self.server = None
self.is_away = False
self.away_message = None
self.is_oper = False
self.channels = KeyFoldWeakValueDict()
super().__init__()
def join_channel(self, channel):
"""
:type channel: Channel
"""
self.channels[channel.name] = memb = channel.get_member(
self, create=True
)
return memb
@property
def account(self):
"""
The user's nickserv account
"""
return self._account
@account.setter
def account(self, value):
if value == '*':
value = None
self._account = value
@property
def nick(self):
"""
The user's nickname
"""
return self.mask.nick
@nick.setter
def nick(self, value):
self.mask = Prefix(value, self.ident, self.host)
@property
def ident(self):
"""
The user's ident/username
"""
return self.mask.user
@ident.setter
def ident(self, value):
self.mask = Prefix(self.nick, value, self.host)
@property
def host(self):
"""
The user's host/address
"""
return self.mask.host
@host.setter
def host(self, value):
self.mask = Prefix(self.nick, self.ident, value)
# region util functions
def get_users(conn):
"""
:type conn: cloudbot.client.Client
:rtype: UsersDict
"""
return conn.memory.setdefault("users", UsersDict(conn))
def get_chans(conn):
"""
:type conn: cloudbot.client.Client
:rtype: ChanDict
"""
return conn.memory.setdefault("chan_data", ChanDict(conn))
# endregion util functions
def update_chan_data(conn, chan):
# type: (IrcClient, str) -> None
"""
Start the process of updating channel data from /NAMES
:param conn: The current connection
:param chan: The channel to update
"""
chan_data = get_chans(conn).getchan(chan)
chan_data.receiving_names = False
conn.cmd("NAMES", chan)
def update_conn_data(conn):
# type: (IrcClient) -> None
"""
Update all channel data for this connection
:param conn: The connection to update
"""
for chan in set(conn.channels):
update_chan_data(conn, chan)
SUPPORTED_CAPS = frozenset({
"userhost-in-names",
"multi-prefix",
"extended-join",
"account-notify",
"away-notify",
"chghost",
})
@hook.on_cap_available(*SUPPORTED_CAPS)
def do_caps():
"""
Request all available CAPs we support
"""
return True
def is_cap_available(conn, cap):
"""
:type conn: cloudbot.client.Client
:type cap: str
"""
caps = conn.memory.get("server_caps", {})
return bool(caps.get(cap, False))
@hook.on_start
def get_chan_data(bot: cloudbot.bot.CloudBot):
"""
:type bot: cloudbot.bot.CloudBot
"""
for conn in bot.connections.values():
if conn.connected and conn.type == 'irc':
assert isinstance(conn, IrcClient)
init_chan_data(conn, False)
update_conn_data(conn)
def clean_user_data(user):
"""
:type user: User
"""
for memb in user.channels.values():
memb.sort_status()
def clean_chan_data(chan):
"""
:type chan: Channel
"""
with suppress(KeyError):
del chan.data["new_users"]
def clean_conn_data(conn):
"""
:type conn: cloudbot.client.Client
"""
for user in get_users(conn).values():
clean_user_data(user)
for chan in get_chans(conn).values():
clean_chan_data(chan)
def clean_data(bot):
"""
:type bot: cloudbot.bot.CloudBot
"""
for conn in bot.connections.values():
clean_conn_data(conn)
@hook.connect
def init_chan_data(conn, _clear=True):
"""
:type conn: cloudbot.client.Client
:type _clear: bool
"""
chan_data = get_chans(conn)
users = get_users(conn)
if not (isinstance(chan_data, ChanDict) and isinstance(users, UsersDict)):
del conn.memory["chan_data"]
del conn.memory["users"]
return init_chan_data(conn, _clear)
if _clear:
chan_data.clear()
users.clear()
return None
def parse_names_item(item, statuses, has_multi_prefix, has_userhost):
"""
Parse an entry from /NAMES
:param item: The entry to parse
:param statuses: Status prefixes on this network
:param has_multi_prefix: Whether multi-prefix CAP is enabled
:param has_userhost: Whether userhost-in-names CAP is enabled
:return: The parsed data
"""
user_status = []
while item[:1] in statuses:
status, item = item[:1], item[1:]
user_status.append(statuses[status])
if not has_multi_prefix:
# Only remove one status prefix
# if we don't have multi prefix enabled
break
user_status.sort(key=attrgetter('level'), reverse=True)
if has_userhost:
prefix = Prefix.parse(item)
else:
prefix = Prefix(item)
return prefix.nick, prefix.user, prefix.host, user_status
def replace_user_data(conn, chan_data):
"""
:type conn: cloudbot.client.Client
:type chan_data: Channel
"""
statuses = {
status.prefix: status
for status in set(conn.memory["server_info"]["statuses"].values())
}
new_data = chan_data.data.pop("new_users", [])
has_uh_i_n = is_cap_available(conn, "userhost-in-names")
has_multi_pfx = is_cap_available(conn, "multi-prefix")
old_data = chan_data.data.pop('old_users', {})
new_names = set()
for name in new_data:
nick, ident, host, status = parse_names_item(
name, statuses, has_multi_pfx, has_uh_i_n
)
new_names.update(nick.casefold())
user_data = get_users(conn).getuser(nick)
user_data.nick = nick
if ident:
user_data.ident = ident
if host:
user_data.host = host
memb_data = user_data.join_channel(chan_data)
memb_data.status = status
for old_nick in old_data:
if old_nick not in new_names:
del chan_data.users[old_nick]
@hook.irc_raw(['353', '366'], singlethread=True)
def on_names(conn, irc_paramlist, irc_command):
"""
:type conn: cloudbot.client.Client
:type irc_paramlist: cloudbot.util.parsers.irc.ParamList
:type irc_command: str
"""
chan = irc_paramlist[2 if irc_command == '353' else 1]
chan_data = get_chans(conn).getchan(chan)
if irc_command == '366':
chan_data.receiving_names = False
replace_user_data(conn, chan_data)
return
users = chan_data.data.setdefault("new_users", [])
if not chan_data.receiving_names:
chan_data.data['old_users'] = old = ChannelMembersDict(chan_data)
old.update(chan_data.users)
chan_data.receiving_names = True
users.clear()
names = irc_paramlist[-1].strip()
users.extend(names.split())
class MappingSerializer:
"""
Serialize generic mappings to json
"""
def __init__(self):
self._seen_objects = []
def _serialize(self, obj):
if isinstance(obj, (str, Number, bool)) or obj is None:
return obj
if isinstance(obj, Mapping):
if id(obj) in self._seen_objects:
return '<{} with id {}>'.format(type(obj).__name__, id(obj))
self._seen_objects.append(id(obj))
return {
self._serialize(k): self._serialize(v)
for k, v in obj.items()
}
if isinstance(obj, Iterable):
if id(obj) in self._seen_objects:
return '<{} with id {}>'.format(type(obj).__name__, id(obj))
self._seen_objects.append(id(obj))
return [
self._serialize(item)
for item in obj
]
return repr(obj)
def serialize(self, mapping, **kwargs):
"""
Serialize mapping to JSON
"""
return json.dumps(self._serialize(mapping), **kwargs)
@hook.permission("chanop")
def perm_check(chan, conn, nick):
"""
:type chan: str
:type conn: cloudbot.client.Client
:type nick: str
"""
if not (chan and conn):
return False
chans = get_chans(conn)
try:
chan_data = chans[chan]
except KeyError:
return False
try:
memb = chan_data.users[nick]
except KeyError:
return False
status = memb.status
if status and status[0].level > 1:
return True
return False
@hook.command(permissions=["botcontrol"], autohelp=False)
def dumpchans(conn):
"""- Dumps all stored channel data for this connection to the console
:type conn: cloudbot.client.Client
"""
data = get_chans(conn)
return web.paste(MappingSerializer().serialize(data, indent=2))
@hook.command(permissions=["botcontrol"], autohelp=False)
def dumpusers(conn):
"""- Dumps all stored user data for this connection to the console
:type conn: cloudbot.client.Client
"""
data = get_users(conn)
return web.paste(MappingSerializer().serialize(data, indent=2))
@hook.command(permissions=["botcontrol"], autohelp=False)
def updateusers(bot):
"""- Forces an update of all /NAMES data for all channels
:type bot: cloudbot.bot.CloudBot
"""
get_chan_data(bot)
return "Updating all channel data"
@hook.command(permissions=["botcontrol"], autohelp=False)
def cleanusers(bot):
"""- Clean user data
:type bot: cloudbot.bot.CloudBot
"""
clean_data(bot)
gc.collect()
return "Data cleaned."
@hook.command(permissions=["botcontrol"], autohelp=False)
def clearusers(bot):
"""- Clear all user data
:type bot: cloudbot.bot.CloudBot
"""
for conn in bot.connections.values():
init_chan_data(conn)
gc.collect()
return "Data cleared."
@hook.command("getdata", permissions=["botcontrol"], autohelp=False)
def getdata_cmd(conn, chan, nick):
"""- Get data for current user"""
chan_data = get_chans(conn).getchan(chan)
user_data = get_users(conn).getuser(nick)
memb = chan_data.get_member(user_data)
return web.paste(MappingSerializer().serialize(memb, indent=2))
@hook.irc_raw('JOIN')
def on_join(nick, user, host, conn, irc_paramlist):
"""
:type nick: str
:type user: str
:type host: str
:type conn: cloudbot.client.Client
:type irc_paramlist: cloudbot.util.parsers.irc.ParamList
"""
chan, *other_data = irc_paramlist
users = get_users(conn)
user_data = users.getuser(nick)
user_data.ident = user
user_data.host = host
if is_cap_available(conn, "extended-join") and other_data:
acct, realname = other_data
user_data.account = acct
user_data.realname = realname
chan_data = get_chans(conn).getchan(chan)
user_data.join_channel(chan_data)
ModeChange = namedtuple('ModeChange', 'mode adding param is_status')
def _parse_mode_string(modes, params, status_modes, mode_types):
new_modes = []
adding = True
for c in modes:
if c == '+':
adding = True
elif c == '-':
adding = False
else:
is_status = c in status_modes
mode_type = mode_types.get(c)
if mode_type:
mode_type = mode_type.type
else:
mode_type = 'B' if is_status else None
if mode_type in "AB" or (mode_type == 'C' and adding):
param = params.pop(0)
else:
param = None
new_modes.append(ModeChange(c, adding, param, is_status))
return new_modes
@hook.irc_raw('MODE')
def on_mode(chan, irc_paramlist, conn):
"""
:type chan: str
:type conn: cloudbot.client.Client
:type irc_paramlist: cloudbot.util.parsers.irc.ParamList
"""
if irc_paramlist[0].casefold() == conn.nick.casefold():
# this is a user mode line
return
serv_info = conn.memory["server_info"]
statuses = serv_info["statuses"]
status_modes = {status.mode for status in statuses.values()}
mode_types = serv_info["channel_modes"]
chan_data = get_chans(conn).getchan(chan)
modes = irc_paramlist[1]
mode_params = list(irc_paramlist[2:]).copy()
new_modes = _parse_mode_string(modes, mode_params, status_modes, mode_types)
new_statuses = [change for change in new_modes if change.is_status]
to_sort = {}
for change in new_statuses:
status_char = change.mode
nick = change.param
user = get_users(conn).getuser(nick)
memb = chan_data.get_member(user, create=True)
status = statuses[status_char]
if change.adding:
memb.add_status(status, sort=False)
to_sort[user.nick] = memb
else:
memb.remove_status(status)
for member in to_sort.values():
member.sort_status()
@hook.irc_raw('PART')
def on_part(chan, nick, conn):
"""
:type chan: str
:type nick: str
:type conn: cloudbot.client.Client
"""
channels = get_chans(conn)
if nick.casefold() == conn.nick.casefold():
del channels[chan]
else:
chan_data = channels[chan]
del chan_data.users[nick]
@hook.irc_raw('KICK')
def on_kick(chan, target, conn):
"""
:type chan: str
:type target: str
:type conn: cloudbot.client.Client
"""
on_part(chan, target, conn)
@hook.irc_raw('QUIT')
def on_quit(nick, conn):
"""
:type nick: str
:type conn: cloudbot.client.Client
"""
users = get_users(conn)
if nick in users:
user = users.pop(nick)
for memb in user.channels.values():
chan = memb.channel
del chan.users[nick]
@hook.irc_raw('NICK')
def on_nick(nick, irc_paramlist, conn):
"""
:type nick: str
:type irc_paramlist: cloudbot.util.parsers.irc.ParamList
:type conn: cloudbot.client.Client
"""
users = get_users(conn)
new_nick = irc_paramlist[0]
user = users.pop(nick)
users[new_nick] = user
user.nick = new_nick
for memb in user.channels.values():
chan_users = memb.channel.users
chan_users[new_nick] = chan_users.pop(nick)
@hook.irc_raw('ACCOUNT')
def on_account(conn, nick, irc_paramlist):
"""
:type nick: str
:type irc_paramlist: cloudbot.util.parsers.irc.ParamList
:type conn: cloudbot.client.Client
"""
get_users(conn).getuser(nick).account = irc_paramlist[0]
@hook.irc_raw('CHGHOST')
def on_chghost(conn, nick, irc_paramlist):
"""
:type nick: str
:type irc_paramlist: cloudbot.util.parsers.irc.ParamList
:type conn: cloudbot.client.Client
"""
ident, host = irc_paramlist
user = get_users(conn).getuser(nick)
user.ident = ident
user.host = host
@hook.irc_raw('AWAY')
def on_away(conn, nick, irc_paramlist):
"""
:type nick: str
:type irc_paramlist: cloudbot.util.parsers.irc.ParamList
:type conn: cloudbot.client.Client
"""
if irc_paramlist:
reason = irc_paramlist[0]
else:
reason = None
user = get_users(conn).getuser(nick)
user.is_away = (reason is not None)
user.away_message = reason
@hook.irc_raw('352')
def on_who(conn, irc_paramlist):
"""
:type irc_paramlist: cloudbot.util.parsers.irc.ParamList
:type conn: cloudbot.client.Client
"""
_, _, ident, host, server, nick, status, realname = irc_paramlist
realname = realname.split(None, 1)[1]
user = get_users(conn).getuser(nick)
status = list(status)
is_away = status.pop(0) == "G"
is_oper = status[:1] == "*"
user.ident = ident
user.host = host
user.server = server
user.realname = realname
user.is_away = is_away
user.is_oper = is_oper
@hook.irc_raw('311')
def on_whois_name(conn, irc_paramlist):
"""
:type irc_paramlist: cloudbot.util.parsers.irc.ParamList
:type conn: cloudbot.client.Client
"""
_, nick, ident, host, _, realname = irc_paramlist
user = get_users(conn).getuser(nick)
user.ident = ident
user.host = host
user.realname = realname
@hook.irc_raw('330')
def on_whois_acct(conn, irc_paramlist):
"""
:type irc_paramlist: cloudbot.util.parsers.irc.ParamList
:type conn: cloudbot.client.Client
"""
_, nick, acct = irc_paramlist[:2]
get_users(conn).getuser(nick).account = acct
@hook.irc_raw('301')
def on_whois_away(conn, irc_paramlist):
"""
:type irc_paramlist: cloudbot.util.parsers.irc.ParamList
:type conn: cloudbot.client.Client
"""
_, nick, msg = irc_paramlist
user = get_users(conn).getuser(nick)
user.is_away = True
user.away_message = msg
@hook.irc_raw('312')
def on_whois_server(conn, irc_paramlist):
"""
:type irc_paramlist: cloudbot.util.parsers.irc.ParamList
:type conn: cloudbot.client.Client
"""
_, nick, server, _ = irc_paramlist
get_users(conn).getuser(nick).server = server
@hook.irc_raw('313')
def on_whois_oper(conn, irc_paramlist):
"""
:type irc_paramlist: cloudbot.util.parsers.irc.ParamList
:type conn: cloudbot.client.Client
"""
nick = irc_paramlist[1]
get_users(conn).getuser(nick).is_oper = True<|fim▁end|> | """ |
<|file_name|>MysqlUserDetailsServiceTest.java<|end_file_name|><|fim▁begin|>package com.imrenagi.service_auth.service.security;
import com.imrenagi.service_auth.AuthApplication;
import com.imrenagi.service_auth.domain.User;
import com.imrenagi.service_auth.repository.UserRepository;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.springframework.boot.test.SpringApplicationConfiguration;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.web.WebAppConfiguration;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.*;
import static org.mockito.MockitoAnnotations.initMocks;
/**
* Created by imrenagi on 5/14/17.
*/
@RunWith(SpringJUnit4ClassRunner.class)
@SpringApplicationConfiguration(classes = AuthApplication.class)
@WebAppConfiguration
public class MysqlUserDetailsServiceTest {
@InjectMocks
private MysqlUserDetailsService userDetailsService;
@Mock
private UserRepository repository;
@Before
public void setup() {
initMocks(this);
}
@Test
public void shouldReturnUserDetailWhenAUserIsFound() throws Exception {
final User user = new User("imrenagi", "1234", "imre", "nagi");
doReturn(user).when(repository).findByUsername(user.getUsername());
UserDetails found = userDetailsService.loadUserByUsername(user.getUsername());
assertEquals(user.getUsername(), found.getUsername());
assertEquals(user.getPassword(), found.getPassword());
verify(repository, times(1)).findByUsername(user.getUsername());
}
@Test
public void shouldFailWhenUserIsNotFound() throws Exception {
doReturn(null).when(repository).findByUsername(anyString());
try {<|fim▁hole|> } catch (Exception e) {
}
}
}<|fim▁end|> | userDetailsService.loadUserByUsername(anyString());
fail(); |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>import logging
from requests_oauthlib import OAuth1Session
from django.http import HttpResponse, HttpResponseRedirect
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.shortcuts import render_to_response
from .models import QuickbooksToken, get_quickbooks_token
from .api import QuickbooksApi, AuthenticationFailure
from .signals import qb_connected
REQUEST_TOKEN_URL = 'https://oauth.intuit.com/oauth/v1/get_request_token'
ACCESS_TOKEN_URL = 'https://oauth.intuit.com/oauth/v1/get_access_token'
AUTHORIZATION_URL = 'https://appcenter.intuit.com/Connect/Begin'
BLUE_DOT_CACHE_KEY = 'quickbooks:blue_dot_menu'
<|fim▁hole|> del request.session[BLUE_DOT_CACHE_KEY]
access_token_callback = settings.QUICKBOOKS['OAUTH_CALLBACK_URL']
if callable(access_token_callback):
access_token_callback = access_token_callback(request)
session = OAuth1Session(client_key=settings.QUICKBOOKS['CONSUMER_KEY'],
client_secret=settings.QUICKBOOKS['CONSUMER_SECRET'],
callback_uri=access_token_callback)
response = session.fetch_request_token(REQUEST_TOKEN_URL)
try:
request_token = response['oauth_token']
request_token_secret = response['oauth_token_secret']
request.session['qb_oauth_token'] = request_token
request.session['qb_oauth_token_secret'] = request_token_secret
except:
logger = logging.getLogger('quickbooks.views.request_oauth_token')
logger.exception(("Couldn't extract oAuth parameters from token " +
"request response. Response was '%s'"), response)
raise
return HttpResponseRedirect("%s?oauth_token=%s" % (AUTHORIZATION_URL, request_token))
@login_required
def get_access_token(request):
# [todo] - add doc string for get_access_token
session = OAuth1Session(client_key=settings.QUICKBOOKS['CONSUMER_KEY'],
client_secret=settings.QUICKBOOKS['CONSUMER_SECRET'],
resource_owner_key=request.session['qb_oauth_token'],
resource_owner_secret=request.session['qb_oauth_token_secret'])
remote_response = session.parse_authorization_response('?{}'.format(request.META.get('QUERY_STRING')))
realm_id = remote_response['realmId']
data_source = remote_response['dataSource']
oauth_verifier = remote_response['oauth_verifier']
# [review] - Possible bug? This should be taken care of by session.parse_authorization_response
session.auth.client.verifier = unicode(oauth_verifier)
response = session.fetch_access_token(ACCESS_TOKEN_URL)
# Delete any existing access tokens
request.user.quickbookstoken_set.all().delete()
token = QuickbooksToken.objects.create(
user=request.user,
access_token=response['oauth_token'],
access_token_secret=response['oauth_token_secret'],
realm_id=realm_id,
data_source=data_source)
# Cache blue dot menu
try:
request.session[BLUE_DOT_CACHE_KEY] = None
blue_dot_menu(request)
except AttributeError:
raise Exception('The sessions framework must be installed for this ' +
'application to work.')
# Let everyone else know we conneted
qb_connected.send(None, token=token)
return render_to_response('oauth_callback.html',
{'complete_url': settings.QUICKBOOKS['ACCESS_COMPLETE_URL']})
@login_required
def blue_dot_menu(request):
""" Returns the blue dot menu. If possible a cached copy is returned.
"""
html = request.session.get(BLUE_DOT_CACHE_KEY)
if not html:
html = request.session[BLUE_DOT_CACHE_KEY] = \
HttpResponse(QuickbooksApi(request.user).app_menu())
return html
@login_required
def disconnect(request):
""" Try to disconnect from Intuit, then destroy our tokens."""
token = get_quickbooks_token(request)
try:
QuickbooksApi(token).disconnect()
except AuthenticationFailure:
# If there is an authentication error, then these tokens are bad
# We need to destroy them in any case.
pass
request.user.quickbookstoken_set.all().delete()
return HttpResponseRedirect(settings.QUICKBOOKS['ACCESS_COMPLETE_URL'])<|fim▁end|> | @login_required
def request_oauth_token(request):
# We'll require a refresh in the blue dot cache
if BLUE_DOT_CACHE_KEY in request.session: |
<|file_name|>poz.py<|end_file_name|><|fim▁begin|>"""
POZ Development Application.
"""
import numpy as np
# import cv2
import pozutil as pu
import test_util as tpu
<|fim▁hole|>
def perspective_test(_y, _z, _ele, _azi):
print "--------------------------------------"
print "Perspective Transform tests"
print
cam = pu.CameraHelper()
# some landmarks in a 3x3 grid pattern
p0 = np.float32([-1., _y - 1.0, _z])
p1 = np.float32([0., _y - 1.0, _z])
p2 = np.float32([1., _y - 1.0, _z])
p3 = np.float32([-1., _y + 1.0, _z])
p4 = np.float32([0., _y + 1.0, _z])
p5 = np.float32([1., _y + 1.0, _z])
p6 = np.float32([-1., _y, _z])
p7 = np.float32([0, _y, _z])
p8 = np.float32([1., _y, _z])
# 3x3 grid array
ppp = np.array([p0, p1, p2, p3, p4, p5, p6, p7, p8])
print "Here are some landmarks in world"
print ppp
puv_acc = []
quv_acc = []
for vp in ppp:
# original view of landmarks
u, v = cam.project_xyz_to_uv(vp)
puv_acc.append(np.float32([u, v]))
# rotated view of landmarks
xyz_r = pu.calc_xyz_after_rotation_deg(vp, _ele, _azi, 0)
u, v = cam.project_xyz_to_uv(xyz_r)
quv_acc.append(np.float32([u, v]))
puv = np.array(puv_acc)
quv = np.array(quv_acc)
# 4-pt "diamond" array
quv4 = np.array([quv[1], quv[4], quv[6], quv[8]])
puv4 = np.array([puv[1], puv[4], puv[6], puv[8]])
print
print "Landmark img coords before rotate:"
print puv
print "Landmark img coords after rotate:"
print quv
print quv4
print
# h, _ = cv2.findHomography(puv, quv)
# hh = cv2.getPerspectiveTransform(puv4, quv4)
# print h
# print hh
# perspectiveTransform needs an extra dimension
puv1 = np.expand_dims(puv, axis=0)
# print "Test perspectiveTransform with findHomography matrix:"
# xpersp = cv2.perspectiveTransform(puv1, h)
# print xpersp
# print "Test perspectiveTransform with getPerspectiveTransform matrix:"
# xpersp = cv2.perspectiveTransform(puv1, hh)
# print xpersp
# print
if __name__ == "__main__":
# robot always knows the Y and Elevation of its camera
# (arbitrary assignments for testing)
known_cam_y = -3.
known_cam_el = 0.0
tests = [(1., 1., tpu.lm_vis_1_1),
(7., 6., tpu.lm_vis_7_6)]
print "--------------------------------------"
print "Landmark Test"
print
test_index = 0
vis_map = tests[test_index][2]
# robot does not know its (X, Z) position
# it will have to solve for it
cam_x = tests[test_index][0]
cam_z = tests[test_index][1]
print "Known (X,Z): ", (cam_x, cam_z)
for key in sorted(vis_map.keys()):
cam_azim = vis_map[key].az + 0. # change offset for testing
cam_elev = vis_map[key].el + known_cam_el
print "-----------"
# print "Known Camera Elev =", cam_elev
xyz = [cam_x, known_cam_y, cam_z]
angs = [cam_azim, cam_elev]
print "Landmark {:s}. Camera Azim = {:8.2f}".format(key, cam_azim)
lm1 = tpu.mark1[key]
f, x, z, a = tpu.landmark_test(lm1, tpu.mark2[key], xyz, angs)
print "Robot is at: {:6.3f},{:6.3f},{:20.14f}".format(x, z, a)
f, x, z, a = tpu.landmark_test(lm1, tpu.mark3[key], xyz, angs)
print "Robot is at: {:6.3f},{:6.3f},{:20.14f}".format(x, z, a)
tpu.pnp_test(key, xyz, angs)<|fim▁end|> | |
<|file_name|>url.py<|end_file_name|><|fim▁begin|>import re
from fir_artifacts.artifacts import AbstractArtifact
class URL(AbstractArtifact):
key = 'url'
display_name = 'URLs'
regex = r"""
(?P<search>
((?P<scheme>[\w]{2,9}):\/\/)?
([\S]*\:[\S]*\@)?
(?P<hostname>(<|fim▁hole|> ((([\w\-]+\.)+)
([a-zA-Z]{2,6}))
|([\d+]{1,3}\.[\d+]{1,3}\.[\d+]{1,3}\.[\d+]{1,3})
)
)
(\:[\d]{1,5})?
(?P<path>(\/[\/\~\w\-_%\.\*\#\$&%]*)?
(\?[\~\w\-_%\.&=\*\#\$%]*)?
(\#[\S]*)?)
)
"""
@classmethod
def find(cls, data):
urls = []
_re = re.compile(cls.regex, re.VERBOSE)
for i in re.finditer(_re, data):
url = i.group('search')
if url.find('/') != -1:
urls.append(url)
return urls<|fim▁end|> | |
<|file_name|>ITree.java<|end_file_name|><|fim▁begin|>package com.jwetherell.algorithms.data_structures.interfaces;<|fim▁hole|>/**
* A tree can be defined recursively (locally) as a collection of nodes (starting at a root node),
* where each node is a data structure consisting of a value, together with a list of nodes (the "children"),
* with the constraints that no node is duplicated. A tree can be defined abstractly as a whole (globally)
* as an ordered tree, with a value assigned to each node.
* <p>
* @see <a href="https://en.wikipedia.org/wiki/Tree_(data_structure)">Tree (Wikipedia)</a>
* <br>
* @author Justin Wetherell <[email protected]>
*/
public interface ITree<T> {
/**
* Add value to the tree. Tree can contain multiple equal values.
*
* @param value to add to the tree.
* @return True if successfully added to tree.
*/
public boolean add(T value);
/**
* Remove first occurrence of value in the tree.
*
* @param value to remove from the tree.
* @return T value removed from tree.
*/
public T remove(T value);
/**
* Clear the entire stack.
*/
public void clear();
/**
* Does the tree contain the value.
*
* @param value to locate in the tree.
* @return True if tree contains value.
*/
public boolean contains(T value);
/**
* Get number of nodes in the tree.
*
* @return Number of nodes in the tree.
*/
public int size();
/**
* Validate the tree according to the invariants.
*
* @return True if the tree is valid.
*/
public boolean validate();
/**
* Get Tree as a Java compatible Collection
*
* @return Java compatible Collection
*/
public java.util.Collection<T> toCollection();
}<|fim▁end|> | |
<|file_name|>test_openvswitch_port.py<|end_file_name|><|fim▁begin|>import pytest
import salt.states.openvswitch_port as openvswitch_port
from tests.support.mock import MagicMock, patch
@pytest.fixture
def configure_loader_modules():
return {openvswitch_port: {"__opts__": {"test": False}}}
def test_present():
"""
Test to verify that the named port exists on bridge, eventually creates it.
"""
name = "salt"
bridge = "br-salt"
ret = {"name": name, "result": None, "comment": "", "changes": {}}
mock = MagicMock(return_value=True)
mock_l = MagicMock(return_value=["salt"])
mock_n = MagicMock(return_value=[])
with patch.dict(
openvswitch_port.__salt__,
{
"openvswitch.bridge_exists": mock,
"openvswitch.interface_get_type": MagicMock(return_value='""'),
"openvswitch.port_list": mock_l,
},
):
comt = "Port salt already exists."
ret.update({"comment": comt, "result": True})
assert openvswitch_port.present(name, bridge) == ret
with patch.dict(
openvswitch_port.__salt__,
{
"openvswitch.bridge_exists": mock,
"openvswitch.interface_get_type": MagicMock(return_value='""'),
"openvswitch.port_list": mock_n,
"openvswitch.port_add": mock,
},
):
comt = "Port salt created on bridge br-salt."
ret.update(
{
"comment": comt,
"result": True,
"changes": {
"salt": {
"new": "Created port salt on bridge br-salt.",<|fim▁hole|> }
)
assert openvswitch_port.present(name, bridge) == ret
with patch.dict(
openvswitch_port.__salt__,
{
"openvswitch.bridge_exists": mock,
"openvswitch.port_list": mock_n,
"openvswitch.port_add": mock,
"openvswitch.interface_get_options": mock_n,
"openvswitch.interface_get_type": MagicMock(return_value=""),
"openvswitch.port_create_gre": mock,
"dig.check_ip": mock,
},
):
comt = "Port salt created on bridge br-salt."
ret.update(
{
"result": True,
"comment": (
"Created GRE tunnel interface salt with remote ip 10.0.0.1 and key"
" 1 on bridge br-salt."
),
"changes": {
"salt": {
"new": (
"Created GRE tunnel interface salt with remote ip 10.0.0.1"
" and key 1 on bridge br-salt."
),
"old": (
"No GRE tunnel interface salt with remote ip 10.0.0.1 and"
" key 1 on bridge br-salt present."
),
},
},
}
)
assert (
openvswitch_port.present(
name, bridge, tunnel_type="gre", id=1, remote="10.0.0.1"
)
== ret
)<|fim▁end|> | "old": "No port named salt present.",
},
}, |
<|file_name|>gists.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012 <Jaume Devesa ([email protected])>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
gists.gists
~~~~~~~~~~~
This single-function module defines the input parameters and the subparsers,
and coordinates the 'handlers'->'actions'->'formatters' execution workflow
"""
import argparse
from actions import (list_gists, show, get, post, delete, update, authorize,
fork, star, unstar)
from handlers import (handle_list, handle_show, handle_update,
handle_authorize, handle_get, handle_post, handle_delete,
handle_fork, handle_star)
from formatters import (format_list, format_post, format_update,
format_get, format_show, format_delete,
format_authorize, format_star)
from version import VERSION
USER_MSG = ("github username. Use this user instead of the defined one in "
"the configuration file. If action demands authentication, a "
"password request will be prompt")
GIST_ID_MSG = ("identifier of the Gist. Execute 'gists list' to know Gists "
"identifiers")
def run(*args, **kwargs):
# Initialize argument's parser
description = 'Manage Github gists from CLI'
parser = argparse.ArgumentParser(description=description,
epilog="Happy Gisting!")
# Define subparsers to handle each action
subparsers = parser.add_subparsers(help="Available commands.")
# Add the subparsers
__add_list_parser(subparsers)
__add_show_parser(subparsers)
__add_get_parser(subparsers)
__add_create_parser(subparsers)
__add_update_parser(subparsers)
__add_delete_parser(subparsers)
__add_authorize_parser(subparsers)
__add_version_parser(subparsers)
__add_fork_parser(subparsers)
__add_star_parser(subparsers)
__add_unstar_parser(subparsers)
# Parse the arguments
args = parser.parse_args()
# Calling the handle_args function defined, parsing the args and return
# and object with the needed values to execute the function
parameters = args.handle_args(args)
# Passing the 'parameters' object as array of parameters
result = args.func(*parameters)
# Parsing the 'result' object to be output formatted.
# (that must be a single object)
result_formatted = args.formatter(result)
# Print the formatted output
print result_formatted
def __add_list_parser(subparsers):
""" Define the subparser to handle the 'list' functionality.
:param subparsers: the subparser entity
"""
# Add the subparser to handle the list of gists
parser_list = subparsers.add_parser("list", help="list a user's Gists")
parser_list.add_argument("-u", "--user", help=USER_MSG)
group1 = parser_list.add_mutually_exclusive_group()
group1.add_argument("-p", "--private", help="""return the private gists
besides the public ones. Needs authentication""",
action="store_true")
group1.add_argument("-s", "--starred", help="""return ONLY the starred
gists. Needs authentication""", action="store_true")
parser_list.set_defaults(handle_args=handle_list,
func=list_gists, formatter=format_list)
def __add_show_parser(subparsers):
""" Define the subparser to handle with the 'show' functionallity.
:param subparsers: the subparser entity
"""
# Add the subparser to handle the 'show' action
parser_show = subparsers.add_parser("show", help="""show a Gist. Shows
Gist metadata by default.
With '-f' (--filename) option, shows
the content of one of the Gist files
""")
parser_show.add_argument("gist_id", help=GIST_ID_MSG)
parser_show.add_argument("-f", "--filename", help="gist file to show")
parser_show.set_defaults(handle_args=handle_show, func=show,
formatter=format_show)
def __add_get_parser(subparsers):
""" Define the subparser to handle the 'get' functionality.
:param subparsers: the subparser entity
"""
# Add the subparser to handle the 'get' action
parser_get = subparsers.add_parser("get", help="""download a single gist
file. If the gist has just a single
file, argument '-f' (--filename) is not
needed""")
parser_get.add_argument("gist_id", help=GIST_ID_MSG)
parser_get.add_argument("-f", "--filename", help="file to download")
parser_get.add_argument("-o", "--output_dir", help="destination directory",
default=".")
parser_get.set_defaults(handle_args=handle_get, func=get,
formatter=format_get)
def __add_create_parser(subparsers):
""" Define the subparser to handle the 'create' functionality.
:param subparsers: the subparser entity
"""
# Add the subparser to handle the 'create' action
parser_post = subparsers.add_parser("create", help="""create a new gist.
Needs authentication""")
parser_post.add_argument("-u", "--user", help=USER_MSG)
parser_post.add_argument("-f", "--filenames", nargs='+', help="""specify
files to upload with Gist creation""",
required=True)
parser_post.add_argument("-p", "--private", help="""private Gist? ('false'
by default)""", action="store_true")
parser_post.add_argument("-i", "--input_dir", help="""input directory where
the source files are""")
parser_post.add_argument("-d", "--description", help="""description for
the Gist to create""")
parser_post.set_defaults(handle_args=handle_post, func=post,
formatter=format_post)
def __add_update_parser(subparsers):
""" Define the subparser to handle the 'update' functionality.
:param subparsers: the subparser entity
"""
# Add the subparser to handle the 'update' action
parser_update = subparsers.add_parser("update", help="""update a gist.
Needs authentication""")
parser_update.add_argument("gist_id", help=GIST_ID_MSG)
parser_update.add_argument("-u", "--user", help=USER_MSG)
group1 = parser_update.add_argument_group("file options",
"update Gist files")
group1.add_argument("-f", "--filenames", nargs='+',
help="Gist files to update")
group11 = group1.add_mutually_exclusive_group()
group11.add_argument("-n", "--new", action="store_true", help="""files
supplied are new for the Gist. '-f' (--filenames)
argument needed""",
default=False)
group11.add_argument("-r", "--remove", action="store_true",
help="""files supplied will be removed from the Gist.
'-f' (--filenames) argument needed""", default=False)
group1.add_argument("-i", "--input_dir", help="""directory where the files
are. Current directory by default""")
group2 = parser_update.add_argument_group('metadata options',
"update Gist metadata")
group2.add_argument("-d", "--description", help="update Gist description")
parser_update.set_defaults(handle_args=handle_update, func=update,
formatter=format_update)
def __add_delete_parser(subparsers):
""" Define the subparser to handle the 'delete' functionality.
:param subparsers: the subparser entity
"""
# Add the subparser to handle the 'delete' action
parser_delete = subparsers.add_parser("delete", help="""delete a Gist.
Needs authentication""")
parser_delete.add_argument("gist_id", help=GIST_ID_MSG)
parser_delete.add_argument("-u", "--user", help=USER_MSG)
parser_delete.set_defaults(handle_args=handle_delete, func=delete,
formatter=format_delete)
def __add_authorize_parser(subparsers):
""" Define the subparser to handle the 'authorize' functionallity.
:param subparsers: the subparser entity
"""
# Add the subparser to handle the 'authorize' action.
parser_authorize = subparsers.add_parser("authorize", help="""authorize
this project in github""")
parser_authorize.add_argument("-u", "--user", help="""your github user
. Needed to generate the auth token. """,
required=True)
parser_authorize.set_defaults(handle_args=handle_authorize, func=authorize,
formatter=format_authorize)
def __add_version_parser(subparsers):
""" Define the subparser to handle 'version' functionallity.
:param subparsers: the subparser entity
"""
parser_version = subparsers.add_parser("version", help="""print the version
of the release""")
parser_version.set_defaults(handle_args=lambda x: (None,),
func=lambda x: None,
formatter=lambda x: VERSION)
def __add_fork_parser(subparsers):
""" Define the subparser to handle 'fork' functionallity.
:param subparsers: the subparser entity
"""
parser_fork = subparsers.add_parser("fork", help="""fork another users'
Gists""")<|fim▁hole|> formatter=format_post)
def __add_star_parser(subparsers):
""" Define the subparser to handle 'star' functionallity.
:param subparsers: the subparser entity
"""
parser_star = subparsers.add_parser("star", help="star a Gist")
parser_star.add_argument("gist_id", help=GIST_ID_MSG)
parser_star.add_argument("-u", "--user", help=USER_MSG)
parser_star.set_defaults(handle_args=handle_star, func=star,
formatter=format_star)
def __add_unstar_parser(subparsers):
""" Define the subparser to handle 'unstar' functionallity.
:param subparsers: the subparser entity
"""
parser_unstar = subparsers.add_parser("unstar", help="unstar a Gist")
parser_unstar.add_argument("gist_id", help=GIST_ID_MSG)
parser_unstar.add_argument("-u", "--user", help=USER_MSG)
parser_unstar.set_defaults(handle_args=handle_star, func=unstar,
formatter=format_star)<|fim▁end|> | parser_fork.add_argument("gist_id", help=GIST_ID_MSG)
parser_fork.add_argument("-u", "--user", help=USER_MSG)
parser_fork.set_defaults(handle_args=handle_fork, func=fork, |
<|file_name|>twitterapi.go<|end_file_name|><|fim▁begin|>package main
import (
"bufio"
"bytes"
"encoding/json"
"errors"
"fmt"
oauth "github.com/akrennmair/goauth"
goconf "github.com/akrennmair/goconf"
"io/ioutil"
"log"
"net/http"
"strconv"
"strings"
"time"
)
type Timeline struct {
Tweets []*Tweet
}
type UserList struct {
Users []TwitterUser
}
type UserIdList struct {
Ids []int64
}
type Tweet struct {
Favorited *bool
In_reply_to_status_id *int64
Retweet_count interface{}
In_reply_to_screen_name *string
Place *PlaceDesc
Truncated *bool
User *TwitterUser
Retweeted *bool
In_reply_to_status_id_str *string
In_reply_to_user_id_str *string
In_reply_to_user_id *int64
Source *string
Id *int64
Id_str *string
//Coordinates *TODO
Text *string
Created_at *string
}
type TwitterUser struct {
Protected *bool
Listed_count int
Name *string
Verified *bool
Lang *string
Time_zone *string
Description *string
Location *string
Statuses_count int
Url *string
Screen_name *string
Follow_request_sent *bool
Following *bool
Friends_count *int64
Favourites_count *int64
Followers_count *int64
Id *int64
Id_str *string
}
type PlaceDesc struct {
Name *string
Full_name *string
Url *string
Country_code *string
}
type TwitterEvent struct {
Delete *WhatEvent
}
type WhatEvent struct {
Status *EventDetail
}
type EventDetail struct {
Id *int64
Id_str *string
User_id *int64
User_id_str *string
}
type Configuration struct {
Characters_reserved_per_media *int64
Max_media_per_upload *int64
Short_url_length_https *int64
Short_url_length *int64
}
const (
request_token_url = "https://api.twitter.com/oauth/request_token"
access_token_url = "https://api.twitter.com/oauth/access_token"
authorization_url = "https://api.twitter.com/oauth/authorize"
INITIAL_NETWORK_WAIT time.Duration = 250e6 // 250 milliseconds
INITIAL_HTTP_WAIT time.Duration = 10e9 // 10 seconds
MAX_NETWORK_WAIT time.Duration = 16e9 // 16 seconds
MAX_HTTP_WAIT time.Duration = 240e9 // 240 seconds
)
type TwitterAPI struct {
authcon *oauth.OAuthConsumer
config *goconf.ConfigFile
access_token *oauth.AccessToken
request_token *oauth.RequestToken
ratelimit_rem uint
ratelimit_limit uint
ratelimit_reset int64
}
func NewTwitterAPI(consumer_key, consumer_secret string, cfg *goconf.ConfigFile) *TwitterAPI {
tapi := &TwitterAPI{
authcon: &oauth.OAuthConsumer{
Service: "twitter",
RequestTokenURL: request_token_url,
AccessTokenURL: access_token_url,
AuthorizationURL: authorization_url,
ConsumerKey: consumer_key,
ConsumerSecret: consumer_secret,
UserAgent: PROGRAM_NAME + "/" + PROGRAM_VERSION + " (" + PROGRAM_URL + ")",
Timeout: 60e9, // 60 second default timeout
CallBackURL: "oob",
},
config: cfg,
}
if tapi.config != nil {
if timeout, err := tapi.config.GetInt("default", "http_timeout"); err == nil && timeout > 0 {
tapi.authcon.Timeout = int64(timeout) * 1e9
}
}
return tapi
}
func (tapi *TwitterAPI) GetRequestAuthorizationURL() (string, error) {
s, rt, err := tapi.authcon.GetRequestAuthorizationURL()
tapi.request_token = rt
return s, err
}
func (tapi *TwitterAPI) GetRateLimit() (remaining uint, limit uint, reset int64) {
curtime := time.Now().Unix()
return tapi.ratelimit_rem, tapi.ratelimit_limit, tapi.ratelimit_reset - curtime
}
func (tapi *TwitterAPI) SetPIN(pin string) {
tapi.access_token = tapi.authcon.GetAccessToken(tapi.request_token.Token, pin)
}
func (tapi *TwitterAPI) SetAccessToken(at *oauth.AccessToken) {
tapi.access_token = at
}
func (tapi *TwitterAPI) GetAccessToken() *oauth.AccessToken {
return tapi.access_token
}
func (tapi *TwitterAPI) HomeTimeline(count uint, since_id int64) (*Timeline, error) {
return tapi.get_timeline("home_timeline",
func() *oauth.Pair {
if count != 0 {
return &oauth.Pair{"count", strconv.FormatUint(uint64(count), 10)}
}
return nil
}(),
func() *oauth.Pair {
if since_id != 0 {
return &oauth.Pair{"since_id", strconv.FormatInt(since_id, 10)}
}
return nil
}())
}
func (tapi *TwitterAPI) Mentions(count uint, since_id int64) (*Timeline, error) {
return tapi.get_timeline("mentions",
func() *oauth.Pair {
if count != 0 {
return &oauth.Pair{"count", strconv.FormatUint(uint64(count), 10)}
}
return nil
}(),
func() *oauth.Pair {
if since_id != 0 {
return &oauth.Pair{"since_id", strconv.FormatInt(since_id, 10)}
}
return nil
}())
}
func (tapi *TwitterAPI) PublicTimeline(count uint, since_id int64) (*Timeline, error) {
return tapi.get_timeline("public_timeline",
func() *oauth.Pair {
if count != 0 {
return &oauth.Pair{"count", strconv.FormatUint(uint64(count), 10)}
}
return nil
}(),
func() *oauth.Pair {
if since_id != 0 {
return &oauth.Pair{"since_id", strconv.FormatInt(since_id, 10)}
}
return nil
}())
}
func (tapi *TwitterAPI) RetweetedByMe(count uint, since_id int64) (*Timeline, error) {
return tapi.get_timeline("retweeted_by_me",
func() *oauth.Pair {
if count != 0 {
return &oauth.Pair{"count", strconv.FormatUint(uint64(count), 10)}
}
return nil
}(),
func() *oauth.Pair {
if since_id != 0 {
return &oauth.Pair{"since_id", strconv.FormatInt(since_id, 10)}
}
return nil
}())
}
func (tapi *TwitterAPI) RetweetedToMe(count uint, since_id int64) (*Timeline, error) {
return tapi.get_timeline("retweeted_to_me",
func() *oauth.Pair {
if count != 0 {
return &oauth.Pair{"count", strconv.FormatUint(uint64(count), 10)}
}
return nil
}(),
func() *oauth.Pair {
if since_id != 0 {
return &oauth.Pair{"since_id", strconv.FormatInt(since_id, 10)}
}
return nil
}())
}
func (tapi *TwitterAPI) RetweetsOfMe(count uint, since_id int64) (*Timeline, error) {
return tapi.get_timeline("retweets_of_me",
func() *oauth.Pair {
if count != 0 {
return &oauth.Pair{"count", strconv.FormatUint(uint64(count), 10)}
}
return nil
}(),
func() *oauth.Pair {
if since_id != 0 {
return &oauth.Pair{"since_id", strconv.FormatInt(since_id, 10)}
}
return nil
}())
}
func (tapi *TwitterAPI) UserTimeline(screen_name string, count uint, since_id int64) (*Timeline, error) {
return tapi.get_timeline("user_timeline",
func() *oauth.Pair {
if count != 0 {
return &oauth.Pair{"count", strconv.FormatUint(uint64(count), 10)}
}
return nil
}(),
func() *oauth.Pair {
if since_id != 0 {
return &oauth.Pair{"since_id", strconv.FormatInt(since_id, 10)}
}
return nil
}(),
func() *oauth.Pair {
if screen_name != "" {
return &oauth.Pair{"screen_name", screen_name}
}
return nil
}())
}
func (tapi *TwitterAPI) RetweetedToUser(screen_name string, count uint, since_id int64) (*Timeline, error) {
return tapi.get_timeline("retweeted_to_user",
func() *oauth.Pair {
if count != 0 {
return &oauth.Pair{"count", strconv.FormatUint(uint64(count), 10)}
}
return nil
}(),
func() *oauth.Pair {
if since_id != 0 {
return &oauth.Pair{"since_id", strconv.FormatInt(since_id, 10)}
}
return nil
}(),
func() *oauth.Pair {
if screen_name != "" {
return &oauth.Pair{"screen_name", screen_name}
}
return nil
}())
}
func (tapi *TwitterAPI) RetweetedByUser(screen_name string, count uint, since_id int64) (*Timeline, error) {
return tapi.get_timeline("retweeted_by_user",
func() *oauth.Pair {
if count != 0 {
return &oauth.Pair{"count", strconv.FormatUint(uint64(count), 10)}
}
return nil
}(),
func() *oauth.Pair {
if since_id != 0 {
return &oauth.Pair{"since_id", strconv.FormatInt(since_id, 10)}
}
return nil
}(),
func() *oauth.Pair {
if screen_name != "" {
return &oauth.Pair{"screen_name", screen_name}
}
return nil
}())
}
func (tapi *TwitterAPI) RetweetedBy(tweet_id int64, count uint) (*UserList, error) {
jsondata, err := tapi.get_statuses(strconv.FormatInt(tweet_id, 10)+"/retweeted_by",
func() *oauth.Pair {
if count != 0 {
return &oauth.Pair{"count", strconv.FormatUint(uint64(count), 10)}
}
return nil
}())
if err != nil {
return nil, err
}
ul := &UserList{}
if jsonerr := json.Unmarshal(jsondata, &ul.Users); jsonerr != nil {
return nil, jsonerr
}
return ul, nil
}
func (tapi *TwitterAPI) RetweetedByIds(tweet_id int64, count uint) (*UserIdList, error) {
jsondata, err := tapi.get_statuses(strconv.FormatInt(tweet_id, 10)+"/retweeted_by/ids",
func() *oauth.Pair {
if count != 0 {
return &oauth.Pair{"count", strconv.FormatUint(uint64(count), 10)}
}
return nil
}())
if err != nil {
return nil, err
}
uidl := &UserIdList{}
if jsonerr := json.Unmarshal(jsondata, &uidl.Ids); jsonerr != nil {
return nil, jsonerr
}
return uidl, nil
}
func (tapi *TwitterAPI) Update(tweet Tweet) (*Tweet, error) {
params := oauth.Params{
&oauth.Pair{
Key: "status",
Value: *tweet.Text,
},
}
if tweet.In_reply_to_status_id != nil && *tweet.In_reply_to_status_id != int64(0) {
params = append(params, &oauth.Pair{"in_reply_to_status_id", strconv.FormatInt(*tweet.In_reply_to_status_id, 10)})
}
resp, err := tapi.authcon.Post("https://api.twitter.com/1.1/statuses/update.json", params, tapi.access_token)
if err != nil {
return nil, err
}
tapi.UpdateRatelimit(resp.Header)
if resp.StatusCode == 403 {
return nil, errors.New(resp.Status)
}
data, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, err
}
newtweet := &Tweet{}
if jsonerr := json.Unmarshal(data, newtweet); jsonerr != nil {
return nil, jsonerr
}
return newtweet, nil
}
func (tapi *TwitterAPI) Retweet(tweet Tweet) (*Tweet, error) {
resp, err := tapi.authcon.Post(fmt.Sprintf("https://api.twitter.com/1.1/statuses/retweet/%d.json", *tweet.Id), oauth.Params{}, tapi.access_token)
if err != nil {
return nil, err
}
tapi.UpdateRatelimit(resp.Header)
if resp.StatusCode == 403 {
return nil, errors.New(resp.Status)
}
data, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, err
}
newtweet := &Tweet{}
if jsonerr := json.Unmarshal(data, newtweet); jsonerr != nil {
return nil, jsonerr
}
return newtweet, nil
}
func (tapi *TwitterAPI) Favorite(tweet Tweet) error {
resp, err := tapi.authcon.Post(fmt.Sprintf("https://api.twitter.com/1.1/favorites/create/%d.json", *tweet.Id), oauth.Params{}, tapi.access_token)
if err != nil {
return err
}
if resp.StatusCode != 200 {
return errors.New(resp.Status)
}
return nil
}
func (tapi *TwitterAPI) Follow(screen_name string) error {
params := oauth.Params{
&oauth.Pair{
Key: "screen_name",
Value: screen_name,
},
}
resp, err := tapi.authcon.Post("https://api.twitter.com/1.1/friendships/create.json", params, tapi.access_token)
if err != nil {
return err
}
if resp.StatusCode != 200 {
return errors.New(resp.Status)
}
return nil
}
func (tapi *TwitterAPI) Unfollow(user TwitterUser) error {
params := oauth.Params{
&oauth.Pair{
Key: "user_id",
Value: *user.Id_str,
},
&oauth.Pair{
Key: "screen_name",
Value: *user.Screen_name,
},
}
resp, err := tapi.authcon.Post("https://api.twitter.com/1.1/friendships/destroy.json", params, tapi.access_token)
if err != nil {
return err
}
if resp.StatusCode != 200 {
return errors.New(resp.Status)
}
return nil
}
func (tapi *TwitterAPI) DestroyTweet(tweet Tweet) error {
resp, err := tapi.authcon.Post(fmt.Sprintf("https://api.twitter.com/1.1/statuses/destroy/%d.json", *tweet.Id), oauth.Params{}, tapi.access_token)
if err != nil {
return err
}
if resp.StatusCode != 200 {
return errors.New(resp.Status)
}
return nil
}
func (tapi *TwitterAPI) Configuration() (*Configuration, error) {
params := oauth.Params{}
resp, err := tapi.authcon.Get("https://api.twitter.com/1.1/help/configuration.json", params, tapi.access_token)
if err != nil {
return nil, err
}
if resp.StatusCode != 200 {
return nil, errors.New(resp.Status)
}
jsondata, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, err
}
config := &Configuration{}
if err := json.Unmarshal(jsondata, config); err != nil {
return nil, err
}
return config, nil
}
func (tapi *TwitterAPI) VerifyCredentials() (*TwitterUser, error) {
params := oauth.Params{
&oauth.Pair{
Key: "skip_status",
Value: "true",
},
}
resp, err := tapi.authcon.Get("https://api.twitter.com/1.1/account/verify_credentials.json", params, tapi.access_token)
if err != nil {
return nil, err
}
if resp.StatusCode != 200 {
return nil, errors.New(resp.Status)
}
jsondata, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, err
}
user := &TwitterUser{}
if err := json.Unmarshal(jsondata, user); err != nil {
return nil, err
}
return user, nil
}
func (tapi *TwitterAPI) get_timeline(tl_name string, p ...*oauth.Pair) (*Timeline, error) {
jsondata, err := tapi.get_statuses(tl_name, p...)
if err != nil {
return nil, err
}
tl := &Timeline{}
if jsonerr := json.Unmarshal(jsondata, &tl.Tweets); jsonerr != nil {
return nil, jsonerr
}
return tl, nil
}
func (tapi *TwitterAPI) get_statuses(id string, p ...*oauth.Pair) ([]byte, error) {
var params oauth.Params
for _, x := range p {
if x != nil {
params.Add(x)
}
}
resp, geterr := tapi.authcon.Get("https://api.twitter.com/1.1/statuses/"+id+".json", params, tapi.access_token)
if geterr != nil {
return nil, geterr
}
tapi.UpdateRatelimit(resp.Header)
return ioutil.ReadAll(resp.Body)
}
type HTTPError int
func (e HTTPError) Error() string {
return "HTTP code " + strconv.Itoa(int(e))
}
func (tapi *TwitterAPI) UserStream(tweetchan chan<- []*Tweet, actions chan<- interface{}) {
network_wait := INITIAL_NETWORK_WAIT
http_wait := INITIAL_HTTP_WAIT
last_network_backoff := time.Now()
last_http_backoff := time.Now()
for {
if err := tapi.doUserStream(tweetchan, actions); err != nil {
log.Printf("user stream returned error: %v", err)
if _, ok := err.(HTTPError); ok {
if (time.Now().Sub(last_http_backoff)) > 1800 {
http_wait = INITIAL_HTTP_WAIT
}
log.Printf("HTTP wait: backing off %d seconds", http_wait/1e9)
time.Sleep(http_wait)
if http_wait < MAX_HTTP_WAIT {
http_wait *= 2
}
last_http_backoff = time.Now()
} else {
if (time.Now().Sub(last_network_backoff)) > 1800 {
network_wait = INITIAL_NETWORK_WAIT
}
log.Printf("Network wait: backing off %d milliseconds", network_wait/1e6)
time.Sleep(network_wait)
if network_wait < MAX_NETWORK_WAIT {
network_wait += INITIAL_NETWORK_WAIT
}
last_network_backoff = time.Now()
}
}
}
}
func (tapi *TwitterAPI) doUserStream(tweetchan chan<- []*Tweet, actions chan<- interface{}) error {
resolve_urls := false
if tapi.config != nil {
if resolve, err := tapi.config.GetBool("default", "resolve_urls"); err == nil {
resolve_urls = resolve
}
}
resp, err := tapi.authcon.Get("https://userstream.twitter.com/2/user.json", oauth.Params{}, tapi.access_token)
if err != nil {
return err
}
defer resp.Body.Close()
if resp.StatusCode > 200 {
bodydata, _ := ioutil.ReadAll(resp.Body)
log.Printf("HTTP error: %s", string(bodydata))
return HTTPError(resp.StatusCode)
}
buf := bufio.NewReader(resp.Body)
for {
line, err := getLine(buf)
if err != nil {
log.Printf("getLine error: %v", err)
return err
}
if len(line) == 0 {
continue
}<|fim▁hole|>
if bytes.HasPrefix(line, []byte("{\"delete\":")) {
action := &TwitterEvent{}
if err := json.Unmarshal(line, action); err != nil {
continue
}
if action.Delete != nil && action.Delete.Status != nil && action.Delete.Status.Id != nil {
actions <- ActionDeleteTweet(*action.Delete.Status.Id)
}
} else {
newtweet := &Tweet{}
if err := json.Unmarshal(line, newtweet); err != nil {
log.Printf("couldn't unmarshal tweet: %v\n", err)
continue
}
// TODO: move this to goroutine if resolving turns out to block everything.
if resolve_urls {
newtweet.ResolveURLs()
}
if newtweet.Id != nil && newtweet.Text != nil {
tweetchan <- []*Tweet{newtweet}
}
}
}
// not reached
return nil
}
func getLine(buf *bufio.Reader) ([]byte, error) {
line := []byte{}
for {
data, isprefix, err := buf.ReadLine()
if err != nil {
return line, err
}
line = append(line, data...)
if !isprefix {
break
}
}
return line, nil
}
func (tapi *TwitterAPI) UpdateRatelimit(hdrs http.Header) {
for k, v := range hdrs {
switch strings.ToLower(k) {
case "x-ratelimit-limit":
if limit, err := strconv.ParseUint(v[0], 10, 0); err == nil {
tapi.ratelimit_limit = uint(limit)
}
case "x-ratelimit-remaining":
if rem, err := strconv.ParseUint(v[0], 10, 0); err == nil {
tapi.ratelimit_rem = uint(rem)
}
case "x-ratelimit-reset":
if reset, err := strconv.ParseInt(v[0], 10, 64); err == nil {
tapi.ratelimit_reset = reset
}
}
}
}
func (t *Tweet) RelativeCreatedAt() string {
if t.Created_at == nil {
return ""
}
tt, err := time.Parse(time.RubyDate, *t.Created_at)
if err != nil {
return *t.Created_at
}
delta := time.Now().Unix() - tt.Unix()
switch {
case delta < 60:
return "less than a minute ago"
case delta < 120:
return "about a minute ago"
case delta < 45*60:
return fmt.Sprintf("about %d minutes ago", delta/60)
case delta < 120*60:
return "about an hour ago"
case delta < 24*60*60:
return fmt.Sprintf("about %d hours ago", delta/3600)
case delta < 48*60*60:
return "1 day ago"
}
return fmt.Sprintf("%d days ago", delta/(3600*24))
}
func longify_url(url string) (longurl string) {
defer func() {
if r := recover(); r != nil {
log.Printf("longify_url: %v", r)
longurl = url
}
}()
if resp, err := http.Head(url); err == nil && resp.Request != nil && resp.Request.URL != nil {
longurl = strings.Replace(resp.Request.URL.String(), "%23", "#", -1) // HACK, breaks real %23
} else {
longurl = url
}
return
}
func (t *Tweet) ResolveURLs() {
if t.Text != nil {
*t.Text = FindURLs(*t.Text, longify_url)
}
}<|fim▁end|> | |
<|file_name|>0005_auto_20151119_2224.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
<|fim▁hole|>
class Migration(migrations.Migration):
dependencies = [
('forum', '0004_topic_update_index_date'),
]
database_operations = [
migrations.AlterModelTable('TopicFollowed', 'notification_topicfollowed')
]
state_operations = [
migrations.DeleteModel('TopicFollowed')
]
operations = [
migrations.SeparateDatabaseAndState(
database_operations=database_operations,
state_operations=state_operations)
]<|fim▁end|> | |
<|file_name|>ManifestSource.ts<|end_file_name|><|fim▁begin|>export enum ManifestSource {
TEXT = 'text',<|fim▁hole|><|fim▁end|> | ARTIFACT = 'artifact',
} |
<|file_name|>p421.cpp<|end_file_name|><|fim▁begin|>#include <iostream>
#include <vector>
using namespace std;
typedef long long int64;
#define FOR(i, a, b) for (int _end_ = (b), i = (a); i <= _end_; ++i)
template <typename T> T gcd(T x, T y) {
for (T t; x; t = x, x = y % x, y = t);
return y;
}
int64 fpm(int64 b, int64 e, int64 m) {
int64 t = 1;
for (; e; e >>= 1, b = b * b % m) {
e & 1 ? t = t * b % m : 0;
}
return t;
}
const int M = 100000000;
const int64 N = 1e11;
bool b[M + 1];
vector<int> get_primes(int n) {
vector<int> P;
FOR (i, 2, n) {
if (!b[i]) P.push_back(i);
int k = n / i;
for (auto x : P) {
if (x > k) break;
b[x * i] = true;
if (i % x == 0) break;
}
}
return P;
}
auto primes = get_primes(M);
int find_root(int p) {
int x = p - 1;
vector<int> v;
for (auto &y : primes) {
if (x == 1) break;
if (y * y > x) break;
if (x % y == 0) {
v.push_back((p - 1) / y);
for (; x /= y, x % y == 0; );
}
}
if (x != 1) v.push_back((p - 1) / x);
x = p - 1;
FOR (i, 2, p) {
bool b = true;
for (auto y : v)
if (fpm(i, y, p) == 1) {
b = false;
break;
}
if (b) return i;
}
return -1;
}
int inv[15];
int main() {
int64 ans = 0;
int cnt = 0;
FOR (i, 1, 15) {
int g = gcd(i, 15);
int x = i / g;<|fim▁hole|> for (; y * e % x != 1 % x; ++e);
inv[i % 15] = e;
}
for (auto p : primes) {
int x = find_root(p);
if (++cnt % 100000 == 0) {
cerr << p << " " << x << endl;
}
int g = gcd(p - 1, 15);
int y = (p - 1) / g;
int64 b = fpm(x, (p - 1) / (2 * g) * inv[(p - 1) % 15] % y, p);
int64 m = fpm(x, y, p);
FOR (t, 1, g) {
ans += (N + p - b) / p * p;
b = b * m % p;
}
}
cout << ans << endl;
return 0;
}<|fim▁end|> | int y = 15 / g;
int e = 1; |
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>from collections import OrderedDict
from django import forms
from django.conf import settings
from django.db import transaction
from django.utils.translation import ugettext_lazy as _
from oioioi.base.utils.input_with_generate import TextInputWithGenerate
from oioioi.base.utils.inputs import narrow_input_field
from oioioi.contests.models import ProblemStatementConfig, RankingVisibilityConfig
from oioioi.problems.models import OriginInfoValue, Problem, ProblemSite
class ProblemUploadForm(forms.Form):
contest_id = forms.CharField(widget=forms.HiddenInput, required=False)
def __init__(self, contest, existing_problem, *args, **kwargs):
user = kwargs.pop('user', None)
super(ProblemUploadForm, self).__init__(*args, **kwargs)
self.round_id = None
self.visibility = None
if contest and not existing_problem:
choices = [(r.id, r.name) for r in contest.round_set.all()]
if len(choices) >= 2:
fields = list(self.fields.items())
fields[0:0] = [
('round_id', forms.ChoiceField(choices=choices, label=_("Round")))
]
self.fields = OrderedDict(fields)
elif len(choices) == 1:
self.round_id = choices[0][0]
if 'oioioi.problemsharing' in settings.INSTALLED_APPS and not existing_problem:
if user and user.has_perm('teachers.teacher'):
choices = [
(Problem.VISIBILITY_FRIENDS, 'Friends'),
(Problem.VISIBILITY_PRIVATE, 'Private'),
(Problem.VISIBILITY_PUBLIC, 'Public'),
]
default_visibility = Problem.VISIBILITY_FRIENDS
if contest:
last_problem = (
Problem.objects.filter(contest=contest, author=user)
.order_by('-id')
.first()
)
if (
last_problem
and last_problem.visibility == Problem.VISIBILITY_PRIVATE
):
default_visibility = Problem.VISIBILITY_PRIVATE
self.initial.update({'visibility': default_visibility})
self.fields.update(
{
'visibility': forms.ChoiceField(
choices=choices,
label=_("Visibility"),
required=True,
initial=default_visibility,
)
}
)
def clean(self):
cleaned_data = super(ProblemUploadForm, self).clean()
if self.round_id:
cleaned_data['round_id'] = self.round_id
if self.visibility:
cleaned_data['visibility'] = self.visibility
return cleaned_data
class PackageUploadForm(ProblemUploadForm):
package_file = forms.FileField(label=_("Package file"))
class ProblemStatementConfigForm(forms.ModelForm):
class Meta(object):
fields = '__all__'
model = ProblemStatementConfig
widgets = {'visible': forms.RadioSelect()}
class RankingVisibilityConfigForm(forms.ModelForm):
class Meta(object):
fields = '__all__'
model = RankingVisibilityConfig
widgets = {'visible': forms.RadioSelect()}
class ProblemSiteForm(forms.ModelForm):
class Meta(object):
fields = ['url_key']
model = ProblemSite
widgets = {'url_key': TextInputWithGenerate()}
class ProblemsetSourceForm(forms.Form):
url_key = forms.CharField(label=_("Enter problem's secret key"), required=True)
def __init__(self, url_key, *args, **kwargs):
super(ProblemsetSourceForm, self).__init__(*args, **kwargs)
if url_key:
self.initial = {'url_key': url_key}
class ProblemStatementReplaceForm(forms.Form):
file_name = forms.ChoiceField(label=_("Statement filename"))
file_replacement = forms.FileField(label=_("Replacement file"), required=True)
def __init__(self, file_names, *args, **kwargs):
super(ProblemStatementReplaceForm, self).__init__(*args, **kwargs)
upload_file_field = self.fields['file_replacement']
file_name_field = self.fields['file_name']
file_name_field.choices = [('', '')] + [(name, name) for name in file_names]
self._set_field_show_always('file_name')
narrow_input_field(file_name_field)
narrow_input_field(upload_file_field)
self.initial.update({'file_name': ''})
def _set_field_show_always(self, field_name):
self.fields[field_name].widget.attrs['data-submit'] = 'always'
class PackageFileReuploadForm(forms.Form):
file_name = forms.ChoiceField(label=_("File name"))
file_replacement = forms.FileField(label=_("Replacement file"), required=False)
def __init__(self, file_names, *args, **kwargs):
super(PackageFileReuploadForm, self).__init__(*args, **kwargs)
upload_file_field = self.fields['file_replacement']
file_name_field = self.fields['file_name']
file_name_field.choices = [('', '')] + [(name, name) for name in file_names]
self._set_field_show_always('file_name')
narrow_input_field(file_name_field)
narrow_input_field(upload_file_field)<|fim▁hole|> def _set_field_show_always(self, field_name):
self.fields[field_name].widget.attrs['data-submit'] = 'always'
def _localized_formset_get_initial(localized_objects):
return [
{'language': lang[0]}
for lang in settings.LANGUAGES
if not localized_objects.filter(language=lang[0]).exists()
]
class ProblemNameInlineFormSet(forms.models.BaseInlineFormSet):
def __init__(self, *args, **kwargs):
kwargs['initial'] = _localized_formset_get_initial(kwargs['instance'].names)
super(ProblemNameInlineFormSet, self).__init__(*args, **kwargs)
self.max_num = len(settings.LANGUAGES)
class LocalizationFormset(forms.models.BaseInlineFormSet):
def __init__(self, *args, **kwargs):
kwargs['initial'] = _localized_formset_get_initial(
kwargs['instance'].localizations
)
super(LocalizationFormset, self).__init__(*args, **kwargs)
self.min_num = self.max_num = len(settings.LANGUAGES)
for form in self.forms:
form.empty_permitted = False
class OriginInfoValueForm(forms.ModelForm):
@transaction.atomic
def save(self, commit=True):
instance = super(OriginInfoValueForm, self).save(commit=False)
# Ensure parent_tag exists on problems
category = self.cleaned_data['category']
parent_tag = category.parent_tag
instance.parent_tag = parent_tag
problems = self.cleaned_data.get('problems').prefetch_related('origintag_set')
for problem in problems:
if parent_tag not in problem.origintag_set.all():
parent_tag.problems.add(problem)
if commit:
instance.save()
return instance
class Meta(object):
model = OriginInfoValue
fields = ('category', 'value', 'order', 'problems')
exclude = ('parent_tag',)
def _label_from_instance(obj):
return obj.full_name
class OriginTagThroughForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(OriginTagThroughForm, self).__init__(*args, **kwargs)
self.fields['origintag'].label_from_instance = _label_from_instance
class Meta(object):
labels = {'origintag': _("Origin Tag")}
help_texts = {
'origintag': _(
"Origin tags inform about the problem's general origin "
"- e.g. a specific competition, olympiad, or programming camp."
)
}
class OriginInfoValueThroughForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(OriginInfoValueThroughForm, self).__init__(*args, **kwargs)
self.fields['origininfovalue'].label_from_instance = _label_from_instance
class Meta(object):
labels = {'origininfovalue': _("Origin Information")}
help_texts = {
'origininfovalue': _(
"Origin information values inform about the problem's specific origin"
"- a year, round, day, etc."
)
}
class DifficultyTagThroughForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(DifficultyTagThroughForm, self).__init__(*args, **kwargs)
self.fields['tag'].label_from_instance = _label_from_instance
class Meta(object):
labels = {'tag': _("Difficulty Tag")}
help_texts = {
'tag': _(
"Most problems fall into the 'Easy' and 'Medium' category. "
"However, there are problems that are meant for learning "
"the basics of programming (these are 'Very easy') and those "
"that are 'Hard' and exceptionally hard - the latter fall "
"into the 'Very hard' category."
)
}
class AlgorithmTagThroughForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(AlgorithmTagThroughForm, self).__init__(*args, **kwargs)
self.fields['tag'].label_from_instance = _label_from_instance
class Meta(object):
labels = {'tag': _("Algorithm Tag")}
help_texts = {
'tag': _(
"Algorithm tags inform about the algorithms, theorems "
"and data structures needed to solve a problem. "
"Algorithm tags can also inform about the type of a "
"problem, e.g. if a problem is a quiz."
)
}<|fim▁end|> | self.initial.update({'file_name': ''})
|
<|file_name|>Communicator.cpp<|end_file_name|><|fim▁begin|>#include "synchronization/Communicator.hpp"
namespace Synchronization
{
<|fim▁hole|> _numManagedFmus(0)
{
}
size_type Synchronization::Communicator::addFmu(FMI::AbstractFmu* in, vector<FMI::InputMapping> & valuePacking)
{
vector<ConnectionSPtr>& connList = in->getConnections();
size_type numNewCons = 0;
for (auto & con : connList)
{
con->initialize(in->getFmuName());
size_type conId;
auto it = _knownConIds.find(con->getStartTag());
if (it == _knownConIds.end())
{
valuePacking.push_back(con->getPacking());
conId = _numManagedCons++;
_knownConIds[con->getStartTag()] = conId;
++numNewCons;
}
else
{
conId = it->second;
if (con->isShared())
{
++numNewCons;
valuePacking.push_back(con->getPacking());
}
}
con->setLocalId(conId);
}
in->setSharedId(_numManagedFmus++);
for (auto & con : connList)
{
if (con->getLocalId() + 1 > _connections.size())
{
_connections.resize(con->getLocalId() + 1);
_connections[con->getLocalId()] = con;
}
}
if (_outConnectionIds.size() < in->getSharedId() + 1)
{
_outConnectionIds.resize(in->getSharedId() + 1);
}
if (_inConnectionIds.size() < in->getSharedId() + 1)
{
_inConnectionIds.resize(in->getSharedId() + 1);
}
for (auto & i : connList)
{
if (i->isOutgoing(in->getFmuName()))
{
_outConnectionIds[in->getSharedId()].push_back(i->getLocalId());
}
else
{
_inConnectionIds[in->getSharedId()].push_back(i->getLocalId());
}
}
return numNewCons;
}
bool_type Communicator::send(HistoryEntry const & in, size_type communicationId)
{
return static_cast<bool_type>(_connections[communicationId]->send(in));
}
HistoryEntry Communicator::recv(size_type communicationId)
{
return _connections[communicationId]->recv();
}
int_type Communicator::connectionIsFree(size_type communicationId)
{
return _connections[communicationId]->hasFreeBuffer();
}
const vector<size_type> & Communicator::getInConnectionIds(const FMI::AbstractFmu * in) const
{
return _inConnectionIds[in->getSharedId()];
}
const vector<size_type> & Communicator::getOutConnectionIds(const FMI::AbstractFmu * in) const
{
return _outConnectionIds[in->getSharedId()];
}
size_type Communicator::getNumInConnections() const
{
size_type sum = 0;
for (const auto & _inConnectionId : _inConnectionIds)
{
sum += _inConnectionId.size();
}
return sum;
}
size_type Communicator::getNumOutConnections() const
{
size_type sum = 0;
for (const auto & _outConnectionId : _outConnectionIds)
{
sum += _outConnectionId.size();
}
return sum;
}
} /* namespace Synchronization */<|fim▁end|> | Communicator::Communicator()
: _numManagedCons(0), |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.http import HttpResponse
def hello_world(request):<|fim▁hole|><|fim▁end|> | return HttpResponse("Hello, world.") |
<|file_name|>urdf_remove_pedestal.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Software License Agreement (BSD License)
#
# Copyright (c) 2014-2015, Dataspeed Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of Dataspeed Inc. nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL<|fim▁hole|># CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import rospy
import re
match_link = "(.*)<link[^>]*name\s*=\s*\"pedestal\"[^>]*>.*?[^<]<\/link>(.*)"
match_joint = "(.*)<joint[^>]*name\s*=\s*\"pedestal_fixed\"[^>]*>.*?[^<]<\/joint>(.*)"
if __name__ == '__main__':
try:
rospy.init_node('urdf_remove_pedestal', anonymous=True)
param_src = rospy.get_param('~param_src', "/robot_description")
param_dest = rospy.get_param('~param_dest', "/robot_description_mod")
urdf = rospy.get_param(param_src, "")
changed = False
if urdf:
obj = re.match(match_link, urdf, re.S)
if obj:
urdf = obj.group(1) + obj.group(2)
changed = True
rospy.loginfo("Removed link 'pedestal'")
else:
rospy.logwarn("Failed to find link 'pedestal'")
obj = re.match(match_joint, urdf, re.S)
if obj:
urdf = obj.group(1) + obj.group(2)
changed = True
rospy.loginfo("Removed joint 'pedestal_fixed'")
else:
rospy.logwarn("Failed to find joint 'pedestal_fixed'")
rospy.set_param(param_dest, urdf)
if changed:
rospy.loginfo("Updated parameter '%s'", param_dest)
else:
rospy.loginfo("Copied parameter '%s' to '%s'", param_src, param_dest)
else:
rospy.logwarn("Parameter '%s' not found", param_src)
except rospy.ROSInterruptException: pass<|fim▁end|> | # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER |
<|file_name|>netaddress.go<|end_file_name|><|fim▁begin|>package lnwire
import (
"fmt"
"net"
"github.com/btcsuite/btcd/btcec"
"github.com/btcsuite/btcd/wire"
)
// NetAddress represents information pertaining to the identity and network
// reachability of a peer. Information stored includes the node's identity
// public key for establishing a confidential+authenticated connection, the
// service bits it supports, and a TCP address the node is reachable at.
//
// TODO(roasbeef): merge with LinkNode in some fashion
type NetAddress struct {
// IdentityKey is the long-term static public key for a node. This node is
// used throughout the network as a node's identity key. It is used to
// authenticate any data sent to the network on behalf of the node, and
// additionally to establish a confidential+authenticated connection with
// the node.
IdentityKey *btcec.PublicKey
<|fim▁hole|> // general so that multiple implementations can be used.
Address net.Addr
// ChainNet is the Bitcoin network this node is associated with.
// TODO(roasbeef): make a slice in the future for multi-chain
ChainNet wire.BitcoinNet
}
// A compile time assertion to ensure that NetAddress meets the net.Addr
// interface.
var _ net.Addr = (*NetAddress)(nil)
// String returns a human readable string describing the target NetAddress. The
// current string format is: <pubkey>@host.
//
// This part of the net.Addr interface.
func (n *NetAddress) String() string {
// TODO(roasbeef): use base58?
pubkey := n.IdentityKey.SerializeCompressed()
return fmt.Sprintf("%x@%v", pubkey, n.Address)
}
// Network returns the name of the network this address is bound to.
//
// This part of the net.Addr interface.
func (n *NetAddress) Network() string {
return n.Address.Network()
}<|fim▁end|> | // Address is the IP address and port of the node. This is left |
<|file_name|>sortedLastIndex.js<|end_file_name|><|fim▁begin|>var baseSortedIndex = require('./_baseSortedIndex');
/**<|fim▁hole|> * @static
* @memberOf _
* @category Array
* @param {Array} array The sorted array to inspect.
* @param {*} value The value to evaluate.
* @returns {number} Returns the index at which `value` should be inserted into `array`.
* @specs
*
* _.sortedLastIndex([4, 5], 4);
* // => 1
*/
function sortedLastIndex(array, value) {
return baseSortedIndex(array, value, true);
}
module.exports = sortedLastIndex;<|fim▁end|> | * This method is like `_.sortedIndex` except that it returns the highest
* index at which `value` should be inserted into `array` in order to
* maintain its sort order.
* |
<|file_name|>string-lit.rs<|end_file_name|><|fim▁begin|>// rustfmt-force_format_strings: true
// Long string literals
fn main() -> &'static str {
let str = "AAAAAAAAAAAAAAaAAAAAAAAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAAAaAA AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAaAa";
let str = "AAAAAAAAAAAAAAaAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAaAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAaAa";
let str = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA";
let too_many_lines = "H\
e\
l\
l\
o";
// Make sure we don't break after an escape character.
let odd_length_name = "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n";
let even_length_name = "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n";
let really_long_variable_name = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA";
let raw_string = r#"Do
not
remove
formatting"#;
filename.replace(" ", "\\" );
let xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx =<|fim▁hole|> let unicode2 = "Löwe 老虎 Léopard";
let unicode3 = "中华Việt Nam";
let unicode4 = "☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃";
"stuffin'"
}
fn issue682() {
let a = "hello \\ o/";
let b = a.replace("\\ ", "\\");
}
fn issue716() {
println!("forall x. mult(e(), x) = x /\\
forall x. mult(x, x) = e()");
}<|fim▁end|> | funktion("yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy");
let unicode = "a̐éö̲\r\n"; |
<|file_name|>base.js<|end_file_name|><|fim▁begin|>var doNothing = function () {}
/**
* The `Base` log defines methods that transports will share.
*/
var Base = module.exports = function (config, defaults) {
var cedar = require('../../cedar')
// A log is a shorthand for `log.log`, among other things.
var log = function () {
log.log.apply(log, arguments)
}
// Don't run `setMethods` until all config properties are set.
var setMethods = doNothing
// Define properties that trigger `setMethods`.
Base.resetters.forEach(function (property) {
var value
Object.defineProperty(log, property, {
get: function () {
return value
},
set: function (newValue) {
value = newValue
setMethods.apply(log)
}
})
})
// Copy `config` properties to the `log`.
Base.decorate(log, config, true)
// Apply default properties.
Base.decorate(log, defaults || Base.defaults)
// Set up logging methods.
Base.setMethods.apply(log)
// Re-run `setMethods` if `resetters` change.
setMethods = Base.setMethods
// Return the fully-decorated log function.
return log
}
/**
* Some properties will reset methods if changed.
*/
Base.resetters = ['level', 'prefixes', 'format', 'showTrace']
/**
* Cedar supports 7 levels of logging.
*/
Base.levels = ['trace', 'debug', 'log', 'info', 'warn', 'error', 'fatal']
/**
* Share defaults between log objects.
*/
Base.defaults = {
// Show all log messages by default.
level: 'trace',
// Stream to `stdout` (using `write`).
stream: process.stdout,
// Don't add any space to JSON.
space: '',
// Stringify with `JSON.stringify`.
stringify: JSON.stringify,
// Join arguments together as an array.
join: function (args) {
var list = []
for (var index = 0, length = args.length; index < length; index++) {
var arg = args[index]
if (arg instanceof Error) {
arg = '"' + (arg.stack || arg.toString()).replace(/\n/, '\\n') + '"'
} else {
arg = JSON.stringify(arg, null, this.space)
}
list.push(arg)
}
return '[' + list.join(',') + ']'
},
// Start messages with a prefix for each log method.
prefixes: {
trace: 'TRACE ',
debug: 'DEBUG ',
log: 'LOG ',
info: 'INFO ',
warn: 'WARN ',
error: 'ERROR ',
fatal: 'FATAL '
},
// Format a log message.
format: function (message, type, prefix) {
return prefix + message + '\n'
}
}
/**
* Decorate an object with the properties of another.
*/
Base.decorate = function (object, defaults, shouldOverwrite) {
object = object || {}
for (var key in defaults) {
if (shouldOverwrite || (typeof object[key] === 'undefined')) {
object[key] = defaults[key]
}
}
return object
}
/**
* Create logging methods based on the configured `level`.
*/<|fim▁hole|> var self = this
var found = false
if ((Base.levels.indexOf(self.level) < 0) && self.level !== 'nothing') {
self.error('Unknown log level: "' + self.level + '".')
} else {
Base.levels.forEach(function (methodName, index) {
if (methodName === self.level) {
found = true
}
var prefix = self.prefixes[methodName] || ''
var format = self.format
// If this log is an Emitter, we can catch and emit errors.
if (self.emit) {
self[methodName] = found ? function () {
var message = self.join(arguments)
message = format.call(self, message, methodName, prefix)
try {
self.stream.write(message)
} catch (e) {
self.emit('error', e)
}
} : doNothing
// Otherwise, they'll just throw.
} else {
self[methodName] = found ? function () {
var message = self.join(arguments)
message = format.call(self, message, methodName, prefix)
self.stream.write(message)
} : doNothing
}
})
// Wrap the trace method with a stack tracer.
if (self.trace !== doNothing) {
var traceMethod = self.trace
self.trace = function () {
var e = new Error('')
Error.captureStackTrace(e, self.trace)
var l = arguments.length
arguments[l] = e.stack.split('\n').splice(2).join('\n')
arguments.length = ++l
traceMethod.apply(self, arguments)
}
}
}
}<|fim▁end|> | Base.setMethods = function () { |
<|file_name|>context.go<|end_file_name|><|fim▁begin|>package models
import (
"koding/db/mongodb/modelhelper"
"net"
"socialapi/config"<|fim▁hole|>
"github.com/koding/logging"
)
// Client holds the contextual requester/client info
type Client struct {
// Account holds the requester info
Account *Account
// IP is remote IP of the requester
IP net.IP
// SessionID is session cookie id
SessionID string
}
// Context holds contextual info regarding a REST query
type Context struct {
GroupName string
Client *Client
log logging.Logger
}
// NewContext creates a new context
func NewContext(log logging.Logger) *Context {
return &Context{
log: log,
}
}
// OverrideQuery overrides Query with context info
func (c *Context) OverrideQuery(q *request.Query) *request.Query {
// get group name from context
q.GroupName = c.GroupName
if c.IsLoggedIn() {
q.AccountId = c.Client.Account.Id
} else {
q.AccountId = 0
}
return q
}
// IsLoggedIn checks if the request is an authenticated one
func (c *Context) IsLoggedIn() bool {
if c.Client == nil {
return false
}
if c.Client.Account == nil {
return false
}
if c.Client.Account.Id == 0 {
return false
}
return true
}
// IsAdmin checks if the current requester is an admin or not, this part is just
// a stub and temproray solution for moderation security, when we implement the
// permission system fully, this should be the first function to remove.
func (c *Context) IsAdmin() bool {
if !c.IsLoggedIn() {
return false
}
superAdmins := config.MustGet().DummyAdmins
return IsIn(c.Client.Account.Nick, superAdmins...)
}
// CanManage checks if the current context is the admin of the context's
// group.
// mongo connection is required.
func (c *Context) CanManage() error {
if !c.IsLoggedIn() {
return ErrNotLoggedIn
}
canManage, err := modelhelper.CanManage(c.Client.Account.Nick, c.GroupName)
if err != nil {
return err
}
if !canManage {
return ErrCannotManageGroup
}
return nil
}
// MustGetLogger gets the logger from context, otherwise panics
func (c *Context) MustGetLogger() logging.Logger {
if c.log == nil {
panic(ErrLoggerNotExist)
}
return c.log
}<|fim▁end|> | "socialapi/request" |
<|file_name|>UCMmap.java<|end_file_name|><|fim▁begin|>/**
* <copyright>
* </copyright>
*
* $Id$
*/
package ucm.map;
import core.COREModel;
import org.eclipse.emf.common.util.EList;
import urncore.IURNDiagram;
import urncore.UCMmodelElement;
/**
* <!-- begin-user-doc -->
* A representation of the model object '<em><b>UC Mmap</b></em>'.
* <!-- end-user-doc -->
*
* <p>
* The following features are supported:
* <ul>
* <li>{@link ucm.map.UCMmap#isSingleton <em>Singleton</em>}</li>
* <li>{@link ucm.map.UCMmap#getParentStub <em>Parent Stub</em>}</li>
* </ul><|fim▁hole|> * </p>
*
* @see ucm.map.MapPackage#getUCMmap()
* @model
* @generated
*/
public interface UCMmap extends UCMmodelElement, IURNDiagram, COREModel {
/**
* Returns the value of the '<em><b>Singleton</b></em>' attribute.
* The default value is <code>"true"</code>.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Singleton</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Singleton</em>' attribute.
* @see #setSingleton(boolean)
* @see ucm.map.MapPackage#getUCMmap_Singleton()
* @model default="true"
* @generated
*/
boolean isSingleton();
/**
* Sets the value of the '{@link ucm.map.UCMmap#isSingleton <em>Singleton</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Singleton</em>' attribute.
* @see #isSingleton()
* @generated
*/
void setSingleton(boolean value);
/**
* Returns the value of the '<em><b>Parent Stub</b></em>' reference list.
* The list contents are of type {@link ucm.map.PluginBinding}.
* It is bidirectional and its opposite is '{@link ucm.map.PluginBinding#getPlugin <em>Plugin</em>}'.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Parent Stub</em>' reference list isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Parent Stub</em>' reference list.
* @see ucm.map.MapPackage#getUCMmap_ParentStub()
* @see ucm.map.PluginBinding#getPlugin
* @model type="ucm.map.PluginBinding" opposite="plugin"
* @generated
*/
EList getParentStub();
} // UCMmap<|fim▁end|> | |
<|file_name|>mainForCommandline.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
<|fim▁hole|>import argparse
import math<|fim▁end|> | import os, sygnal, sys |
<|file_name|>trait-path-type-error-once-implemented.rs<|end_file_name|><|fim▁begin|>#![feature(generic_associated_types)]<|fim▁hole|>trait X {
type Y<'a>;
}
const _: () = {
fn f2<'a>(arg : Box<dyn X<Y<1> = &'a ()>>) {}
//~^ ERROR this associated type takes 1 lifetime argument but 0 lifetime arguments
//~| ERROR this associated type takes 0 generic arguments but 1 generic argument
};
fn main() {}<|fim▁end|> | |
<|file_name|>configuration.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# king_phisher/configuration.py
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the project nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import copy
import json
import os
try:
import yaml
except ImportError:
has_yaml = False
"""Whether the :py:mod:`yaml` module is available."""
else:
has_yaml = True
try:
from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:
from yaml import Loader, Dumper
SERIALIZER_DRIVERS = {}
"""A dictionary containing a mapping of driver names to serialization function data."""
SERIALIZER_DRIVERS['json'] = {'load': json.load, 'dumps': lambda obj: json.dumps(obj, sort_keys=True, indent=4)}
SERIALIZER_DRIVERS['jsn'] = {'load': json.load, 'dumps': lambda obj: json.dumps(obj, sort_keys=True, indent=4)}
SERIALIZER_DRIVERS['yaml'] = {'load': lambda file_obj: yaml.load(file_obj, Loader=Loader), 'dumps': lambda obj: yaml.dumps(obj, default_flow_style=False, Dumper=Dumper)}
SERIALIZER_DRIVERS['yml'] = {'load': lambda file_obj: yaml.load(file_obj, Loader=Loader), 'dumps': lambda obj: yaml.dumps(obj, default_flow_style=False, Dumper=Dumper)}
class Configuration(object):
"""
This class provides a generic object for parsing configuration files
in multiple formats.<|fim▁hole|> :param str configuration_file: The configuration file to parse.
:param str prefix: String to be prefixed to all option names.
"""
self.prefix = prefix
self.seperator = '.'
self.configuration_file = configuration_file
file_h = open(self.configuration_file, 'r')
self._storage = dict(self._serializer('load', file_h))
file_h.close()
@property
def configuration_file_ext(self):
"""
The extension of the current configuration file.
"""
return os.path.splitext(self.configuration_file)[1][1:]
def _serializer(self, operation, *args):
if not self.configuration_file_ext in SERIALIZER_DRIVERS:
raise ValueError('unknown file type \'' + self.configuration_file_ext + '\'')
function = SERIALIZER_DRIVERS[self.configuration_file_ext][operation]
return function(*args)
def get(self, item_name):
"""
Retrieve the value of an option.
:param str item_name: The name of the option to retrieve.
:return: The value of *item_name* in the configuration.
"""
if self.prefix:
item_name = self.prefix + self.seperator + item_name
item_names = item_name.split(self.seperator)
node = self._storage
for item_name in item_names:
node = node[item_name]
return node
def get_if_exists(self, item_name, default_value=None):
"""
Retrieve the value of an option if it exists, otherwise
return *default_value* instead of raising an error:
:param str item_name: The name of the option to retrieve.
:param default_value: The value to return if *item_name* does not exist.
:return: The value of *item_name* in the configuration.
"""
if self.has_option(item_name):
return self.get(item_name)
return default_value
def get_storage(self):
"""
Get a copy of the internal configuration. Changes made to the returned
copy will not affect this object.
:return: A copy of the internal storage object.
:rtype: dict
"""
return copy.deepcopy(self._storage)
def get_missing(self, verify_file):
"""
Use a verification configuration which has a list of required options
and their respective types. This information is used to identify missing
and incompatbile options in the loaded configuration.
:param str verify_file: The file to load for verification data.
:return: A dictionary of missing and incompatible settings.
:rtype: dict
"""
vconf = Configuration(verify_file)
missing = {}
for setting, setting_type in vconf.get('settings').items():
if not self.has_option(setting):
missing['missing'] = missing.get('settings', [])
missing['missing'].append(setting)
elif not type(self.get(setting)).__name__ == setting_type:
missing['incompatible'] = missing.get('incompatible', [])
missing['incompatible'].append((setting, setting_type))
return missing
def has_option(self, option_name):
"""
Check that an option exists.
:param str option_name: The name of the option to check.
:return: True of the option exists in the configuration.
:rtype: bool
"""
if self.prefix:
option_name = self.prefix + self.seperator + option_name
item_names = option_name.split(self.seperator)
node = self._storage
for item_name in item_names:
if not item_name in node:
return False
node = node[item_name]
return True
def has_section(self, section_name):
"""
Checks that an option exists and that it contains sub options.
:param str section_name: The name of the section to check.
:return: True if the section exists.
:rtype: dict
"""
if not self.has_option(section_name):
return False
return isinstance(self.get(section_name), dict)
def set(self, item_name, item_value):
"""
Sets the value of an option in the configuration.
:param str item_name: The name of the option to set.
:param item_value: The value of the option to set.
"""
if self.prefix:
item_name = self.prefix + self.seperator + item_name
item_names = item_name.split(self.seperator)
item_last = item_names.pop()
node = self._storage
for item_name in item_names:
if not item_name in node:
node[item_name] = {}
node = node[item_name]
node[item_last] = item_value
return
def save(self):
"""
Save the current configuration to disk.
"""
file_h = open(self.configuration_file, 'wb')
file_h.write(self._serializer('dumps', self._storage))
file_h.close()<|fim▁end|> | """
def __init__(self, configuration_file, prefix=''):
""" |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>//
// George 'papanikge' Papanikolaou 2014-2018
// tail + head = tead
//
// This was originally written in an ancient form of rust.
// Rewritten for rust 1.22.
//
extern crate getopts;
use getopts::Options; // use extra::getopts::{optopt, optflag, getopts};
// renamed to BufReader. but probaby do not want it // use std::io::buffered::BufferedReader;
use std::env;
static VERSION: &'static str = "1.1.0";
static DEFAULT_LINE_NUMBER: &'static str = "5";
fn usage(program_name: &str) {
println!("Usage: {} [options]", program_name);
println!("\t-n <number of lines to print>");
println!("\t-h --help");
println!("\t-v --version");
}
fn main() {
let args: Vec<String> = env::args().collect();
let program_name = args[0].clone();
let mut available = Options::new();
available.optopt("n", "lines-number", "number of lines to print [default: 5]", "");
available.optflag("v", "version", "print tead's version");
available.optflag("h", "help", "print this help menu");
let given = match available.parse(&args[1..]) {
Ok(m) => { m }
Err(f) => { panic!(f.to_string()) }
};
if given.opt_present("h") {
usage(&program_name);
return;
}
if given.opt_present("v") {
println!("tead -- Version {}", VERSION);
return;
}
let lines = given.opt_str("n").or(Some(String::from(DEFAULT_LINE_NUMBER))).unwrap();
let files = given.free;
if files.is_empty() {
usage(&program_name);
return;
// No files provided. stdin() is a reader so we can do:
// let mut buffer = BufferedReader::new(std::io::stdin());<|fim▁hole|> files[0].clone();
// call tead here
}
}<|fim▁end|> | // call tead here
} else {
println!("temp - lines: {}", lines); |
<|file_name|>generic-derived-type.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
fn g<X>(x: X) -> X { return x; }
#[derive(Clone)]
struct Pair<T> {
a: T,
b: T<|fim▁hole|>fn f<T:Clone>(t: T) -> Pair<T> {
let x: Pair<T> = Pair {a: t.clone(), b: t};
return g::<Pair<T>>(x);
}
pub fn main() {
let b = f::<isize>(10);
println!("{}" ,b.a);
println!("{}", b.b);
assert_eq!(b.a, 10);
assert_eq!(b.b, 10);
}<|fim▁end|> | }
|
<|file_name|>gulpfile.js<|end_file_name|><|fim▁begin|>var fs = require('fs');
var path = require('path');
var gulp = require('gulp');
var sass = require('gulp-sass');
// Load all gulp plugins automatically
// and attach them to the `plugins` object
var plugins = require('gulp-load-plugins')();
// Temporary solution until gulp 4
// https://github.com/gulpjs/gulp/issues/355
var runSequence = require('run-sequence');
var pkg = require('./package.json');
var dirs = pkg['h5bp-configs'].directories;
// ---------------------------------------------------------------------
// | Helper tasks |
// ---------------------------------------------------------------------
gulp.task('archive:create_archive_dir', function () {
fs.mkdirSync(path.resolve(dirs.archive), '0755');
});
gulp.task('archive:zip', function (done) {
var archiveName = path.resolve(dirs.archive, pkg.name + '_v' + pkg.version + '.zip');
var archiver = require('archiver')('zip');
var files = require('glob').sync('**/*.*', {
'cwd': dirs.dist,
'dot': true // include hidden files
});
var output = fs.createWriteStream(archiveName);
archiver.on('error', function (error) {
done();
throw error;
});
output.on('close', done);
files.forEach(function (file) {
var filePath = path.resolve(dirs.dist, file);
// `archiver.bulk` does not maintain the file
// permissions, so we need to add files individually
archiver.append(fs.createReadStream(filePath), {
'name': file,
'mode': fs.statSync(filePath)
});
});
archiver.pipe(output);
archiver.finalize();
});
gulp.task('clean', function (done) {
require('del')([
dirs.archive,
dirs.dist
], done);
});
gulp.task('copy', [
'copy:.htaccess',
'copy:index.html',
'copy:jquery',
'copy:license',
'copy:main.css',
'copy:misc',
'copy:normalize'
]);
gulp.task('copy:.htaccess', function () {
return gulp.src('node_modules/apache-server-configs/dist/.htaccess')
.pipe(plugins.replace(/# ErrorDocument/g, 'ErrorDocument'))
.pipe(gulp.dest(dirs.dist));
});
gulp.task('copy:index.html', function () {
return gulp.src(dirs.src + '/index.html')
.pipe(plugins.replace(/{{JQUERY_VERSION}}/g, pkg.devDependencies.jquery))
.pipe(gulp.dest(dirs.dist));
});
gulp.task('copy:jquery', function () {
return gulp.src(['node_modules/jquery/dist/jquery.min.js'])
.pipe(plugins.rename('jquery-' + pkg.devDependencies.jquery + '.min.js'))
.pipe(gulp.dest(dirs.dist + '/js/vendor'));
});
gulp.task('copy:license', function () {
return gulp.src('LICENSE.txt')
.pipe(gulp.dest(dirs.dist));
});
gulp.task('copy:main.css', function () {
var banner = '/*! HTML5 Boilerplate v' + pkg.version +
' | ' + pkg.license.type + ' License' +
' | ' + pkg.homepage + ' */\n\n';
return gulp.src(dirs.src + '/css/main.css')
.pipe(plugins.header(banner))
.pipe(plugins.autoprefixer({
browsers: ['last 2 versions', 'ie >= 8', '> 1%'],
cascade: false
}))
.pipe(gulp.dest(dirs.dist + '/css'));
});
gulp.task('copy:misc', function () {
return gulp.src([
// Copy all files
dirs.src + '/**/*',
// Exclude the following files
// (other tasks will handle the copying of these files)
'!' + dirs.src + '/css/main.css',
'!' + dirs.src + '/css/*.css.map',
'!' + dirs.src + '/index.html',
'!' + dirs.src + '/styles/**/*',
'!' + dirs.src + '/js/util/**/*',
'!' + dirs.src + '/js/.DS_Store',
'!' + dirs.src + '/js/util',
'!' + dirs.src + '/styles'
], {
// Include hidden files by default<|fim▁hole|> }).pipe(gulp.dest(dirs.dist));
});
gulp.task('copy:normalize', function () {
return gulp.src('node_modules/normalize.css/normalize.css')
.pipe(gulp.dest(dirs.dist + '/css'));
});
gulp.task('lint:js', function () {
return gulp.src([
'gulpfile.js',
dirs.src + '/js/*.js',
dirs.test + '/*.js'
])//.pipe(plugins.jscs())
//.pipe(plugins.jshint())
.pipe(plugins.jshint.reporter('jshint-stylish'))
.pipe(plugins.jshint.reporter('fail'));
});
gulp.task('sass', function () {
return gulp.src('./styles/scss/*.scss')
.pipe(sass())
.pipe(gulp.dest('./css'));
});
/*gulp.task('scripts', function(){
gulp.src('./js/!*.js')
.pipe(concat('all.js'))
.pipe(gulp.dest('./dist'))
.pipe(rename('all.min.js'))
.pipe(uglify())
.pipe(gulp.dest('./dist'));
});*/
// ---------------------------------------------------------------------
// | Main tasks |
// ---------------------------------------------------------------------
gulp.task('archive', function (done) {
runSequence(
'build',
'archive:create_archive_dir',
'archive:zip',
done);
});
gulp.task('build', function (done) {
runSequence(
['clean', 'lint:js'],
'copy',
done);
});
gulp.task('default', ['archive']);
gulp.task('default', function () {
gulp.run('archive');
gulp.watch(['./styles/scss/!*.scss'], function () {
gulp.run('sass');
});
});<|fim▁end|> | dot: true
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | __author__ = 'teresah' |
<|file_name|>test.js<|end_file_name|><|fim▁begin|>'use strict'
var test = require('tap').test
var strip = require('./')
test('stripFalsy', function(t) {
t.plan(5)
t.deepEqual(strip(null), {})
t.deepEqual(strip('test'), {})
t.deepEqual(strip(13), {})
t.deepEqual(strip(), {})
<|fim▁hole|> a: false
, b: 0
, c: null
, d: undefined
, e: ''
, f: 'biscuits'
, g: '0'
}
var exp = {
f: 'biscuits'
, g: '0'
}
t.deepEqual(strip(input), exp)
})<|fim▁end|> | var input = { |
<|file_name|>fpsmeter.min.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1<|fim▁hole|><|fim▁end|> | oid sha256:e1af4eb3952e50a1690c1d45f20c988b688e49f11938afc9f62e5384f71aaebb
size 7470 |
<|file_name|>TemplatesModel.js<|end_file_name|><|fim▁begin|>Class('TemplatesModel', {
views: {
empty: {
name: 'empty',
label: 'Empty'
},
ListView: {
name: 'ListView',
label: 'List View',
subviews: ['ListItemView']
}
}<|fim▁hole|><|fim▁end|> | }); |
<|file_name|>entities_to_cascades.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4 nu
from __future__ import (unicode_literals, absolute_import,
division, print_function)
import logging
from django.core.management.base import BaseCommand
from optparse import make_option
from py3compat import PY2
from snisi_core.models.Entities import AdministrativeEntity as AEntity
if PY2:
import unicodecsv as csv
else:
import csv
logger = logging.getLogger(__name__)
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('-f',
help='CSV file',
action='store',
dest='filename'),
)
def handle(self, *args, **options):
headers = ['name', 'region', 'cercle_commune', 'commune_quartier']
f = open(options.get('filename'), 'w')
csv_writer = csv.DictWriter(f, fieldnames=headers)
csv_writer.writeheader()
csv_writer.writerow({
'name': "label",
'region': "Région",
'cercle_commune': "Cercle",
'commune_quartier': "Commune",
})
for region in AEntity.objects.filter(type__slug='region'):
logger.info(region)
is_bko = region.name == 'BAMAKO'
for cercle in AEntity.objects.filter(parent=region):
logger.info(cercle)
for commune in AEntity.objects.filter(parent=cercle):
logger.info(commune)
if not is_bko:
csv_writer.writerow({
'name': "choice_label",
'region': region.name,
'cercle_commune': cercle.name,
'commune_quartier': commune.name
})
continue
for vfq in AEntity.objects.filter(parent=commune):<|fim▁hole|> for v in (region, cercle, commune, vfq):
if not len(v.name.strip()):
continue
csv_writer.writerow({
'name': "choice_label",
'region': region.name,
'cercle_commune': commune.name,
'commune_quartier': vfq.name
})
f.close()<|fim▁end|> | |
<|file_name|>scan-stills2.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from matplotlib.figure import Figure
from sklearn.cluster import KMeans
from sklearn import datasets
from PIL import Image, ImageChops
from scipy.spatial.distance import cdist
import matplotlib.pyplot as plt
from random import randint
import time
import ephem
from PIL import Image
import cv2
import glob
import sys
import os
import numpy as np
import datetime
from pathlib import Path
import subprocess
from amscommon import read_config
import math
import time
from sklearn.cluster import Birch
from collections import deque
video_dir = "/mnt/ams2/SD/"
def stack_stack(pic1, pic2):
frame_pil = Image.fromarray(pic1)
stacked_image = pic2
if stacked_image is None:
stacked_image = frame_pil
else:
stacked_image=ImageChops.lighter(stacked_image,frame_pil)
return(stacked_image)
def compute_straight_line(x1,y1,x2,y2,x3,y3):
print ("COMP STRAIGHT", x1,y1,x2,y2,x3,y3)
if x2 - x1 != 0:
a = (y2 - y1) / (x2 - x1)
else:
a = 0
if x3 - x1 != 0:
b = (y3 - y1) / (x3 - x1)
else:
b = 0<|fim▁hole|> straight = "Y"
else:
straight = "N"
return(straight_line)
def crop_center(img,cropx,cropy):
y,x = img.shape
startx = x//2-(cropx//2) +12
starty = y//2-(cropy//2) + 4
return img[starty:starty+cropy,startx:startx+cropx]
def fig2data ( fig ):
"""
@brief Convert a Matplotlib figure to a 4D numpy array with RGBA channels and return it
@param fig a matplotlib figure
@return a numpy 3D array of RGBA values
"""
# draw the renderer
fig.canvas.draw ( )
# Get the RGBA buffer from the figure
w,h = fig.canvas.get_width_height()
buf = np.fromstring ( fig.canvas.tostring_argb(), dtype=np.uint8 )
buf.shape = ( w, h,4 )
# canvas.tostring_argb give pixmap in ARGB mode. Roll the ALPHA channel to have it in RGBA mode
buf = np.roll ( buf, 3, axis = 2 )
return buf
def kmeans_cluster(points, num_clusters):
points = np.array(points)
print(points)
clusters = []
cluster_points = []
colors = ('r', 'g', 'b')
est = KMeans(n_clusters=num_clusters)
est.fit(points)
print (est.labels_)
print (len(points))
({i: np.where(est.labels_ == i)[0] for i in range(est.n_clusters)})
for i in set(est.labels_):
index = est.labels_ == i
cluster_idx = np.where(est.labels_ == i)
for idxg in cluster_idx:
for idx in idxg:
idx = int(idx)
point = points[idx]
#print ("IDX:",i, idx, point)
cluster_points.append(point)
clusters.append(cluster_points)
cluster_points = []
#print(points[:,0])
#print(points[:,1])
int_lb = est.labels_.astype(float)
#fig = gcf()
fig = Figure()
canvas = FigureCanvas(fig)
plot = fig.add_subplot(1,1,1)
plot.scatter(points[:,0], points[:,1], c=[plt.cm.Spectral(float(i) / 10) for i in est.labels_])
for cluster in clusters:
cxs = []
cys = []
for cp in cluster:
x,y,w,h = cp
cxs.append(x)
cys.append(y)
if len(cxs) > 3:
plot.plot(np.unique(cxs), np.poly1d(np.polyfit(cxs, cys, 1))(np.unique(cxs)))
plt.xlim(0,640)
plt.ylim(0,480)
plot.invert_yaxis()
fig.canvas.draw()
fig.savefig("/tmp/plot.png", dpi=fig.dpi)
#plt.show()
return(clusters)
def calc_dist(x1,y1,x2,y2):
dist = math.sqrt((x2 - x1)**2 + (y2 - y1)**2)
return dist
def find_angle(x1,x2,y1,y2):
if x2 - x1 != 0:
a1 = (y2 - y1) / (x2 - x1)
else:
a1 = 0
angle = math.atan(a1)
angle = math.degrees(angle)
return(angle)
def closest_node(node, nodes):
return nodes[cdist([node], nodes).argmin()]
def find_objects(index, points):
apoints = []
unused_points = []
cl_sort = []
sorted_points = []
last_angle = None
objects = []
group_pts = []
line_segments = []
stars = []
obj_points = []
big_cnts = []
count = 0
x1,y1,w1,h1 = points[index]
print ("Total Points found in image: ", len(points))
used_pts = {}
for i in range(0,len(points)-1):
x1,y1,w1,h1 = points[i]
for i in range(0,len(points)-1):
x2,y2,w2,h2 = points[i]
key = str(x1)+"."+str(y1)+"."+str(x2)+"."+str(y2)
used_pts[key] = 0
key2 = str(x2)+"."+str(y2)+"."+str(x1)+"."+str(y1)
used_pts[key2] = 0
possible_stars = []
for i in range(0,len(points)-1):
closest = []
x1,y1,w1,h1 = points[i]
for j in range(0,len(points)-1):
x2,y2,w2,h2 = points[j]
key = str(x1)+"."+str(y1)+"."+str(x2)+"."+str(y2)
key2 = str(x2)+"."+str(y2)+"."+str(x1)+"."+str(y1)
dist = calc_dist(x1,y1,x2,y2)
angle = find_angle(x1,y1,x2,y2)
if x1 != x2 and y1 != y2:
if used_pts[key] == 0 and used_pts[key2] == 0 :
#print("Closest Point:", (int(dist),int(angle),int(x1),int(y1),int(x2),int(y2)))
closest.append((int(dist),int(angle),int(x1),int(y1),int(x2),int(y2)))
used_pts[key] = 1
used_pts[key2] = 1
#print("Key has been used:", key, key2)
#else:
# print("Key already used try another one:", key, key2)
#else:
# print ("this point has already been used")
count = count + 1
# of all the close points, make sure that at least 2 points < 25 px dist exist.
conf_closest = []
for cls in closest:
if cls[0] < 100:
conf_closest.append(cls)
if len(closest) > 0:
distsort = np.unique(closest, axis=0)
dist,angle,x1,y1,x2,y2 = distsort[0]
if dist < 50 and len(conf_closest) > 1:
line_segments.append((int(dist),int(angle),int(x1),int(y1),int(x2),int(y2)))
obj_points.append((int(x1),int(y1), int(w1), int(h1)))
else:
possible_stars.append((int(x1),int(y1),int(w1),int(h1)))
#print("CLOSEST LINE SEGMENT FOR PT: ", distsort[0])
#else:
#print("ERROR! no close points to this one!", x1,y1)
if w1 > 15 or h1 > 15:
# print ("BIG!!! We have a big object here likely containing many line segments.")
big_cnts.append((int(x1),int(y1),int(w1),int(h1)))
for star in possible_stars:
close = 0
for line in line_segments:
dist,angle,x1,y1,x2,y2 = line
star_dist = calc_dist(star[0], star[1], x1,y1)
#print ("STARDIST: ", star_dist, star[0], star[1], x1,y1)
if star_dist < 60:
close = 1
if close == 1:
obj_points.append(star)
else:
stars.append(star)
#print ("OBJECT POINTS")
if len(line_segments) > 0:
sorted_lines = sorted(line_segments, key=lambda x: x[2])
else:
sorted_lines = []
#print ("LINE SEGMENTS:")
#for line in sorted_lines:
# print (line)
last_ang = 0
last_dist = 0
line_groups = []
line_group = []
orphan_lines = []
if len(sorted_lines) > 0:
for segment in sorted_lines:
dist,angle,x1,y1,x2,y2 = segment
if last_ang != 0 and (angle -5 < last_ang < angle + 5) and dist < 100:
#print ("Line Segment Part of Existing Group: ", segment)
line_group.append((dist,angle,x1,y1,x2,y2))
else:
#print ("New Group Started!", last_ang, angle )
# print ("Line Segment Part of New Group: ", segment)
if len(line_group) >= 3:
line_groups.append(line_group)
else:
#print("Last line segment was too small to be part of a group! These are random points or stars. Skip for now.")
for line in line_group:
orphan_lines.append(line)
line_group = []
line_group.append((dist,angle,x1,y1,x2,y2))
last_ang = angle
if len(line_group) >= 2:
line_groups.append(line_group)
else:
for line in line_group:
orphan_lines.append(line)
# now make sure all of the line segments in the line group can connect to at least one of the other segments
#print ("Total Line Groups as of now:", len(line_groups))
#print ("Total Orphan Lines as of now:", len(orphan_lines))
#print ("Confirm the line segments are all part of the same group", len(line_groups))
#print ("TOTAL POINTS: ", len(points))
#print ("TOTAL LINE GROUPS: ", len(line_groups))
#print ("ORPHAN GROUPS: ", len(orphan_lines))
#for point in points:
#print ("POINT: ", point)
gc = 1
if len(line_groups) > 0:
for line_group in line_groups:
lc = 1
for line in line_group:
#print("LINE:", line)
dist,ang,x1,y1,x2,y2 = line
#confirm_angle = find_angle(x1,y1,x2,y2)
#print ("GROUP", gc, lc, line, ang, confirm_angle)
lc = lc + 1
gc = gc + 1
#else:
#make sure the obj points are not false positives, if so move to stars.
(line_groups, orphan_lines, stars, obj_points, big_cnts) = conf_objs(line_groups, orphan_lines, stars, obj_points, big_cnts)
return(line_groups, orphan_lines, stars, obj_points, big_cnts)
def conf_objs(line_groups, orphan_lines, stars, obj_points, big_cnts):
print ("CONF OBJS")
print ("LINE GROUPS", len(line_groups))
print ("OBJ POINTS", len(obj_points))
conf_line_groups = []
mx = []
my = []
mw = []
mh = []
#first lets check the line groups and make sure at least 3 points are straight
for line_group in line_groups:
mx = []
my = []
mw = []
mh = []
lgc = 0
for dist,ang,x1,y1,x2,y2 in line_group:
mx.append(x1)
my.append(y1)
print (dist, ang, x1,y1,x2,y2)
print (lgc, "adding MX", x1, mx)
print (lgc, "adding MYs", y1, my)
#mx.append(x2)
#my.append(y2)
lgc = lgc + 1
if len(mx) > 2:
print ("MXs", mx)
print ("MYs", my)
st = compute_straight_line(mx[0],my[0],mx[1],my[1],mx[2],my[2])
else:
st = 100
if st <= 1:
print ("This group is straight")
conf_line_groups.append(line_group)
else:
print ("This group is NOT straight")
orphan_lines.append(line_group)
cc = 0
mx = []
my = []
mw = []
mh = []
for x,y,h,w in obj_points:
mx.append(x)
my.append(y)
mw.append(w)
mh.append(h)
cc = cc + 1
if len(mx) > 2:
st = compute_straight_line(mx[0],my[0],mx[1],my[1],mx[2],my[2])
else:
st = 100
if st <= 1:
print ("At least 3 of these are straight, we can continue.", st)
else:
print ("These 3 objects are not straight, and thus false!", st)
for x,y,h,w in obj_points:
stars.append((x,y,h,w))
obj_points = []
return(line_groups, orphan_lines, stars, obj_points, big_cnts)
def clean_line_groups(line_groups, orphan_lines):
cleaned_line_groups = []
cleaned_line_group = []
for line_group in line_groups:
if len(line_group) == 2:
# make sure these two groups are close enough to each other to be grouped.
(dist,angle,x1,y1,x2,y2) = line_group[0]
(xdist,xangle,xx1,xy1,xx2,xy2) = line_group[1]
group_dist = calc_dist(x1,y1,xx1,xy1)
if group_dist > 50 or (angle -5 < xangle < angle + 5):
orphan_lines.append(line_group[0])
orphan_lines.append(line_group[1])
else:
cleaned_line_group.append(line_group[0])
cleaned_line_group.append(line_group[1])
else:
cleaned_line_groups.append(line_group)
line_groups = cleaned_line_groups
print("CLG:", line_groups)
return(cleaned_line_groups, orphan_lines)
def confirm_cnts(crop):
crop = cv2.GaussianBlur(crop, (5, 5), 0)
avg_flux = np.average(crop)
max_flux = np.amax(crop)
thresh_limit = avg_flux / 2
_, crop_thresh = cv2.threshold(crop, thresh_limit, 255, cv2.THRESH_BINARY)
#(_, cnts, xx) = cv2.findContours(crop_thresh.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
#if np.sum(crop_thresh) > (255 * 2):
#print ("CONFIRM:", max_flux, avg_flux, thresh_limit, np.sum(crop_thresh))
#cv2.imshow('pepe', crop_thresh)
#else:
# print ("FAILED:", max_flux, avg_flux, thresh_limit, np.sum(crop_thresh))
#cv2.imshow('pepe', crop)
#cv2.waitKey(100)
return(np.sum(crop_thresh))
def find_best_thresh(image, thresh_limit, type):
go = 1
while go == 1:
_, thresh = cv2.threshold(image, thresh_limit, 255, cv2.THRESH_BINARY)
(_, cnts, xx) = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
if type == 0:
cap = 80
else:
cap = 100
if len(cnts) > cap:
thresh_limit = thresh_limit + 1
else:
bad = 0
for (i,c) in enumerate(cnts):
x,y,w,h = cv2.boundingRect(cnts[i])
if w == image.shape[1]:
bad = 1
if type == 0 and (w >= 10 or h > 10):
bad = 1
if bad == 0:
go = 0
else:
thresh_limit = thresh_limit + 1
#print ("CNTs, BEST THRESH:", str(len(cnts)), thresh_limit)
return(thresh_limit)
def find_objects2(timage, tag, current_image, filename):
stars = []
big_cnts = []
obj_points = []
image = timage
thresh_limit = 10
thresh_limit = find_best_thresh(image, thresh_limit, 0)
# find best thresh limit code here!
line_objects = []
points = []
orphan_lines = []
_, thresh = cv2.threshold(image, thresh_limit, 255, cv2.THRESH_BINARY)
(_, cnts, xx) = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
#print ("CNTS:", len(cnts))
hit = 0
objects = []
if len(cnts) < 500:
for (i,c) in enumerate(cnts):
x,y,w,h = cv2.boundingRect(cnts[i])
if w > 1 and h > 1:
if (w < 10 and h <10):
nothing = 0
# cv2.rectangle(image, (x,y), (x+w+5, y+h+5), (255),1)
#cv2.circle(image, (x,y), 20, (120), 1)
#if w != h:
# cv2.rectangle(image, (x,y), (x+w+5, y+h+5), (255),1)
else:
#cv2.rectangle(image, (x,y), (x+w+5, y+h+5), (255),1)
# Convert big object into points and add each one to the points array.
crop = timage[y:y+h,x:x+w]
points.append((x,y,w,h))
if w < 600 and h < 400:
crop_points = find_points_in_crop(crop,x,y,w,h)
for x,y,w,h in crop_points:
print("adding some points",x,y,w,h)
points.append((x,y,w,h))
points.append((x,y,w,h))
#objects.append((x,y,w,h))
else:
image[y:y+h,x:x+w] = [0]
else:
print ("WAY TO MANY CNTS:", len(cnts))
thresh_limit = thresh_limit + 5
return(points)
# find line objects
if (len(objects) + len(points)) > 0:
line_groups, orphan_lines, stars, obj_points = find_objects(0, points)
else:
line_groups = []
final_group = []
final_groups = []
reject_group = []
reject_groups = []
line_segments = flatten_line_groups(line_groups)
line_segments = sorted(line_segments, key = lambda x: (x[0],x[1]))
if len(line_segments) > 0:
final_group, reject_group = regroup_lines(line_segments)
print ("MIKE!:", len(final_group))
if len(final_group) > 1:
final_groups.append(final_group)
else:
for line in final_group:
orphan_lines.append(line)
if len(reject_group) > 3:
print (len(reject_group), "rejects left. do it again.")
reject_group = sorted(reject_group, key = lambda x: (x[1],x[0]))
final_group, reject_group = regroup_lines(reject_group)
if len(final_group) > 1:
final_groups.append(final_group)
else:
for line in final_group:
orphan_lines.append(line)
print (len(reject_group), "rejects left after 2nd try")
if len(reject_group) > 3:
print (len(reject_group), "rejects left. do it again.")
final_group, reject_group = regroup_lines(reject_group)
if len(final_group) > 1:
final_groups.append(final_group)
else:
for line in final_group:
orphan_lines.append(line)
print (len(reject_group), "rejects left after 3rd try")
# try to adopt the orphans!
if len(orphan_lines) >= 1:
print (orphan_lines)
final_group, reject_group = regroup_lines(orphan_lines)
if len(final_group) > 1:
final_groups.append(final_group)
if len(final_group) > 0:
print ("Adopted! : ", final_group)
orphan_lines = reject_group
if len(orphan_lines) >= 1:
final_group, reject_group = regroup_lines(reject_group)
if len(final_group) > 1:
final_groups.append(final_group)
if len(final_group) > 0:
print ("Adopted! : ", final_group)
orphan_lines = reject_group
if len(orphan_lines) >= 1:
final_group, reject_group = regroup_lines(reject_group)
if len(final_group) > 1:
final_groups.append(final_group)
if len(final_group) > 0:
print ("Adopted! : ", final_group)
orphan_lines = reject_group
final_groups, orphan_lines = clean_line_groups(final_groups, orphan_lines)
clusters= []
clusters_ab= []
last_x = None
last_y = None
last_ang = None
ang = None
if len(points) > 3:
num_clusters = int(len(points)/3)
clusters = kmeans_cluster(points, num_clusters)
#print ("MIKE CLUSTERS", len(clusters))
for cluster in clusters:
cxs = []
cys = []
for cp in cluster:
x,y,w,h = cp
cxs.append(x)
cys.append(y)
if last_x is not None:
ang = find_angle(x,y,last_x,last_y)
print ("CLUSTER ANGLE:", x,y,last_x,last_y,ang)
if last_ang is not None:
if ang - 5 < last_ang < ang + 5:
cv2.line(image, (x,y), (last_x,last_y), (200), 4)
last_x = x
last_y = y
last_ang = ang
a, b = best_fit (cxs,cys)
mnx = min(cxs)
mny = min(cys)
mmx = max(cxs)
mmy = max(cys)
cv2.rectangle(image, (mnx,mny), (mmx, mmy), (255),1)
#print ("MIKE MIKE XS,", cxs)
#print ("MIKE MIKE YS,", cys)
clusters_ab.append((a,b))
print ("MIKE AB,", a,b)
print ("FINAL ANALYSIS")
print (final_groups)
print ("--------------")
print ("File Name: ", filename)
print ("Total Points:", len(points))
print ("Total Line Segments:", len(line_segments))
print ("Total Final Line Groups:", len(final_groups))
print ("Total Clusters:", len(clusters))
cl =0
for a,b in clusters_ab:
print ("Cluster " + str(cl + 1) + " " + str(len(clusters[cl])) + " points")
print ("LINE AB " + str(a) + " " + str(b))
cl = cl + 1
#print (final_groups)
print ("Total Rejected Lines:", len(reject_group))
gc = 1
xs = ys = []
for line_group in final_groups:
lc = 1
for line in line_group:
dist,angle,x1,y1,x2,y2 = line
xs.append(x1)
xs.append(x2)
ys.append(y1)
ys.append(y2)
#print (gc, lc, line)
lc = lc + 1
gc = gc + 1
if len(xs) > 0 and len(ys) > 0:
mnx = min(xs)
mxx = max(xs)
mny = min(ys)
mxy = max(ys)
cv2.rectangle(image, (mnx,mny), (mxx, mxy), (255),1)
print ("Total Orphaned Lines:", len(orphan_lines))
if len(line_groups) > 0:
line_segments = flatten_line_groups(line_groups)
find_line_nodes(line_segments)
gc = 1
for line_group in line_groups:
lc = 1
line_group = sorted(line_group, key = lambda x: (x[2],x[3]))
dist,angle,sx1,sy1,sx2,sy2 = line_group[0]
for line in line_group:
dist,angle,x1,y1,x2,y2 = line
#s_ang = find_angle(sx1,sy1,x1,y1)
#if angle - 5 < s_ang < angle + 5:
# print("FINAL GROUP:", gc,lc,line, angle, s_ang)
# final_group.append((dist,angle,x1,y1,x2,y2))
#else:
# print("REJECT GROUP:", gc,lc,line, angle, s_ang)
# reject_group.append((dist,angle,x1,y1,x2,y2))
#seg_dist = find_closest_segment(line, line_group)
cv2.line(image, (x1,y1), (x2,y2), (255), 2)
cv2.putText(image, "L " + str(lc), (x1+25,y1+10), cv2.FONT_HERSHEY_SIMPLEX, .4, (255), 1)
lc = lc + 1
if len(line_group) > 0:
cv2.putText(image, "LG " + str(gc), (x1+25,y1), cv2.FONT_HERSHEY_SIMPLEX, .4, (255), 1)
gc = gc + 1
for line in orphan_lines:
#print("ORPHAN:", line)
dist,angle,x1,y1,x2,y2 = line
cv2.line(image, (x1,y1), (x2,y2), (255), 1)
cv2.putText(image, "Orph" , (x1+25,y1), cv2.FONT_HERSHEY_SIMPLEX, .4, (255), 1)
#cv2.ellipse(image,(ax,ay),(dist_x,dist_y),elp_ang,elp_ang,180,255,-1)
#a,b = best_fit(lxs, lys)
#plt.scatter(lxs,lys)
#plt.xlim(0,640)
#plt.ylim(0,480)
#yfit = [a + b * xi for xi in lxs]
#plt.plot(lxs,yfit)
#cv2.imshow('pepe', image)
#cv2.waitKey(1)
#plt.gca().invert_yaxis()
#plt.show()
#for x,y,w,h in points:
# if w > 25 or h > 25:
# cv2.rectangle(image, (x,y), (x+w+5, y+h+5), (255),1)
# else:
# cv2.circle(image, (x,y), 20, (120), 1)
edges = cv2.Canny(image.copy(),thresh_limit,255)
el = filename.split("/");
fn = el[-1]
cv2.putText(current_image, "File Name: " + fn, (10,440), cv2.FONT_HERSHEY_SIMPLEX, .4, (255), 1)
cv2.putText(current_image, str(tag), (10,450), cv2.FONT_HERSHEY_SIMPLEX, .4, (255), 1)
cv2.putText(current_image, "Points: " + str(len(points)), (10,460), cv2.FONT_HERSHEY_SIMPLEX, .4, (255), 1)
cv2.putText(current_image, "Line Groups: " + str(len(final_groups)), (10,470), cv2.FONT_HERSHEY_SIMPLEX, .4, (255), 1)
blend = cv2.addWeighted(image, .2, current_image, .8,0)
np_plt = cv2.imread("/tmp/plot.png")
np_plt = cv2.cvtColor(np_plt, cv2.COLOR_BGR2GRAY)
hh, ww = np_plt.shape
crop = cv2.resize(np_plt, (0,0), fx=1.1, fy=1.1)
crop = crop_center(crop, 640,480)
#blend = cv2.addWeighted(blend, .5, crop, .5,0)
#for x,y in stars:
# cv2.circle(blend, (x,y), 5, (255), 1)
#exit()
return(line_groups, points, clusters)
def regroup_lines(line_segments):
final_group = []
reject_group = []
sangles = []
dist,angle,sx1,sy1,sx2,sy2 = line_segments[0]
for line in line_segments:
dist,angle,x1,y1,x2,y2 = line
s_ang = find_angle(sx1,sy1,x1,y1)
sangles.append(s_ang)
mean_angle = np.median(np.array(sangles))
if len(line_segments ) > 0:
dist,angle,sx1,sy1,sx2,sy2 = line_segments[0]
for line in line_segments:
dist,angle,x1,y1,x2,y2 = line
s_ang = find_angle(sx1,sy1,x1,y1)
if mean_angle - 10 <= s_ang <= mean_angle + 10:
#print("FINAL GROUP:", line, angle, s_ang, mean_angle)
found = 0
for (dd,aa,ax1,ay1,ax2,ay2) in final_group:
if ax1 == x1 and ay1 == y1:
found = 1
if found == 0:
final_group.append((dist,angle,x1,y1,x2,y2))
else:
#print("REJECT GROUP:",line, angle, s_ang, mean_angle)
reject_group.append((dist,angle,x1,y1,x2,y2))
if len(line_segments ) > 0:
sdist,sangle,sx1,sy1,sx2,sy2 = line_segments[0]
for line in line_segments:
dist,angle,x1,y1,x2,y2 = line
s_ang = find_angle(sx1,sy1,x1,y1)
tdist = calc_dist(x1,y1,sx1,sy1)
if sangle - 10 <= angle <= sangle + 10 and tdist < 20:
found = 0
for (dd,aa,ax1,ay1,ax2,ay2) in final_group:
if ax1 == x1 and ay1 == y1:
found = 1
if found == 0:
print("FINAL GROUP:", line, angle, s_ang, mean_angle)
final_group.append((dist,angle,x1,y1,x2,y2))
else:
#print("REJECT GROUP:",line, angle, s_ang, mean_angle)
reject_group.append((dist,angle,x1,y1,x2,y2))
return(final_group, reject_group)
def flatten_line_groups(line_groups):
line_segments = []
for line_group in line_groups:
for line in line_group:
dist,angle,x1,y1,x2,y2 = line
line_segments.append((dist,angle,x1,y1,x2,y2))
return(line_segments)
def log_node(nodes, line, closest):
if len(nodes) == 0:
nodes.append((line,closest))
return(nodes)
def find_line_nodes(line_segments):
nodes = []
seg_list = []
rest = line_segments
for line in line_segments:
#print("LENLINE", len(line))
#print(line)
dist,angle,x1,y1,x2,y2 = line
closest, rest = sort_segs(x1,y1,rest)
#nodes = log_node(nodes, line, closest)
def sort_segs(x,y,seg_dist):
sorted_lines = sorted(seg_dist, key=lambda x: x[0])
#for line in sorted_lines:
# print ("SORTED LINE", line)
closest = []
rest = []
already_found = 0
for line in sorted_lines:
if len(line) == 6:
dist,angle,x1,y1,x2,y2 = line
else:
print("WTF!:", line)
seg_dist = calc_dist(x,y,x1,y1)
if seg_dist != 0 and already_found != 1:
closest.append((dist,angle,x1,y1,x2,y2))
else:
rest.append((dist,angle,x1,y1,x2,y2))
return(closest, rest)
def find_closest_segment(this_line,line_group):
seg_dist = []
dist, angle, x1,y1,x2,y2 = this_line
cx = (x1 + x2) / 2
cy = (y1 + y2) / 2
for line in line_group:
xdist, xangle, xx1,xy1,xx2,xy2 = line
xcx = (xx1 + xx2) / 2
xcy = (xy1 + xy2) / 2
dist = calc_dist(cx,cy,xcx,xcy)
if dist > 0:
seg_dist.append((dist, x1,y1,x2,y2))
sorted_lines = sorted(seg_dist, key=lambda x: x[0])
#for line in sorted_lines:
# print("CLOSEST SEGMENTS:", line)
def find_points_in_crop(crop,x,y,w,h):
print ("cropping")
go = 1
cnt_pts = []
thresh_limit = 250
canvas = np.zeros([480,640], dtype=crop.dtype)
canvas[y:y+h,x:x+w] = crop
for i in range(x,x+w):
for j in range(y,y+w):
if i % 5 == 0:
canvas[0:480,i:i+3] = 0
if j % 5 == 0:
canvas[j:j+3,0:640] = 0
#print ("CROP", crop.shape[0])
#if crop.shape[0] > 25:
#cv2.imshow('pepe', canvas)
#cv2.waitKey(1000)
last_cnts = []
while go == 1:
_, thresh = cv2.threshold(canvas, thresh_limit, 255, cv2.THRESH_BINARY)
(_, cnts, xx) = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
cnt_limit = int((w + h) / 20)
if cnt_limit < 5:
cnt_limit = 5
if cnt_limit > 25:
cnt_limit = 25
#print ("CNTS at thresh:", len(cnts), thresh_limit)
thresh_limit = thresh_limit - 2
if len(cnts) >= cnt_limit:
for (i,c) in enumerate(cnts):
x,y,w,h = cv2.boundingRect(cnts[i])
if w > 1 and h > 1:
cnt_pts.append((x,y,w,h))
if len(last_cnts) >= len(cnt_pts) and len(last_cnts) > cnt_limit:
#cnt_pts = last_cnts
go = 0
if thresh_limit < 5:
cnt_pts = last_cnts
go = 0
if len(cnts) > 70:
go = 0
#print ("CNTS: ", len(cnts))
#print ("LAST CNTS: ", len(last_cnts))
#print ("THRESH LIMIT: ", thresh_limit)
#cv2.imshow('pepe', thresh)
#cv2.waitKey(100)
last_cnts = cnt_pts
return(cnt_pts)
def best_fit(X, Y):
xbar = sum(X)/len(X)
ybar = sum(Y)/len(Y)
n = len(X) # or len(Y)
numer = sum([xi*yi for xi,yi in zip(X, Y)]) - n * xbar * ybar
denum = sum([xi**2 for xi in X]) - n * xbar**2
b = numer / denum
a = ybar - b * xbar
print('best fit line:\ny = {:.2f} + {:.2f}x'.format(a, b))
return a, b
def diff_all(med_stack_all, background, median, before_image, current_image, after_image,filename ):
before_diff = cv2.absdiff(current_image.astype(current_image.dtype), before_image,)
after_diff = cv2.absdiff(current_image.astype(current_image.dtype), after_image,)
before_after_diff = cv2.absdiff(before_image.astype(current_image.dtype), after_image,)
median_three = np.median(np.array((before_image, after_image, current_image)), axis=0)
median = np.uint8(median)
median_sum = np.sum(median)
median_diff = cv2.absdiff(median_three.astype(current_image.dtype), median,)
blur_med = cv2.GaussianBlur(median, (5, 5), 0)
# find bright areas in median and mask them out of the current image
tm = find_best_thresh(blur_med, 30, 1)
_, median_thresh = cv2.threshold(blur_med, tm, 255, cv2.THRESH_BINARY)
#cv2.imshow('pepe', median_thresh)
#cv2.waitKey(1000)
(_, cnts, xx) = cv2.findContours(median_thresh.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
hit = 0
real_cnts = []
print ("CNTS: ", len(cnts))
if len(cnts) < 1000:
for (i,c) in enumerate(cnts):
x,y,w,h = cv2.boundingRect(cnts[i])
if True:
w = w + 20
h = h + 20
x = x - 20
y = y - 20
if x < 0:
x = 0
if y < 0:
y = 0
if x+w > current_image.shape[1]:
x = current_image.shape[1]-1
if y+h > current_image.shape[0]:
y = current_image.shape[0]-1
if w > 0 and h > 0:
mask = current_image[y:y+h, x:x+w]
#cv2.rectangle(current_image, (x,y), (x+w+5, y+h+5), (255),1)
for xx in range(0, mask.shape[1]):
for yy in range(0, mask.shape[0]):
mask[yy,xx] = randint(0,6)
blur_mask = cv2.GaussianBlur(mask, (5, 5), 0)
current_image[y:y+h,x:x+w] = blur_mask
median[y:y+h,x:x+w] =blur_mask
# find the diff between the masked median and the masked current image
blur_cur = cv2.GaussianBlur(current_image, (5, 5), 0)
blur_med = cv2.GaussianBlur(median, (5, 5), 0)
cur_med_diff = cv2.absdiff(blur_cur.astype(blur_cur.dtype), blur_med,)
blend = cv2.addWeighted(current_image, .5, cur_med_diff, .5,0)
cur_med_diff =- median
#line_groups, points, clusters = find_objects2(blend, "Current Median Diff Blend", current_image, filename)
return(blend, current_image, filename)
#return(line_groups, points)
def inspect_image(med_stack_all, background, median, before_image, current_image, after_image, avg_cnt,avg_tot,avg_pts,filename):
rois = []
big_cnts = []
line_groups = []
orphan_lines = []
obj_points = []
stars = []
image_diff = cv2.absdiff(current_image.astype(current_image.dtype), background,)
orig_image = current_image
current_image = image_diff
blend, current_image, filename = diff_all(med_stack_all, background, median, before_image, current_image, after_image,filename)
points = find_objects2(blend, "Current Median Diff Blend", current_image, filename)
if len(points) > 2:
line_groups, orphan_lines, stars, obj_points, big_cnts = find_objects(0, points)
if len(obj_points) > 2:
line_groups, orphan_lines, stars2, obj_points, big_cnts = find_objects(0, obj_points)
stars = stars + stars2
print ("---FINAL ANALYSIS---")
print ("File: ", filename)
print ("Total Points: ", len(points))
print ("Line Groups: ", len(line_groups))
lg_points = 0
lg = 1
for line in line_groups:
print (" Group " + str(lg) + ": " + str(len(line)))
lg = lg + 1
lg_points = lg_points + len(line)
print ("Total Line Group Points: ", lg_points)
print ("Orphan Lines: ", len(line_groups))
print ("Stars: ", len(stars))
print ("Obj Points: ", len(obj_points))
print ("Big CNTS: ", len(big_cnts))
for x,y,w,h in big_cnts:
cv2.rectangle(blend, (x,y), (x+w+5, y+h+5), (255),1)
#for x,y,w,h in obj_points:
# if w > 25 or h > 25:
# cv2.rectangle(blend, (x,y), (x+w+5, y+h+5), (255),1)
# else:
# cv2.circle(blend, (x,y), 20, (120), 1)
#for x,y,w,h in stars:
# if w > 25 or h > 25:
# cv2.rectangle(blend, (x,y), (x+w+5, y+h+5), (255),1)
# else:
# cv2.circle(blend, (x,y), 5, (120), 1)
return(blend, points, line_groups, stars, obj_points, big_cnts)
def parse_file_date(orig_video_file):
#print(orig_video_file)
if ".mp4" in orig_video_file:
stacked_image_fn = orig_video_file.replace(".mp4", "-stack.jpg")
star_image_fn = orig_video_file.replace(".mp4", "-stars.jpg")
report_fn = orig_video_file.replace(".mp4", "-stack-report.txt")
video_report = orig_video_file.replace(".mp4", "-report.txt")
trim_file = orig_video_file.replace(".mp4", "-trim.mp4")
else:
stacked_image_fn = orig_video_file.replace(".avi", "-stack.jpg")
trim_file = orig_video_file.replace(".avi", "-trim.avi")
star_image_fn = orig_video_file.replace(".avi", "-stars.jpg")
report_fn = orig_video_file.replace(".avi", "-stack-report.txt")
el = orig_video_file.split("/")
file_name = el[-1]
file_name = file_name.replace("_", "-")
file_name = file_name.replace(".", "-")
#print ("FN", file_name)
xyear, xmonth, xday, xhour, xmin, xsec, xcam_num, ftype, xext = file_name.split("-")
cam_num = xcam_num.replace("cam", "")
date_str = xyear + "-" + xmonth + "-" + xday + " " + xhour + ":" + xmin + ":" + xsec
capture_date = date_str
return(capture_date)
def day_or_night(config, capture_date):
obs = ephem.Observer()
obs.pressure = 0
obs.horizon = '-0:34'
obs.lat = config['device_lat']
obs.lon = config['device_lng']
obs.date = capture_date
sun = ephem.Sun()
sun.compute(obs)
(sun_alt, x,y) = str(sun.alt).split(":")
saz = str(sun.az)
(sun_az, x,y) = saz.split(":")
#print ("SUN", sun_alt)
if int(sun_alt) < -1:
sun_status = "night"
else:
sun_status = "day"
return(sun_status, sun_alt)
def diff_stills(sdate, cam_num):
med_last_objects = []
last_objects = deque(maxlen=5)
diffed_files = []
config = read_config("conf/config-1.txt")
video_dir = "/mnt/ams2/SD/"
images = []
images_orig = []
images_blend = []
images_info = []
count = 0
last_image = None
last_thresh_sum = 0
hits = 0
avg_cnt = 0
avg_tot = 0
avg_pts = 0
count = 0
glob_dir = video_dir + "proc/" + sdate + "/" + "*cam" + str(cam_num) + "-stacked.jpg"
report_file = video_dir + "proc/" + sdate + "/" + sdate + "-cam" + str(cam_num) + "-report.txt"
master_stack_file = video_dir + "proc/" + sdate + "/" + sdate + "-cam" + str(cam_num) + "-master_stack.jpg"
#cv2.namedWindow('pepe')
mask_file = "conf/mask-" + str(cam_num) + ".txt"
file_exists = Path(mask_file)
mask_exists = 0
still_mask = [0,0,0,0]
if (file_exists.is_file()):
print("File found.")
ms = open(mask_file)
for lines in ms:
line, jk = lines.split("\n")
exec(line)
ms.close()
mask_exists = 1
(sm_min_x, sm_max_x, sm_min_y, sm_max_y) = still_mask
diffs = 0
image_list = []
file_list = []
sorted_list = []
print ("Loading still images from ", glob_dir)
fp = open(report_file, "w")
for filename in (glob.glob(glob_dir)):
capture_date = parse_file_date(filename)
sun_status, sun_alt = day_or_night(config, capture_date)
if sun_status != 'day' and int(sun_alt) <= -5:
#print("NIGHTTIME", capture_date, filename, sun_status)
file_list.append(filename)
else:
print ("This is a daytime or dusk file")
sorted_list = sorted(file_list)
for filename in sorted_list:
open_cv_image = cv2.imread(filename,0)
orig_image = open_cv_image
images_orig.append(orig_image)
print(filename)
open_cv_image[440:480, 0:640] = [0]
if mask_exists == 1:
open_cv_image[sm_min_y:sm_max_y, sm_min_x:sm_max_x] = [0]
images.append(open_cv_image)
#exit()
#time.sleep(5)
height , width = open_cv_image.shape
master_stack = None
# Define the codec and create VideoWriter object
#fourcc = cv2.VideoWriter_fourcc(*'H264')
#out = cv2.VideoWriter(outfile,fourcc, 5, (width,height),1)
#med_stack_all = np.median(np.array(images[50:150]), axis=0)
med_stack_all = np.median(np.array(images), axis=0)
#cv2.imshow('pepe', cv2.convertScaleAbs(med_stack_all))
#cv2.waitKey(1000)
objects = None
last_line_groups = []
last_points = []
for filename in sorted_list:
hit = 0
detect = 0
el = filename.split("/")
fn = el[-1]
#this_image = cv2.imread(filename,1)
this_image = images[count]
if count >= 1:
before_image = images[count-1]
else:
before_image = images[count+2]
if count >= len(file_list)-1:
after_image = images[count-2]
else:
after_image = images[count+1]
if count < 25:
median = np.median(np.array(images[0:count+25]), axis=0)
elif len(images) - count < 25:
median = np.median(np.array(images[count-25:count]), axis=0)
else:
median = np.median(np.array(images[count-25:count]), axis=0)
if count < 10:
background = images[count+1]
for i in range (0,10):
background = cv2.addWeighted(background, .8, images[count+i], .2,0)
else:
background = images[count-1]
for i in range (0,10):
background = cv2.addWeighted(background, .8, images[count-i], .2,0)
img_rpt_file = filename.replace("-stacked.jpg", "-stack-report.txt")
img_report = open(img_rpt_file, "w")
(blend, points, line_groups, stars, obj_points, big_cnts) = inspect_image(med_stack_all, background, median, before_image, this_image, after_image, avg_cnt,avg_tot,avg_pts, filename)
master_stack = stack_stack(blend, master_stack)
img_report.write("points=" + str(points) + "\n")
img_report.write("line_groups=" + str(line_groups) + "\n")
img_report.write("stars=" + str(stars) + "\n")
img_report.write("obj_points=" + str(obj_points) + "\n")
img_report.write("big_cnts=" + str(big_cnts) + "\n")
img_report.close()
images_blend.append(blend)
images_info.append((points, line_groups, stars, obj_points, big_cnts))
# block out the detections in the master image to remove it from the running mask
last_line_group = line_groups
last_points = points
for x,y,w,h in last_points:
images[count][y:y+h,x:x+w] = 5
count = count + 1
if len(big_cnts) > 0 or len(obj_points) >= 3:
hits = hits + 1
#cv2.imshow('pepe', blend)
#if len(line_groups) >= 1 or len(obj_points) > 3 or len(big_cnts) > 0:
#cv2.waitKey(1)
# while(1):
# k = cv2.waitKey(33)
# if k == 32:
# break
# if k == 27:
# exit()
#else:
#cv2.waitKey(1)
data = filename + "," + str(len(line_groups)) + "," + str(len(obj_points)) + "," + str(len(big_cnts)) + "\n"
fp.write(data)
print ("TOTAL: ", len(file_list))
print ("HITS: ", hits)
fp.close()
if master_stack is not None:
print("saving", master_stack_file)
master_stack.save(master_stack_file, "JPEG")
else:
print("Failed.")
hits = 1
for count in range(0, len(sorted_list) - 1):
file = sorted_list[count]
el = file.split("/")
st = el[-1]
report_str = st.replace("-stacked.jpg", "-report.txt")
video_str = st.replace("-stacked.jpg", ".mp4")
video_file = file.replace("-stacked.jpg", ".mp4")
(points, line_groups, stars, obj_points, big_cnts) = images_info[count]
if len(obj_points) > 3 or len(big_cnts) > 0:
for bc in big_cnts:
(x,y,w,h) = bc
obj_points.append((x,y,5,5))
obj_points.append((x+w,y+h,5,5))
np_obj_points = np.array(obj_points)
max_x = np.max(np_obj_points[:,0])
max_y = np.max(np_obj_points[:,1])
min_x = np.min(np_obj_points[:,0])
min_y = np.min(np_obj_points[:,1])
myimg = cv2.imread(sorted_list[count],0)
cv2.rectangle(myimg, (min_x,min_y), (max_x, max_y), (255),1)
#cv2.imshow('pepe', myimg)
#cv2.waitKey(1)
print ("-------")
print ("Count:", count)
print ("Hit:", hits)
print ("File:", sorted_list[count])
print ("Points:", str(len(points)))
print ("Line Groups:", str(len(line_groups)))
gc = 1
for line_group in line_groups:
for dist, ang, x1,y1,w1,h1 in line_group:
print ("GROUP: ", gc, dist, ang, x1,y1,w1,h1)
gc = gc + 1
print ("Stars:", str(len(stars)))
print ("Obj Points:", str(len(obj_points)))
print ("Big Cnts:", str(len(big_cnts)))
print ("Min/Max X/Y:", str(min_x), str(min_y), str(max_x), str(max_y))
print ("-------")
hits = hits + 1
video_report = video_file.replace(".mp4", "-report.txt")
file_exists = Path(video_report)
if (file_exists.is_file()):
print ("Already processed the video.")
#else:
# print("./PV.py " + video_file + " " + cam_num)
# os.system("./PV.py " + video_file + " " + cam_num)
else :
min_x = min_y = max_x = max_y = 0
#cmd = "grep \"Motion Frames:\" `find /mnt/ams2/SD/" + str(cam_num) + " |grep " + report_str + "`"
#output = subprocess.check_output(cmd, shell=True).decode("utf-8")
#output = output.replace("Motion Frames:", "motion_frames=")
#print (output)
#exec(output)
#if len(motion_frames) > 14:
# cmd = "find /mnt/ams2/SD/" + str(cam_num) + " |grep " + video_str
# video_file = subprocess.check_output(cmd, shell=True).decode("utf-8")
# print("This is probably a real event?")
# print(video_file)
sdate = sys.argv[1]
cam_num = sys.argv[2]
diff_stills(sdate, cam_num)<|fim▁end|> | straight_line = a - b
if (straight_line < 1): |
<|file_name|>trait-alias.rs<|end_file_name|><|fim▁begin|>// run-pass
#![feature(trait_alias)]
pub trait Foo {}<|fim▁hole|><|fim▁end|> | pub trait FooAlias = Foo;
fn main() {} |
<|file_name|>0003_auto_20160717_1943.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
<|fim▁hole|> dependencies = [
('basicviz', '0002_auto_20160717_1939'),
]
operations = [
migrations.AlterField(
model_name='document',
name='name',
field=models.CharField(unique=True, max_length=32),
),
]<|fim▁end|> | class Migration(migrations.Migration):
|
<|file_name|>html-spec-reporter.ts<|end_file_name|><|fim▁begin|>import { Configuration } from "./configuration";
import { ConfigurationParser } from "./configuration-parser";
import { CustomReporterResult } from "./custom-reporter-result";
import { ExecutionDisplay } from "./execution-display";
import { ExecutionMetrics } from "./execution-metrics";
import CustomReporter = jasmine.CustomReporter;
import SuiteInfo = jasmine.SuiteInfo;
import RunDetails = jasmine.RunDetails;
export class HtmlSpecReporter implements CustomReporter {
private started: boolean = false;
private finished: boolean = false;
private display: ExecutionDisplay;
private metrics: ExecutionMetrics;
private configuration: Configuration;
constructor(configuration?: Configuration) {
this.configuration = ConfigurationParser.parse(configuration);
this.display = new ExecutionDisplay(this.configuration);
this.metrics = new ExecutionMetrics();
}<|fim▁hole|>
public jasmineStarted(suiteInfo: SuiteInfo): void {
this.started = true;
this.metrics.start(suiteInfo);
this.display.jasmineStarted(suiteInfo);
}
public jasmineDone(runDetails: RunDetails): void {
this.metrics.stop(runDetails);
this.display.summary(runDetails, this.metrics);
this.finished = true;
}
public suiteStarted(result: CustomReporterResult): void {
this.display.suiteStarted(result);
}
public suiteDone(result: CustomReporterResult): void {
this.display.suiteDone();
}
public specStarted(result: CustomReporterResult): void {
this.metrics.startSpec();
this.display.specStarted(result);
}
public specDone(result: CustomReporterResult): void {
this.metrics.stopSpec(result);
if (result.status === "pending") {
this.metrics.pendingSpecs++;
this.display.pending(result);
} else if (result.status === "passed") {
this.metrics.successfulSpecs++;
this.display.successful(result);
} else if (result.status === "failed") {
this.metrics.failedSpecs++;
this.display.failed(result);
}
this.display.specDone(result);
}
}<|fim▁end|> | |
<|file_name|>workerStatistics.d.ts<|end_file_name|><|fim▁begin|>/**
* This code was generated by
* \ / _ _ _| _ _
* | (_)\/(_)(_|\/| |(/_ v1.0.0
* / /
*/
import Page = require('../../../../../base/Page');
import Response = require('../../../../../http/response');
import V1 = require('../../../V1');
import { SerializableClass } from '../../../../../interfaces';
/**
* Initialize the WorkerStatisticsList
*
* @param version - Version of the resource
* @param workspaceSid - The SID of the Workspace that contains the WorkerChannel
* @param workerSid - The SID of the Worker that contains the WorkerChannel
*/
declare function WorkerStatisticsList(version: V1, workspaceSid: string, workerSid: string): WorkerStatisticsListInstance;
/**
* Options to pass to fetch
*
* @property endDate - Only include usage that occurred on or before this date
* @property minutes - Only calculate statistics since this many minutes in the past
* @property startDate - Only calculate statistics from on or after this date
* @property taskChannel - Only calculate statistics on this TaskChannel
*/
interface WorkerStatisticsInstanceFetchOptions {
endDate?: Date;
minutes?: number;
startDate?: Date;
taskChannel?: string;
}
interface WorkerStatisticsListInstance {
/**
* @param sid - sid of instance
*/
(sid: string): WorkerStatisticsContext;
/**
* Constructs a worker_statistics
*/
get(): WorkerStatisticsContext;
/**
* Provide a user-friendly representation
*/
toJSON(): any;
}
interface WorkerStatisticsPayload extends WorkerStatisticsResource, Page.TwilioResponsePayload {
}
interface WorkerStatisticsResource {
account_sid: string;
cumulative: object;
url: string;
worker_sid: string;
workspace_sid: string;
}
interface WorkerStatisticsSolution {
workerSid?: string;
workspaceSid?: string;
}
declare class WorkerStatisticsContext {
/**
* Initialize the WorkerStatisticsContext
*
* @param version - Version of the resource
* @param workspaceSid - The SID of the Workspace with the WorkerChannel to fetch
* @param workerSid - The SID of the Worker with the WorkerChannel to fetch
*/
constructor(version: V1, workspaceSid: string, workerSid: string);
/**
* fetch a WorkerStatisticsInstance
*
* @param callback - Callback to handle processed record
*/
fetch(callback?: (error: Error | null, items: WorkerStatisticsInstance) => any): Promise<WorkerStatisticsInstance>;
/**
* fetch a WorkerStatisticsInstance
*
* @param opts - Options for request
* @param callback - Callback to handle processed record
*/
fetch(opts?: WorkerStatisticsInstanceFetchOptions, callback?: (error: Error | null, items: WorkerStatisticsInstance) => any): Promise<WorkerStatisticsInstance>;
/**
* Provide a user-friendly representation<|fim▁hole|>
declare class WorkerStatisticsInstance extends SerializableClass {
/**
* Initialize the WorkerStatisticsContext
*
* @param version - Version of the resource
* @param payload - The instance payload
* @param workspaceSid - The SID of the Workspace that contains the WorkerChannel
* @param workerSid - The SID of the Worker that contains the WorkerChannel
*/
constructor(version: V1, payload: WorkerStatisticsPayload, workspaceSid: string, workerSid: string);
private _proxy: WorkerStatisticsContext;
accountSid: string;
cumulative: any;
/**
* fetch a WorkerStatisticsInstance
*
* @param callback - Callback to handle processed record
*/
fetch(callback?: (error: Error | null, items: WorkerStatisticsInstance) => any): Promise<WorkerStatisticsInstance>;
/**
* fetch a WorkerStatisticsInstance
*
* @param opts - Options for request
* @param callback - Callback to handle processed record
*/
fetch(opts?: WorkerStatisticsInstanceFetchOptions, callback?: (error: Error | null, items: WorkerStatisticsInstance) => any): Promise<WorkerStatisticsInstance>;
/**
* Provide a user-friendly representation
*/
toJSON(): any;
url: string;
workerSid: string;
workspaceSid: string;
}
declare class WorkerStatisticsPage extends Page<V1, WorkerStatisticsPayload, WorkerStatisticsResource, WorkerStatisticsInstance> {
/**
* Initialize the WorkerStatisticsPage
*
* @param version - Version of the resource
* @param response - Response from the API
* @param solution - Path solution
*/
constructor(version: V1, response: Response<string>, solution: WorkerStatisticsSolution);
/**
* Build an instance of WorkerStatisticsInstance
*
* @param payload - Payload response from the API
*/
getInstance(payload: WorkerStatisticsPayload): WorkerStatisticsInstance;
/**
* Provide a user-friendly representation
*/
toJSON(): any;
}
export { WorkerStatisticsContext, WorkerStatisticsInstance, WorkerStatisticsInstanceFetchOptions, WorkerStatisticsList, WorkerStatisticsListInstance, WorkerStatisticsPage, WorkerStatisticsPayload, WorkerStatisticsResource, WorkerStatisticsSolution }<|fim▁end|> | */
toJSON(): any;
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.