file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
apsaracloudreport.go | // Copyright 2019 Yunion
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package apsaramon
import (
"yunion.io/x/jsonutils"
"yunion.io/x/onecloud/pkg/apis/compute"
"yunion.io/x/onecloud/pkg/cloudmon/collectors/common"
"yunion.io/x/onecloud/pkg/mcclient"
"yunion.io/x/onecloud/pkg/mcclient/modules"
)
func init() {
factory := SApsaraCloudReportFactory{}
common.RegisterFactory(&factory)
}
type SApsaraCloudReportFactory struct {
}
| return &SApsaraCloudReport{
common.CloudReportBase{
SProvider: provider,
Session: session,
Args: args,
Operator: operatorType,
},
}
}
func (self *SApsaraCloudReportFactory) GetId() string {
return compute.CLOUD_PROVIDER_APSARA
}
type SApsaraCloudReport struct {
common.CloudReportBase
}
func (self *SApsaraCloudReport) Report() error {
var servers []jsonutils.JSONObject
var err error
switch self.Operator {
case "redis":
servers, err = self.GetAllserverOfThisProvider(&modules.ElasticCache)
case "rds":
servers, err = self.GetAllserverOfThisProvider(&modules.DBInstance)
case "oss":
servers, err = self.GetAllserverOfThisProvider(&modules.Buckets)
case "elb":
servers, err = self.GetAllserverOfThisProvider(&modules.Loadbalancers)
default:
servers, err = self.GetAllserverOfThisProvider(&modules.Servers)
}
providerInstance, err := self.InitProviderInstance()
if err != nil {
return err
}
regionList, regionServerMap, err := self.GetAllRegionOfServers(servers, providerInstance)
if err != nil {
return err
}
for _, region := range regionList {
servers := regionServerMap[region.GetGlobalId()]
switch self.Operator {
case "server":
err = self.collectRegionMetricOfHost(region, servers)
case "redis":
err = self.collectRegionMetricOfRedis(region, servers)
case "rds":
err = self.collectRegionMetricOfRds(region, servers)
case "oss":
err = self.collectRegionMetricOfOss(region, servers)
case "elb":
err = self.collectRegionMetricOfElb(region, servers)
}
if err != nil {
return err
}
}
return nil
} | func (self *SApsaraCloudReportFactory) NewCloudReport(provider *common.SProvider, session *mcclient.ClientSession,
args *common.ReportOptions, operatorType string) common.ICloudReport { |
token-error-correct-2.rs | // Test that we do some basic error correction in the tokeniser (and don't ICE).
fn main() {
if foo {
//~^ ERROR: cannot find value `foo`
) //~ ERROR: mismatched closing delimiter: `)` | } |
|
departure.rs | // Copyright (c) 2016-2017 Chef Software Inc. and/or applicable contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use btest;
use habitat_butterfly::member::Health;
use habitat_butterfly::rumor::departure::Departure;
use habitat_butterfly::client::Client;
#[test]
fn two_members_share_departures() |
#[test]
fn departure_via_client() {
let mut net = btest::SwimNet::new(3);
net.mesh();
net.wait_for_gossip_rounds(1);
let mut client =
Client::new(net[0].gossip_addr(), None).expect("Cannot create Butterfly Client");
client
.send_departure(String::from(net[1].member_id()))
.expect("Cannot send the departure");
net.wait_for_gossip_rounds(1);
net[2].departure_store.with_rumor(
"departure",
net[1].member_id(),
|u| assert!(u.is_some()),
);
assert_wait_for_health_of!(net, 1, Health::Departed);
}
| {
let mut net = btest::SwimNet::new(2);
net.mesh();
net.add_departure(0);
net.wait_for_gossip_rounds(1);
net[1].departure_store.with_rumor(
"departure",
net[0].member_id(),
|u| assert!(u.is_some()),
);
} |
chrome.go | package chrome
import (
"context"
"os"
"os/exec"
"strings"
"syscall"
"time"
"github.com/mafredri/cdp/devtool"
"github.com/thecodingmachine/gotenberg/internal/pkg/xerror"
"github.com/thecodingmachine/gotenberg/internal/pkg/xexec"
"github.com/thecodingmachine/gotenberg/internal/pkg/xlog"
"github.com/thecodingmachine/gotenberg/internal/pkg/xtime"
)
// Start starts Google Chrome headless in background.
func Start(logger xlog.Logger) error {
const op string = "chrome.Start"
logger.DebugOp(op, "starting new Google Chrome headless process on port 9222...")
resolver := func() error {
cmd, err := cmd(logger)
if err != nil {
return err
}
// we try to start the process.
xexec.LogBeforeExecute(logger, cmd)
if err := cmd.Start(); err != nil {
return err
}
// if the process failed to start correctly,
// we have to restart it.
if !isViable(logger) {
return restart(logger, cmd.Process)
}
return nil
}
if err := resolver(); err != nil {
return xerror.New(op, err)
}
return nil
}
func cmd(logger xlog.Logger) (*exec.Cmd, error) {
const op string = "chrome.cmd"
binary := "google-chrome-stable"
args := []string{
"--no-sandbox",
"--headless",
// see https://github.com/GoogleChrome/puppeteer/issues/2410.
"--font-render-hinting=medium",
"--remote-debugging-port=9222",
"--disable-gpu",
"--disable-translate",
"--disable-extensions",
"--disable-background-networking",
"--safebrowsing-disable-auto-update",
"--disable-sync",
"--disable-default-apps",
"--hide-scrollbars",
"--metrics-recording-only",
"--mute-audio",
"--no-first-run",
}
cmd, err := xexec.Command(logger, binary, args...)
if err != nil {
return nil, xerror.New(op, err)
}
cmd.SysProcAttr = &syscall.SysProcAttr{Setpgid: true}
return cmd, nil
}
func kill(logger xlog.Logger, proc *os.Process) error {
const op string = "chrome.kill"
logger.DebugOp(op, "killing Google Chrome headless process using port 9222...")
resolver := func() error {
err := syscall.Kill(-proc.Pid, syscall.SIGKILL)
if err == nil {
return nil
}
if strings.Contains(err.Error(), "no such process") {
return nil
}
return err
}
if err := resolver(); err != nil {
return xerror.New(op, err)
}
return nil
}
func restart(logger xlog.Logger, proc *os.Process) error {
const op string = "chrome.restart"
logger.DebugOp(op, "restarting Google Chrome headless process using port 9222...")
resolver := func() error {
// kill the existing process first.
if err := kill(logger, proc); err != nil {
return err
}
cmd, err := cmd(logger)
if err != nil {
return err
}
// we try to restart the process.
xexec.LogBeforeExecute(logger, cmd)
if err := cmd.Start(); err != nil {
return err
}
// if the process failed to restart correctly, | }
return nil
}
if err := resolver(); err != nil {
return xerror.New(op, err)
}
return nil
}
func isViable(logger xlog.Logger) bool {
const (
op string = "chrome.isViable"
maxViabilityTests int = 20
)
viable := func() bool {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
endpoint := "http://localhost:9222"
logger.DebugfOp(
op,
"checking Google Chrome headless process viability via endpoint '%s/json/version'",
endpoint,
)
v, err := devtool.New(endpoint).Version(ctx)
if err != nil {
logger.DebugfOp(
op,
"Google Chrome headless is not viable as endpoint returned '%v'",
err.Error(),
)
return false
}
logger.DebugfOp(
op,
"Google Chrome headless is viable as endpoint returned '%v'",
v,
)
return true
}
result := false
for i := 0; i < maxViabilityTests && !result; i++ {
warmup(logger)
result = viable()
}
return result
}
func warmup(logger xlog.Logger) {
const op string = "chrome.warmup"
warmupTime := xtime.Duration(0.5)
logger.DebugfOp(
op,
"waiting '%v' for allowing Google Chrome to warmup",
warmupTime,
)
time.Sleep(warmupTime)
} | // we have to restart it again.
if !isViable(logger) {
return restart(logger, cmd.Process) |
maxbody.go | package middleware | return func(c *fiber.Ctx) error {
if len(c.Body()) >= size {
return fiber.ErrRequestEntityTooLarge
}
return c.Next()
}
} |
import "github.com/gofiber/fiber/v2"
func MaxBody(size int) fiber.Handler { |
netease.py | #!/usr/bin/env python
__all__ = ['netease_download']
from ..common import *
from ..common import print_more_compatible as print
from ..util import fs
from json import loads
import hashlib
import base64
import os
def netease_hymn():
return """
player's Game Over,
u can abandon.
u get pissed,
get pissed,
Hallelujah my King!
errr oh! fuck ohhh!!!!
"""
def netease_cloud_music_download(url, output_dir='.', merge=True, info_only=False, **kwargs):
rid = match1(url, r'\Wid=(.*)')
if rid is None:
rid = match1(url, r'/(\d+)/?')
if "album" in url:
j = loads(get_content("http://music.163.com/api/album/%s?id=%s&csrf_token=" % (rid, rid), headers={"Referer": "http://music.163.com/"}))
artist_name = j['album']['artists'][0]['name']
album_name = j['album']['name'].strip()
new_dir = output_dir + '/' + fs.legitimize("%s - %s" % (artist_name, album_name))
if not info_only:
if not os.path.exists(new_dir):
os.mkdir(new_dir)
cover_url = j['album']['picUrl']
download_urls([cover_url], "cover", "jpg", 0, new_dir)
for i in j['album']['songs']:
netease_song_download(i, output_dir=new_dir, info_only=info_only)
try: # download lyrics
assert kwargs['caption']
l = loads(get_content("http://music.163.com/api/song/lyric/?id=%s&lv=-1&csrf_token=" % i['id'], headers={"Referer": "http://music.163.com/"}))
netease_lyric_download(i, l["lrc"]["lyric"], output_dir=new_dir, info_only=info_only)
except: pass
elif "playlist" in url:
j = loads(get_content("http://music.163.com/api/playlist/detail?id=%s&csrf_token=" % rid, headers={"Referer": "http://music.163.com/"}))
new_dir = output_dir + '/' + fs.legitimize(j['result']['name'])
if not info_only:
if not os.path.exists(new_dir):
os.mkdir(new_dir)
cover_url = j['result']['coverImgUrl']
download_urls([cover_url], "cover", "jpg", 0, new_dir)
prefix_width = len(str(len(j['result']['tracks'])))
for n, i in enumerate(j['result']['tracks']):
playlist_prefix = '%%.%dd_' % prefix_width % n
netease_song_download(i, output_dir=new_dir, info_only=info_only, playlist_prefix=playlist_prefix)
try: # download lyrics
assert kwargs['caption']
l = loads(get_content("http://music.163.com/api/song/lyric/?id=%s&lv=-1&csrf_token=" % i['id'], headers={"Referer": "http://music.163.com/"}))
netease_lyric_download(i, l["lrc"]["lyric"], output_dir=new_dir, info_only=info_only, playlist_prefix=playlist_prefix)
except: pass
elif "song" in url:
j = loads(get_content("http://music.163.com/api/song/detail/?id=%s&ids=[%s]&csrf_token=" % (rid, rid), headers={"Referer": "http://music.163.com/"}))
netease_song_download(j["songs"][0], output_dir=output_dir, info_only=info_only)
try: # download lyrics
assert kwargs['caption']
l = loads(get_content("http://music.163.com/api/song/lyric/?id=%s&lv=-1&csrf_token=" % rid, headers={"Referer": "http://music.163.com/"}))
netease_lyric_download(j["songs"][0], l["lrc"]["lyric"], output_dir=output_dir, info_only=info_only)
except: pass
elif "program" in url:
j = loads(get_content("http://music.163.com/api/dj/program/detail/?id=%s&ids=[%s]&csrf_token=" % (rid, rid), headers={"Referer": "http://music.163.com/"}))
netease_song_download(j["program"]["mainSong"], output_dir=output_dir, info_only=info_only)
elif "radio" in url:
j = loads(get_content("http://music.163.com/api/dj/program/byradio/?radioId=%s&ids=[%s]&csrf_token=" % (rid, rid), headers={"Referer": "http://music.163.com/"}))
for i in j['programs']:
netease_song_download(i["mainSong"],output_dir=output_dir, info_only=info_only)
elif "mv" in url:
j = loads(get_content("http://music.163.com/api/mv/detail/?id=%s&ids=[%s]&csrf_token=" % (rid, rid), headers={"Referer": "http://music.163.com/"}))
netease_video_download(j['data'], output_dir=output_dir, info_only=info_only)
def netease_lyric_download(song, lyric, output_dir='.', info_only=False, playlist_prefix=""):
if info_only: return
title = "%s%s. %s" % (playlist_prefix, song['position'], song['name'])
filename = '%s.lrc' % get_filename(title)
print('Saving %s ...' % filename, end="", flush=True)
with open(os.path.join(output_dir, filename),
'w', encoding='utf-8') as x:
x.write(lyric)
print('Done.')
def netease_video_download(vinfo, output_dir='.', info_only=False):
|
def netease_song_download(song, output_dir='.', info_only=False, playlist_prefix=""):
title = "%s%s. %s" % (playlist_prefix, song['position'], song['name'])
songNet = 'p' + song['mp3Url'].split('/')[2][1:]
if 'hMusic' in song and song['hMusic'] != None:
url_best = make_url(songNet, song['hMusic']['dfsId'])
elif 'mp3Url' in song:
url_best = song['mp3Url']
elif 'bMusic' in song:
url_best = make_url(songNet, song['bMusic']['dfsId'])
netease_download_common(title, url_best,
output_dir=output_dir, info_only=info_only)
def netease_download_common(title, url_best, output_dir, info_only):
songtype, ext, size = url_info(url_best)
print_info(site_info, title, songtype, size)
if not info_only:
download_urls([url_best], title, ext, size, output_dir)
def netease_download(url, output_dir = '.', merge = True, info_only = False, **kwargs):
if "163.fm" in url:
url = get_location(url)
if "music.163.com" in url:
netease_cloud_music_download(url, output_dir, merge, info_only, **kwargs)
else:
html = get_decoded_html(url)
title = r1('movieDescription=\'([^\']+)\'', html) or r1('<title>(.+)</title>', html)
if title[0] == ' ':
title = title[1:]
src = r1(r'<source src="([^"]+)"', html) or r1(r'<source type="[^"]+" src="([^"]+)"', html)
if src:
url = src
_, ext, size = url_info(src)
#sd_url = r1(r'(.+)-mobile.mp4', src) + ".flv"
#hd_url = re.sub('/SD/', '/HD/', sd_url)
else:
url = (r1(r'["\'](.+)-list.m3u8["\']', html) or r1(r'["\'](.+).m3u8["\']', html)) + ".mp4"
_, _, size = url_info(url)
ext = 'mp4'
print_info(site_info, title, ext, size)
if not info_only:
download_urls([url], title, ext, size, output_dir = output_dir, merge = merge)
def encrypted_id(dfsId):
x = [ord(i[0]) for i in netease_hymn().split()]
y = ''.join([chr(i - 61) if i > 96 else chr(i + 32) for i in x])
byte1 = bytearray(y, encoding='ascii')
byte2 = bytearray(str(dfsId), encoding='ascii')
for i in range(len(byte2)):
byte2[i] ^= byte1[i % len(byte1)]
m = hashlib.md5()
m.update(byte2)
result = base64.b64encode(m.digest()).decode('ascii')
result = result.replace('/', '_')
result = result.replace('+', '-')
return result
def make_url(songNet, dfsId):
encId = encrypted_id(dfsId)
mp3_url = "http://%s/%s/%s.mp3" % (songNet, encId, dfsId)
return mp3_url
site_info = "163.com"
download = netease_download
download_playlist = playlist_not_supported('netease')
| title = "%s - %s" % (vinfo['name'], vinfo['artistName'])
url_best = sorted(vinfo["brs"].items(), reverse=True,
key=lambda x: int(x[0]))[0][1]
netease_download_common(title, url_best,
output_dir=output_dir, info_only=info_only) |
test_Dump.py | #!/usr/bin/env python
"""
.. py:currentmodule:: FileFormat.Results.test_Dump
.. moduleauthor:: Hendrix Demers <[email protected]>
Tests for module `Dump`
"""
# Script information for the file.
__author__ = "Hendrix Demers ([email protected])"
__version__ = ""
__date__ = ""
__copyright__ = "Copyright (c) 2012 Hendrix Demers"
__license__ = ""
# Subversion informations for the file.
__svnRevision__ = "$Revision$"
__svnDate__ = "$Date$"
__svnId__ = "$Id$"
# Standard library modules.
import unittest
import logging
import os
# Third party modules.
# Local modules.
# Project modules
import pymcxray.FileFormat.Results.Dump as Dump
# Globals and constants variables.
class TestDump(unittest.TestCase):
"""
TestCase class for the module `Dump`.
"""
def setUp(self):
"""
Setup method.
"""
unittest.TestCase.setUp(self)
self.testDataPath = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../test_data"))
def tearDown(self):
"""
Teardown method.
"""
unittest.TestCase.tearDown(self)
def testSkeleton(self):
"""
First test to check if the testcase is working with the testing framework.
"""
#self.fail("Test if the testcase is working.")
self.assert_(True)
| Tests for method `read`.
"""
filepath = os.path.join(self.testDataPath, "autoSavedFiles", "McXRayDUMP.txt")
dumpFile = Dump.Dump()
dumpFile.read(filepath)
#self.fail("Test if the testcase is working.")
if __name__ == '__main__': #pragma: no cover
logging.getLogger().setLevel(logging.DEBUG)
from pymcxray.Testings import runTestModuleWithCoverage
runTestModuleWithCoverage(__file__) | def test_read(self):
""" |
abstractFactory_test.go | package abstractFactory
func getMainAndDetail(factory DAOFactory) {
factory.CreateOrderMainDAO().SaveOrderMain()
factory.CreateOrderDetailDAO().SaveOrderDetail()
}
func ExampleRdbFactory() {
var factory DAOFactory
factory = &RDBDAOFactory{}
getMainAndDetail(factory)
// Output:
// rdb main save | var factory DAOFactory
factory = &XMLDAOFactory{}
getMainAndDetail(factory)
// Output:
// xml main save
// xml detail save
} | // rdb detail save
}
func ExampleXmlFactory() { |
admin.py | from django.contrib import admin
from .models import Action
@admin.register(Action)
class | (admin.ModelAdmin):
list_display = ('user', 'verb', 'target', 'created')
list_filter = ('created',)
search_fields = ('verb',) | ActionAdmin |
fd1a0ef7.0115dc8c.js | (window.webpackJsonp=window.webpackJsonp||[]).push([[267],{323:function(e,r,t){"use strict";t.r(r),t.d(r,"frontMatter",(function(){return c})),t.d(r,"metadata",(function(){return i})),t.d(r,"rightToc",(function(){return l})),t.d(r,"default",(function(){return s}));var n=t(2),o=t(6),a=(t(0),t(333)),c={id:"error",title:"This should be the Error UI"},i={unversionedId:"fallback/error",id:"fallback/error",isDocsHomePage:!1,title:"This should be the Error UI",description:"You ended up here because you did not set the following configuration value:",source:"@site/docs/fallback/error.mdx",slug:"/fallback/error",permalink:"/kratos/docs/next/fallback/error",editUrl:"https://github.com/ory/kratos/edit/master/docs/docs/fallback/error.mdx",version:"current",lastUpdatedBy:"hackerman",lastUpdatedAt:1593616768},l=[],u={rightToc:l};function s(e){var r=e.components,t=Object(o.a)(e,["components"]);return Object(a.b)("wrapper",Object(n.a)({},u,t,{components:r,mdxType:"MDXLayout"}),Object(a.b)("p",null,"You ended up here because you did not set the following configuration value:"),Object(a.b)("pre",null,Object(a.b)("code",Object(n.a)({parentName:"pre"},{className:"language-yaml",metastring:'title="path/to/kratos/config.yml',title:'"path/to/kratos/config.yml'}),"selfservice:\n flows:\n error:\n ui_url: http://my-app.com/error\n")),Object(a.b)("p",null,"You can set this configuration value using environment variable\n",Object(a.b)("inlineCode",{parentName:"p"},"SELFSERVICE_FLOWS_ERROR_UI_URL")," as well!"),Object(a.b)("p",null,"If you don't know what that means, head over to\n",Object(a.b)("a",Object(n.a)({parentName:"p"},{href:"/kratos/docs/next/self-service/flows/user-facing-errors"}),"User-Facing Error"),"!"))}s.isMDXComponent=!0},333:function(e,r,t){"use strict";t.d(r,"a",(function(){return p})),t.d(r,"b",(function(){return d}));var n=t(0),o=t.n(n);function a(e,r,t){return r in e?Object.defineProperty(e,r,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[r]=t,e}function c(e,r){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);r&&(n=n.filter((function(r){return Object.getOwnPropertyDescriptor(e,r).enumerable}))),t.push.apply(t,n)}return t}function i(e){for(var r=1;r<arguments.length;r++){var t=null!=arguments[r]?arguments[r]:{};r%2?c(Object(t),!0).forEach((function(r){a(e,r,t[r])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):c(Object(t)).forEach((function(r){Object.defineProperty(e,r,Object.getOwnPropertyDescriptor(t,r))}))}return e}function l(e,r){if(null==e)return{};var t,n,o=function(e,r){if(null==e)return{};var t,n,o={},a=Object.keys(e);for(n=0;n<a.length;n++)t=a[n],r.indexOf(t)>=0||(o[t]=e[t]);return o}(e,r);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n<a.length;n++)t=a[n],r.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var u=o.a.createContext({}),s=function(e){var r=o.a.useContext(u),t=r;return e&&(t="function"==typeof e?e(r):i(i({},r),e)),t},p=function(e){var r=s(e.components);return o.a.createElement(u.Provider,{value:r},e.children)},f={inlineCode:"code",wrapper:function(e){var r=e.children;return o.a.createElement(o.a.Fragment,{},r)}},b=o.a.forwardRef((function(e,r){var t=e.components,n=e.mdxType,a=e.originalType,c=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),p=s(t),b=n,d=p["".concat(c,".").concat(b)]||p[b]||f[b]||a;return t?o.a.createElement(d,i(i({ref:r},u),{},{components:t})):o.a.createElement(d,i({ref:r},u))}));function d(e,r){var t=arguments,n=r&&r.mdxType;if("string"==typeof e||n){var a=t.length,c=new Array(a);c[0]=b;var i={};for(var l in r)hasOwnProperty.call(r,l)&&(i[l]=r[l]);i.originalType=e,i.mdxType="string"==typeof e?e:n,c[1]=i;for(var u=2;u<a;u++)c[u]=t[u];return o.a.createElement.apply(null,c)}return o.a.createElement.apply(null,t)}b.displayName="MDXCreateElement"}}]); |
||
index.js | import { app, BrowserWindow, globalShortcut } from 'electron' // eslint-disable-line
/**
* Set `__static` path to static files in production
* https://simulatedgreg.gitbooks.io/electron-vue/content/en/using-static-assets.html
*/
if (process.env.NODE_ENV !== 'development') {
global.__static = require('path').join(__dirname, '/static').replace(/\\/g, '\\\\') // eslint-disable-line
}
let mainWindow
const winURL = process.env.NODE_ENV === 'development'
? `http://localhost:9080`
: `file://${__dirname}/index.html`
function createWindow () {
/**
* Initial window options
*/
mainWindow = new BrowserWindow({
height: 563,
useContentSize: true,
width: 465
})
mainWindow.loadURL(winURL)
mainWindow.on('closed', () => {
mainWindow = null
})
globalShortcut.register('VolumeUp', () => {
mainWindow.webContents.send('mp-volume-up');
})
globalShortcut.register('VolumeDown', () => {
mainWindow.webContents.send('mp-volume-down');
})
globalShortcut.register('VolumeMute', () => {
mainWindow.webContents.send('mp-volume-mute');
})
globalShortcut.register('MediaNextTrack', () => {
mainWindow.webContents.send('mp-media-next');
})
globalShortcut.register('MediaPreviousTrack', () => {
mainWindow.webContents.send('mp-media-prev');
})
globalShortcut.register('MediaPlayPause', () => {
mainWindow.webContents.send('mp-play-pause');
})
}
app.on('ready', createWindow)
app.on('window-all-closed', () => {
if (process.platform !== 'darwin') {
app.quit()
}
})
app.on('activate', () => {
if (mainWindow === null) {
createWindow()
}
})
/**
* Auto Updater
*
* Uncomment the following code below and install `electron-updater` to
* support auto updating. Code Signing with a valid certificate is required.
* https://simulatedgreg.gitbooks.io/electron-vue/content/en/using-electron-builder.html#auto-updating
*/
/* |
autoUpdater.on('update-downloaded', () => {
autoUpdater.quitAndInstall()
})
app.on('ready', () => {
if (process.env.NODE_ENV === 'production') autoUpdater.checkForUpdates()
})
*/ | import { autoUpdater } from 'electron-updater' |
evaluate_sql.py | # coding=utf8
import os
import re
import json
import argparse
from sql.evaluator import compare_sqls
def evaluate(path, timeout=120):
|
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--predictions', help='file that stores the prediction results', required=True)
args = parser.parse_args()
evaluate(args.predictions)
| with open(path, 'r') as f:
predictions = json.load(f)
total = len(predictions)
correct = 0
for pidx, p in enumerate(predictions):
truth = p['truth_logical_form']
pred = p['predicted_logical_form']
if compare_sqls(truth, pred):
correct += 1
print("Total: %d, Correct: %d, Accuracy: %f" %
(total, correct, float(correct / total))) |
clipboardadmin.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from django.conf.urls import url
from django.contrib import admin
from django.forms.models import modelform_factory
from django.http import JsonResponse
from django.views.decorators.csrf import csrf_exempt
from .. import settings as filer_settings
from ..models import Clipboard, ClipboardItem, Folder
from ..utils.files import (
UploadException, handle_request_files_upload, handle_upload,
)
from ..utils.loader import load_model
from . import views
NO_FOLDER_ERROR = "Can't find folder to upload. Please refresh and try again"
NO_PERMISSIONS_FOR_FOLDER = (
"Can't use this folder, Permission Denied. Please select another folder."
)
Image = load_model(filer_settings.FILER_IMAGE_MODEL)
# ModelAdmins
class ClipboardItemInline(admin.TabularInline):
model = ClipboardItem
class ClipboardAdmin(admin.ModelAdmin):
model = Clipboard
inlines = [ClipboardItemInline]
filter_horizontal = ('files',)
raw_id_fields = ('user',)
verbose_name = "DEBUG Clipboard"
verbose_name_plural = "DEBUG Clipboards"
def get_urls(self):
return [
url(r'^operations/paste_clipboard_to_folder/$',
self.admin_site.admin_view(views.paste_clipboard_to_folder),
name='filer-paste_clipboard_to_folder'),
url(r'^operations/discard_clipboard/$',
self.admin_site.admin_view(views.discard_clipboard),
name='filer-discard_clipboard'),
url(r'^operations/delete_clipboard/$',
self.admin_site.admin_view(views.delete_clipboard),
name='filer-delete_clipboard'),
url(r'^operations/upload/(?P<folder_id>[0-9]+)/$',
ajax_upload,
name='filer-ajax_upload'),
url(r'^operations/upload/no_folder/$',
ajax_upload,
name='filer-ajax_upload'),
] + super(ClipboardAdmin, self).get_urls()
def get_model_perms(self, *args, **kwargs):
"""
It seems this is only used for the list view. NICE :-)
"""
return {
'add': False,
'change': False,
'delete': False,
}
@csrf_exempt
def ajax_upload(request, folder_id=None):
"""
Receives an upload from the uploader. Receives only one file at a time.
"""
folder = None
if folder_id:
try:
# Get folder
folder = Folder.objects.get(pk=folder_id)
except Folder.DoesNotExist:
return JsonResponse({'error': NO_FOLDER_ERROR})
# check permissions
if folder and not folder.has_add_children_permission(request):
return JsonResponse({'error': NO_PERMISSIONS_FOR_FOLDER})
try:
if len(request.FILES) == 1:
# dont check if request is ajax or not, just grab the file
upload, filename, is_raw = handle_request_files_upload(request)
else:
# else process the request as usual
upload, filename, is_raw = handle_upload(request)
# TODO: Deprecated/refactor
# Get clipboad
# clipboard = Clipboard.objects.get_or_create(user=request.user)[0]
# find the file type
for filer_class in filer_settings.FILER_FILE_MODELS:
FileSubClass = load_model(filer_class)
# TODO: What if there are more than one that qualify?
if FileSubClass.matches_file_type(filename, upload, request):
FileForm = modelform_factory(
model=FileSubClass,
fields=('original_filename', 'owner', 'file')
)
break
uploadform = FileForm({'original_filename': filename,
'owner': request.user.pk},
{'file': upload})
if uploadform.is_valid():
file_obj = uploadform.save(commit=False)
# Enforce the FILER_IS_PUBLIC_DEFAULT
file_obj.is_public = filer_settings.FILER_IS_PUBLIC_DEFAULT
file_obj.folder = folder
file_with_thumbs = None
data = {}
file_obj.save()
# TODO: Deprecated/refactor
# clipboard_item = ClipboardItem(
# clipboard=clipboard, file=file_obj)
# clipboard_item.save()
| if not file_obj.icons:
if file_obj.extension not in filer_settings.FILER_FILE_EXTENSION_NOTHUMBS:
# There is no point to continue, as we can't generate
# thumbnails for this file. Usual reasons: bad format or
# filename.
file_obj.delete()
# This would be logged in BaseImage._generate_thumbnails()
# if FILER_ENABLE_LOGGING is on.
file_with_thumbs = True
return JsonResponse(
{'error': 'failed to generate icons for file'},
status=500,
)
else:
file_with_thumbs = True
if file_with_thumbs:
# Backwards compatibility: try to get specific icon size (32px)
# first. Then try medium icon size (they are already sorted),
# fallback to the first (smallest) configured icon.
thumbnail = None
for size in (['32']
+ filer_settings.FILER_ADMIN_ICON_SIZES[1::-1]):
try:
thumbnail = file_obj.icons[size]
break
except KeyError:
continue
# prepare preview thumbnail
if type(file_obj) == Image:
thumbnail_180_options = {
'size': (180, 180),
'crop': True,
'upscale': True,
}
thumbnail_180 = file_obj.file.get_thumbnail(
thumbnail_180_options)
data_thumbs = {
'thumbnail': thumbnail,
'thumbnail_180': thumbnail_180.url
}
data.update(data_thumbs)
data_common = {
'alt_text': '',
'label': str(file_obj),
'file_id': file_obj.pk,
'original_image': file_obj.url
}
data.update(data_common)
return JsonResponse(data)
else:
form_errors = '; '.join(['%s: %s' % (
field,
', '.join(errors)) for field, errors in list(
uploadform.errors.items())
])
raise UploadException(
"AJAX request not valid: form invalid '%s'" % (
form_errors,))
except UploadException as e:
return JsonResponse({'error': str(e)}, status=500) | # Try to generate thumbnails. |
enum.py | import sys
from types import MappingProxyType, DynamicClassAttribute
__all__ = [
'EnumMeta',
'Enum', 'IntEnum', 'Flag', 'IntFlag',
'auto', 'unique',
]
def _is_descriptor(obj):
"""Returns True if obj is a descriptor, False otherwise."""
return (
hasattr(obj, '__get__') or
hasattr(obj, '__set__') or
hasattr(obj, '__delete__'))
def _is_dunder(name):
"""Returns True if a __dunder__ name, False otherwise."""
return (name[:2] == name[-2:] == '__' and
name[2:3] != '_' and
name[-3:-2] != '_' and
len(name) > 4)
def _is_sunder(name):
"""Returns True if a _sunder_ name, False otherwise."""
return (name[0] == name[-1] == '_' and
name[1:2] != '_' and
name[-2:-1] != '_' and
len(name) > 2)
def _make_class_unpicklable(cls):
"""Make the given class un-picklable."""
def _break_on_call_reduce(self, proto):
raise TypeError('%r cannot be pickled' % self)
cls.__reduce_ex__ = _break_on_call_reduce
cls.__module__ = '<unknown>'
_auto_null = object()
class auto:
"""
Instances are replaced with an appropriate value in Enum class suites.
"""
value = _auto_null
class _EnumDict(dict):
"""Track enum member order and ensure member names are not reused.
EnumMeta will use the names found in self._member_names as the
enumeration member names.
"""
def __init__(self):
super().__init__()
self._member_names = []
self._last_values = []
self._ignore = []
def __setitem__(self, key, value):
"""Changes anything not dundered or not a descriptor.
If an enum member name is used twice, an error is raised; duplicate
values are not checked for.
Single underscore (sunder) names are reserved.
"""
if _is_sunder(key):
if key not in (
'_order_', '_create_pseudo_member_',
'_generate_next_value_', '_missing_', '_ignore_',
):
raise ValueError('_names_ are reserved for future Enum use')
if key == '_generate_next_value_':
setattr(self, '_generate_next_value', value)
elif key == '_ignore_':
if isinstance(value, str):
value = value.replace(',',' ').split()
else:
value = list(value)
self._ignore = value
already = set(value) & set(self._member_names)
if already:
raise ValueError('_ignore_ cannot specify already set names: %r' % (already, ))
elif _is_dunder(key):
if key == '__order__':
key = '_order_'
elif key in self._member_names:
# descriptor overwriting an enum?
raise TypeError('Attempted to reuse key: %r' % key)
elif key in self._ignore:
pass
elif not _is_descriptor(value):
if key in self:
# enum overwriting a descriptor?
raise TypeError('%r already defined as: %r' % (key, self[key]))
if isinstance(value, auto):
if value.value == _auto_null:
value.value = self._generate_next_value(key, 1, len(self._member_names), self._last_values[:])
value = value.value
self._member_names.append(key)
self._last_values.append(value)
super().__setitem__(key, value)
# Dummy value for Enum as EnumMeta explicitly checks for it, but of course
# until EnumMeta finishes running the first time the Enum class doesn't exist.
# This is also why there are checks in EnumMeta like `if Enum is not None`
Enum = None
class EnumMeta(type):
"""Metaclass for Enum"""
@classmethod
def __prepare__(metacls, cls, bases):
# create the namespace dict
enum_dict = _EnumDict()
# inherit previous flags and _generate_next_value_ function
member_type, first_enum = metacls._get_mixins_(bases)
if first_enum is not None:
enum_dict['_generate_next_value_'] = getattr(first_enum, '_generate_next_value_', None)
return enum_dict
def __new__(metacls, cls, bases, classdict):
# an Enum class is final once enumeration items have been defined; it
# cannot be mixed with other types (int, float, etc.) if it has an
# inherited __new__ unless a new __new__ is defined (or the resulting
# class will fail).
#
# remove any keys listed in _ignore_
classdict.setdefault('_ignore_', []).append('_ignore_')
ignore = classdict['_ignore_']
for key in ignore:
classdict.pop(key, None)
member_type, first_enum = metacls._get_mixins_(bases)
__new__, save_new, use_args = metacls._find_new_(classdict, member_type,
first_enum)
# save enum items into separate mapping so they don't get baked into
# the new class
enum_members = {k: classdict[k] for k in classdict._member_names}
for name in classdict._member_names:
del classdict[name]
# adjust the sunders
_order_ = classdict.pop('_order_', None)
# check for illegal enum names (any others?)
invalid_names = set(enum_members) & {'mro', }
if invalid_names:
raise ValueError('Invalid enum member name: {0}'.format(
','.join(invalid_names)))
# create a default docstring if one has not been provided
if '__doc__' not in classdict:
classdict['__doc__'] = 'An enumeration.'
# create our new Enum type
enum_class = super().__new__(metacls, cls, bases, classdict)
enum_class._member_names_ = [] # names in definition order
enum_class._member_map_ = {} # name->value map
enum_class._member_type_ = member_type
# save DynamicClassAttribute attributes from super classes so we know
# if we can take the shortcut of storing members in the class dict
dynamic_attributes = {k for c in enum_class.mro()
for k, v in c.__dict__.items()
if isinstance(v, DynamicClassAttribute)}
# Reverse value->name map for hashable values.
enum_class._value2member_map_ = {}
# If a custom type is mixed into the Enum, and it does not know how
# to pickle itself, pickle.dumps will succeed but pickle.loads will
# fail. Rather than have the error show up later and possibly far
# from the source, sabotage the pickle protocol for this class so
# that pickle.dumps also fails.
#
# However, if the new class implements its own __reduce_ex__, do not
# sabotage -- it's on them to make sure it works correctly. We use
# __reduce_ex__ instead of any of the others as it is preferred by
# pickle over __reduce__, and it handles all pickle protocols.
if '__reduce_ex__' not in classdict:
if member_type is not object:
methods = ('__getnewargs_ex__', '__getnewargs__',
'__reduce_ex__', '__reduce__')
if not any(m in member_type.__dict__ for m in methods):
_make_class_unpicklable(enum_class)
# instantiate them, checking for duplicates as we go
# we instantiate first instead of checking for duplicates first in case
# a custom __new__ is doing something funky with the values -- such as
# auto-numbering ;)
for member_name in classdict._member_names:
value = enum_members[member_name]
if not isinstance(value, tuple):
args = (value, )
else:
args = value
if member_type is tuple: # special case for tuple enums
args = (args, ) # wrap it one more time
if not use_args:
enum_member = __new__(enum_class)
if not hasattr(enum_member, '_value_'):
enum_member._value_ = value
else:
enum_member = __new__(enum_class, *args)
if not hasattr(enum_member, '_value_'):
if member_type is object:
enum_member._value_ = value
else:
enum_member._value_ = member_type(*args)
value = enum_member._value_
enum_member._name_ = member_name
enum_member.__objclass__ = enum_class
enum_member.__init__(*args)
# If another member with the same value was already defined, the
# new member becomes an alias to the existing one.
for name, canonical_member in enum_class._member_map_.items():
if canonical_member._value_ == enum_member._value_:
enum_member = canonical_member
break
else:
# Aliases don't appear in member names (only in __members__).
enum_class._member_names_.append(member_name)
# performance boost for any member that would not shadow
# a DynamicClassAttribute
if member_name not in dynamic_attributes:
setattr(enum_class, member_name, enum_member)
# now add to _member_map_
enum_class._member_map_[member_name] = enum_member
try:
# This may fail if value is not hashable. We can't add the value
# to the map, and by-value lookups for this value will be
# linear.
enum_class._value2member_map_[value] = enum_member
except TypeError:
pass
# double check that repr and friends are not the mixin's or various
# things break (such as pickle)
for name in ('__repr__', '__str__', '__format__', '__reduce_ex__'):
class_method = getattr(enum_class, name)
obj_method = getattr(member_type, name, None)
enum_method = getattr(first_enum, name, None)
if obj_method is not None and obj_method is class_method:
setattr(enum_class, name, enum_method)
# replace any other __new__ with our own (as long as Enum is not None,
# anyway) -- again, this is to support pickle
if Enum is not None:
# if the user defined their own __new__, save it before it gets
# clobbered in case they subclass later
if save_new:
enum_class.__new_member__ = __new__
enum_class.__new__ = Enum.__new__
# py3 support for definition order (helps keep py2/py3 code in sync)
if _order_ is not None:
if isinstance(_order_, str):
_order_ = _order_.replace(',', ' ').split()
if _order_ != enum_class._member_names_:
raise TypeError('member order does not match _order_')
return enum_class
def __bool__(self):
"""
classes/types should always be True.
"""
return True
def __call__(cls, value, names=None, *, module=None, qualname=None, type=None, start=1):
"""Either returns an existing member, or creates a new enum class.
This method is used both when an enum class is given a value to match
to an enumeration member (i.e. Color(3)) and for the functional API
(i.e. Color = Enum('Color', names='RED GREEN BLUE')).
When used for the functional API:
`value` will be the name of the new class.
`names` should be either a string of white-space/comma delimited names
(values will start at `start`), or an iterator/mapping of name, value pairs.
`module` should be set to the module this class is being created in;
if it is not set, an attempt to find that module will be made, but if
it fails the class will not be picklable.
`qualname` should be set to the actual location this class can be found
at in its module; by default it is set to the global scope. If this is
not correct, unpickling will fail in some circumstances.
`type`, if set, will be mixed in as the first base class.
"""
if names is None: # simple value lookup
return cls.__new__(cls, value)
# otherwise, functional API: we're creating a new Enum type
return cls._create_(value, names, module=module, qualname=qualname, type=type, start=start)
def __contains__(cls, member):
if not isinstance(member, Enum):
raise TypeError(
"unsupported operand type(s) for 'in': '%s' and '%s'" % (
type(member).__qualname__, cls.__class__.__qualname__))
return isinstance(member, cls) and member._name_ in cls._member_map_
def __delattr__(cls, attr):
# nicer error message when someone tries to delete an attribute
# (see issue19025).
if attr in cls._member_map_:
raise AttributeError(
"%s: cannot delete Enum member." % cls.__name__)
super().__delattr__(attr)
def __dir__(self):
return (['__class__', '__doc__', '__members__', '__module__'] +
self._member_names_)
def __getattr__(cls, name):
"""Return the enum member matching `name`
We use __getattr__ instead of descriptors or inserting into the enum
class' __dict__ in order to support `name` and `value` being both
properties for enum members (which live in the class' __dict__) and
enum members themselves.
"""
if _is_dunder(name):
raise AttributeError(name)
try:
return cls._member_map_[name]
except KeyError:
raise AttributeError(name) from None
def __getitem__(cls, name):
return cls._member_map_[name]
def __iter__(cls):
return (cls._member_map_[name] for name in cls._member_names_)
def __len__(cls):
return len(cls._member_names_)
@property
def __members__(cls):
"""Returns a mapping of member name->value.
This mapping lists all enum members, including aliases. Note that this
is a read-only view of the internal mapping.
"""
return MappingProxyType(cls._member_map_)
def __repr__(cls):
return "<enum %r>" % cls.__name__
def __reversed__(cls):
return (cls._member_map_[name] for name in reversed(cls._member_names_))
def __setattr__(cls, name, value):
"""Block attempts to reassign Enum members.
A simple assignment to the class namespace only changes one of the
several possible ways to get an Enum member from the Enum class,
resulting in an inconsistent Enumeration.
"""
member_map = cls.__dict__.get('_member_map_', {})
if name in member_map:
raise AttributeError('Cannot reassign members.')
super().__setattr__(name, value)
def _create_(cls, class_name, names, *, module=None, qualname=None, type=None, start=1):
"""Convenience method to create a new Enum class.
`names` can be:
* A string containing member names, separated either with spaces or
commas. Values are incremented by 1 from `start`.
* An iterable of member names. Values are incremented by 1 from `start`.
* An iterable of (member name, value) pairs.
* A mapping of member name -> value pairs.
"""
metacls = cls.__class__
bases = (cls, ) if type is None else (type, cls)
_, first_enum = cls._get_mixins_(bases)
classdict = metacls.__prepare__(class_name, bases)
# special processing needed for names?
if isinstance(names, str):
names = names.replace(',', ' ').split()
if isinstance(names, (tuple, list)) and names and isinstance(names[0], str):
original_names, names = names, []
last_values = []
for count, name in enumerate(original_names):
value = first_enum._generate_next_value_(name, start, count, last_values[:])
last_values.append(value)
names.append((name, value))
# Here, names is either an iterable of (name, value) or a mapping.
for item in names:
if isinstance(item, str):
member_name, member_value = item, names[item]
else:
member_name, member_value = item
classdict[member_name] = member_value
enum_class = metacls.__new__(metacls, class_name, bases, classdict)
# TODO: replace the frame hack if a blessed way to know the calling
# module is ever developed
if module is None:
try:
module = sys._getframe(2).f_globals['__name__']
except (AttributeError, ValueError) as exc:
pass
if module is None:
_make_class_unpicklable(enum_class)
else:
enum_class.__module__ = module
if qualname is not None:
enum_class.__qualname__ = qualname
return enum_class
def _convert_(cls, name, module, filter, source=None):
"""
Create a new Enum subclass that replaces a collection of global constants
"""
# convert all constants from source (or module) that pass filter() to
# a new Enum called name, and export the enum and its members back to
# module;
# also, replace the __reduce_ex__ method so unpickling works in
# previous Python versions
module_globals = vars(sys.modules[module])
if source:
source = vars(source)
else:
source = module_globals
# _value2member_map_ is populated in the same order every time
# for a consistent reverse mapping of number to name when there
# are multiple names for the same number.
members = [
(name, value)
for name, value in source.items()
if filter(name)]
try:
# sort by value
members.sort(key=lambda t: (t[1], t[0]))
except TypeError:
# unless some values aren't comparable, in which case sort by name
members.sort(key=lambda t: t[0])
cls = cls(name, members, module=module)
cls.__reduce_ex__ = _reduce_ex_by_name
module_globals.update(cls.__members__)
module_globals[name] = cls
return cls
def _convert(cls, *args, **kwargs):
import warnings
warnings.warn("_convert is deprecated and will be removed in 3.9, use "
"_convert_ instead.", DeprecationWarning, stacklevel=2)
return cls._convert_(*args, **kwargs)
@staticmethod
def _get_mixins_(bases):
"""Returns the type for creating enum members, and the first inherited
enum class.
bases: the tuple of bases that was given to __new__
"""
if not bases:
return object, Enum
def _find_data_type(bases):
for chain in bases:
for base in chain.__mro__:
if base is object:
|
elif '__new__' in base.__dict__:
if issubclass(base, Enum):
continue
return base
# ensure final parent class is an Enum derivative, find any concrete
# data type, and check that Enum has no members
first_enum = bases[-1]
if not issubclass(first_enum, Enum):
raise TypeError("new enumerations should be created as "
"`EnumName([mixin_type, ...] [data_type,] enum_type)`")
member_type = _find_data_type(bases) or object
if first_enum._member_names_:
raise TypeError("Cannot extend enumerations")
return member_type, first_enum
@staticmethod
def _find_new_(classdict, member_type, first_enum):
"""Returns the __new__ to be used for creating the enum members.
classdict: the class dictionary given to __new__
member_type: the data type whose __new__ will be used by default
first_enum: enumeration to check for an overriding __new__
"""
# now find the correct __new__, checking to see of one was defined
# by the user; also check earlier enum classes in case a __new__ was
# saved as __new_member__
__new__ = classdict.get('__new__', None)
# should __new__ be saved as __new_member__ later?
save_new = __new__ is not None
if __new__ is None:
# check all possibles for __new_member__ before falling back to
# __new__
for method in ('__new_member__', '__new__'):
for possible in (member_type, first_enum):
target = getattr(possible, method, None)
if target not in {
None,
None.__new__,
object.__new__,
Enum.__new__,
}:
__new__ = target
break
if __new__ is not None:
break
else:
__new__ = object.__new__
# if a non-object.__new__ is used then whatever value/tuple was
# assigned to the enum member name will be passed to __new__ and to the
# new enum member's __init__
if __new__ is object.__new__:
use_args = False
else:
use_args = True
return __new__, save_new, use_args
class Enum(metaclass=EnumMeta):
"""Generic enumeration.
Derive from this class to define new enumerations.
"""
def __new__(cls, value):
# all enum instances are actually created during class construction
# without calling this method; this method is called by the metaclass'
# __call__ (i.e. Color(3) ), and by pickle
if type(value) is cls:
# For lookups like Color(Color.RED)
return value
# by-value search for a matching enum member
# see if it's in the reverse mapping (for hashable values)
try:
return cls._value2member_map_[value]
except KeyError:
# Not found, no need to do long O(n) search
pass
except TypeError:
# not there, now do long search -- O(n) behavior
for member in cls._member_map_.values():
if member._value_ == value:
return member
# still not found -- try _missing_ hook
try:
exc = None
result = cls._missing_(value)
except Exception as e:
exc = e
result = None
if isinstance(result, cls):
return result
else:
ve_exc = ValueError("%r is not a valid %s" % (value, cls.__name__))
if result is None and exc is None:
raise ve_exc
elif exc is None:
exc = TypeError(
'error in %s._missing_: returned %r instead of None or a valid member'
% (cls.__name__, result)
)
exc.__context__ = ve_exc
raise exc
def _generate_next_value_(name, start, count, last_values):
for last_value in reversed(last_values):
try:
return last_value + 1
except TypeError:
pass
else:
return start
@classmethod
def _missing_(cls, value):
raise ValueError("%r is not a valid %s" % (value, cls.__name__))
def __repr__(self):
return "<%s.%s: %r>" % (
self.__class__.__name__, self._name_, self._value_)
def __str__(self):
return "%s.%s" % (self.__class__.__name__, self._name_)
def __dir__(self):
added_behavior = [
m
for cls in self.__class__.mro()
for m in cls.__dict__
if m[0] != '_' and m not in self._member_map_
]
return (['__class__', '__doc__', '__module__'] + added_behavior)
def __format__(self, format_spec):
# mixed-in Enums should use the mixed-in type's __format__, otherwise
# we can get strange results with the Enum name showing up instead of
# the value
# pure Enum branch
if self._member_type_ is object:
cls = str
val = str(self)
# mix-in branch
else:
cls = self._member_type_
val = self._value_
return cls.__format__(val, format_spec)
def __hash__(self):
return hash(self._name_)
def __reduce_ex__(self, proto):
return self.__class__, (self._value_, )
# DynamicClassAttribute is used to provide access to the `name` and
# `value` properties of enum members while keeping some measure of
# protection from modification, while still allowing for an enumeration
# to have members named `name` and `value`. This works because enumeration
# members are not set directly on the enum class -- __getattr__ is
# used to look them up.
@DynamicClassAttribute
def name(self):
"""The name of the Enum member."""
return self._name_
@DynamicClassAttribute
def value(self):
"""The value of the Enum member."""
return self._value_
class IntEnum(int, Enum):
"""Enum where members are also (and must be) ints"""
def _reduce_ex_by_name(self, proto):
return self.name
class Flag(Enum):
"""Support for flags"""
def _generate_next_value_(name, start, count, last_values):
"""
Generate the next value when not given.
name: the name of the member
start: the initital start value or None
count: the number of existing members
last_value: the last value assigned or None
"""
if not count:
return start if start is not None else 1
for last_value in reversed(last_values):
try:
high_bit = _high_bit(last_value)
break
except Exception:
raise TypeError('Invalid Flag value: %r' % last_value) from None
return 2 ** (high_bit+1)
@classmethod
def _missing_(cls, value):
original_value = value
if value < 0:
value = ~value
possible_member = cls._create_pseudo_member_(value)
if original_value < 0:
possible_member = ~possible_member
return possible_member
@classmethod
def _create_pseudo_member_(cls, value):
"""
Create a composite member iff value contains only members.
"""
pseudo_member = cls._value2member_map_.get(value, None)
if pseudo_member is None:
# verify all bits are accounted for
_, extra_flags = _decompose(cls, value)
if extra_flags:
raise ValueError("%r is not a valid %s" % (value, cls.__name__))
# construct a singleton enum pseudo-member
pseudo_member = object.__new__(cls)
pseudo_member._name_ = None
pseudo_member._value_ = value
# use setdefault in case another thread already created a composite
# with this value
pseudo_member = cls._value2member_map_.setdefault(value, pseudo_member)
return pseudo_member
def __contains__(self, other):
if not isinstance(other, self.__class__):
raise TypeError(
"unsupported operand type(s) for 'in': '%s' and '%s'" % (
type(other).__qualname__, self.__class__.__qualname__))
return other._value_ & self._value_ == other._value_
def __repr__(self):
cls = self.__class__
if self._name_ is not None:
return '<%s.%s: %r>' % (cls.__name__, self._name_, self._value_)
members, uncovered = _decompose(cls, self._value_)
return '<%s.%s: %r>' % (
cls.__name__,
'|'.join([str(m._name_ or m._value_) for m in members]),
self._value_,
)
def __str__(self):
cls = self.__class__
if self._name_ is not None:
return '%s.%s' % (cls.__name__, self._name_)
members, uncovered = _decompose(cls, self._value_)
if len(members) == 1 and members[0]._name_ is None:
return '%s.%r' % (cls.__name__, members[0]._value_)
else:
return '%s.%s' % (
cls.__name__,
'|'.join([str(m._name_ or m._value_) for m in members]),
)
def __bool__(self):
return bool(self._value_)
def __or__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self.__class__(self._value_ | other._value_)
def __and__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self.__class__(self._value_ & other._value_)
def __xor__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self.__class__(self._value_ ^ other._value_)
def __invert__(self):
members, uncovered = _decompose(self.__class__, self._value_)
inverted = self.__class__(0)
for m in self.__class__:
if m not in members and not (m._value_ & self._value_):
inverted = inverted | m
return self.__class__(inverted)
class IntFlag(int, Flag):
"""Support for integer-based Flags"""
@classmethod
def _missing_(cls, value):
if not isinstance(value, int):
raise ValueError("%r is not a valid %s" % (value, cls.__name__))
new_member = cls._create_pseudo_member_(value)
return new_member
@classmethod
def _create_pseudo_member_(cls, value):
pseudo_member = cls._value2member_map_.get(value, None)
if pseudo_member is None:
need_to_create = [value]
# get unaccounted for bits
_, extra_flags = _decompose(cls, value)
# timer = 10
while extra_flags:
# timer -= 1
bit = _high_bit(extra_flags)
flag_value = 2 ** bit
if (flag_value not in cls._value2member_map_ and
flag_value not in need_to_create
):
need_to_create.append(flag_value)
if extra_flags == -flag_value:
extra_flags = 0
else:
extra_flags ^= flag_value
for value in reversed(need_to_create):
# construct singleton pseudo-members
pseudo_member = int.__new__(cls, value)
pseudo_member._name_ = None
pseudo_member._value_ = value
# use setdefault in case another thread already created a composite
# with this value
pseudo_member = cls._value2member_map_.setdefault(value, pseudo_member)
return pseudo_member
def __or__(self, other):
if not isinstance(other, (self.__class__, int)):
return NotImplemented
result = self.__class__(self._value_ | self.__class__(other)._value_)
return result
def __and__(self, other):
if not isinstance(other, (self.__class__, int)):
return NotImplemented
return self.__class__(self._value_ & self.__class__(other)._value_)
def __xor__(self, other):
if not isinstance(other, (self.__class__, int)):
return NotImplemented
return self.__class__(self._value_ ^ self.__class__(other)._value_)
__ror__ = __or__
__rand__ = __and__
__rxor__ = __xor__
def __invert__(self):
result = self.__class__(~self._value_)
return result
def _high_bit(value):
"""returns index of highest bit, or -1 if value is zero or negative"""
return value.bit_length() - 1
def unique(enumeration):
"""Class decorator for enumerations ensuring unique member values."""
duplicates = []
for name, member in enumeration.__members__.items():
if name != member.name:
duplicates.append((name, member.name))
if duplicates:
alias_details = ', '.join(
["%s -> %s" % (alias, name) for (alias, name) in duplicates])
raise ValueError('duplicate values found in %r: %s' %
(enumeration, alias_details))
return enumeration
def _decompose(flag, value):
"""Extract all members from the value."""
# _decompose is only called if the value is not named
not_covered = value
negative = value < 0
# issue29167: wrap accesses to _value2member_map_ in a list to avoid race
# conditions between iterating over it and having more pseudo-
# members added to it
if negative:
# only check for named flags
flags_to_check = [
(m, v)
for v, m in list(flag._value2member_map_.items())
if m.name is not None
]
else:
# check for named flags and powers-of-two flags
flags_to_check = [
(m, v)
for v, m in list(flag._value2member_map_.items())
if m.name is not None or _power_of_two(v)
]
members = []
for member, member_value in flags_to_check:
if member_value and member_value & value == member_value:
members.append(member)
not_covered &= ~member_value
if not members and value in flag._value2member_map_:
members.append(flag._value2member_map_[value])
members.sort(key=lambda m: m._value_, reverse=True)
if len(members) > 1 and members[0].value == value:
# we have the breakdown, don't need the value member itself
members.pop(0)
return members, not_covered
def _power_of_two(value):
if value < 1:
return False
return value == 2 ** _high_bit(value)
| continue |
aws_credentials.py | """
Copyright (c) Contributors to the Open 3D Engine Project. For complete copyright and license terms please see the LICENSE at the root of this distribution.
SPDX-License-Identifier: Apache-2.0 OR MIT
"""
import boto3
import configparser
import logging
import os
import pytest
import typing
logger = logging.getLogger(__name__)
logging.getLogger('boto').setLevel(logging.CRITICAL)
class AwsCredentials:
def __init__(self, profile_name: str):
self._profile_name = profile_name
self._credentials_path = os.environ.get('AWS_SHARED_CREDENTIALS_FILE')
if not self._credentials_path:
# Home directory location varies based on the operating system, but is referred to using the environment
# variables %UserProfile% in Windows and $HOME or ~ (tilde) in Unix-based systems.
self._credentials_path = os.path.join(os.environ.get('UserProfile', os.path.expanduser('~')),
'.aws', 'credentials')
self._credentials_file_exists = os.path.exists(self._credentials_path)
self._credentials = configparser.ConfigParser()
self._credentials.read(self._credentials_path)
def get_aws_credentials(self) -> typing.Tuple[str, str, str]:
"""
Get aws credentials stored in the specific named profile.
:return AWS credentials.
"""
access_key_id = self._get_aws_credential_attribute_value('aws_access_key_id')
secret_access_key = self._get_aws_credential_attribute_value('aws_secret_access_key')
session_token = self._get_aws_credential_attribute_value('aws_session_token')
return access_key_id, secret_access_key, session_token
def set_aws_credentials_by_session(self, session: boto3.Session) -> None:
"""
Set AWS credentials stored in the specific named profile using an assumed role session.
:param session: assumed role session.
"""
credentials = session.get_credentials().get_frozen_credentials()
self.set_aws_credentials(credentials.access_key, credentials.secret_key, credentials.token)
def set_aws_credentials(self, aws_access_key_id: str, aws_secret_access_key: str,
aws_session_token: str) -> None:
|
def _get_aws_credential_attribute_value(self, attribute_name: str) -> str:
"""
Get the value of an AWS credential attribute stored in the specific named profile.
:param attribute_name: Name of the AWS credential attribute.
:return Value of the AWS credential attribute.
"""
try:
value = self._credentials.get(self._profile_name, attribute_name)
except configparser.NoSectionError:
# Named profile or key doesn't exist
value = None
except configparser.NoOptionError:
# Named profile doesn't have the specified attribute
value = None
return value
def _set_aws_credential_attribute_value(self, attribute_name: str, attribute_value: str) -> None:
"""
Set the value of an AWS credential attribute stored in the specific named profile.
:param attribute_name: Name of the AWS credential attribute.
:param attribute_value: Value of the AWS credential attribute.
"""
if self._profile_name not in self._credentials:
self._credentials[self._profile_name] = {}
if attribute_value is None:
self._credentials.remove_option(self._profile_name, attribute_name)
# Remove the named profile if it doesn't have any AWS credential attribute.
if len(self._credentials[self._profile_name]) == 0:
self._credentials.remove_section(self._profile_name)
else:
self._credentials[self._profile_name][attribute_name] = attribute_value
| """
Set AWS credentials stored in the specific named profile.
:param aws_access_key_id: AWS access key id.
:param aws_secret_access_key: AWS secrete access key.
:param aws_session_token: AWS assumed role session.
"""
self._set_aws_credential_attribute_value('aws_access_key_id', aws_access_key_id)
self._set_aws_credential_attribute_value('aws_secret_access_key', aws_secret_access_key)
self._set_aws_credential_attribute_value('aws_session_token', aws_session_token)
if (len(self._credentials.sections()) == 0) and (not self._credentials_file_exists):
os.remove(self._credentials_path)
return
with open(self._credentials_path, 'w+') as credential_file:
self._credentials.write(credential_file) |
ha_group_delete_parameters_test.go | package ha_group
import (
"context"
"github.com/go-openapi/runtime/client"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"net/http"
"testing"
"time"
)
| p := NewHaGroupDeleteParams()
require.NotNil(t, p.timeout)
assert.Equal(t, client.DefaultTimeout, p.timeout)
}
func TestNewHaGroupDeleteParamsWithTimeout(t *testing.T) {
p := NewHaGroupDeleteParamsWithTimeout(time.Minute * 5)
require.NotNil(t, p.timeout)
assert.Equal(t, time.Minute*5, p.timeout)
}
func TestNewHaGroupDeleteParamsWithContext(t *testing.T) {
p := NewHaGroupDeleteParamsWithContext(context.TODO())
require.NotNil(t, p.Context)
assert.Equal(t, context.TODO(), p.Context)
}
func TestNewHaGroupDeleteParamsWithHTTPClient(t *testing.T) {
cli := &http.Client{}
p := NewHaGroupDeleteParamsWithHTTPClient(cli)
require.NotNil(t, p.HTTPClient)
assert.Equal(t, cli, p.HTTPClient)
}
func TestHaGroupDeleteParams_WithDefaults(t *testing.T) {
p := NewHaGroupDeleteParams()
p = p.WithDefaults()
}
func TestHaGroupDeleteParams_WithTimeout(t *testing.T) {
p := NewHaGroupDeleteParams()
p = p.WithTimeout(time.Minute * 5)
require.NotNil(t, p.timeout)
assert.Equal(t, time.Minute*5, p.timeout)
}
func TestHaGroupDeleteParams_WithContext(t *testing.T) {
p := NewHaGroupDeleteParams()
p = p.WithContext(context.TODO())
require.NotNil(t, p.Context)
assert.Equal(t, context.TODO(), p.Context)
}
func TestHaGroupDeleteParams_WithHTTPClient(t *testing.T) {
p := NewHaGroupDeleteParams()
cli := &http.Client{}
p = p.WithHTTPClient(cli)
require.NotNil(t, p.HTTPClient)
assert.Equal(t, cli, p.HTTPClient)
}
func TestHaGroupDeleteParams_WithID(t *testing.T) {
p := NewHaGroupDeleteParams()
p = p.WithID("test-id")
require.NotNil(t, p.ID)
assert.Equal(t, "test-id", p.ID)
} | func TestNewHaGroupDeleteParams(t *testing.T) { |
schema.go | //go:generate stringer -type=RelType -output=./gen_string.go
package sdata
import (
"fmt"
"strings"
"github.com/gobuffalo/flect"
)
type aliasKey struct {
name string
parent string
}
type DBSchema struct {
ver int
typ string
t map[string]DBTableInfo
at map[aliasKey]string
rm map[string][]DBRel
vt map[string]VirtualTable
fm map[string]DBFunction
}
type DBTableInfo struct {
Name string
Type string
IsSingular bool
IsAlias bool
Columns []DBColumn
PrimaryCol DBColumn
TSVCol DBColumn
Singular string
Plural string
Blocked bool
Schema *DBSchema
colMap map[string]int
}
type RelType int
const (
RelNone RelType = iota
RelOneToOne
RelOneToMany
RelOneToManyThrough
RelPolymorphic
RelRecursive
RelEmbedded
RelRemote
RelSkip
)
type DBRel struct {
Type RelType
Through struct {
ColL DBColumn
ColR DBColumn
}
Left struct {
Ti DBTableInfo
Col DBColumn
}
Right struct {
VTable string
Ti DBTableInfo
Col DBColumn
}
}
func NewDBSchema(info *DBInfo, aliases map[string][]string) (*DBSchema, error) {
schema := &DBSchema{
ver: info.Version,
typ: info.Type,
t: make(map[string]DBTableInfo),
at: make(map[aliasKey]string),
rm: make(map[string][]DBRel),
vt: make(map[string]VirtualTable),
fm: make(map[string]DBFunction),
}
for i, t := range info.Tables {
err := schema.addTableInfo(t, info.Columns[i], aliases)
if err != nil {
return nil, err
}
}
if err := schema.virtualRels(info.VTables); err != nil {
return nil, err
}
for i, t := range info.Tables {
err := schema.firstDegreeRels(t, info.Columns[i])
if err != nil {
return nil, err
}
}
for i, t := range info.Tables {
err := schema.secondDegreeRels(t, info.Columns[i])
if err != nil {
return nil, err
}
}
for k, f := range info.Functions {
if len(f.Params) == 1 {
schema.fm[strings.ToLower(f.Name)] = info.Functions[k]
}
}
return schema, nil
}
func (s *DBSchema) addTableInfo(
t DBTable, cols []DBColumn, aliases map[string][]string) error {
colmap := make(map[string]int, len(cols))
singular := flect.Singularize(t.Key)
plural := flect.Pluralize(t.Key)
ti := DBTableInfo{
Name: t.Name,
Type: t.Type,
Columns: cols,
Singular: singular,
Plural: plural,
Blocked: t.Blocked,
Schema: s,
colMap: colmap,
}
for i := range cols {
c := &cols[i]
c.Table = t.Name
switch {
case c.Type == "tsvector":
ti.TSVCol = cols[i]
case c.PrimaryKey:
ti.PrimaryCol = cols[i]
}
colmap[c.Key] = i
}
ti.IsSingular = true
s.t[singular] = ti
ti.IsSingular = false
s.t[plural] = ti
| if al, ok := aliases[t.Key]; ok {
for i := range al {
ti1 := ti
ti1.Singular = flect.Singularize(al[i])
ti1.Plural = flect.Pluralize(al[i])
ti1.IsSingular = true
s.t[ti1.Singular] = ti1
ti1.IsSingular = false
s.t[ti1.Plural] = ti1
}
}
return nil
}
func (s *DBSchema) virtualRels(vts []VirtualTable) error {
for _, vt := range vts {
s.vt[vt.Name] = vt
for _, t := range s.t {
idCol, ok := t.getColumn(vt.IDColumn)
if !ok {
continue
}
if _, ok := t.getColumn(vt.TypeColumn); !ok {
continue
}
nt := DBTable{
ID: -1,
Name: vt.Name,
Key: strings.ToLower(vt.Name),
Type: "virtual",
}
if err := s.addTableInfo(nt, nil, nil); err != nil {
return err
}
rel := DBRel{Type: RelPolymorphic}
rel.Left.Ti = t
rel.Left.Col = idCol
rcol := DBColumn{
Name: vt.FKeyColumn,
Key: strings.ToLower(vt.FKeyColumn),
Type: idCol.Type,
}
rel.Right.VTable = vt.TypeColumn
rel.Right.Ti = t
rel.Right.Col = rcol
if err := s.SetRel(vt.Name, t.Name, rel, false); err != nil {
return err
}
}
}
return nil
}
func (s *DBSchema) firstDegreeRels(t DBTable, cols []DBColumn) error {
cti, ok := s.t[t.Name]
if !ok {
return fmt.Errorf("invalid foreign key table '%s'", t.Name)
}
for i := range cols {
c := cols[i]
if c.FKeyTable == "" {
continue
}
fti, ok := s.t[c.FKeyTable]
if !ok {
return fmt.Errorf("invalid foreign key table '%s'", c.FKeyTable)
}
pn1 := c.FKeyTable
pn2 := getRelName(c.Name)
// This is an embedded relationship like when a json/jsonb column
// is exposed as a table
if c.Name == c.FKeyTable && c.FKeyCol == "" {
rel := DBRel{Type: RelEmbedded}
rel.Left.Col = cti.PrimaryCol
rel.Right.Col = c
if err := s.SetRel(pn2, cti.Name, rel, true); err != nil {
return err
}
continue
}
if c.FKeyCol == "" {
continue
}
fc, ok := fti.getColumn(c.FKeyCol)
if !ok {
return fmt.Errorf("invalid foreign key column '%s.%s'",
c.FKeyTable, c.FKeyCol)
}
rel1 := DBRel{}
// One-to-many relation between current table and the
// table in the foreign key
switch {
case cti.Name == c.FKeyTable:
rel1.Type = RelRecursive
rel1.Right.VTable = "_rcte_" + t.Name
case fc.UniqueKey:
rel1.Type = RelOneToOne
default:
rel1.Type = RelOneToMany
}
rel1.Left.Ti = cti
rel1.Left.Col = c
rel1.Right.Ti = fti
rel1.Right.Col = fc
if err := s.SetRel(cti.Name, pn1, rel1, false); err != nil {
return err
}
if cti.Name == c.FKeyTable {
continue
}
rel2 := DBRel{}
// One-to-many reverse relation between the foreign key table and the
// the current table
if c.UniqueKey {
rel2.Type = RelOneToOne
} else {
rel2.Type = RelOneToMany
}
rel2.Left.Ti = fti
rel2.Left.Col = fc
rel2.Right.Ti = cti
rel2.Right.Col = c
if err := s.SetRel(pn1, cti.Name, rel2, false); err != nil {
return err
}
if err := s.SetRel(pn2, cti.Name, rel2, true); err != nil {
return err
}
}
return nil
}
func (s *DBSchema) secondDegreeRels(t DBTable, cols []DBColumn) error {
jcols := make([]DBColumn, 0, len(cols))
ct := t.Key
cti, ok := s.t[ct]
if !ok {
return fmt.Errorf("invalid foreign key table '%s'", ct)
}
for _, c := range cols {
if c.FKeyTable == "" {
continue
}
fti, ok := s.t[c.FKeyTable]
if !ok {
return fmt.Errorf("invalid foreign key table '%s'", c.FKeyTable)
}
// This is an embedded relationship like when a json/jsonb column
// is exposed as a table so skip
if c.Name == c.FKeyTable && c.FKeyCol == "" {
continue
}
if c.FKeyCol == "" {
continue
}
if _, ok := fti.getColumn(c.FKeyCol); !ok {
return fmt.Errorf("invalid foreign key column '%s.%s'",
c.FKeyTable, c.FKeyCol)
}
jcols = append(jcols, c)
}
// If table contains multiple foreign key columns it's a possible
// join table for many-to-many relationships or multiple one-to-many
// relations
// Below one-to-many relations use the current table as the
// join table aka through table.
if len(jcols) > 1 {
for i := range jcols {
for n := range jcols {
if n == i {
continue
}
err := s.updateSchemaOTMT(cti, jcols[i], jcols[n])
if err != nil {
return err
}
}
}
}
return nil
}
func (s *DBSchema) updateSchemaOTMT(
ti DBTableInfo, col1, col2 DBColumn) error {
ft1 := strings.ToLower(col1.FKeyTable)
ft2 := strings.ToLower(col2.FKeyTable)
if ft1 == ft2 {
return nil
}
cn1 := getRelName(col1.Name)
cn2 := getRelName(col2.Name)
fti1, ok := s.t[ft1]
if !ok {
return fmt.Errorf("invalid foreign key table '%s'", ft1)
}
fc1, ok := fti1.getColumn(col1.FKeyCol)
if !ok {
return fmt.Errorf("invalid foreign key column '%s.%s'",
ft1, col1.FKeyCol)
}
fti2, ok := s.t[ft2]
if !ok {
return fmt.Errorf("invalid foreign key table '%s'", ft2)
}
fc2, ok := fti2.getColumn(col2.FKeyCol)
if !ok {
return fmt.Errorf("invalid foreign key column id '%s.%s'",
ft2, col2.FKeyCol)
}
// One-to-many-through relation between 1nd foreign key table and the
// 2nd foreign key table
rel1 := DBRel{Type: RelOneToManyThrough}
rel1.Through.ColL = col1
rel1.Through.ColR = col2
rel1.Left.Ti = fti1
rel1.Left.Col = fc1
rel1.Right.Ti = fti2
rel1.Right.Col = fc2
if err := s.SetRel(ft1, ft2, rel1, false); err != nil {
return err
}
if err := s.SetRel(cn1, cn2, rel1, true); err != nil {
return err
}
// One-to-many-through relation between 2nd foreign key table and the
// 1nd foreign key table
rel2 := DBRel{Type: RelOneToManyThrough}
rel2.Through.ColL = col2
rel2.Through.ColR = col1
rel2.Left.Ti = fti2
rel2.Left.Col = fc2
rel2.Right.Ti = fti1
rel2.Right.Col = fc1
if err := s.SetRel(ft2, ft1, rel2, false); err != nil {
return err
}
if err := s.SetRel(cn2, cn1, rel2, true); err != nil {
return err
}
return nil
}
// func (s *DBSchema) addAlias(name, parent string, ti DBTableInfo) {
// if name == ti.Plural || name == ti.Singular {
// return
// }
// ns := strings.ToLower(flect.Singularize(name))
// np := strings.ToLower(flect.Pluralize(name))
// if ns != np {
// s.at[aliasKey{ns, parent}] = ti.Singular
// s.at[aliasKey{np, parent}] = ti.Plural
// } else {
// s.at[aliasKey{np, parent}] = ti.Plural
// }
// }
func (s *DBSchema) GetTableNames() []string {
var names []string
for name := range s.t {
names = append(names, name)
}
return names
}
func (s *DBSchema) GetAliases(parent string) []string {
var names []string
for ak := range s.at {
if ak.parent == parent {
names = append(names, ak.name)
}
}
return names
}
func (s *DBSchema) GetAliasTable(name, parent string) (string, bool) {
v, ok := s.at[aliasKey{name, parent}]
return v, ok
}
func (s *DBSchema) getTableInfo(name, parent string, blocking bool) (DBTableInfo, error) {
t, ok := s.t[name]
if ok {
if blocking && t.Blocked {
return t, fmt.Errorf("table: '%s' (%s) blocked", t.Name, name)
}
return t, nil
}
if parent != "" {
at, ok := s.at[aliasKey{name, parent}]
if ok {
t, ok := s.t[at]
if ok {
if blocking && t.Blocked {
return t, fmt.Errorf("table: '%s' blocked (%s, %s)", t.Name, name, parent)
}
t.IsAlias = true
return t, nil
}
}
}
return t, fmt.Errorf("table: '%s' not found (%s)", name, parent)
}
func (s *DBSchema) GetTableInfo(name, parent string) (DBTableInfo, error) {
return s.getTableInfo(name, parent, false)
}
func (s *DBSchema) GetTableInfoB(name, parent string) (DBTableInfo, error) {
return s.getTableInfo(name, parent, true)
}
func (s *DBSchema) SetRel(child, parent string, rel DBRel, alias bool) error {
// if ok, err := s.relExists(child, parent); ok {
// return nil
// } else if err != nil {
// return err
// }
sp := strings.ToLower(flect.Singularize(parent))
pp := strings.ToLower(flect.Pluralize(parent))
sc := strings.ToLower(flect.Singularize(child))
pc := strings.ToLower(flect.Pluralize(child))
s.rm[(sc + sp)] = append(s.rm[(sc+sp)], rel)
s.rm[(sc + pp)] = append(s.rm[(sc+pp)], rel)
s.rm[(pc + sp)] = append(s.rm[(pc+sp)], rel)
s.rm[(pc + pp)] = append(s.rm[(pc+pp)], rel)
// Todo: Maybe a graph ds would be better
// s.rm[(sp + sc)] = append(s.rm[(sp+sc)], rel)
// s.rm[(pp + sc)] = append(s.rm[(pp+sc)], rel)
// s.rm[(sp + pc)] = append(s.rm[(sp+pc)], rel)
// s.rm[(pp + pc)] = append(s.rm[(pp+pc)], rel)
if alias && (sc != rel.Left.Ti.Singular || pc != rel.Left.Ti.Plural) {
s.at[aliasKey{sc, sp}] = rel.Left.Ti.Singular
s.at[aliasKey{sc, pp}] = rel.Left.Ti.Singular
s.at[aliasKey{pc, sp}] = rel.Left.Ti.Plural
s.at[aliasKey{pc, pp}] = rel.Left.Ti.Plural
}
return nil
}
func (s *DBSchema) GetRel(child, parent, through string) (DBRel, error) {
var rel DBRel
rels, ok := s.rm[(child + parent)]
if !ok || len(rels) == 0 {
return rel, fmt.Errorf("relationship: '%s' -> '%s' not found",
child, parent)
}
if len(through) != 0 {
for _, v := range rels {
if v.Through.ColL.Table == through {
return v, nil
}
}
}
return rels[0], nil
}
func (ti *DBTableInfo) ColumnExists(name string) bool {
_, ok := ti.colMap[name]
return ok
}
func (ti *DBTableInfo) getColumn(name string) (DBColumn, bool) {
var c DBColumn
if i, ok := ti.colMap[name]; ok {
return ti.Columns[i], true
}
return c, false
}
func (ti *DBTableInfo) GetColumn(name string) (DBColumn, error) {
c, ok := ti.getColumn(name)
if ok {
return c, nil
}
return c, fmt.Errorf("column: '%s.%s' not found", ti.Name, name)
}
func (ti *DBTableInfo) GetColumnB(name string) (DBColumn, error) {
c, err := ti.GetColumn(name)
if err != nil {
return c, err
}
if c.Blocked {
return c, fmt.Errorf("column: '%s.%s' blocked", ti.Name, name)
}
return c, nil
}
func (s *DBSchema) GetFunctions() map[string]DBFunction {
return s.fm
}
func getRelName(colName string) string {
cn := strings.ToLower(colName)
if strings.HasSuffix(cn, "_id") {
return colName[:len(colName)-3]
}
if strings.HasSuffix(cn, "_ids") {
return colName[:len(colName)-4]
}
if strings.HasPrefix(cn, "id_") {
return colName[3:]
}
if strings.HasPrefix(cn, "ids_") {
return colName[4:]
}
return colName
}
func (s *DBSchema) Type() string {
return s.typ
}
func (s *DBSchema) DBVersion() int {
return s.ver
} | |
transfer.rs | use lazy_static::lazy_static;
use contract_ffi::value::U512;
use engine_core::engine_state::CONV_RATE;
use engine_shared::motes::Motes;
use crate::{
support::test_support::{self, ExecuteRequestBuilder, InMemoryWasmTestBuilder},
test::{
DEFAULT_ACCOUNT_ADDR, DEFAULT_ACCOUNT_INITIAL_BALANCE, DEFAULT_GENESIS_CONFIG,
DEFAULT_PAYMENT,
},
};
const CONTRACT_TRANSFER_PURSE_TO_ACCOUNT: &str = "transfer_purse_to_account.wasm";
const CONTRACT_TRANSFER_TO_ACCOUNT_01: &str = "transfer_to_account_01.wasm";
const CONTRACT_TRANSFER_TO_ACCOUNT_02: &str = "transfer_to_account_02.wasm";
lazy_static! {
static ref TRANSFER_1_AMOUNT: U512 = U512::from(250_000_000) + 1000;
static ref TRANSFER_2_AMOUNT: U512 = U512::from(750);
static ref TRANSFER_2_AMOUNT_WITH_ADV: U512 = *DEFAULT_PAYMENT + *TRANSFER_2_AMOUNT;
static ref TRANSFER_TOO_MUCH: U512 = U512::from(u64::max_value());
static ref ACCOUNT_1_INITIAL_BALANCE: U512 = *DEFAULT_PAYMENT;
}
const ACCOUNT_1_ADDR: [u8; 32] = [1u8; 32];
const ACCOUNT_2_ADDR: [u8; 32] = [2u8; 32];
#[ignore]
#[test]
fn should_transfer_to_account() {
let initial_genesis_amount: U512 = U512::from(DEFAULT_ACCOUNT_INITIAL_BALANCE);
let transfer_amount: U512 = *TRANSFER_1_AMOUNT;
// Run genesis
let mut builder = InMemoryWasmTestBuilder::default();
let builder = builder.run_genesis(&DEFAULT_GENESIS_CONFIG);
let default_account = builder
.get_account(DEFAULT_ACCOUNT_ADDR)
.expect("should get account");
let default_account_purse_id = default_account.purse_id();
// Check genesis account balance
let genesis_balance = builder.get_purse_balance(default_account_purse_id);
assert_eq!(genesis_balance, initial_genesis_amount,);
// Exec transfer contract
let exec_request_1 = ExecuteRequestBuilder::standard(
DEFAULT_ACCOUNT_ADDR,
CONTRACT_TRANSFER_TO_ACCOUNT_01,
(ACCOUNT_1_ADDR,),
)
.build();
builder.exec(exec_request_1).expect_success().commit();
let account = builder
.get_account(ACCOUNT_1_ADDR)
.expect("should get account");
let account_purse_id = account.purse_id();
// Check genesis account balance
let genesis_balance = builder.get_purse_balance(default_account_purse_id);
let gas_cost =
Motes::from_gas(builder.exec_costs(0)[0], CONV_RATE).expect("should convert gas to motes");
assert_eq!(
genesis_balance,
initial_genesis_amount - gas_cost.value() - transfer_amount
);
// Check account 1 balance
let account_1_balance = builder.get_purse_balance(account_purse_id);
assert_eq!(account_1_balance, transfer_amount,);
}
#[ignore]
#[test]
fn should_transfer_from_account_to_account() {
let initial_genesis_amount: U512 = U512::from(DEFAULT_ACCOUNT_INITIAL_BALANCE);
let transfer_1_amount: U512 = *TRANSFER_1_AMOUNT;
let transfer_2_amount: U512 = *TRANSFER_2_AMOUNT;
// Run genesis
let mut builder = InMemoryWasmTestBuilder::default();
let builder = builder.run_genesis(&DEFAULT_GENESIS_CONFIG);
let default_account = builder
.get_account(DEFAULT_ACCOUNT_ADDR)
.expect("should get account");
let default_account_purse_id = default_account.purse_id();
// Check genesis account balance
let genesis_balance = builder.get_purse_balance(default_account_purse_id);
assert_eq!(genesis_balance, initial_genesis_amount,);
// Exec transfer 1 contract
let exec_request_1 = ExecuteRequestBuilder::standard(
DEFAULT_ACCOUNT_ADDR,
CONTRACT_TRANSFER_TO_ACCOUNT_01,
(ACCOUNT_1_ADDR,),
)
.build();
builder.exec(exec_request_1).expect_success().commit();
let exec_1_response = builder
.get_exec_response(0)
.expect("should have exec response");
let genesis_balance = builder.get_purse_balance(default_account_purse_id);
let gas_cost = Motes::from_gas(test_support::get_exec_costs(&exec_1_response)[0], CONV_RATE)
.expect("should convert");
assert_eq!(
genesis_balance,
initial_genesis_amount - gas_cost.value() - transfer_1_amount
);
// Check account 1 balance
let account_1 = builder
.get_account(ACCOUNT_1_ADDR)
.expect("should have account 1");
let account_1_purse_id = account_1.purse_id();
let account_1_balance = builder.get_purse_balance(account_1_purse_id);
assert_eq!(account_1_balance, transfer_1_amount,);
// Exec transfer 2 contract
let exec_request_2 = ExecuteRequestBuilder::standard(
ACCOUNT_1_ADDR,
CONTRACT_TRANSFER_TO_ACCOUNT_02,
(*TRANSFER_2_AMOUNT,),
)
.build();
builder.exec(exec_request_2).expect_success().commit();
let exec_2_response = builder
.get_exec_response(1)
.expect("should have exec response");
let account_2 = builder
.get_account(ACCOUNT_2_ADDR)
.expect("should have account 2");
let account_2_purse_id = account_2.purse_id();
// Check account 1 balance
let account_1_balance = builder.get_purse_balance(account_1_purse_id);
let gas_cost = Motes::from_gas(test_support::get_exec_costs(&exec_2_response)[0], CONV_RATE)
.expect("should convert");
assert_eq!(
account_1_balance,
transfer_1_amount - gas_cost.value() - transfer_2_amount
);
let account_2_balance = builder.get_purse_balance(account_2_purse_id);
assert_eq!(account_2_balance, transfer_2_amount,);
}
#[ignore]
#[test]
fn should_transfer_to_existing_account() |
#[ignore]
#[test]
fn should_fail_when_insufficient_funds() {
// Run genesis
let exec_request_1 = ExecuteRequestBuilder::standard(
DEFAULT_ACCOUNT_ADDR,
CONTRACT_TRANSFER_TO_ACCOUNT_01,
(ACCOUNT_1_ADDR,),
)
.build();
let exec_request_2 = ExecuteRequestBuilder::standard(
ACCOUNT_1_ADDR,
CONTRACT_TRANSFER_TO_ACCOUNT_02,
(*TRANSFER_2_AMOUNT_WITH_ADV,),
)
.build();
let exec_request_3 = ExecuteRequestBuilder::standard(
ACCOUNT_1_ADDR,
CONTRACT_TRANSFER_TO_ACCOUNT_02,
(*TRANSFER_TOO_MUCH,),
)
.build();
let result = InMemoryWasmTestBuilder::default()
.run_genesis(&DEFAULT_GENESIS_CONFIG)
// Exec transfer contract
.exec(exec_request_1)
.expect_success()
.commit()
// Exec transfer contract
.exec(exec_request_2)
.expect_success()
.commit()
// // Exec transfer contract
.exec(exec_request_3)
// .expect_success()
.commit()
.finish();
assert_eq!(
"Trap(Trap { kind: Unreachable })",
result
.builder()
.exec_error_message(2)
.expect("should have error message"),
)
}
#[ignore]
#[test]
fn should_transfer_total_amount() {
let mut builder = test_support::InMemoryWasmTestBuilder::default();
let exec_request_1 = ExecuteRequestBuilder::standard(
DEFAULT_ACCOUNT_ADDR,
CONTRACT_TRANSFER_PURSE_TO_ACCOUNT,
(ACCOUNT_1_ADDR, *ACCOUNT_1_INITIAL_BALANCE),
)
.build();
let exec_request_2 = ExecuteRequestBuilder::standard(
ACCOUNT_1_ADDR,
CONTRACT_TRANSFER_PURSE_TO_ACCOUNT,
(ACCOUNT_2_ADDR, *ACCOUNT_1_INITIAL_BALANCE),
)
.build();
builder
.run_genesis(&DEFAULT_GENESIS_CONFIG)
.exec(exec_request_1)
.expect_success()
.commit()
.exec(exec_request_2)
.commit()
.expect_success()
.finish();
}
| {
let initial_genesis_amount: U512 = U512::from(DEFAULT_ACCOUNT_INITIAL_BALANCE);
let transfer_1_amount: U512 = *TRANSFER_1_AMOUNT;
let transfer_2_amount: U512 = *TRANSFER_2_AMOUNT;
// Run genesis
let mut builder = InMemoryWasmTestBuilder::default();
let builder = builder.run_genesis(&DEFAULT_GENESIS_CONFIG);
let default_account = builder
.get_account(DEFAULT_ACCOUNT_ADDR)
.expect("should get account");
let default_account_purse_id = default_account.purse_id();
// Check genesis account balance
let genesis_balance = builder.get_purse_balance(default_account_purse_id);
assert_eq!(genesis_balance, initial_genesis_amount,);
// Exec transfer 1 contract
let exec_request_1 = ExecuteRequestBuilder::standard(
DEFAULT_ACCOUNT_ADDR,
CONTRACT_TRANSFER_TO_ACCOUNT_01,
(ACCOUNT_1_ADDR,),
)
.build();
builder.exec(exec_request_1).expect_success().commit();
// Exec transfer contract
let account_1 = builder
.get_account(ACCOUNT_1_ADDR)
.expect("should get account");
let account_1_purse_id = account_1.purse_id();
// Check genesis account balance
let genesis_balance = builder.get_purse_balance(default_account_purse_id);
let gas_cost =
Motes::from_gas(builder.exec_costs(0)[0], CONV_RATE).expect("should convert gas to motes");
assert_eq!(
genesis_balance,
initial_genesis_amount - gas_cost.value() - transfer_1_amount
);
// Check account 1 balance
let account_1_balance = builder.get_purse_balance(account_1_purse_id);
assert_eq!(account_1_balance, transfer_1_amount,);
// Exec transfer contract
let exec_request_2 = ExecuteRequestBuilder::standard(
ACCOUNT_1_ADDR,
CONTRACT_TRANSFER_TO_ACCOUNT_02,
(*TRANSFER_2_AMOUNT,),
)
.build();
builder.exec(exec_request_2).expect_success().commit();
let account_2 = builder
.get_account(ACCOUNT_2_ADDR)
.expect("should get account");
let account_2_purse_id = account_2.purse_id();
// Check account 1 balance
let account_1_balance = builder.get_purse_balance(account_1_purse_id);
let gas_cost =
Motes::from_gas(builder.exec_costs(1)[0], CONV_RATE).expect("should convert gas to motes");
assert_eq!(
account_1_balance,
transfer_1_amount - gas_cost.value() - transfer_2_amount,
);
// Check account 2 balance
let account_2_balance_transform = builder.get_purse_balance(account_2_purse_id);
assert_eq!(account_2_balance_transform, transfer_2_amount);
} |
index.js | const http = require('http'); | const LOG = require('./utils/logger');
const server = http.createServer(app);
server.listen(config.PORT, () => {
LOG.info(`🌐Server running on port ${config.PORT}`);
});
process.on('SIGTERM', () => {
LOG.info('SIGTERM signal received: closing HTTP server');
server.close(() => {
LOG.info('HTTP server closed');
});
});
module.exports = server; | const app = require('./app');
const config = require('./utils/config');
|
value.rs | use Type;
use util;
use std;
/// A register.
/// Stores the zero-based index of the node in the
/// DAG that is referred to.
#[derive(Clone,Debug,PartialEq,Eq)]
pub struct RegisterRef {
/// The number of the node that is referred to.
/// Zero based.
pub register_id: util::Id,
/// The number of the result from the node.
pub result_number: u32,
/// The type.
pub ty: Type,
}
/// A constant integer.
#[derive(Clone,Debug,PartialEq,Eq)]
pub struct ConstantInteger {
pub bit_width: u32,
pub value: i64,
}
#[derive(Clone,PartialEq,Eq)]
pub enum Value
{
/// An argument to the function.
ArgumentRef {
id: util::Id,
ty: Type,
},
/// A constant integer.
ConstantInteger(ConstantInteger),
/// A register.
RegisterRef(RegisterRef),
}
#[derive(Copy, Clone, PartialEq, Eq)]
pub enum ValueInfo
{
Input,
Output,
InputOutput,
}
impl Value
{
/// Gets the type of the value.
pub fn ty(&self) -> Type {
match *self { | }
/// Creates a new n-bit constant integer.
pub fn i(width: u32, value: i64) -> Self {
Value::ConstantInteger(ConstantInteger {
bit_width: width,
value: value,
})
}
/// Creates a new register reference.
pub fn register_ref(register_id: util::Id, result_number: u32, ty: Type) -> Self {
Value::RegisterRef(RegisterRef {
register_id: register_id,
result_number: result_number,
ty: ty,
})
}
pub fn is_register_ref(&self) -> bool {
if let Value::RegisterRef(..) = *self { true } else { false}
}
pub fn expect_constant_integer(&self) -> &ConstantInteger {
if let Value::ConstantInteger(ref c) = *self {
c
} else {
panic!("expected a constant integer");
}
}
pub fn expect_register_ref(&self) -> &RegisterRef {
if let Value::RegisterRef(ref r) = *self {
r
} else {
panic!("expected a register reference");
}
}
}
impl std::fmt::Debug for Value
{
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
match *self {
Value::ConstantInteger(ref c) => {
write!(fmt, "i{} {}", c.bit_width, c.value)
},
Value::RegisterRef(ref reg) => write!(fmt, "%<reg:{}>", reg.register_id),
Value::ArgumentRef { id, .. } => write!(fmt, "%<arg:{}>", id),
}
}
} | Value::ArgumentRef { ref ty, .. } => ty.clone(),
Value::ConstantInteger(ref c) => Type::Integer { bit_width: c.bit_width },
Value::RegisterRef(ref reg) => reg.ty.clone(),
} |
one.py | import numpy as np
import matplotlib.pyplot as plt
from one_a import one_a
from one_b import one_b
from one_c import one_c
from one_d import one_d
from one_e import one_e
def random_generator(seed, m=2 ** 64 - 1, a=2349543, c=913842, a1=21, a2=35, a3=4, a4=4294957665):
|
def all_one(rand_gen):
one_a(rand_gen)
plt.cla()
one_b(rand_gen)
plt.cla()
one_c(rand_gen)
plt.cla()
one_d(rand_gen)
plt.cla()
one_e(rand_gen)
plt.cla()
| """
Generates psuedorandom numbers with a combination of (M)LCC, 64 bit shift, and MWC
:param seed: Seed to use
:param m: Determines period of the MLCC
:param a: For the MLCC
:param c: For the MLCC
:param a1: For the first bit shift
:param a2: For the second bit shift
:param a3: For the third bit shift
:param a4: For the MWC
:return:
"""
# First linear congruential generator
# While true, so the generator never stops making new numbers
# This is used to make sure teh XOR shift is 64 bit
bit_64 = 0xffffffffffffffff
while True:
# This is MLCC part
generated_number = (a * seed + c) % m
# Now bit shift
generated_number = generated_number ^ (generated_number >> a1) & bit_64
generated_number = generated_number ^ (generated_number << a2) & bit_64
generated_number = generated_number ^ (generated_number >> a3) & bit_64
# Now MWC part
mwc_out = a4 * (generated_number & (2 ** 32 - 1)) + (generated_number >> 32)
seed = mwc_out # set the seed to a new number, so a different number generated next time
mwc_out = mwc_out / m
if mwc_out > 1.:
# Have to make it between 1 and 0, so mod 1. makes sure its between 0 and 1 now
close_to_final = mwc_out % 1.
else:
close_to_final = mwc_out
yield close_to_final |
create_o_auth2_client_parameters.go | // Code generated by go-swagger; DO NOT EDIT.
package admin
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"context"
"net/http"
"time"
"github.com/go-openapi/errors"
"github.com/go-openapi/runtime"
cr "github.com/go-openapi/runtime/client"
"github.com/go-openapi/strfmt"
"github.com/ory/hydra/internal/httpclient/models"
)
// NewCreateOAuth2ClientParams creates a new CreateOAuth2ClientParams object,
// with the default timeout for this client.
//
// Default values are not hydrated, since defaults are normally applied by the API server side.
//
// To enforce default values in parameter, use SetDefaults or WithDefaults.
func NewCreateOAuth2ClientParams() *CreateOAuth2ClientParams {
return &CreateOAuth2ClientParams{
timeout: cr.DefaultTimeout,
}
}
// NewCreateOAuth2ClientParamsWithTimeout creates a new CreateOAuth2ClientParams object
// with the ability to set a timeout on a request.
func NewCreateOAuth2ClientParamsWithTimeout(timeout time.Duration) *CreateOAuth2ClientParams {
return &CreateOAuth2ClientParams{
timeout: timeout,
}
}
// NewCreateOAuth2ClientParamsWithContext creates a new CreateOAuth2ClientParams object
// with the ability to set a context for a request.
func NewCreateOAuth2ClientParamsWithContext(ctx context.Context) *CreateOAuth2ClientParams |
// NewCreateOAuth2ClientParamsWithHTTPClient creates a new CreateOAuth2ClientParams object
// with the ability to set a custom HTTPClient for a request.
func NewCreateOAuth2ClientParamsWithHTTPClient(client *http.Client) *CreateOAuth2ClientParams {
return &CreateOAuth2ClientParams{
HTTPClient: client,
}
}
/* CreateOAuth2ClientParams contains all the parameters to send to the API endpoint
for the create o auth2 client operation.
Typically these are written to a http.Request.
*/
type CreateOAuth2ClientParams struct {
// Body.
Body *models.OAuth2Client
timeout time.Duration
Context context.Context
HTTPClient *http.Client
}
// WithDefaults hydrates default values in the create o auth2 client params (not the query body).
//
// All values with no default are reset to their zero value.
func (o *CreateOAuth2ClientParams) WithDefaults() *CreateOAuth2ClientParams {
o.SetDefaults()
return o
}
// SetDefaults hydrates default values in the create o auth2 client params (not the query body).
//
// All values with no default are reset to their zero value.
func (o *CreateOAuth2ClientParams) SetDefaults() {
// no default values defined for this parameter
}
// WithTimeout adds the timeout to the create o auth2 client params
func (o *CreateOAuth2ClientParams) WithTimeout(timeout time.Duration) *CreateOAuth2ClientParams {
o.SetTimeout(timeout)
return o
}
// SetTimeout adds the timeout to the create o auth2 client params
func (o *CreateOAuth2ClientParams) SetTimeout(timeout time.Duration) {
o.timeout = timeout
}
// WithContext adds the context to the create o auth2 client params
func (o *CreateOAuth2ClientParams) WithContext(ctx context.Context) *CreateOAuth2ClientParams {
o.SetContext(ctx)
return o
}
// SetContext adds the context to the create o auth2 client params
func (o *CreateOAuth2ClientParams) SetContext(ctx context.Context) {
o.Context = ctx
}
// WithHTTPClient adds the HTTPClient to the create o auth2 client params
func (o *CreateOAuth2ClientParams) WithHTTPClient(client *http.Client) *CreateOAuth2ClientParams {
o.SetHTTPClient(client)
return o
}
// SetHTTPClient adds the HTTPClient to the create o auth2 client params
func (o *CreateOAuth2ClientParams) SetHTTPClient(client *http.Client) {
o.HTTPClient = client
}
// WithBody adds the body to the create o auth2 client params
func (o *CreateOAuth2ClientParams) WithBody(body *models.OAuth2Client) *CreateOAuth2ClientParams {
o.SetBody(body)
return o
}
// SetBody adds the body to the create o auth2 client params
func (o *CreateOAuth2ClientParams) SetBody(body *models.OAuth2Client) {
o.Body = body
}
// WriteToRequest writes these params to a swagger request
func (o *CreateOAuth2ClientParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {
if err := r.SetTimeout(o.timeout); err != nil {
return err
}
var res []error
if o.Body != nil {
if err := r.SetBodyParam(o.Body); err != nil {
return err
}
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
| {
return &CreateOAuth2ClientParams{
Context: ctx,
}
} |
DeviceDescriptorMediumType.go | //
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//
package model
import (
"github.com/apache/plc4x/plc4go/internal/plc4go/spi/utils"
"github.com/pkg/errors"
)
// Code generated by code-generation. DO NOT EDIT.
type DeviceDescriptorMediumType uint8
type IDeviceDescriptorMediumType interface {
Serialize(writeBuffer utils.WriteBuffer) error
}
const (
DeviceDescriptorMediumType_TP1 DeviceDescriptorMediumType = 0x0
DeviceDescriptorMediumType_PL110 DeviceDescriptorMediumType = 0x1
DeviceDescriptorMediumType_RF DeviceDescriptorMediumType = 0x2
DeviceDescriptorMediumType_TP0 DeviceDescriptorMediumType = 0x3
DeviceDescriptorMediumType_PL132 DeviceDescriptorMediumType = 0x4
DeviceDescriptorMediumType_KNX_IP DeviceDescriptorMediumType = 0x5
)
var DeviceDescriptorMediumTypeValues []DeviceDescriptorMediumType
func init() {
_ = errors.New
DeviceDescriptorMediumTypeValues = []DeviceDescriptorMediumType{
DeviceDescriptorMediumType_TP1,
DeviceDescriptorMediumType_PL110,
DeviceDescriptorMediumType_RF,
DeviceDescriptorMediumType_TP0,
DeviceDescriptorMediumType_PL132,
DeviceDescriptorMediumType_KNX_IP,
}
}
func DeviceDescriptorMediumTypeByValue(value uint8) DeviceDescriptorMediumType {
switch value {
case 0x0:
return DeviceDescriptorMediumType_TP1
case 0x1:
return DeviceDescriptorMediumType_PL110
case 0x2:
return DeviceDescriptorMediumType_RF
case 0x3:
return DeviceDescriptorMediumType_TP0
case 0x4:
return DeviceDescriptorMediumType_PL132
case 0x5:
return DeviceDescriptorMediumType_KNX_IP
}
return 0
}
func DeviceDescriptorMediumTypeByName(value string) DeviceDescriptorMediumType {
switch value {
case "TP1":
return DeviceDescriptorMediumType_TP1
case "PL110":
return DeviceDescriptorMediumType_PL110
case "RF":
return DeviceDescriptorMediumType_RF
case "TP0":
return DeviceDescriptorMediumType_TP0
case "PL132":
return DeviceDescriptorMediumType_PL132
case "KNX_IP":
return DeviceDescriptorMediumType_KNX_IP
} | func CastDeviceDescriptorMediumType(structType interface{}) DeviceDescriptorMediumType {
castFunc := func(typ interface{}) DeviceDescriptorMediumType {
if sDeviceDescriptorMediumType, ok := typ.(DeviceDescriptorMediumType); ok {
return sDeviceDescriptorMediumType
}
return 0
}
return castFunc(structType)
}
func (m DeviceDescriptorMediumType) LengthInBits() uint16 {
return 4
}
func (m DeviceDescriptorMediumType) LengthInBytes() uint16 {
return m.LengthInBits() / 8
}
func DeviceDescriptorMediumTypeParse(readBuffer utils.ReadBuffer) (DeviceDescriptorMediumType, error) {
val, err := readBuffer.ReadUint8("DeviceDescriptorMediumType", 4)
if err != nil {
return 0, nil
}
return DeviceDescriptorMediumTypeByValue(val), nil
}
func (e DeviceDescriptorMediumType) Serialize(writeBuffer utils.WriteBuffer) error {
return writeBuffer.WriteUint8("DeviceDescriptorMediumType", 4, uint8(e), utils.WithAdditionalStringRepresentation(e.name()))
}
func (e DeviceDescriptorMediumType) name() string {
switch e {
case DeviceDescriptorMediumType_TP1:
return "TP1"
case DeviceDescriptorMediumType_PL110:
return "PL110"
case DeviceDescriptorMediumType_RF:
return "RF"
case DeviceDescriptorMediumType_TP0:
return "TP0"
case DeviceDescriptorMediumType_PL132:
return "PL132"
case DeviceDescriptorMediumType_KNX_IP:
return "KNX_IP"
}
return ""
}
func (e DeviceDescriptorMediumType) String() string {
return e.name()
} | return 0
}
|
index.tsx | import '@storybook/addon-actions'
import { storiesOf } from '@storybook/react'
import React from 'react'
import { FaArrowLeft, FaInfoCircle } from 'react-icons/fa'
import { Flex, Text } from 'rebass'
import styled from 'styled-components'
import Fetch from './Fetch'
import { FormComponent, FormContainer, AcceptHosted } from '../src'
const AuthorizeNetAuthInfo = {
clientKey: process.env.AUTHORIZENET_CLIENTKEY,
apiLoginId: process.env.AUTHORIZENET_LOGINID
}
storiesOf('FormComponent', module)
.add('with default style', () => {
return (
<FormContainer
{...AuthorizeNetAuthInfo}
environment="sandbox"
disclaimer="This is a disclaimer"
component={FormComponent}
amount={25}
/>
)
})
.add('with custom style (using style objects)', () => {
return (
<FormContainer
{...AuthorizeNetAuthInfo}
environment="sandbox"
component={props => (
<FormComponent
{...props}
style={{
form: { backgroundColor: 'white' },
input: {
backgroundColor: 'white',
fontFamily: 'monospace',
color: 'black',
border: '1px solid black'
},
button: {
backgroundColor: 'white',
border: '1px solid black',
boxShadow: 'none',
color: 'black'
}
}}
/>
)}
amount={25}
/>
)
})
.add('with custom style (using styled-components)', () => {
const StyledForm = styled(FormComponent)`
font-family: monospace;
button {
background-color: white;
color: black;
border: 1px solid black;
}
`
return (
<FormContainer
{...AuthorizeNetAuthInfo}
amount={25}
environment={'sandbox'}
component={StyledForm}
/>
)
})
.add('as a payment step', () => {
return (
<div
style={{ height: '100vh', backgroundColor: '#6772e5', padding: '20px' }}
>
<div
style={{
alignItems: 'center',
borderRadius: '4px',
cursor: 'pointer',
display: 'inline-flex', | margin: '1em 0em',
padding: '16px'
}}
>
<FaArrowLeft color="white" fontSize={'1.5em'} />
<Text color="white" fontFamily="roboto" pl={2}>
back
</Text>
</div>
<Flex alignItems="center" px={[3, 5] as any} py={[3, 0] as any}>
<FaInfoCircle color="white" fontSize="2em" />
<Text
color="white"
fontFamily="roboto"
fontSize={4}
fontWeight={'500'}
textAlign="center"
p={3}
>
Payment details
</Text>
</Flex>
<FormContainer
{...AuthorizeNetAuthInfo}
environment={'sandbox'}
amount={25}
/>
</div>
)
})
storiesOf('Accept Hosted', module).add('with embedded iframe', () => {
return (
<Fetch
url={
'https://us-central1-react-authorize-net.cloudfunctions.net/get-form-token'
}
>
{response => (
<AcceptHosted
type={'iframe'}
mode={'sandbox'}
formToken={response.token}
onCancel={() => console.log(`New cancel message from Accept Hosted`)}
onResize={(width, height) =>
console.log(
`New resize message from Accept Hosted\nwidth: ${width} height: ${height}`
)
}
/>
)}
</Fetch>
)
}) | |
leetcode69_Sqrt_x.py | # Implement int sqrt(int x).
# Compute and return the square root of x, where x is guaranteed to be a non-negative integer.
# Since the return type is an integer, the decimal digits are truncated and only the integer part of the result is returned.
# Example 1:
| # Example 2:
# Input: 8
# Output: 2
# Explanation: The square root of 8 is 2.82842..., and since
# the decimal part is truncated, 2 is returned.
class Solution:
def mySqrt(self, x):
"""
:type x: int
:rtype: int
"""
if x == 0:
return 0
elif x < 4:
return 1
elif x < 9:
return 2
res = self.helper(x, 0, x//2)
return res
def helper(self, x, left, right):
mid = (left + right)//2
if mid**2 <= x and (mid+1)**2:
return mid
elif mid**2 > x:
right = mid
elif mid**2 < x:
left = mid
return self.helper(x, left, right)
# Time: O(log(n))
# Space: O(1)
# Difficulty: easy | # Input: 4
# Output: 2 |
lookups.py | from selectable.base import ModelLookup
from selectable.registry import registry
from .models import Taxonomy
class TaxonomyLookup(ModelLookup):
|
registry.register(TaxonomyLookup)
| model = Taxonomy
search_fields = ('name__icontains', ) |
ex_binding_textures.py | # coding=utf-8
"""Using 2 different textures in the same Fragment Shader"""
import glfw
from OpenGL.GL import *
import OpenGL.GL.shaders
import numpy as np
import sys
import os.path
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import grafica.transformations as tr
import grafica.basic_shapes as bs
import grafica.easy_shaders as es
from grafica.assets_path import getAssetPath
from grafica.gpu_shape import GPUShape, SIZE_IN_BYTES
__author__ = "Sebastián Olmos"
__license__ = "MIT"
# We extend the functionality of a GPUShape with an additional texture.
class TexGPUShape(GPUShape):
def __init__(self):
"""VAO, VBO, EBO and texture handlers to GPU memory"""
super().__init__()
self.texture2 = None
def __str__(self):
return super().__str__() + " tex=" + str(self.texture2)
def c | self):
"""Freeing GPU memory"""
super().clear()
if self.texture2 != None:
glDeleteTextures(1, [self.texture2])
# Shader that handles two textures
class DoubleTextureTransformShaderProgram:
def __init__(self):
vertex_shader = """
#version 330
uniform mat4 transform;
in vec3 position;
in vec2 texCoords;
out vec2 outTexCoords;
void main()
{
gl_Position = transform * vec4(position, 1.0f);
outTexCoords = texCoords;
}
"""
fragment_shader = """
#version 330
// gl_FragCoord contains the (x,y) fragment coordinates of the window.
// We also set the origin to the upper left corner
layout(origin_upper_left) in vec4 gl_FragCoord;
in vec2 outTexCoords;
out vec4 outColor;
uniform sampler2D upTexture;
uniform sampler2D downTexture;
uniform float mousePosY;
void main()
{
vec4 finalColor;
if ( gl_FragCoord.y > mousePosY){
finalColor = texture(downTexture, outTexCoords);
}
else {
finalColor = texture(upTexture, outTexCoords);
}
outColor = finalColor;
}
"""
# Binding artificial vertex array object for validation
VAO = glGenVertexArrays(1)
glBindVertexArray(VAO)
# Compiling our shader program
self.shaderProgram = OpenGL.GL.shaders.compileProgram(
OpenGL.GL.shaders.compileShader(vertex_shader, GL_VERTEX_SHADER),
OpenGL.GL.shaders.compileShader(fragment_shader, GL_FRAGMENT_SHADER))
def setupVAO(self, gpuShape):
glBindVertexArray(gpuShape.vao)
glBindBuffer(GL_ARRAY_BUFFER, gpuShape.vbo)
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, gpuShape.ebo)
# 3d vertices + 2d texture coordinates => 3*4 + 2*4 = 20 bytes
position = glGetAttribLocation(self.shaderProgram, "position")
glVertexAttribPointer(position, 3, GL_FLOAT, GL_FALSE, 20, ctypes.c_void_p(0))
glEnableVertexAttribArray(position)
texCoords = glGetAttribLocation(self.shaderProgram, "texCoords")
glVertexAttribPointer(texCoords, 2, GL_FLOAT, GL_FALSE, 20, ctypes.c_void_p(3 * SIZE_IN_BYTES))
glEnableVertexAttribArray(texCoords)
# Unbinding current vao
glBindVertexArray(0)
def drawCall(self, gpuShape, mode=GL_TRIANGLES):
assert isinstance(gpuShape, TexGPUShape)
glBindVertexArray(gpuShape.vao)
# Binding the first texture
glActiveTexture(GL_TEXTURE0 + 0)
glBindTexture(GL_TEXTURE_2D, gpuShape.texture)
# Binding the second texture
glActiveTexture(GL_TEXTURE0 + 1)
glBindTexture(GL_TEXTURE_2D, gpuShape.texture2)
glDrawElements(mode, gpuShape.size, GL_UNSIGNED_INT, None)
# Unbind the current VAO
glBindVertexArray(0)
# A class to store the application control
class Controller:
def __init__(self):
self.fillPolygon = True
self.mousePos = (0.0, 0.0)
# global controller as communication with the callback function
controller = Controller()
def on_key(window, key, scancode, action, mods):
if action != glfw.PRESS:
return
global controller
if key == glfw.KEY_SPACE:
controller.fillPolygon = not controller.fillPolygon
elif key == glfw.KEY_ESCAPE:
glfw.set_window_should_close(window, True)
else:
print('Unknown key')
def cursor_pos_callback(window, x, y):
global controller
controller.mousePos = (x,y)
if __name__ == "__main__":
# Initialize glfw
if not glfw.init():
sys.exit(1)
width = 600
height = 600
window = glfw.create_window(width, height, "Double binding", None, None)
if not window:
glfw.terminate()
glfw.set_window_should_close(window, True)
glfw.make_context_current(window)
# Connecting the callback function 'on_key' to handle keyboard events
glfw.set_key_callback(window, on_key)
glfw.set_cursor_pos_callback(window, cursor_pos_callback)
# A simple shader program with position and texture coordinates as inputs.
pipeline = DoubleTextureTransformShaderProgram()
# Telling OpenGL to use our shader program
# Setting up the clear screen color
glClearColor(0.25, 0.25, 0.25, 1.0)
# Creating shapes on GPU memory
shape = bs.createTextureQuad(1, 1)
gpuShape = TexGPUShape().initBuffers()
pipeline.setupVAO(gpuShape)
gpuShape.fillBuffers(shape.vertices, shape.indices, GL_STATIC_DRAW)
gpuShape.texture = es.textureSimpleSetup(
getAssetPath("torres-del-paine-sq.jpg"), GL_CLAMP_TO_EDGE, GL_CLAMP_TO_EDGE, GL_LINEAR, GL_LINEAR)
gpuShape.texture2 = es.textureSimpleSetup(
getAssetPath("red_woodpecker.jpg"), GL_CLAMP_TO_EDGE, GL_CLAMP_TO_EDGE, GL_LINEAR, GL_LINEAR)
currentMousePos = [width/2, height/2]
while not glfw.window_should_close(window):
# Using GLFW to check for input events
glfw.poll_events()
if (controller.fillPolygon):
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL)
else:
glPolygonMode(GL_FRONT_AND_BACK, GL_LINE)
theta = 0.3 * np.sin(glfw.get_time())
# Clearing the screen in both, color and depth
glClear(GL_COLOR_BUFFER_BIT)
glUseProgram(pipeline.shaderProgram)
# Drawing the shapes
glUniformMatrix4fv(glGetUniformLocation(pipeline.shaderProgram, "transform"), 1, GL_TRUE,
np.matmul(
tr.shearing(0,theta,0,0,0,0),
tr.uniformScale(1.5)
)
)
# Binding samplers to both texture units
glUniform1i(glGetUniformLocation(pipeline.shaderProgram, "upTexture"), 0)
glUniform1i(glGetUniformLocation(pipeline.shaderProgram, "downTexture"), 1)
# Sending the mouse vertical location to our shader
glUniform1f(glGetUniformLocation(pipeline.shaderProgram, "mousePosY"), controller.mousePos[1])
pipeline.drawCall(gpuShape)
# Once the render is done, buffers are swapped, showing only the complete scene.
glfw.swap_buffers(window)
# freeing GPU memory
gpuShape.clear()
glfw.terminate()
| lear( |
TaskStatusChip.tsx | import React from 'react';
import Avatar from '@material-ui/core/Avatar';
import Chip from '@material-ui/core/Chip';
import Icon from '@material-ui/core/Icon';
import { taskStatusColor } from '../../utils/colors';
import { taskStatusIconName, taskStatusMessage } from '../../utils/status';
import { cirrusColors } from '../../cirrusTheme';
import { createFragmentContainer } from 'react-relay';
import { graphql } from 'babel-plugin-relay/macro';
import { Tooltip } from '@material-ui/core';
function TaskStatusChip(props) {
let { task } = props;
let chip = (
<Chip | avatar={
<Avatar style={{ backgroundColor: taskStatusColor(task.status) }}>
<Icon style={{ color: cirrusColors.cirrusWhite }}>{taskStatusIconName(task.status)}</Icon>
</Avatar>
}
/>
);
if (task.executingTimestamp && task.executingTimestamp > 0) {
return (
<Tooltip title={`Execution started at ${new Date(task.executingTimestamp).toLocaleTimeString()}`}>{chip}</Tooltip>
);
}
return chip;
}
export default createFragmentContainer(TaskStatusChip, {
task: graphql`
fragment TaskStatusChip_task on Task {
status
durationInSeconds
executingTimestamp
}
`,
}); | className={props.className}
label={taskStatusMessage(task)} |
broker.pb.go | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: broker.proto
| /*
Package brokerpb is a generated protocol buffer package.
It is generated from these files:
broker.proto
It has these top-level messages:
Participant
Version
Pact
PactVersion
Tag
PublishPactRequest
PublishPactResponse
GetAllProviderPactsRequest
ConsumerInfo
Links
GetAllProviderPactsResponse
GetProviderConsumerVersionPactRequest
GetProviderConsumerVersionPactResponse
Verification
VerificationSummary
VerificationDetail
VerificationDetails
VerificationResult
PublishVerificationRequest
PublishVerificationResponse
RetrieveVerificationRequest
RetrieveVerificationResponse
BaseBrokerRequest
BrokerAPIInfoEntry
BrokerHomeResponse
*/
package brokerpb
import proto "github.com/golang/protobuf/proto"
import fmt "fmt"
import math "math"
import proto1 "github.com/apache/servicecomb-service-center/server/core/proto"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
type Participant struct {
Id int32 `protobuf:"varint,1,opt,name=id" json:"id,omitempty"`
AppId string `protobuf:"bytes,2,opt,name=appId" json:"appId,omitempty"`
ServiceName string `protobuf:"bytes,3,opt,name=serviceName" json:"serviceName,omitempty"`
}
func (m *Participant) Reset() { *m = Participant{} }
func (m *Participant) String() string { return proto.CompactTextString(m) }
func (*Participant) ProtoMessage() {}
func (*Participant) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
func (m *Participant) GetId() int32 {
if m != nil {
return m.Id
}
return 0
}
func (m *Participant) GetAppId() string {
if m != nil {
return m.AppId
}
return ""
}
func (m *Participant) GetServiceName() string {
if m != nil {
return m.ServiceName
}
return ""
}
type Version struct {
Id int32 `protobuf:"varint,1,opt,name=id" json:"id,omitempty"`
Number string `protobuf:"bytes,2,opt,name=number" json:"number,omitempty"`
ParticipantId int32 `protobuf:"varint,3,opt,name=participantId" json:"participantId,omitempty"`
Order int32 `protobuf:"varint,4,opt,name=order" json:"order,omitempty"`
}
func (m *Version) Reset() { *m = Version{} }
func (m *Version) String() string { return proto.CompactTextString(m) }
func (*Version) ProtoMessage() {}
func (*Version) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} }
func (m *Version) GetId() int32 {
if m != nil {
return m.Id
}
return 0
}
func (m *Version) GetNumber() string {
if m != nil {
return m.Number
}
return ""
}
func (m *Version) GetParticipantId() int32 {
if m != nil {
return m.ParticipantId
}
return 0
}
func (m *Version) GetOrder() int32 {
if m != nil {
return m.Order
}
return 0
}
type Pact struct {
Id int32 `protobuf:"varint,1,opt,name=id" json:"id,omitempty"`
ConsumerParticipantId int32 `protobuf:"varint,2,opt,name=consumerParticipantId" json:"consumerParticipantId,omitempty"`
ProviderParticipantId int32 `protobuf:"varint,3,opt,name=providerParticipantId" json:"providerParticipantId,omitempty"`
Sha []byte `protobuf:"bytes,4,opt,name=sha,proto3" json:"sha,omitempty"`
Content []byte `protobuf:"bytes,5,opt,name=content,proto3" json:"content,omitempty"`
}
func (m *Pact) Reset() { *m = Pact{} }
func (m *Pact) String() string { return proto.CompactTextString(m) }
func (*Pact) ProtoMessage() {}
func (*Pact) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} }
func (m *Pact) GetId() int32 {
if m != nil {
return m.Id
}
return 0
}
func (m *Pact) GetConsumerParticipantId() int32 {
if m != nil {
return m.ConsumerParticipantId
}
return 0
}
func (m *Pact) GetProviderParticipantId() int32 {
if m != nil {
return m.ProviderParticipantId
}
return 0
}
func (m *Pact) GetSha() []byte {
if m != nil {
return m.Sha
}
return nil
}
func (m *Pact) GetContent() []byte {
if m != nil {
return m.Content
}
return nil
}
type PactVersion struct {
Id int32 `protobuf:"varint,1,opt,name=id" json:"id,omitempty"`
VersionId int32 `protobuf:"varint,2,opt,name=versionId" json:"versionId,omitempty"`
PactId int32 `protobuf:"varint,3,opt,name=pactId" json:"pactId,omitempty"`
ProviderParticipantId int32 `protobuf:"varint,4,opt,name=providerParticipantId" json:"providerParticipantId,omitempty"`
}
func (m *PactVersion) Reset() { *m = PactVersion{} }
func (m *PactVersion) String() string { return proto.CompactTextString(m) }
func (*PactVersion) ProtoMessage() {}
func (*PactVersion) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} }
func (m *PactVersion) GetId() int32 {
if m != nil {
return m.Id
}
return 0
}
func (m *PactVersion) GetVersionId() int32 {
if m != nil {
return m.VersionId
}
return 0
}
func (m *PactVersion) GetPactId() int32 {
if m != nil {
return m.PactId
}
return 0
}
func (m *PactVersion) GetProviderParticipantId() int32 {
if m != nil {
return m.ProviderParticipantId
}
return 0
}
type Tag struct {
Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
VersionId int32 `protobuf:"varint,2,opt,name=versionId" json:"versionId,omitempty"`
}
func (m *Tag) Reset() { *m = Tag{} }
func (m *Tag) String() string { return proto.CompactTextString(m) }
func (*Tag) ProtoMessage() {}
func (*Tag) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} }
func (m *Tag) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *Tag) GetVersionId() int32 {
if m != nil {
return m.VersionId
}
return 0
}
type PublishPactRequest struct {
ProviderId string `protobuf:"bytes,1,opt,name=providerId" json:"providerId,omitempty"`
ConsumerId string `protobuf:"bytes,2,opt,name=consumerId" json:"consumerId,omitempty"`
Version string `protobuf:"bytes,3,opt,name=version" json:"version,omitempty"`
Pact []byte `protobuf:"bytes,4,opt,name=pact,proto3" json:"pact,omitempty"`
}
func (m *PublishPactRequest) Reset() { *m = PublishPactRequest{} }
func (m *PublishPactRequest) String() string { return proto.CompactTextString(m) }
func (*PublishPactRequest) ProtoMessage() {}
func (*PublishPactRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} }
func (m *PublishPactRequest) GetProviderId() string {
if m != nil {
return m.ProviderId
}
return ""
}
func (m *PublishPactRequest) GetConsumerId() string {
if m != nil {
return m.ConsumerId
}
return ""
}
func (m *PublishPactRequest) GetVersion() string {
if m != nil {
return m.Version
}
return ""
}
func (m *PublishPactRequest) GetPact() []byte {
if m != nil {
return m.Pact
}
return nil
}
type PublishPactResponse struct {
Response *proto1.Response `protobuf:"bytes,1,opt,name=response" json:"response,omitempty"`
}
func (m *PublishPactResponse) Reset() { *m = PublishPactResponse{} }
func (m *PublishPactResponse) String() string { return proto.CompactTextString(m) }
func (*PublishPactResponse) ProtoMessage() {}
func (*PublishPactResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} }
func (m *PublishPactResponse) GetResponse() *proto1.Response {
if m != nil {
return m.Response
}
return nil
}
type GetAllProviderPactsRequest struct {
ProviderId string `protobuf:"bytes,1,opt,name=providerId" json:"providerId,omitempty"`
BaseUrl *BaseBrokerRequest `protobuf:"bytes,2,opt,name=baseUrl" json:"baseUrl,omitempty"`
}
func (m *GetAllProviderPactsRequest) Reset() { *m = GetAllProviderPactsRequest{} }
func (m *GetAllProviderPactsRequest) String() string { return proto.CompactTextString(m) }
func (*GetAllProviderPactsRequest) ProtoMessage() {}
func (*GetAllProviderPactsRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} }
func (m *GetAllProviderPactsRequest) GetProviderId() string {
if m != nil {
return m.ProviderId
}
return ""
}
func (m *GetAllProviderPactsRequest) GetBaseUrl() *BaseBrokerRequest {
if m != nil {
return m.BaseUrl
}
return nil
}
type ConsumerInfo struct {
Href string `protobuf:"bytes,1,opt,name=href" json:"href,omitempty"`
Name string `protobuf:"bytes,2,opt,name=name" json:"name,omitempty"`
}
func (m *ConsumerInfo) Reset() { *m = ConsumerInfo{} }
func (m *ConsumerInfo) String() string { return proto.CompactTextString(m) }
func (*ConsumerInfo) ProtoMessage() {}
func (*ConsumerInfo) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} }
func (m *ConsumerInfo) GetHref() string {
if m != nil {
return m.Href
}
return ""
}
func (m *ConsumerInfo) GetName() string {
if m != nil {
return m.Name
}
return ""
}
type Links struct {
Pacts []*ConsumerInfo `protobuf:"bytes,1,rep,name=pacts" json:"pacts,omitempty"`
}
func (m *Links) Reset() { *m = Links{} }
func (m *Links) String() string { return proto.CompactTextString(m) }
func (*Links) ProtoMessage() {}
func (*Links) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} }
func (m *Links) GetPacts() []*ConsumerInfo {
if m != nil {
return m.Pacts
}
return nil
}
type GetAllProviderPactsResponse struct {
Response *proto1.Response `protobuf:"bytes,1,opt,name=response" json:"response,omitempty"`
XLinks *Links `protobuf:"bytes,2,opt,name=_links,json=Links" json:"_links,omitempty"`
}
func (m *GetAllProviderPactsResponse) Reset() { *m = GetAllProviderPactsResponse{} }
func (m *GetAllProviderPactsResponse) String() string { return proto.CompactTextString(m) }
func (*GetAllProviderPactsResponse) ProtoMessage() {}
func (*GetAllProviderPactsResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{10} }
func (m *GetAllProviderPactsResponse) GetResponse() *proto1.Response {
if m != nil {
return m.Response
}
return nil
}
func (m *GetAllProviderPactsResponse) GetXLinks() *Links {
if m != nil {
return m.XLinks
}
return nil
}
type GetProviderConsumerVersionPactRequest struct {
ProviderId string `protobuf:"bytes,1,opt,name=providerId" json:"providerId,omitempty"`
ConsumerId string `protobuf:"bytes,2,opt,name=consumerId" json:"consumerId,omitempty"`
Version string `protobuf:"bytes,3,opt,name=version" json:"version,omitempty"`
BaseUrl *BaseBrokerRequest `protobuf:"bytes,4,opt,name=baseUrl" json:"baseUrl,omitempty"`
}
func (m *GetProviderConsumerVersionPactRequest) Reset() { *m = GetProviderConsumerVersionPactRequest{} }
func (m *GetProviderConsumerVersionPactRequest) String() string { return proto.CompactTextString(m) }
func (*GetProviderConsumerVersionPactRequest) ProtoMessage() {}
func (*GetProviderConsumerVersionPactRequest) Descriptor() ([]byte, []int) {
return fileDescriptor0, []int{11}
}
func (m *GetProviderConsumerVersionPactRequest) GetProviderId() string {
if m != nil {
return m.ProviderId
}
return ""
}
func (m *GetProviderConsumerVersionPactRequest) GetConsumerId() string {
if m != nil {
return m.ConsumerId
}
return ""
}
func (m *GetProviderConsumerVersionPactRequest) GetVersion() string {
if m != nil {
return m.Version
}
return ""
}
func (m *GetProviderConsumerVersionPactRequest) GetBaseUrl() *BaseBrokerRequest {
if m != nil {
return m.BaseUrl
}
return nil
}
type GetProviderConsumerVersionPactResponse struct {
Response *proto1.Response `protobuf:"bytes,1,opt,name=response" json:"response,omitempty"`
Pact []byte `protobuf:"bytes,2,opt,name=pact,proto3" json:"pact,omitempty"`
}
func (m *GetProviderConsumerVersionPactResponse) Reset() {
*m = GetProviderConsumerVersionPactResponse{}
}
func (m *GetProviderConsumerVersionPactResponse) String() string { return proto.CompactTextString(m) }
func (*GetProviderConsumerVersionPactResponse) ProtoMessage() {}
func (*GetProviderConsumerVersionPactResponse) Descriptor() ([]byte, []int) {
return fileDescriptor0, []int{12}
}
func (m *GetProviderConsumerVersionPactResponse) GetResponse() *proto1.Response {
if m != nil {
return m.Response
}
return nil
}
func (m *GetProviderConsumerVersionPactResponse) GetPact() []byte {
if m != nil {
return m.Pact
}
return nil
}
type Verification struct {
Id int32 `protobuf:"varint,1,opt,name=id" json:"id,omitempty"`
Number int32 `protobuf:"varint,2,opt,name=number" json:"number,omitempty"`
PactVersionId int32 `protobuf:"varint,3,opt,name=pactVersionId" json:"pactVersionId,omitempty"`
Success bool `protobuf:"varint,4,opt,name=success" json:"success,omitempty"`
ProviderVersion string `protobuf:"bytes,5,opt,name=providerVersion" json:"providerVersion,omitempty"`
BuildUrl string `protobuf:"bytes,6,opt,name=buildUrl" json:"buildUrl,omitempty"`
VerificationDate string `protobuf:"bytes,7,opt,name=verificationDate" json:"verificationDate,omitempty"`
}
func (m *Verification) Reset() { *m = Verification{} }
func (m *Verification) String() string { return proto.CompactTextString(m) }
func (*Verification) ProtoMessage() {}
func (*Verification) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{13} }
func (m *Verification) GetId() int32 {
if m != nil {
return m.Id
}
return 0
}
func (m *Verification) GetNumber() int32 {
if m != nil {
return m.Number
}
return 0
}
func (m *Verification) GetPactVersionId() int32 {
if m != nil {
return m.PactVersionId
}
return 0
}
func (m *Verification) GetSuccess() bool {
if m != nil {
return m.Success
}
return false
}
func (m *Verification) GetProviderVersion() string {
if m != nil {
return m.ProviderVersion
}
return ""
}
func (m *Verification) GetBuildUrl() string {
if m != nil {
return m.BuildUrl
}
return ""
}
func (m *Verification) GetVerificationDate() string {
if m != nil {
return m.VerificationDate
}
return ""
}
type VerificationSummary struct {
Successful []string `protobuf:"bytes,1,rep,name=successful" json:"successful,omitempty"`
Failed []string `protobuf:"bytes,2,rep,name=failed" json:"failed,omitempty"`
Unknown []string `protobuf:"bytes,3,rep,name=unknown" json:"unknown,omitempty"`
}
func (m *VerificationSummary) Reset() { *m = VerificationSummary{} }
func (m *VerificationSummary) String() string { return proto.CompactTextString(m) }
func (*VerificationSummary) ProtoMessage() {}
func (*VerificationSummary) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{14} }
func (m *VerificationSummary) GetSuccessful() []string {
if m != nil {
return m.Successful
}
return nil
}
func (m *VerificationSummary) GetFailed() []string {
if m != nil {
return m.Failed
}
return nil
}
func (m *VerificationSummary) GetUnknown() []string {
if m != nil {
return m.Unknown
}
return nil
}
type VerificationDetail struct {
ProviderName string `protobuf:"bytes,1,opt,name=providerName" json:"providerName,omitempty"`
ProviderApplicationVersion string `protobuf:"bytes,2,opt,name=providerApplicationVersion" json:"providerApplicationVersion,omitempty"`
Success bool `protobuf:"varint,3,opt,name=success" json:"success,omitempty"`
VerificationDate string `protobuf:"bytes,4,opt,name=verificationDate" json:"verificationDate,omitempty"`
}
func (m *VerificationDetail) Reset() { *m = VerificationDetail{} }
func (m *VerificationDetail) String() string { return proto.CompactTextString(m) }
func (*VerificationDetail) ProtoMessage() {}
func (*VerificationDetail) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{15} }
func (m *VerificationDetail) GetProviderName() string {
if m != nil {
return m.ProviderName
}
return ""
}
func (m *VerificationDetail) GetProviderApplicationVersion() string {
if m != nil {
return m.ProviderApplicationVersion
}
return ""
}
func (m *VerificationDetail) GetSuccess() bool {
if m != nil {
return m.Success
}
return false
}
func (m *VerificationDetail) GetVerificationDate() string {
if m != nil {
return m.VerificationDate
}
return ""
}
type VerificationDetails struct {
VerificationResults []*VerificationDetail `protobuf:"bytes,1,rep,name=verificationResults" json:"verificationResults,omitempty"`
}
func (m *VerificationDetails) Reset() { *m = VerificationDetails{} }
func (m *VerificationDetails) String() string { return proto.CompactTextString(m) }
func (*VerificationDetails) ProtoMessage() {}
func (*VerificationDetails) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{16} }
func (m *VerificationDetails) GetVerificationResults() []*VerificationDetail {
if m != nil {
return m.VerificationResults
}
return nil
}
type VerificationResult struct {
Success bool `protobuf:"varint,1,opt,name=success" json:"success,omitempty"`
ProviderSummary *VerificationSummary `protobuf:"bytes,2,opt,name=providerSummary" json:"providerSummary,omitempty"`
XEmbedded *VerificationDetails `protobuf:"bytes,3,opt,name=_embedded,json=Embedded" json:"_embedded,omitempty"`
}
func (m *VerificationResult) Reset() { *m = VerificationResult{} }
func (m *VerificationResult) String() string { return proto.CompactTextString(m) }
func (*VerificationResult) ProtoMessage() {}
func (*VerificationResult) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{17} }
func (m *VerificationResult) GetSuccess() bool {
if m != nil {
return m.Success
}
return false
}
func (m *VerificationResult) GetProviderSummary() *VerificationSummary {
if m != nil {
return m.ProviderSummary
}
return nil
}
func (m *VerificationResult) GetXEmbedded() *VerificationDetails {
if m != nil {
return m.XEmbedded
}
return nil
}
type PublishVerificationRequest struct {
ProviderId string `protobuf:"bytes,1,opt,name=providerId" json:"providerId,omitempty"`
ConsumerId string `protobuf:"bytes,2,opt,name=consumerId" json:"consumerId,omitempty"`
PactId int32 `protobuf:"varint,3,opt,name=pactId" json:"pactId,omitempty"`
Success bool `protobuf:"varint,4,opt,name=success" json:"success,omitempty"`
ProviderApplicationVersion string `protobuf:"bytes,5,opt,name=providerApplicationVersion" json:"providerApplicationVersion,omitempty"`
}
func (m *PublishVerificationRequest) Reset() { *m = PublishVerificationRequest{} }
func (m *PublishVerificationRequest) String() string { return proto.CompactTextString(m) }
func (*PublishVerificationRequest) ProtoMessage() {}
func (*PublishVerificationRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{18} }
func (m *PublishVerificationRequest) GetProviderId() string {
if m != nil {
return m.ProviderId
}
return ""
}
func (m *PublishVerificationRequest) GetConsumerId() string {
if m != nil {
return m.ConsumerId
}
return ""
}
func (m *PublishVerificationRequest) GetPactId() int32 {
if m != nil {
return m.PactId
}
return 0
}
func (m *PublishVerificationRequest) GetSuccess() bool {
if m != nil {
return m.Success
}
return false
}
func (m *PublishVerificationRequest) GetProviderApplicationVersion() string {
if m != nil {
return m.ProviderApplicationVersion
}
return ""
}
type PublishVerificationResponse struct {
Response *proto1.Response `protobuf:"bytes,1,opt,name=response" json:"response,omitempty"`
Confirmation *VerificationDetail `protobuf:"bytes,2,opt,name=confirmation" json:"confirmation,omitempty"`
}
func (m *PublishVerificationResponse) Reset() { *m = PublishVerificationResponse{} }
func (m *PublishVerificationResponse) String() string { return proto.CompactTextString(m) }
func (*PublishVerificationResponse) ProtoMessage() {}
func (*PublishVerificationResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{19} }
func (m *PublishVerificationResponse) GetResponse() *proto1.Response {
if m != nil {
return m.Response
}
return nil
}
func (m *PublishVerificationResponse) GetConfirmation() *VerificationDetail {
if m != nil {
return m.Confirmation
}
return nil
}
type RetrieveVerificationRequest struct {
ConsumerId string `protobuf:"bytes,1,opt,name=consumerId" json:"consumerId,omitempty"`
ConsumerVersion string `protobuf:"bytes,2,opt,name=consumerVersion" json:"consumerVersion,omitempty"`
}
func (m *RetrieveVerificationRequest) Reset() { *m = RetrieveVerificationRequest{} }
func (m *RetrieveVerificationRequest) String() string { return proto.CompactTextString(m) }
func (*RetrieveVerificationRequest) ProtoMessage() {}
func (*RetrieveVerificationRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{20} }
func (m *RetrieveVerificationRequest) GetConsumerId() string {
if m != nil {
return m.ConsumerId
}
return ""
}
func (m *RetrieveVerificationRequest) GetConsumerVersion() string {
if m != nil {
return m.ConsumerVersion
}
return ""
}
type RetrieveVerificationResponse struct {
Response *proto1.Response `protobuf:"bytes,1,opt,name=response" json:"response,omitempty"`
Result *VerificationResult `protobuf:"bytes,2,opt,name=result" json:"result,omitempty"`
}
func (m *RetrieveVerificationResponse) Reset() { *m = RetrieveVerificationResponse{} }
func (m *RetrieveVerificationResponse) String() string { return proto.CompactTextString(m) }
func (*RetrieveVerificationResponse) ProtoMessage() {}
func (*RetrieveVerificationResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{21} }
func (m *RetrieveVerificationResponse) GetResponse() *proto1.Response {
if m != nil {
return m.Response
}
return nil
}
func (m *RetrieveVerificationResponse) GetResult() *VerificationResult {
if m != nil {
return m.Result
}
return nil
}
type BaseBrokerRequest struct {
HostAddress string `protobuf:"bytes,1,opt,name=hostAddress" json:"hostAddress,omitempty"`
Scheme string `protobuf:"bytes,2,opt,name=scheme" json:"scheme,omitempty"`
}
func (m *BaseBrokerRequest) Reset() { *m = BaseBrokerRequest{} }
func (m *BaseBrokerRequest) String() string { return proto.CompactTextString(m) }
func (*BaseBrokerRequest) ProtoMessage() {}
func (*BaseBrokerRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{22} }
func (m *BaseBrokerRequest) GetHostAddress() string {
if m != nil {
return m.HostAddress
}
return ""
}
func (m *BaseBrokerRequest) GetScheme() string {
if m != nil {
return m.Scheme
}
return ""
}
type BrokerAPIInfoEntry struct {
Href string `protobuf:"bytes,1,opt,name=href" json:"href,omitempty"`
Name string `protobuf:"bytes,2,opt,name=name" json:"name,omitempty"`
Title string `protobuf:"bytes,3,opt,name=title" json:"title,omitempty"`
Templated bool `protobuf:"varint,4,opt,name=templated" json:"templated,omitempty"`
}
func (m *BrokerAPIInfoEntry) Reset() { *m = BrokerAPIInfoEntry{} }
func (m *BrokerAPIInfoEntry) String() string { return proto.CompactTextString(m) }
func (*BrokerAPIInfoEntry) ProtoMessage() {}
func (*BrokerAPIInfoEntry) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{23} }
func (m *BrokerAPIInfoEntry) GetHref() string {
if m != nil {
return m.Href
}
return ""
}
func (m *BrokerAPIInfoEntry) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *BrokerAPIInfoEntry) GetTitle() string {
if m != nil {
return m.Title
}
return ""
}
func (m *BrokerAPIInfoEntry) GetTemplated() bool {
if m != nil {
return m.Templated
}
return false
}
type BrokerHomeResponse struct {
Response *proto1.Response `protobuf:"bytes,1,opt,name=response" json:"response,omitempty"`
XLinks map[string]*BrokerAPIInfoEntry `protobuf:"bytes,2,rep,name=_links,json=Links" json:"_links,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
Curies []*BrokerAPIInfoEntry `protobuf:"bytes,3,rep,name=curies" json:"curies,omitempty"`
}
func (m *BrokerHomeResponse) Reset() { *m = BrokerHomeResponse{} }
func (m *BrokerHomeResponse) String() string { return proto.CompactTextString(m) }
func (*BrokerHomeResponse) ProtoMessage() {}
func (*BrokerHomeResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{24} }
func (m *BrokerHomeResponse) GetResponse() *proto1.Response {
if m != nil {
return m.Response
}
return nil
}
func (m *BrokerHomeResponse) GetXLinks() map[string]*BrokerAPIInfoEntry {
if m != nil {
return m.XLinks
}
return nil
}
func (m *BrokerHomeResponse) GetCuries() []*BrokerAPIInfoEntry {
if m != nil {
return m.Curies
}
return nil
}
func init() {
proto.RegisterType((*Participant)(nil), "brokerpb.Participant")
proto.RegisterType((*Version)(nil), "brokerpb.Version")
proto.RegisterType((*Pact)(nil), "brokerpb.Pact")
proto.RegisterType((*PactVersion)(nil), "brokerpb.PactVersion")
proto.RegisterType((*Tag)(nil), "brokerpb.Tag")
proto.RegisterType((*PublishPactRequest)(nil), "brokerpb.PublishPactRequest")
proto.RegisterType((*PublishPactResponse)(nil), "brokerpb.PublishPactResponse")
proto.RegisterType((*GetAllProviderPactsRequest)(nil), "brokerpb.GetAllProviderPactsRequest")
proto.RegisterType((*ConsumerInfo)(nil), "brokerpb.ConsumerInfo")
proto.RegisterType((*Links)(nil), "brokerpb.Links")
proto.RegisterType((*GetAllProviderPactsResponse)(nil), "brokerpb.GetAllProviderPactsResponse")
proto.RegisterType((*GetProviderConsumerVersionPactRequest)(nil), "brokerpb.GetProviderConsumerVersionPactRequest")
proto.RegisterType((*GetProviderConsumerVersionPactResponse)(nil), "brokerpb.GetProviderConsumerVersionPactResponse")
proto.RegisterType((*Verification)(nil), "brokerpb.Verification")
proto.RegisterType((*VerificationSummary)(nil), "brokerpb.VerificationSummary")
proto.RegisterType((*VerificationDetail)(nil), "brokerpb.VerificationDetail")
proto.RegisterType((*VerificationDetails)(nil), "brokerpb.VerificationDetails")
proto.RegisterType((*VerificationResult)(nil), "brokerpb.VerificationResult")
proto.RegisterType((*PublishVerificationRequest)(nil), "brokerpb.PublishVerificationRequest")
proto.RegisterType((*PublishVerificationResponse)(nil), "brokerpb.PublishVerificationResponse")
proto.RegisterType((*RetrieveVerificationRequest)(nil), "brokerpb.RetrieveVerificationRequest")
proto.RegisterType((*RetrieveVerificationResponse)(nil), "brokerpb.RetrieveVerificationResponse")
proto.RegisterType((*BaseBrokerRequest)(nil), "brokerpb.BaseBrokerRequest")
proto.RegisterType((*BrokerAPIInfoEntry)(nil), "brokerpb.BrokerAPIInfoEntry")
proto.RegisterType((*BrokerHomeResponse)(nil), "brokerpb.BrokerHomeResponse")
}
func init() { proto.RegisterFile("broker.proto", fileDescriptor0) }
var fileDescriptor0 = []byte{
// 1077 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x57, 0x4d, 0x6f, 0x23, 0x45,
0x13, 0xd6, 0x38, 0x76, 0x12, 0x57, 0xfc, 0xbe, 0x59, 0x3a, 0xb0, 0xb2, 0x9c, 0x80, 0xa2, 0x16,
0x2c, 0x11, 0xb0, 0x89, 0x14, 0x76, 0x01, 0xed, 0x61, 0x45, 0xc2, 0xae, 0x42, 0x24, 0x36, 0xb2,
0x06, 0x36, 0x07, 0x2e, 0xab, 0x71, 0x4f, 0x39, 0x6e, 0x65, 0xbe, 0xe8, 0xee, 0x31, 0xca, 0x11,
0x6e, 0x48, 0x48, 0xfc, 0x13, 0x2e, 0x5c, 0x38, 0xf1, 0x13, 0xf8, 0x1d, 0xfc, 0x0c, 0xd4, 0x3d,
0xdd, 0xf3, 0x11, 0x7f, 0x64, 0xb1, 0x10, 0xb7, 0xae, 0xea, 0xea, 0xea, 0xa7, 0x9e, 0x7a, 0x5c,
0x3d, 0x86, 0xde, 0x48, 0xa4, 0xd7, 0x28, 0x0e, 0x33, 0x91, 0xaa, 0x94, 0x6c, 0x16, 0x56, 0x36,
0x1a, 0xbc, 0xb8, 0xe2, 0x6a, 0x92, 0x8f, 0x0e, 0x59, 0x1a, 0x1f, 0x05, 0x59, 0xc0, 0x26, 0x78,
0x24, 0x51, 0x4c, 0x39, 0x43, 0x96, 0xc6, 0xa3, 0x87, 0x76, 0xfd, 0x90, 0x61, 0xa2, 0x50, 0x98,
0x2d, 0x14, 0x47, 0x2c, 0x15, 0x78, 0x64, 0x32, 0xb9, 0x60, 0x59, 0x24, 0xa6, 0x2f, 0x61, 0x6b,
0x18, 0x08, 0xc5, 0x19, 0xcf, 0x82, 0x44, 0x91, 0xff, 0x43, 0x8b, 0x87, 0x7d, 0x6f, 0xdf, 0x3b,
0xe8, 0xf8, 0x2d, 0x1e, 0x92, 0x37, 0xa1, 0x13, 0x64, 0xd9, 0x79, 0xd8, 0x6f, 0xed, 0x7b, 0x07,
0x5d, 0xbf, 0x30, 0xc8, 0x3e, 0x6c, 0xd9, 0x34, 0x17, 0x41, 0x8c, 0xfd, 0x35, 0xb3, 0x57, 0x77,
0xd1, 0x18, 0x36, 0x2e, 0x51, 0x48, 0x9e, 0x26, 0x33, 0x29, 0xef, 0xc3, 0x7a, 0x92, 0xc7, 0x23,
0x14, 0x36, 0xa7, 0xb5, 0xc8, 0xbb, 0xf0, 0xbf, 0xac, 0x42, 0x72, 0x1e, 0x9a, 0xb4, 0x1d, 0xbf,
0xe9, 0xd4, 0x80, 0x52, 0x11, 0xa2, 0xe8, 0xb7, 0xcd, 0x6e, 0x61, 0xd0, 0x5f, 0x3d, 0x68, 0x0f,
0x03, 0x36, 0x8b, 0xff, 0x11, 0xbc, 0xc5, 0xd2, 0x44, 0xe6, 0x31, 0x8a, 0x61, 0x23, 0x79, 0xcb,
0x84, 0xcc, 0xdf, 0xd4, 0xa7, 0x32, 0x91, 0x4e, 0x79, 0x78, 0xfb, 0x54, 0x01, 0x69, 0xfe, 0x26,
0xb9, 0x07, 0x6b, 0x72, 0x12, 0x18, 0x60, 0x3d, 0x5f, 0x2f, 0x49, 0x1f, 0x36, 0x58, 0x9a, 0x28,
0x4c, 0x54, 0xbf, 0x63, 0xbc, 0xce, 0xa4, 0x3f, 0x79, 0x9a, 0x77, 0xa6, 0x16, 0x91, 0xb4, 0x07,
0xdd, 0x69, 0xb1, 0x55, 0x62, 0xad, 0x1c, 0x9a, 0xc2, 0x2c, 0x60, 0x15, 0x20, 0x6b, 0x2d, 0xc6,
0xdd, 0x5e, 0x82, 0x9b, 0x7e, 0x0a, 0x6b, 0xdf, 0x04, 0x57, 0x84, 0x40, 0x3b, 0xd1, 0xdd, 0xf4,
0x4c, 0x57, 0xcc, 0x7a, 0x39, 0x0c, 0xfa, 0xa3, 0x07, 0x64, 0x98, 0x8f, 0x22, 0x2e, 0x27, 0xba,
0x16, 0x1f, 0xbf, 0xcb, 0x51, 0x2a, 0xf2, 0x0e, 0x80, 0xbb, 0xe8, 0x3c, 0xb4, 0xe9, 0x6a, 0x1e,
0xbd, 0xef, 0x68, 0x2f, 0x85, 0x55, 0xf3, 0x68, 0xd6, 0xec, 0x1d, 0x56, 0x59, 0xce, 0xd4, 0x10,
0x75, 0xa5, 0x96, 0x62, 0xb3, 0xa6, 0xa7, 0xb0, 0xd3, 0xc0, 0x20, 0xb3, 0x34, 0x91, 0x48, 0x3e,
0x84, 0x4d, 0x61, 0xd7, 0x06, 0xc2, 0xd6, 0xf1, 0x76, 0xa1, 0xf8, 0x43, 0x17, 0xe2, 0x97, 0x01,
0x54, 0xc2, 0xe0, 0x0c, 0xd5, 0x49, 0x14, 0x0d, 0x4b, 0x82, 0x98, 0x92, 0xaf, 0x5b, 0xcf, 0x63,
0xd8, 0x18, 0x05, 0x12, 0x5f, 0x8a, 0xc8, 0x14, 0xb3, 0x75, 0xbc, 0x7b, 0xe8, 0x7e, 0xad, 0x87,
0xa7, 0x81, 0xc4, 0x53, 0x63, 0xd8, 0x6c, 0xbe, 0x8b, 0xa5, 0x9f, 0x40, 0xef, 0x0b, 0x57, 0x74,
0x32, 0x4e, 0x75, 0x71, 0x13, 0x81, 0x63, 0xc7, 0xbf, 0x5e, 0x97, 0x3d, 0x69, 0x55, 0x3d, 0xa1,
0x8f, 0xa1, 0xf3, 0x15, 0x4f, 0xae, 0x25, 0xf9, 0x08, 0x3a, 0x9a, 0x01, 0xd9, 0xf7, 0xf6, 0xd7,
0x0e, 0xb6, 0x8e, 0xef, 0x57, 0xb7, 0xd6, 0xf3, 0xfa, 0x45, 0x10, 0x15, 0xb0, 0x3b, 0xb7, 0xc6,
0x15, 0xf8, 0x22, 0x0f, 0x60, 0xfd, 0x55, 0xa4, 0x31, 0xd8, 0x82, 0xb7, 0xab, 0xab, 0x0d, 0x34,
0xbf, 0x40, 0x48, 0x7f, 0xf7, 0xe0, 0xbd, 0x33, 0x54, 0xee, 0x46, 0x07, 0xcb, 0x8a, 0xfe, 0xbf,
0xd1, 0x4c, 0xad, 0x3b, 0xed, 0x7f, 0xd0, 0x1d, 0x0e, 0x0f, 0xee, 0x42, 0xbe, 0x0a, 0x73, 0x4e,
0xc1, 0xad, 0x9a, 0x82, 0xff, 0xf2, 0xa0, 0x77, 0x89, 0x82, 0x8f, 0x39, 0x0b, 0xd4, 0xdd, 0x13,
0xb3, 0xd3, 0x9c, 0x98, 0xe5, 0x0c, 0xa9, 0x4f, 0xcc, 0x9a, 0x53, 0x53, 0x23, 0x73, 0xc6, 0x50,
0x4a, 0x43, 0xc0, 0xa6, 0xef, 0x4c, 0x72, 0x00, 0xdb, 0x8e, 0x62, 0x1b, 0x6e, 0xc6, 0x54, 0xd7,
0xbf, 0xed, 0x26, 0x03, 0xd8, 0x1c, 0xe5, 0x3c, 0x0a, 0x35, 0x8b, 0xeb, 0x26, 0xa4, 0xb4, 0xc9,
0x07, 0x70, 0x6f, 0x5a, 0x43, 0xff, 0x2c, 0x50, 0xd8, 0xdf, 0x30, 0x31, 0x33, 0x7e, 0x7a, 0x05,
0x3b, 0xf5, 0x4a, 0xbf, 0xce, 0xe3, 0x38, 0x10, 0x37, 0xba, 0xbb, 0x16, 0xd3, 0x38, 0x8f, 0x8c,
0x9c, 0xbb, 0x7e, 0xcd, 0xa3, 0x09, 0x18, 0x07, 0x3c, 0x42, 0xdd, 0x79, 0xbd, 0x67, 0x2d, 0x5d,
0x5a, 0x9e, 0x5c, 0x27, 0xe9, 0xf7, 0xba, 0xeb, 0x7a, 0xc3, 0x99, 0xf4, 0x0f, 0x0f, 0x48, 0xfd,
0xa6, 0x67, 0xa8, 0x02, 0x1e, 0x11, 0x0a, 0x3d, 0x57, 0xda, 0x45, 0x35, 0xeb, 0x1a, 0x3e, 0xf2,
0x14, 0x06, 0xce, 0x3e, 0xc9, 0xb2, 0xc8, 0x26, 0x70, 0x04, 0x15, 0xd2, 0x5b, 0x12, 0x51, 0xe7,
0x7b, 0xad, 0xc9, 0xf7, 0x3c, 0xa6, 0xda, 0x0b, 0x98, 0xc2, 0x26, 0x53, 0x05, 0x7e, 0x49, 0x2e,
0x60, 0xa7, 0x1e, 0xea, 0xa3, 0xcc, 0xa3, 0x72, 0x02, 0xec, 0x55, 0xca, 0x9e, 0x3d, 0xeb, 0xcf,
0x3b, 0x48, 0x7f, 0xbb, 0xc5, 0x53, 0xe1, 0xaf, 0xd7, 0xe0, 0x35, 0x6b, 0x38, 0xab, 0x34, 0x63,
0xbb, 0x67, 0x67, 0xc0, 0xdb, 0xf3, 0x2f, 0xb7, 0x41, 0xfe, 0xed, 0x53, 0xe4, 0x09, 0x74, 0x5f,
0x61, 0x3c, 0xc2, 0x30, 0xc4, 0x42, 0xb8, 0x0b, 0x53, 0xd8, 0xda, 0xfd, 0xcd, 0xe7, 0x36, 0x9c,
0xfe, 0xe9, 0xc1, 0xc0, 0x0e, 0xfd, 0x26, 0xf8, 0x7f, 0x67, 0x98, 0x2c, 0x7a, 0x5e, 0x17, 0xff,
0x92, 0x96, 0x6b, 0xa6, 0x73, 0x97, 0x66, 0xe8, 0xcf, 0x1e, 0xec, 0xce, 0x2d, 0x68, 0x95, 0x19,
0xf3, 0x39, 0xf4, 0x58, 0x9a, 0x8c, 0xb9, 0x88, 0x4d, 0x12, 0xdb, 0x9f, 0xe5, 0xe2, 0x68, 0x9c,
0xa0, 0x57, 0xb0, 0xeb, 0xa3, 0x12, 0x1c, 0xa7, 0xb8, 0x80, 0xdf, 0x1a, 0x7f, 0xde, 0x0c, 0x7f,
0x07, 0xb0, 0xcd, 0x9a, 0x03, 0xd3, 0x92, 0x7c, 0xdb, 0x4d, 0x7f, 0xf0, 0x60, 0x6f, 0xfe, 0x4d,
0xab, 0x14, 0xfe, 0x08, 0xd6, 0x85, 0xd1, 0xef, 0xf2, 0x92, 0x0b, 0x8d, 0xfb, 0x36, 0x96, 0xbe,
0x80, 0x37, 0x66, 0xde, 0x01, 0xfd, 0x85, 0x3b, 0x49, 0xa5, 0x3a, 0x09, 0x43, 0xe1, 0x7e, 0x04,
0x5d, 0xbf, 0xee, 0xd2, 0x22, 0x91, 0x6c, 0x82, 0xe5, 0xe3, 0x6c, 0x2d, 0x9a, 0x01, 0x29, 0x52,
0x9d, 0x0c, 0xcf, 0xf5, 0xfb, 0xfb, 0x3c, 0x51, 0xe2, 0xe6, 0x75, 0x1f, 0x77, 0xfd, 0x79, 0xab,
0xb8, 0x8a, 0xdc, 0x37, 0x75, 0x61, 0xe8, 0xcf, 0x30, 0x85, 0x71, 0x16, 0x05, 0x0a, 0x43, 0x2b,
0xbd, 0xca, 0x41, 0x7f, 0x69, 0xb9, 0x2b, 0xbf, 0x4c, 0x63, 0x5c, 0x8d, 0xba, 0xa7, 0xb5, 0x17,
0x5d, 0x8f, 0x92, 0xf7, 0x6b, 0x8f, 0xe4, 0x4c, 0xea, 0xe2, 0x91, 0x37, 0x85, 0xd9, 0x97, 0x5e,
0x53, 0xcf, 0x72, 0xc1, 0x51, 0x9a, 0x41, 0xdc, 0xa0, 0x7e, 0x96, 0x0d, 0xdf, 0xc6, 0x0e, 0x2e,
0x01, 0xaa, 0x54, 0xfa, 0xfb, 0xf9, 0x1a, 0x6f, 0x2c, 0x45, 0x7a, 0x49, 0x8e, 0xa1, 0x33, 0x0d,
0xa2, 0x1c, 0x67, 0xfb, 0x39, 0x27, 0x69, 0x11, 0xfa, 0xa4, 0xf5, 0x99, 0x77, 0x0a, 0xdf, 0x96,
0xff, 0x97, 0x46, 0xeb, 0xa6, 0xe6, 0x8f, 0xff, 0x0e, 0x00, 0x00, 0xff, 0xff, 0xf1, 0xd8, 0x8b,
0xa5, 0x50, 0x0d, 0x00, 0x00,
} | |
lib.rs | #![no_std]
extern crate atsamd_hal as hal;
#[cfg(feature = "rt")]
extern crate cortex_m_rt;
#[cfg(feature = "rt")]
pub use cortex_m_rt::entry;
#[cfg(feature = "panic_halt")]
pub extern crate panic_halt;
use hal::prelude::*;
use hal::*;
pub use hal::common::*;
pub use hal::samd21::*;
pub use hal::target_device as pac;
use gpio::{Floating, Input, Port};
use hal::clock::GenericClockController;
use hal::sercom::{I2CMaster3, PadPin};
use hal::time::Hertz;
#[cfg(feature = "usb")]
use hal::usb::usb_device::bus::UsbBusAllocator;
#[cfg(feature = "usb")]
pub use hal::usb::UsbBus;
// The docs could be further improved with details of the specific channels etc
define_pins!(
/// Maps the pins to their arduino names and the numbers printed on the board.
/// Information pulled from datasheet and board files for arduino IDE : <https://wiki.seeedstudio.com/Wio-Lite-MG126/#tech-support>
struct Pins,
target_device: target_device,
/// Digital 32: SDA
pin d32 = a22,
/// Digital 33: SCL
pin d33 = a23,
/// Digital 5: PWM, TC
pin d5 = a15,
/// Digital 6: PWM, TCC
pin d6 = a20,
/// Digital 9
pin d9 = a7,
/// Digital 10
pin d10 = a18,
/// Digital 11
pin d11 = a16,
/// Digital 12
pin d12 = a19,
/// Digital 13
pin d13 = a17,
/// Analog 0: DAC
pin a0 = a2,
/// Analog 1
pin a1 = b8,
/// Analog 2
pin a2 = b9,
/// Analog 3: PWM, TCC
pin a3 = a4,
/// Analog 4: PWM, TCC
pin a4 = a5,
/// Analog 5
pin a5 = b2,
/// Digital 24: SCK
pin d24 = b11,
/// Digital 22: MISO
pin d22 = b12,
/// Digital 23: MOSI
pin d23 = b10,
/// Digital 0: RX
pin rx = b3,
/// Digital 1: TX
pin tx = a27,
pin usb_dm = a24,
pin usb_dp = a25,
// LED built into the board
// pin led_builtin = a17,
//pin bottom_pad = a28,
//pin adc_battery = b9,
);
#[cfg(feature = "usb")]
pub fn usb_allocator(
usb: pac::USB,
clocks: &mut GenericClockController,
pm: &mut pac::PM,
dm: gpio::Pa24<Input<Floating>>,
dp: gpio::Pa25<Input<Floating>>,
port: &mut Port,
) -> UsbBusAllocator<UsbBus> {
use gpio::IntoFunction;
let gclk0 = clocks.gclk0();
let usb_clock = &clocks.usb(&gclk0).unwrap();
UsbBusAllocator::new(UsbBus::new(
usb_clock,
pm,
dm.into_function(port),
dp.into_function(port),
usb,
))
}
/// Convenience for setting up the labelled SDA, SCL pins to
/// operate as an I2C master running at the specified frequency.
pub fn | <F: Into<Hertz>>(
clocks: &mut GenericClockController,
bus_speed: F,
sercom3: pac::SERCOM3,
pm: &mut pac::PM,
sda: gpio::Pa22<Input<Floating>>,
scl: gpio::Pa23<Input<Floating>>,
port: &mut Port,
) -> hal::sercom::I2CMaster3<
hal::sercom::Sercom3Pad0<gpio::Pa22<gpio::PfC>>,
hal::sercom::Sercom3Pad1<gpio::Pa23<gpio::PfC>>,
> {
let gclk0 = clocks.gclk0();
I2CMaster3::new(
&clocks.sercom3_core(&gclk0).unwrap(),
bus_speed.into(),
sercom3,
pm,
sda.into_pad(port),
scl.into_pad(port),
)
}
| i2c_master |
stripe_requests.go | package p400
import (
"bytes"
"context"
"encoding/json"
"fmt"
"net/http"
"net/url"
"strconv"
"github.com/stripe/stripe-cli/pkg/stripe"
)
// Metadata belongs to the Stripe P400 reader object
// we don't currently make use of it directly for quickstart
type Metadata struct{}
// Reader represents the Stripe P400 reader object shape
type Reader struct {
ID string `json:"id"`
Object string `json:"object"`
DeviceSwVersion string `json:"device_software_version"`
DeviceType string `json:"device_type"`
IPAddress string `json:"ip_address"`
Label string `json:"label"`
Livemode bool `json:"livemode"`
Location string `json:"location"`
SerialNumber string `json:"serial_number"`
Status string `json:"status"`
Metadata Metadata `json:"metadata"`
BaseURL string `json:"base_url"`
}
type readersResponse struct {
Error string `json:"error"`
Object string `json:"object"`
URL string `json:"url"`
HasMore bool `json:"has_more"`
Data []Reader `json:"data"`
}
type createPaymentIntentResponse struct {
ID string `json:"id"`
}
type startNewRPCSessionResponse struct {
SDKRPCSessionToken string `json:"sdk_rpc_session_token"`
}
type getConnectionTokenResponse struct {
Secret string `json:"secret"`
}
// DiscoverReaders calls the Stripe API to get a list of currently registered P400 readers on the account
// it returns a map of Reader types
func DiscoverReaders(tsCtx TerminalSessionContext) ([]Reader, error) {
parsedBaseURL, err := url.Parse(stripe.DefaultAPIBaseURL)
if err != nil {
return nil, err
}
var readersList []Reader
if err != nil {
return readersList, err
}
client := &stripe.Client{
BaseURL: parsedBaseURL,
APIKey: tsCtx.PstToken,
Verbose: false,
}
res, err := client.PerformRequest(context.TODO(), http.MethodGet, stripeTerminalReadersPath, "", nil)
if err != nil {
return readersList, err
}
if res.StatusCode != http.StatusOK {
if res.StatusCode >= 400 && res.StatusCode < 500 {
err = ErrStripeForbiddenResponse
} else {
fmt.Println(res)
fmt.Println(res.StatusCode)
err = ErrStripeGenericResponse
}
return readersList, err
}
var result readersResponse
defer res.Body.Close()
json.NewDecoder(res.Body).Decode(&result)
readersList = result.Data
return readersList, nil
}
// StartNewRPCSession calls the Stripe API for a new RPC session token for interacting with a P400 reader
// returns a session token when successful
func StartNewRPCSession(tsCtx TerminalSessionContext) (string, error) |
// GetNewConnectionToken calls the Stripe API and requests a new connection token in order to start a new reader session
// it returns the connection token when successful
func GetNewConnectionToken(tsCtx TerminalSessionContext) (string, error) {
parsedBaseURL, err := url.Parse(stripe.DefaultAPIBaseURL)
if err != nil {
return "", err
}
client := &stripe.Client{
BaseURL: parsedBaseURL,
APIKey: tsCtx.APIKey,
Verbose: false,
}
res, err := client.PerformRequest(context.TODO(), http.MethodPost, stripeTerminalConnectionTokensPath, "", nil)
if err != nil {
return "", err
}
if res.StatusCode != http.StatusOK {
if res.StatusCode >= 400 && res.StatusCode < 500 {
err = ErrStripeForbiddenResponse
} else {
err = ErrStripeGenericResponse
}
return "", err
}
var result getConnectionTokenResponse
defer res.Body.Close()
json.NewDecoder(res.Body).Decode(&result)
pstToken := result.Secret
return pstToken, nil
}
// CreatePaymentIntent calls the Stripe API to create a new Payment Intent in order to later attach a collected P400 payment to
// it returns the Payment Intent Id
func CreatePaymentIntent(tsCtx TerminalSessionContext) (string, error) {
parsedBaseURL, err := url.Parse(stripe.DefaultAPIBaseURL)
if err != nil {
return "", err
}
amountStr := strconv.Itoa(tsCtx.Amount)
client := &stripe.Client{
BaseURL: parsedBaseURL,
APIKey: tsCtx.APIKey,
Verbose: false,
}
data := url.Values{}
data.Set("amount", amountStr)
data.Set("currency", tsCtx.Currency)
data.Set("payment_method_types[]", "card_present")
data.Set("capture_method", "manual")
data.Set("description", "Stripe CLI Test Payment")
res, err := client.PerformRequest(context.TODO(), http.MethodPost, stripeCreatePaymentIntentPath, data.Encode(), nil)
if err != nil {
return "", err
}
if res.StatusCode != http.StatusOK {
if res.StatusCode >= 400 && res.StatusCode < 500 {
err = ErrStripeForbiddenResponse
} else {
err = ErrStripeGenericResponse
}
return "", err
}
var result createPaymentIntentResponse
defer res.Body.Close()
json.NewDecoder(res.Body).Decode(&result)
paymentIntentID := result.ID
return paymentIntentID, nil
}
// CapturePaymentIntent manually captures the Payment Intent after a Payment Method is attached which is the required flow for collecting payments on the Terminal platform
func CapturePaymentIntent(tsCtx TerminalSessionContext) error {
parsedBaseURL, err := url.Parse(stripe.DefaultAPIBaseURL)
if err != nil {
return err
}
stripeCapturePaymentIntentURL := fmt.Sprintf(stripeCapturePaymentIntentPath, tsCtx.PaymentIntentID)
client := &stripe.Client{
BaseURL: parsedBaseURL,
APIKey: tsCtx.APIKey,
Verbose: false,
}
res, err := client.PerformRequest(context.TODO(), http.MethodPost, stripeCapturePaymentIntentURL, "", nil)
if err != nil {
return ErrCapturePaymentIntentFailed
}
if res.StatusCode != http.StatusOK {
if res.StatusCode >= 400 && res.StatusCode < 500 {
err = ErrStripeForbiddenResponse
} else {
err = ErrStripeGenericResponse
}
return err
}
res.Body.Close()
return nil
}
// RegisterReader calls the Stripe API to register a new P400 reader to an account
// it returns the IP address of the reader if successful
func RegisterReader(regcode string, tsCtx TerminalSessionContext) (Reader, error) {
parsedBaseURL, err := url.Parse(stripe.DefaultAPIBaseURL)
var result Reader
if err != nil {
return result, err
}
client := &stripe.Client{
BaseURL: parsedBaseURL,
APIKey: tsCtx.APIKey,
Verbose: false,
}
data := url.Values{}
data.Set("registration_code", regcode)
res, err := client.PerformRequest(context.TODO(), http.MethodPost, stripeTerminalRegisterPath, data.Encode(), nil)
if err != nil {
return result, err
}
if res.StatusCode != http.StatusOK {
if res.StatusCode >= 400 && res.StatusCode < 500 {
err = ErrStripeForbiddenResponse
} else {
fmt.Println(res)
fmt.Println(res.StatusCode)
err = ErrStripeGenericResponse
}
return result, err
}
defer res.Body.Close()
json.NewDecoder(res.Body).Decode(&result)
return result, nil
}
| {
httpclient := http.Client{}
parsedBaseURL, err := url.Parse(stripe.DefaultAPIBaseURL)
if err != nil {
return "", err
}
stripeTerminalRPCSessionURL := fmt.Sprintf("%s%s", parsedBaseURL, rpcSessionPath)
data := url.Values{}
data.Set("pos_device_info[device_class]", tsCtx.DeviceInfo.DeviceClass)
data.Set("pos_device_info[device_uuid]", tsCtx.DeviceInfo.DeviceUUID)
data.Set("pos_device_info[host_os_version]", tsCtx.DeviceInfo.HostOSVersion)
data.Set("pos_device_info[hardware_model][pos_info][description]", tsCtx.DeviceInfo.HardwareModel.POSInfo.Description)
data.Set("pos_device_info[app_model][app_id]", tsCtx.DeviceInfo.AppModel.AppID)
data.Set("pos_device_info[app_model][app_version]", tsCtx.DeviceInfo.AppModel.AppVersion)
encodedURLData := data.Encode()
urlDataBuffer := bytes.NewBuffer([]byte(encodedURLData))
request, err := http.NewRequest("POST", stripeTerminalRPCSessionURL, urlDataBuffer)
if err != nil {
return "", err
}
request.Header.Set("Content-Type", "application/x-www-form-urlencoded")
request.Header.Set("Authorization", fmt.Sprintf("Bearer %v", tsCtx.PstToken))
res, err := httpclient.Do(request)
if err != nil {
return "", err
}
if res.StatusCode != http.StatusOK {
if res.StatusCode >= 400 && res.StatusCode < 500 {
err = ErrStripeForbiddenResponse
} else {
err = ErrStripeGenericResponse
}
return "", err
}
var result startNewRPCSessionResponse
defer res.Body.Close()
json.NewDecoder(res.Body).Decode(&result)
sessionToken := result.SDKRPCSessionToken
return sessionToken, nil
} |
main.rs | use tokio::sync::broadcast;
#[tokio::main]
async fn main() {
let (tx, mut rx1) = broadcast::channel(16);
let mut rx2 = tx.subscribe();
tokio::spawn(async move {
assert_eq!(rx1.recv().await.unwrap(), 10);
assert_eq!(rx1.recv().await.unwrap(), 20);
});
tokio::spawn(async move {
assert_eq!(rx2.recv().await.unwrap(), 10); | });
tx.send(10).unwrap();
tx.send(20).unwrap();
} | assert_eq!(rx2.recv().await.unwrap(), 20); |
index.ts | export * from './db-add-account'; | export * from './db-delete-account';
export * from './db-list-accounts';
export * from './db-update-account'; |
|
track.js | import WaveformPlaylist from "waveform-playlist";
async function | () {
const playlist = WaveformPlaylist({
container: document.getElementById("playlist"),
timescale: true,
state: "cursor",
samplesPerPixel: 1024,
controls: {
show: true,
width: 200,
},
colors: {
waveOutlineColor: "#E0EFF1",
timeColor: "grey",
fadeColor: "black",
},
});
const ee = playlist.getEventEmitter();
document.querySelector(".btn-play").addEventListener("click", () => {
ee.emit("play");
});
document.querySelector(".btn-pause").addEventListener("click", () => {
ee.emit("pause");
});
document.querySelector(".btn-stop").addEventListener("click", () => {
ee.emit("stop");
});
document.querySelector(".btn-rewind").addEventListener("click", () => {
ee.emit("rewind");
});
document.querySelector(".btn-fast-forward").addEventListener("click", () => {
ee.emit("fastforward");
});
playlist.load([
{
name: "Sonnet",
src: "/media/123",
},
]);
}
main();
| main |
ingressroute.go | // Copyright © 2018 Heptio
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package v1beta1
import (
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
// IngressRouteSpec defines the spec of the CRD
type IngressRouteSpec struct {
// Virtualhost appears at most once. If it is present, the object is considered
// to be a "root".
VirtualHost *VirtualHost `json:"virtualhost,omitempty"`
// Routes are the ingress routes. If TCPProxy is present, Routes is ignored.
Routes []Route `json:"routes"`
// TCPProxy holds TCP proxy information.
TCPProxy *TCPProxy `json:"tcpproxy,omitempty"`
}
// VirtualHost appears at most once. If it is present, the object is considered
// to be a "root".
type VirtualHost struct {
// The fully qualified domain name of the root of the ingress tree
// all leaves of the DAG rooted at this object relate to the fqdn
Fqdn string `json:"fqdn"`
// If present describes tls properties. The CNI names that will be matched on
// are described in fqdn, the tls.secretName secret must contain a
// matching certificate
TLS *TLS `json:"tls,omitempty"`
}
// TLS describes tls properties. The CNI names that will be matched on
// are described in fqdn, the tls.secretName secret must contain a
// matching certificate unless tls.passthrough is set to true.
type TLS struct {
// required, the name of a secret in the current namespace
SecretName string `json:"secretName,omitempty"`
// Minimum TLS version this vhost should negotiate
MinimumProtocolVersion string `json:"minimumProtocolVersion,omitempty"`
// If Passthrough is set to true, the SecretName will be ignored
// and the encrypted handshake will be passed through to the
// backing cluster.
Passthrough bool `json:"passthrough,omitempty"`
}
// Route contains the set of routes for a virtual host
type Route struct {
// Match defines the prefix match
Match string `json:"match"`
// Services are the services to proxy traffic
Services []Service `json:"services,omitempty"`
// Delegate specifies that this route should be delegated to another IngressRoute
Delegate *Delegate `json:"delegate,omitempty"`
// Enables websocket support for the route
EnableWebsockets bool `json:"enableWebsockets,omitempty"`
// Allow this path to respond to insecure requests over HTTP which are normally
// not permitted when a `virtualhost.tls` block is present.
PermitInsecure bool `json:"permitInsecure,omitempty"`
// Indicates that during forwarding, the matched prefix (or path) should be swapped with this value
PrefixRewrite string `json:"prefixRewrite,omitempty"`
// The timeout policy for this route
TimeoutPolicy *TimeoutPolicy `json:"timeoutPolicy,omitempty"`
// // The retry policy for this route
RetryPolicy *RetryPolicy `json:"retryPolicy,omitempty"`
}
// TCPProxy contains the set of services to proxy TCP connections.
type TCPProxy struct {
// Services are the services to proxy traffic
Services []Service `json:"services,omitempty"`
// Delegate specifies that this tcpproxy should be delegated to another IngressRoute
Delegate *Delegate `json:"delegate,omitempty"`
}
// Service defines an upstream to proxy traffic to
type Service struct {
// Name is the name of Kubernetes service to proxy traffic.
// Names defined here will be used to look up corresponding endpoints which contain the ips to route.
Name string `json:"name"`
// Port (defined as Integer) to proxy traffic to since a service can have multiple defined
Port int `json:"port"`
// Weight defines percentage of traffic to balance traffic
Weight int `json:"weight,omitempty"`
// HealthCheck defines optional healthchecks on the upstream service
HealthCheck *HealthCheck `json:"healthCheck,omitempty"`
// LB Algorithm to apply (see https://github.com/heptio/contour/blob/master/design/ingressroute-design.md#load-balancing)
Strategy string `json:"strategy,omitempty"`
// UpstreamValidation defines how to verify the backend service's certificate
UpstreamValidation *UpstreamValidation `json:"validation,omitempty"`
}
// Delegate allows for delegating VHosts to other IngressRoutes
type Delegate struct {
// Name of the IngressRoute
Name string `json:"name"`
// Namespace of the IngressRoute
Namespace string `json:"namespace,omitempty"`
}
// HealthCheck defines optional healthchecks on the upstream service
type HealthCheck struct {
// HTTP endpoint used to perform health checks on upstream service
Path string `json:"path"`
// The value of the host header in the HTTP health check request.
// If left empty (default value), the name "contour-envoy-healthcheck"
// will be used.
Host string `json:"host,omitempty"`
// The interval (seconds) between health checks
IntervalSeconds int64 `json:"intervalSeconds"`
// The time to wait (seconds) for a health check response
TimeoutSeconds int64 `json:"timeoutSeconds"`
// The number of unhealthy health checks required before a host is marked unhealthy
UnhealthyThresholdCount uint32 `json:"unhealthyThresholdCount"`
// The number of healthy health checks required before a host is marked healthy
HealthyThresholdCount uint32 `json:"healthyThresholdCount"`
}
// TimeoutPolicy define the attributes associated with timeout
type TimeoutPolicy struct {
// Timeout for receiving a response from the server after processing a request from client.
// If not supplied the timeout duration is undefined.
Request string `json:"request"`
}
| type RetryPolicy struct {
// NumRetries is maximum allowed number of retries.
// If not supplied, the number of retries is zero.
NumRetries int `json:"count"`
// PerTryTimeout specifies the timeout per retry attempt.
// Ignored if NumRetries is not supplied.
PerTryTimeout string `json:"perTryTimeout,omitempty"`
}
// UpstreamValidation defines how to verify the backend service's certificate
type UpstreamValidation struct {
// Name of the Kubernetes secret be used to validate the certificate presented by the backend
CACertificate string `json:"caSecret"`
// Key which is expected to be present in the 'subjectAltName' of the presented certificate
SubjectName string `json:"subjectName"`
}
// Status reports the current state of the IngressRoute
type Status struct {
CurrentStatus string `json:"currentStatus"`
Description string `json:"description"`
}
// +genclient
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// IngressRoute is an Ingress CRD specificiation
type IngressRoute struct {
metav1.TypeMeta `json:",inline"`
metav1.ObjectMeta `json:"metadata"`
Spec IngressRouteSpec `json:"spec"`
Status `json:"status"`
}
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// IngressRouteList is a list of IngressRoutes
type IngressRouteList struct {
metav1.TypeMeta `json:",inline"`
metav1.ListMeta `json:"metadata"`
Items []IngressRoute `json:"items"`
} | // RetryPolicy define the attributes associated with retrying policy |
requests_test.go | package flavors
import (
"testing"
"github.com/rackspace/gophercloud"
"github.com/rackspace/gophercloud/pagination"
th "github.com/rackspace/gophercloud/testhelper"
fake "github.com/rackspace/gophercloud/testhelper/client"
)
func TestList(t *testing.T) {
th.SetupHTTP()
defer th.TeardownHTTP()
HandleListCDNFlavorsSuccessfully(t)
count := 0
err := List(fake.ServiceClient()).EachPage(func(page pagination.Page) (bool, error) {
count++
actual, err := ExtractFlavors(page)
if err != nil |
expected := []Flavor{
Flavor{
ID: "europe",
Providers: []Provider{
Provider{
Provider: "Fastly",
Links: []gophercloud.Link{
gophercloud.Link{
Href: "http://www.fastly.com",
Rel: "provider_url",
},
},
},
},
Links: []gophercloud.Link{
gophercloud.Link{
Href: "https://www.poppycdn.io/v1.0/flavors/europe",
Rel: "self",
},
},
},
}
th.CheckDeepEquals(t, expected, actual)
return true, nil
})
th.AssertNoErr(t, err)
th.CheckEquals(t, 1, count)
}
func TestGet(t *testing.T) {
th.SetupHTTP()
defer th.TeardownHTTP()
HandleGetCDNFlavorSuccessfully(t)
expected := &Flavor{
ID: "asia",
Providers: []Provider{
Provider{
Provider: "ChinaCache",
Links: []gophercloud.Link{
gophercloud.Link{
Href: "http://www.chinacache.com",
Rel: "provider_url",
},
},
},
},
Links: []gophercloud.Link{
gophercloud.Link{
Href: "https://www.poppycdn.io/v1.0/flavors/asia",
Rel: "self",
},
},
}
actual, err := Get(fake.ServiceClient(), "asia").Extract()
th.AssertNoErr(t, err)
th.AssertDeepEquals(t, expected, actual)
}
| {
t.Errorf("Failed to extract flavors: %v", err)
return false, err
} |
enhanceCompanyInput.tsx | import React, { Component } from 'react';
import { connect } from 'react-redux';
import { withRouter } from 'react-router-dom';
import { CompanyAutoComplete } from '../commons/components/company-input/company-autocomplete';
import { fetchCompanyAction, selectCompanyList } from '../ducks/company.duck'; | const enhanceCompanyInput = () => {
const Wrapper = () => {
return class extends Component<any> {
componentWillMount() {
const { fetchCompanyAction } = this.props;
fetchCompanyAction();
}
componentWillUpdate(nextProps: any) {
const { fetchCompanyAction } = this.props;
}
render() {
const { suggestCompany } = this.props;
return <CompanyAutoComplete suggestData={suggestCompany} {...this.props} />;
}
};
};
const mapStateToProps = (state: any, ownProps: any) => {
return {
suggestCompany: selectCompanyList(state)
};
};
return withRouter(
connect(
mapStateToProps,
{
fetchCompanyAction
}
)(Wrapper())
);
};
export default enhanceCompanyInput; | |
record_batch_test.go | // Copyright 2019 The Cockroach Authors.
//
// Use of this software is governed by the Business Source License
// included in the file licenses/BSL.txt.
//
// As of the Change Date specified in that file, in accordance with
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0, included in the file
// licenses/APL.txt.
package colserde_test
import (
"bytes"
"encoding/binary"
"fmt"
"math"
"math/rand"
"strings"
"testing"
"time"
"unsafe"
"github.com/apache/arrow/go/arrow"
"github.com/apache/arrow/go/arrow/array"
"github.com/apache/arrow/go/arrow/memory"
"github.com/cockroachdb/apd"
"github.com/cockroachdb/cockroach/pkg/col/colserde"
"github.com/cockroachdb/cockroach/pkg/col/typeconv"
"github.com/cockroachdb/cockroach/pkg/sql/types"
"github.com/cockroachdb/cockroach/pkg/testutils"
"github.com/cockroachdb/cockroach/pkg/util/leaktest"
"github.com/cockroachdb/cockroach/pkg/util/randutil"
"github.com/cockroachdb/cockroach/pkg/util/timeutil"
"github.com/stretchr/testify/require"
)
// randomDataFromType creates an *array.Data of length n and type t, filling it
// with random values and inserting nulls with probability nullProbability.
func | (rng *rand.Rand, t *types.T, n int, nullProbability float64) *array.Data {
if nullProbability < 0 || nullProbability > 1 {
panic(fmt.Sprintf("expected a value between 0 and 1 for nullProbability but got %f", nullProbability))
}
const (
// maxVarLen is the maximum length we allow variable length datatypes (e.g.
// strings) to be.
maxVarLen = 1024
charset = "㪊㪋㪌㪍㪎𢽙啟敍敎敏敚敐救敒敓敔敕敖敗敘教敏敖abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ😈💜╯‵Д′)╯彡┻━┻"
)
// valid represents the null bitmap.
valid := make([]bool, n)
for i := range valid {
if rng.Float64() >= nullProbability {
valid[i] = true
}
}
var builder array.Builder
switch typeconv.TypeFamilyToCanonicalTypeFamily[t.Family()] {
case types.BoolFamily:
builder = array.NewBooleanBuilder(memory.DefaultAllocator)
data := make([]bool, n)
for i := range data {
if rng.Float64() < 0.5 {
data[i] = true
}
}
builder.(*array.BooleanBuilder).AppendValues(data, valid)
case types.IntFamily:
switch t.Width() {
case 16:
builder = array.NewInt16Builder(memory.DefaultAllocator)
data := make([]int16, n)
for i := range data {
data[i] = int16(rng.Uint64())
}
builder.(*array.Int16Builder).AppendValues(data, valid)
case 32:
builder = array.NewInt32Builder(memory.DefaultAllocator)
data := make([]int32, n)
for i := range data {
data[i] = int32(rng.Uint64())
}
builder.(*array.Int32Builder).AppendValues(data, valid)
case 0, 64:
builder = array.NewInt64Builder(memory.DefaultAllocator)
data := make([]int64, n)
for i := range data {
data[i] = int64(rng.Uint64())
}
builder.(*array.Int64Builder).AppendValues(data, valid)
default:
panic(fmt.Sprintf("unexpected int width: %d", t.Width()))
}
case types.FloatFamily:
builder = array.NewFloat64Builder(memory.DefaultAllocator)
data := make([]float64, n)
for i := range data {
data[i] = rng.Float64() * math.MaxFloat64
}
builder.(*array.Float64Builder).AppendValues(data, valid)
case types.BytesFamily:
// Bytes can be represented 3 different ways. As variable-length bytes,
// variable-length strings, or fixed-width bytes.
representation := rng.Intn(3)
switch representation {
case 0:
builder = array.NewStringBuilder(memory.DefaultAllocator)
data := make([]string, n)
stringBuilder := &strings.Builder{}
for i := range data {
stringBuilder.Reset()
if valid[i] {
for j := 0; j < rng.Intn(maxVarLen)+1; j++ {
stringBuilder.WriteRune(rune(charset[rng.Intn(len(charset))]))
}
}
data[i] = stringBuilder.String()
}
builder.(*array.StringBuilder).AppendValues(data, valid)
case 1:
builder = array.NewBinaryBuilder(memory.DefaultAllocator, arrow.BinaryTypes.Binary)
data := make([][]byte, n)
for i := range data {
slice := make([]byte, rng.Intn(maxVarLen))
if valid[i] {
// Read always returns len(slice) and nil error.
_, _ = rng.Read(slice)
}
data[i] = slice
}
builder.(*array.BinaryBuilder).AppendValues(data, valid)
case 2:
width := rng.Intn(maxVarLen) + 1
builder = array.NewFixedSizeBinaryBuilder(memory.DefaultAllocator, &arrow.FixedSizeBinaryType{ByteWidth: width})
data := make([][]byte, n)
for i := range data {
slice := make([]byte, width)
if valid[i] {
_, _ = rng.Read(slice)
}
data[i] = slice
}
builder.(*array.FixedSizeBinaryBuilder).AppendValues(data, valid)
}
case types.DecimalFamily:
var err error
builder = array.NewBinaryBuilder(memory.DefaultAllocator, arrow.BinaryTypes.Binary)
data := make([][]byte, n)
for i := range data {
var d apd.Decimal
// int64(rng.Uint64()) to get negative numbers, too.
d.SetFinite(int64(rng.Uint64()), int32(rng.Intn(40)-20))
data[i], err = d.MarshalText()
if err != nil {
panic(err)
}
}
builder.(*array.BinaryBuilder).AppendValues(data, valid)
case types.TimestampTZFamily:
var err error
now := timeutil.Now()
builder = array.NewBinaryBuilder(memory.DefaultAllocator, arrow.BinaryTypes.Binary)
data := make([][]byte, n)
for i := range data {
delta := rng.Int63()
ts := now.Add(time.Duration(delta))
data[i], err = ts.MarshalBinary()
if err != nil {
panic(err)
}
}
builder.(*array.BinaryBuilder).AppendValues(data, valid)
case types.IntervalFamily:
builder = array.NewBinaryBuilder(memory.DefaultAllocator, arrow.BinaryTypes.Binary)
data := make([][]byte, n)
sizeOfInt64 := int(unsafe.Sizeof(int64(0)))
for i := range data {
data[i] = make([]byte, sizeOfInt64*3)
binary.LittleEndian.PutUint64(data[i][0:sizeOfInt64], rng.Uint64())
binary.LittleEndian.PutUint64(data[i][sizeOfInt64:sizeOfInt64*2], rng.Uint64())
binary.LittleEndian.PutUint64(data[i][sizeOfInt64*2:sizeOfInt64*3], rng.Uint64())
}
builder.(*array.BinaryBuilder).AppendValues(data, valid)
default:
panic(fmt.Sprintf("unsupported type %s", t))
}
return builder.NewArray().Data()
}
func TestRecordBatchSerializer(t *testing.T) {
defer leaktest.AfterTest(t)()
t.Run("UnsupportedSchema", func(t *testing.T) {
_, err := colserde.NewRecordBatchSerializer([]*types.T{})
require.True(t, testutils.IsError(err, "zero length"), err)
})
// Serializing and Deserializing an invalid schema is undefined.
t.Run("SerializeDifferentColumnLengths", func(t *testing.T) {
s, err := colserde.NewRecordBatchSerializer([]*types.T{types.Int, types.Int})
require.NoError(t, err)
b := array.NewInt64Builder(memory.DefaultAllocator)
b.AppendValues([]int64{1, 2}, nil /* valid */)
firstCol := b.NewArray().Data()
b.AppendValues([]int64{3}, nil /* valid */)
secondCol := b.NewArray().Data()
_, _, err = s.Serialize(&bytes.Buffer{}, []*array.Data{firstCol, secondCol})
require.True(t, testutils.IsError(err, "mismatched data lengths"), err)
})
}
func TestRecordBatchSerializerSerializeDeserializeRandom(t *testing.T) {
defer leaktest.AfterTest(t)()
rng, _ := randutil.NewPseudoRand()
const (
maxTypes = 16
maxDataLen = 2048
)
var (
typs = make([]*types.T, rng.Intn(maxTypes)+1)
data = make([]*array.Data, len(typs))
dataLen = rng.Intn(maxDataLen) + 1
nullProbability = rng.Float64()
buf = bytes.Buffer{}
)
for i := range typs {
typs[i] = typeconv.AllSupportedSQLTypes[rng.Intn(len(typeconv.AllSupportedSQLTypes))]
data[i] = randomDataFromType(rng, typs[i], dataLen, nullProbability)
}
s, err := colserde.NewRecordBatchSerializer(typs)
if err != nil {
t.Fatal(err)
}
// Run Serialize/Deserialize in a loop to test reuse.
for i := 0; i < 2; i++ {
buf.Reset()
_, _, err := s.Serialize(&buf, data)
require.NoError(t, err)
if buf.Len()%8 != 0 {
t.Fatal("message length must align to 8 byte boundary")
}
var deserializedData []*array.Data
require.NoError(t, s.Deserialize(&deserializedData, buf.Bytes()))
// Check the fields we care most about. We can't use require.Equal directly
// due to some unimportant differences (e.g. mutability of underlying
// buffers).
require.Equal(t, len(data), len(deserializedData))
for i := range data {
require.Equal(t, data[i].Len(), deserializedData[i].Len())
require.Equal(t, len(data[i].Buffers()), len(deserializedData[i].Buffers()))
require.Equal(t, data[i].NullN(), deserializedData[i].NullN())
require.Equal(t, data[i].Offset(), deserializedData[i].Offset())
decBuffers := deserializedData[i].Buffers()
for j, buf := range data[i].Buffers() {
if buf == nil {
if decBuffers[j].Len() != 0 {
t.Fatal("expected zero length serialization of nil buffer")
}
continue
}
require.Equal(t, buf.Len(), decBuffers[j].Len())
require.Equal(t, buf.Bytes(), decBuffers[j].Bytes())
}
}
}
}
func BenchmarkRecordBatchSerializerInt64(b *testing.B) {
rng, _ := randutil.NewPseudoRand()
var (
typs = []*types.T{types.Int}
buf = bytes.Buffer{}
deserializedData []*array.Data
)
s, err := colserde.NewRecordBatchSerializer(typs)
require.NoError(b, err)
for _, dataLen := range []int{1, 16, 256, 2048, 4096} {
// Only calculate useful bytes.
numBytes := int64(dataLen * 8)
data := []*array.Data{randomDataFromType(rng, typs[0], dataLen, 0 /* nullProbability */)}
b.Run(fmt.Sprintf("Serialize/dataLen=%d", dataLen), func(b *testing.B) {
b.SetBytes(numBytes)
for i := 0; i < b.N; i++ {
buf.Reset()
if _, _, err := s.Serialize(&buf, data); err != nil {
b.Fatal(err)
}
}
})
// buf should still have the result of the last serialization. It is still
// empty in cases in which we run only the Deserialize benchmarks.
if buf.Len() == 0 {
if _, _, err := s.Serialize(&buf, data); err != nil {
b.Fatal(err)
}
}
b.Run(fmt.Sprintf("Deserialize/dataLen=%d", dataLen), func(b *testing.B) {
b.SetBytes(numBytes)
for i := 0; i < b.N; i++ {
if err := s.Deserialize(&deserializedData, buf.Bytes()); err != nil {
b.Fatal(err)
}
deserializedData = deserializedData[:0]
}
})
}
}
| randomDataFromType |
player.js | var game = window.game;
var cursors;
var keys;
var sprite = 0;
var iter = 0;
var speed = 200;
var player = {};
var projectile = require("projectileFactory")
var init_char = require("character")
var particles = require("particles")
var shapes = require("shapes");
var meleeCooldown = false;
var meleeCooldownTime = 200;
var mouseBeenUp = true;
player.preload = function(){
}
player.create = function() {
player.character = init_char.getCurrentUser();
player.iframes = false;
var sprite = game.add.graphics(0, 0);
sprite.beginFill(0x222222);
sprite.drawCircle(0, 0, 32);
sprite.beginFill(0x1463ab);
sprite.drawCircle(0, 0, 25);
player.entity = game.add.sprite(game.world.centerX, game.world.centerY, sprite.generateTexture());
player.entity.name = "player";
sprite.destroy();
game.physics.p2.enable(player.entity);
cursors = game.input.keyboard.createCursorKeys();
keys = {
w: game.input.keyboard.addKey(Phaser.Keyboard.W),
a: game.input.keyboard.addKey(Phaser.Keyboard.A),
s: game.input.keyboard.addKey(Phaser.Keyboard.S),
d: game.input.keyboard.addKey(Phaser.Keyboard.D)
};
player.entity.body.setCollisionGroup(game.playerCollisionGroup)
player.entity.body.daddy = player;
player.entity.body.collides(game.allCollisionGroups);
player.entity.body.collideWorldBounds = true;
game.input.keyboard.addKey(Phaser.Keyboard.C).onDown.add(function () {
player.mutate();
}, this);
setInterval(function (){
var c = player.character.current_health;
var m = player.character.getStats().maxHealth;
if(c < m){
player.character.current_health++
}
},5000);
setInterval(player.mutate, 30000)
}
player.mutate = function (){
player.character.changeType()
particles.explosion(player.entity.x,player.entity.y);
}
player.render = function() {
player.entity.removeChild(sprite);
sprite = game.add.graphics(0, 0);
iter += 0.4;
if (iter > 10)
iter = -10;
this.drawWeapon(sprite);
sprite.beginFill(0xEDBE00);
sprite.drawCircle(0, 0, Math.abs(iter) + 5);
var child = player.entity.addChild(sprite);
}
player.drawWeapon = function(sprite) {
player.entity.body.angle = 0;
var sx = player.entity.x;
var sy = player.entity.y;
var tx = game.input.worldX;
var ty = game.input.worldY;
var angle = Phaser.Point.angle(new Phaser.Point(sx, sy), new Phaser.Point(tx, ty));
function | (diff, length){
var x = -Math.cos(angle + diff);
var y = -Math.sin(angle + diff);
sprite.lineTo(x*length, y*length);
}
switch(player.character.type) {
case "Range":
sprite.lineStyle(5, 0x222222, 1);
var x = -Math.cos(angle);
var y = -Math.sin(angle);
sprite.moveTo(x*16, y*16);
sprite.lineTo(x*25, y*25);
break;
case "Melee":
// sprite.fill(2, 0xEDBE00, 1);
sprite.lineStyle(5, 0x222222, 1);
var f = 1;
if (meleeCooldown) {
f = 1.5;
}
sprite.moveTo(0, 0);
lt(Math.PI,10)
lt(Math.PI/16,30*f)
lt(0,35*f)
lt(-Math.PI/16,30*f)
lt(-Math.PI,10)
break;
case "Magic":
sprite.lineStyle(5, 0x222222, 1);
var xa = -Math.cos(angle + Math.PI/4);
var ya = -Math.sin(angle + Math.PI/4);
var xb = -Math.cos(angle - Math.PI/4);
var yb = -Math.sin(angle - Math.PI/4);
// sprite.fill(2, 0xEDBE00, 1);
sprite.moveTo(xa*25, ya*25);
sprite.lineTo(xa, ya);
sprite.lineTo(xb*25, yb*25);
break;
default:
break;
sprite.lineStyle(0, 0xEDBE00, 1);
}
}
player.update = function() {
var nspeed = speed;
player.entity.body.setZeroVelocity();
var vert = false;
var hori = false;
if (cursors.up.isDown || keys.w.isDown)
vert = !vert;
if (cursors.down.isDown || keys.s.isDown)
vert = !vert;
if (cursors.left.isDown || keys.a.isDown)
hori = !hori;
if (cursors.right.isDown || keys.d.isDown)
hori = !hori;
if (vert & hori)
nspeed = speed/Math.sqrt(2);
if (cursors.up.isDown || keys.w.isDown) {
vert = !vert;
}
if (cursors.up.isDown || keys.w.isDown)
player.entity.body.moveUp(nspeed);
if (cursors.down.isDown || keys.s.isDown)
player.entity.body.moveDown(nspeed);
if (cursors.left.isDown || keys.a.isDown)
player.entity.body.moveLeft(nspeed);
if (cursors.right.isDown || keys.d.isDown)
player.entity.body.moveRight(nspeed);
if(game.input.mousePointer.isUp) {
mouseBeenUp = true;
}
if(game.input.mousePointer.isDown) {
switch(player.character.type) {
case "Range":
projectile.spawnProjectile(player, "mouse", projectile.defaultProjectile);
break;
case "Melee":
if (meleeCooldown || !qmouseBeenUp)
break;
meleeCooldown = true;
mouseBeenUp = false;
var sx = player.entity.x;
var sy = player.entity.y;
var tx = game.input.worldX;
var ty = game.input.worldY;
var angle = Phaser.Point.angle(new Phaser.Point(sx, sy), new Phaser.Point(tx, ty));
var m_x = player.entity.x;
var m_y = player.entity.y;
var m_direction = angle;
var m_spread = 0.4;
var m_range = 75;
var m_mobs = require("mobFactory").findMobInCone(m_x,m_y,m_direction,m_spread,m_range);
for(var m in m_mobs){
var sx2 = player.entity.x;
var sy2 = player.entity.y;
var tx2 = m_mobs[m].entity.x;
var ty2 = m_mobs[m].entity.y;
var angle2 = Phaser.Point.angle(new Phaser.Point(sx2, sy2), new Phaser.Point(tx2, ty2));
var x_velocity = -Math.cos(angle2)*20;
var y_velocity = -Math.sin(angle2)*20;
m_mobs[m].move(x_velocity,y_velocity);
m_mobs[m].current_health--;
}
setTimeout(function () {
meleeCooldown = false;
}, meleeCooldownTime);
break;
case "Magic":
projectile.spawnProjectile(player, "mouse", projectile.magicMissile);
break;
default:
break;
}
}
}
module.exports = player; | lt |
devcontext.go | package v0
import (
"context"
"errors"
"time"
v0 "github.com/authzed/authzed-go/proto/authzed/api/v0"
v1 "github.com/authzed/authzed-go/proto/authzed/api/v1"
"github.com/rs/zerolog/log"
"github.com/shopspring/decimal"
"github.com/authzed/spicedb/internal/datastore"
"github.com/authzed/spicedb/internal/datastore/memdb"
"github.com/authzed/spicedb/internal/dispatch"
"github.com/authzed/spicedb/internal/dispatch/graph"
"github.com/authzed/spicedb/internal/namespace"
"github.com/authzed/spicedb/pkg/schemadsl/compiler"
"github.com/authzed/spicedb/pkg/schemadsl/input"
"github.com/authzed/spicedb/pkg/tuple"
)
// DevContext holds the various helper types for running the developer calls.
type DevContext struct {
Ctx context.Context
Datastore datastore.Datastore
Revision decimal.Decimal
Namespaces []*v0.NamespaceDefinition
Dispatcher dispatch.Dispatcher
RequestErrors []*v0.DeveloperError
NamespaceManager namespace.Manager
}
// NewDevContext creates a new DevContext from the specified request context, parsing and populating
// the datastore as needed.
func NewDevContext(ctx context.Context, requestContext *v0.RequestContext) (*DevContext, bool, error) {
ds, err := memdb.NewMemdbDatastore(0, 0*time.Second, 0*time.Second, 0*time.Second)
if err != nil {
return nil, false, err
}
dctx, ok, err := newDevContext(ctx, requestContext, ds)
if !ok || err != nil {
err := dctx.NamespaceManager.Close()
if err != nil {
return nil, false, err
}
err = ds.Close()
if err != nil {
return nil, false, err
}
}
return dctx, ok, err
}
func newDevContext(ctx context.Context, requestContext *v0.RequestContext, ds datastore.Datastore) (*DevContext, bool, error) {
nsm, err := namespace.NewCachingNamespaceManager(ds, 0*time.Second, nil)
if err != nil {
return nil, false, err
}
dispatcher := graph.NewLocalOnlyDispatcher(nsm, ds)
namespaces, devError, err := compile(requestContext.Schema)
if err != nil {
return &DevContext{Ctx: ctx, NamespaceManager: nsm}, false, err
}
if devError != nil {
return &DevContext{Ctx: ctx, NamespaceManager: nsm, RequestErrors: []*v0.DeveloperError{devError}}, false, nil
}
requestErrors, err := loadNamespaces(ctx, namespaces, nsm, ds)
if err != nil {
return &DevContext{Ctx: ctx, NamespaceManager: nsm}, false, err
}
if len(requestErrors) > 0 {
return &DevContext{Ctx: ctx, NamespaceManager: nsm, RequestErrors: requestErrors}, false, nil
}
if len(requestContext.LegacyNsConfigs) > 0 {
requestErrors, err := loadNamespaces(ctx, requestContext.LegacyNsConfigs, nsm, ds)
if err != nil {
return &DevContext{Ctx: ctx, NamespaceManager: nsm}, false, err
}
if len(requestErrors) > 0 {
return &DevContext{Ctx: ctx, NamespaceManager: nsm, RequestErrors: requestErrors}, false, nil
}
}
revision, requestErrors, err := loadTuples(ctx, requestContext.Relationships, nsm, ds)
if err != nil {
return &DevContext{Ctx: ctx, NamespaceManager: nsm, Namespaces: namespaces}, false, err
}
if len(requestErrors) == 0 {
err = requestContext.Validate()
if err != nil {
return &DevContext{Ctx: ctx, NamespaceManager: nsm, Namespaces: namespaces}, false, err
}
}
return &DevContext{
Ctx: ctx,
Datastore: ds,
Namespaces: namespaces,
Revision: revision,
Dispatcher: dispatcher,
RequestErrors: requestErrors,
NamespaceManager: nsm,
}, len(requestErrors) == 0, nil
}
func (dc *DevContext) dispose() {
datastore := dc.Datastore
if datastore != nil {
err := dc.NamespaceManager.Close()
if err != nil {
log.Ctx(dc.Ctx).Err(err).Msg("error when disposing of namespace manager in devcontext")
}
err = datastore.Close()
if err != nil {
log.Ctx(dc.Ctx).Err(err).Msg("error when disposing of datastore in devcontext")
}
}
}
func compile(schema string) ([]*v0.NamespaceDefinition, *v0.DeveloperError, error) {
empty := ""
namespaces, err := compiler.Compile([]compiler.InputSchema{
{
Source: input.InputSource("schema"),
SchemaString: schema,
},
}, &empty)
var contextError compiler.ErrorWithContext
if errors.As(err, &contextError) {
line, col, err := contextError.SourceRange.Start().LineAndColumn()
if err != nil {
return []*v0.NamespaceDefinition{}, nil, err
}
return []*v0.NamespaceDefinition{}, &v0.DeveloperError{
Message: contextError.Error(),
Kind: v0.DeveloperError_SCHEMA_ISSUE,
Source: v0.DeveloperError_SCHEMA,
Line: uint32(line) + 1, // 0-indexed in parser.
Column: uint32(col) + 1, // 0-indexed in parser.
}, nil
}
if err != nil {
return []*v0.NamespaceDefinition{}, nil, err
}
return namespaces, nil, nil
}
func loadTuples(ctx context.Context, tuples []*v0.RelationTuple, nsm namespace.Manager, ds datastore.Datastore) (decimal.Decimal, []*v0.DeveloperError, error) |
func loadNamespaces(ctx context.Context, namespaces []*v0.NamespaceDefinition, nsm namespace.Manager, ds datastore.Datastore) ([]*v0.DeveloperError, error) {
var errors []*v0.DeveloperError
for _, nsDef := range namespaces {
ts, terr := namespace.BuildNamespaceTypeSystemForDefs(nsDef, namespaces)
if terr != nil {
return errors, terr
}
tverr := ts.Validate(ctx)
if tverr == nil {
_, err := ds.WriteNamespace(ctx, nsDef)
if err != nil {
return errors, err
}
continue
}
errors = append(errors, &v0.DeveloperError{
Message: tverr.Error(),
Kind: v0.DeveloperError_SCHEMA_ISSUE,
Source: v0.DeveloperError_SCHEMA,
Context: nsDef.Name,
})
}
return errors, nil
}
| {
var errors []*v0.DeveloperError
var updates []*v1.RelationshipUpdate
for _, tpl := range tuples {
verr := tpl.Validate()
if verr != nil {
errors = append(errors, &v0.DeveloperError{
Message: verr.Error(),
Source: v0.DeveloperError_RELATIONSHIP,
Kind: v0.DeveloperError_PARSE_ERROR,
Context: tuple.String(tpl),
})
continue
}
err := validateTupleWrite(ctx, tpl, nsm)
if err != nil {
verrs, wireErr := rewriteGraphError(ctx, v0.DeveloperError_RELATIONSHIP, 0, 0, tuple.String(tpl), err)
if wireErr == nil {
errors = append(errors, verrs...)
continue
}
return decimal.NewFromInt(0), errors, wireErr
}
updates = append(updates, &v1.RelationshipUpdate{
Operation: v1.RelationshipUpdate_OPERATION_TOUCH,
Relationship: tuple.MustToRelationship(tpl),
})
}
revision, err := ds.WriteTuples(ctx, nil, updates)
return revision, errors, err
} |
client.py | import asyncio
import json
import ssl
from pathlib import Path
from typing import Any, Dict, Optional
import websockets
from flora.server.server import ssl_context_for_client
from flora.types.blockchain_format.sized_bytes import bytes32
from flora.util.config import load_config
from flora.util.json_util import dict_to_json_str
from flora.util.ws_message import WsRpcMessage, create_payload_dict
class DaemonProxy:
def __init__(self, uri: str, ssl_context: Optional[ssl.SSLContext]):
self._uri = uri
self._request_dict: Dict[bytes32, asyncio.Event] = {}
self.response_dict: Dict[bytes32, Any] = {}
self.ssl_context = ssl_context
def format_request(self, command: str, data: Dict[str, Any]) -> WsRpcMessage:
request = create_payload_dict(command, data, "client", "daemon")
return request
async def start(self):
self.websocket = await websockets.connect(self._uri, max_size=None, ssl=self.ssl_context)
async def listener():
while True:
try:
message = await self.websocket.recv()
except websockets.exceptions.ConnectionClosedOK:
return None
decoded = json.loads(message)
id = decoded["request_id"]
if id in self._request_dict:
if id in self._request_dict:
self.response_dict[id] = decoded
self._request_dict[id].set()
asyncio.create_task(listener())
await asyncio.sleep(1)
async def _get(self, request: WsRpcMessage) -> WsRpcMessage:
request_id = request["request_id"]
self._request_dict[request_id] = asyncio.Event()
string = dict_to_json_str(request)
asyncio.create_task(self.websocket.send(string))
async def | ():
await asyncio.sleep(30)
if request_id in self._request_dict:
print("Error, timeout.")
self._request_dict[request_id].set()
asyncio.create_task(timeout())
await self._request_dict[request_id].wait()
if request_id in self.response_dict:
response = self.response_dict[request_id]
self.response_dict.pop(request_id)
else:
response = None
self._request_dict.pop(request_id)
return response
async def start_service(self, service_name: str) -> WsRpcMessage:
data = {"service": service_name}
request = self.format_request("start_service", data)
response = await self._get(request)
return response
async def stop_service(self, service_name: str, delay_before_kill: int = 15) -> WsRpcMessage:
data = {"service": service_name}
request = self.format_request("stop_service", data)
response = await self._get(request)
return response
async def is_running(self, service_name: str) -> bool:
data = {"service": service_name}
request = self.format_request("is_running", data)
response = await self._get(request)
if "is_running" in response["data"]:
return bool(response["data"]["is_running"])
return False
async def ping(self) -> WsRpcMessage:
request = self.format_request("ping", {})
response = await self._get(request)
return response
async def close(self) -> None:
await self.websocket.close()
async def exit(self) -> WsRpcMessage:
request = self.format_request("exit", {})
return await self._get(request)
async def connect_to_daemon(self_hostname: str, daemon_port: int, ssl_context: Optional[ssl.SSLContext]) -> DaemonProxy:
"""
Connect to the local daemon.
"""
client = DaemonProxy(f"wss://{self_hostname}:{daemon_port}", ssl_context)
await client.start()
return client
async def connect_to_daemon_and_validate(root_path: Path) -> Optional[DaemonProxy]:
"""
Connect to the local daemon and do a ping to ensure that something is really
there and running.
"""
try:
net_config = load_config(root_path, "config.yaml")
crt_path = root_path / net_config["daemon_ssl"]["private_crt"]
key_path = root_path / net_config["daemon_ssl"]["private_key"]
ca_crt_path = root_path / net_config["private_ssl_ca"]["crt"]
ca_key_path = root_path / net_config["private_ssl_ca"]["key"]
ssl_context = ssl_context_for_client(ca_crt_path, ca_key_path, crt_path, key_path)
connection = await connect_to_daemon(net_config["self_hostname"], net_config["daemon_port"], ssl_context)
r = await connection.ping()
if "value" in r["data"] and r["data"]["value"] == "pong":
return connection
except Exception:
print("Daemon not started yet")
return None
return None
| timeout |
saga.js | // import { take, call, put, select } from 'redux-saga/effects';
// Individual exports for testing
export default function* notificationsSaga() {
// See example in containers/HomePage/saga.js
} | ||
base.ts | import * as fs from 'fs';
import * as path from 'path';
import * as sqlite3 from 'sqlite3';
export class | {
protected database: sqlite3.Database = null;
constructor() {
this.database = new sqlite3.Database(':memory:');
}
public async execute(sql: string): Promise<void> {
await this.initialize();
await this._execute(sql);
}
public async query(sql: string, parameters: any): Promise<any[]> {
await this.initialize();
return this._query(sql, parameters);
}
public valueToString(value: any): string {
if (typeof value === 'string') {
return `'${value}'`;
}
return value;
}
protected async _execute(sql: string): Promise<void> {
return new Promise<void>((resolve: () => void, reject: (error: Error) => void) => {
this.database.exec(sql, (error: Error) => {
if (error) {
reject(error);
return;
}
resolve();
});
});
}
public _query(sql: string, parameters: any): Promise<any[]> {
return new Promise<any[]>((resolve: (rows: any[]) => void, reject: (error: Error) => void) => {
this.database.all(sql, parameters, (error: Error, rows: any[]) => {
if (error) {
reject(error);
return;
}
resolve(rows);
});
});
}
protected async initialize(): Promise<void> {
try {
let version: number = await this.getVersion();
while (true) {
version += 1;
const scriptPath: string = path.join('database', 'migration-scripts', `version-${version}.up.sql`);
if (!fs.existsSync(scriptPath)) {
break;
}
const script: string = fs.readFileSync(scriptPath, 'utf-8');
await this._execute(script);
await this.insertVersion(version);
}
} catch (error) {
if (error.message !== 'SQLITE_ERROR: no such table: VERSION') {
throw error;
}
await this.createVersionTable();
await this.initialize();
}
}
protected async createVersionTable(): Promise<void> {
await this._execute(`CREATE TABLE VERSION (
VERSION INT NOT NULL,
TIMESTAMP INT NOT NULL
);`);
}
protected async getVersion(): Promise<number> {
const rows: any[] = await this._query('SELECT * FROM VERSION ORDER BY TIMESTAMP DESC', undefined);
if (!rows.length) {
return 0;
}
return rows[0].VERSION;
}
protected async insertVersion(version: number): Promise<void> {
await this._execute(`INSERT INTO VERSION (VERSION, TIMESTAMP) VALUES (${version}, ${new Date().getTime()})`);
}
}
| BaseRepository |
bool.rs | //! impl bool {}
#[cfg(not(bootstrap))]
#[lang = "bool"]
impl bool {
/// Returns `Some(t)` if the `bool` is `true`, or `None` otherwise.
///
/// # Examples
///
/// ```
/// #![feature(bool_to_option)]
///
/// assert_eq!(false.then(0), None);
/// assert_eq!(true.then(0), Some(0));
/// ```
#[unstable(feature = "bool_to_option", issue = "64260")]
#[inline]
pub fn then<T>(self, t: T) -> Option<T> {
if self {
Some(t)
} else {
None
}
}
/// Returns `Some(f())` if the `bool` is `true`, or `None` otherwise.
///
/// # Examples
///
/// ```
/// #![feature(bool_to_option)]
///
/// assert_eq!(false.then_with(|| 0), None);
/// assert_eq!(true.then_with(|| 0), Some(0));
/// ```
#[unstable(feature = "bool_to_option", issue = "64260")]
#[inline]
pub fn | <T, F: FnOnce() -> T>(self, f: F) -> Option<T> {
if self {
Some(f())
} else {
None
}
}
}
| then_with |
babylon.glTF2FileLoader.d.ts | declare module BABYLON {
enum GLTFLoaderCoordinateSystemMode {
AUTO = 0,
PASS_THROUGH = 1,
FORCE_RIGHT_HANDED = 2,
}
interface IGLTFLoaderData {
json: Object;
bin: ArrayBufferView;
}
interface IGLTFLoader {
importMeshAsync: (meshesNames: any, scene: Scene, data: IGLTFLoaderData, rootUrl: string, onSuccess: (meshes: AbstractMesh[], particleSystems: ParticleSystem[], skeletons: Skeleton[]) => void, onProgress: (event: ProgressEvent) => void, onError: (message: string) => void) => void;
loadAsync: (scene: Scene, data: IGLTFLoaderData, rootUrl: string, onSuccess: () => void, onProgress: (event: ProgressEvent) => void, onError: (message: string) => void) => void;
}
class GLTFFileLoader implements ISceneLoaderPluginAsync {
static CreateGLTFLoaderV1: (parent: GLTFFileLoader) => IGLTFLoader;
static CreateGLTFLoaderV2: (parent: GLTFFileLoader) => IGLTFLoader;
onParsed: (data: IGLTFLoaderData) => void;
static HomogeneousCoordinates: boolean;
static IncrementalLoading: boolean;
coordinateSystemMode: GLTFLoaderCoordinateSystemMode;
onTextureLoaded: (texture: BaseTexture) => void;
onMaterialLoaded: (material: Material) => void;
/**
* Let the user decides if he needs to process the material (like precompilation) before affecting it to meshes
*/
onBeforeMaterialReadyAsync: (material: Material, targetMesh: AbstractMesh, isLOD: boolean, callback: () => void) => void;
/**
* Raised when all LODs are complete (or if there is no LOD and model is complete)
*/
onComplete: () => void;
/**
* Raised when first LOD complete (or if there is no LOD and model is complete)
*/
onFirstLODComplete: () => void;
name: string;
extensions: ISceneLoaderPluginExtensions;
importMeshAsync(meshesNames: any, scene: Scene, data: any, rootUrl: string, onSuccess: (meshes: AbstractMesh[], particleSystems: ParticleSystem[], skeletons: Skeleton[]) => void, onProgress: (event: ProgressEvent) => void, onError: (message: string) => void): void;
loadAsync(scene: Scene, data: string | ArrayBuffer, rootUrl: string, onSuccess: () => void, onProgress: (event: ProgressEvent) => void, onError: (message: string) => void): void;
canDirectLoad(data: string): boolean;
private static _parse(data, onError);
private _getLoader(loaderData, onError);
private static _parseBinary(data, onError);
private static _parseV1(binaryReader, onError);
private static _parseV2(binaryReader, onError);
private static _parseVersion(version);
private static _compareVersion(a, b);
private static _decodeBufferToText(view);
}
}
declare module BABYLON.GLTF2 {
/**
* Enums
*/
enum EComponentType {
BYTE = 5120,
UNSIGNED_BYTE = 5121,
SHORT = 5122,
UNSIGNED_SHORT = 5123,
UNSIGNED_INT = 5125,
FLOAT = 5126,
}
enum EMeshPrimitiveMode {
POINTS = 0,
LINES = 1,
LINE_LOOP = 2,
LINE_STRIP = 3,
TRIANGLES = 4,
TRIANGLE_STRIP = 5,
TRIANGLE_FAN = 6,
}
enum ETextureMagFilter {
NEAREST = 9728,
LINEAR = 9729,
}
enum ETextureMinFilter {
NEAREST = 9728,
LINEAR = 9729,
NEAREST_MIPMAP_NEAREST = 9984,
LINEAR_MIPMAP_NEAREST = 9985,
NEAREST_MIPMAP_LINEAR = 9986,
LINEAR_MIPMAP_LINEAR = 9987,
}
enum ETextureWrapMode {
CLAMP_TO_EDGE = 33071,
MIRRORED_REPEAT = 33648,
REPEAT = 10497,
}
/**
* Interfaces
*/
interface IGLTFProperty {
extensions?: Object;
extras?: any;
}
interface IGLTFChildRootProperty extends IGLTFProperty {
name?: string;
}
interface IGLTFAccessorSparseIndices extends IGLTFProperty {
bufferView: number;
byteOffset?: number;
componentType: EComponentType;
}
interface IGLTFAccessorSparseValues extends IGLTFProperty {
bufferView: number;
byteOffset?: number;
}
interface IGLTFAccessorSparse extends IGLTFProperty {
count: number;
indices: IGLTFAccessorSparseIndices;
values: IGLTFAccessorSparseValues;
}
interface IGLTFAccessor extends IGLTFChildRootProperty {
bufferView?: number;
byteOffset?: number;
componentType: EComponentType;
normalized?: boolean;
count: number;
type: string;
max: number[];
min: number[];
sparse?: IGLTFAccessorSparse;
}
interface IGLTFAnimationChannel extends IGLTFProperty {
sampler: number;
target: IGLTFAnimationChannelTarget;
}
interface IGLTFAnimationChannelTarget extends IGLTFProperty {
node: number;
path: string;
}
interface IGLTFAnimationSampler extends IGLTFProperty {
input: number;
interpolation?: string;
output: number;
}
interface IGLTFAnimation extends IGLTFChildRootProperty {
channels: IGLTFAnimationChannel[];
samplers: IGLTFAnimationSampler[];
targets?: any[];
}
interface IGLTFAsset extends IGLTFChildRootProperty {
copyright?: string;
generator?: string;
version: string;
minVersion?: string;
}
interface IGLTFBuffer extends IGLTFChildRootProperty {
uri?: string;
byteLength: number;
loadedData: ArrayBufferView;
loadedObservable: Observable<IGLTFBuffer>;
}
interface IGLTFBufferView extends IGLTFChildRootProperty {
buffer: number;
byteOffset?: number;
byteLength: number;
byteStride?: number;
}
interface IGLTFCameraOrthographic extends IGLTFProperty {
xmag: number;
ymag: number;
zfar: number;
znear: number;
}
interface IGLTFCameraPerspective extends IGLTFProperty {
aspectRatio: number;
yfov: number;
zfar: number;
znear: number;
}
interface IGLTFCamera extends IGLTFChildRootProperty {
orthographic?: IGLTFCameraOrthographic;
perspective?: IGLTFCameraPerspective;
type: string;
}
interface IGLTFImage extends IGLTFChildRootProperty {
uri?: string;
mimeType?: string;
bufferView?: number;
}
interface IGLTFMaterialNormalTextureInfo extends IGLTFTextureInfo {
scale: number;
}
interface IGLTFMaterialOcclusionTextureInfo extends IGLTFTextureInfo {
strength: number;
}
interface IGLTFMaterialPbrMetallicRoughness {
baseColorFactor: number[];
baseColorTexture: IGLTFTextureInfo;
metallicFactor: number;
roughnessFactor: number;
metallicRoughnessTexture: IGLTFTextureInfo;
}
interface IGLTFMaterial extends IGLTFChildRootProperty {
pbrMetallicRoughness?: IGLTFMaterialPbrMetallicRoughness;
normalTexture?: IGLTFMaterialNormalTextureInfo;
occlusionTexture?: IGLTFMaterialOcclusionTextureInfo;
emissiveTexture?: IGLTFTextureInfo;
emissiveFactor?: number[];
alphaMode?: string;
alphaCutoff: number;
doubleSided?: boolean;
index?: number;
babylonMaterial?: Material;
}
interface IGLTFMeshPrimitive extends IGLTFProperty {
attributes: {
[name: string]: number;
};
indices?: number;
material?: number;
mode?: EMeshPrimitiveMode;
targets?: {
[name: string]: number;
}[];
}
interface IGLTFMesh extends IGLTFChildRootProperty {
primitives: IGLTFMeshPrimitive[];
weights?: number[];
}
interface IGLTFNode extends IGLTFChildRootProperty {
camera?: number;
children?: number[];
skin?: number;
matrix?: number[];
mesh?: number;
rotation?: number[];
scale?: number[];
translation?: number[];
weights?: number[];
index?: number;
parent?: IGLTFNode;
babylonMesh?: Mesh;
babylonBones?: {
[skin: number]: Bone;
};
babylonAnimationTargets?: Node[];
}
interface IGLTFSampler extends IGLTFChildRootProperty {
magFilter?: ETextureMagFilter;
minFilter?: ETextureMinFilter;
wrapS?: ETextureWrapMode;
wrapT?: ETextureWrapMode;
}
interface IGLTFScene extends IGLTFChildRootProperty {
nodes: number[];
}
interface IGLTFSkin extends IGLTFChildRootProperty {
inverseBindMatrices?: number;
skeleton?: number;
joints: number[];
index?: number;
babylonSkeleton?: Skeleton;
}
interface IGLTFTexture extends IGLTFChildRootProperty {
sampler?: number;
source: number;
url?: string;
dataReadyObservable?: Observable<IGLTFTexture>;
}
interface IGLTFTextureInfo {
index: number;
texCoord?: number;
}
interface IGLTF extends IGLTFProperty {
accessors?: IGLTFAccessor[];
animations?: IGLTFAnimation[];
asset: IGLTFAsset;
buffers?: IGLTFBuffer[];
bufferViews?: IGLTFBufferView[];
cameras?: IGLTFCamera[];
extensionsUsed?: string[];
extensionsRequired?: string[];
images?: IGLTFImage[];
materials?: IGLTFMaterial[];
meshes?: IGLTFMesh[];
nodes?: IGLTFNode[];
samplers?: IGLTFSampler[];
scene?: number;
scenes?: IGLTFScene[];
skins?: IGLTFSkin[];
textures?: IGLTFTexture[];
}
}
declare module BABYLON.GLTF2 {
class GLTFLoader implements IGLTFLoader, IDisposable {
private _parent;
private _gltf;
private _babylonScene;
private _rootUrl;
private _defaultMaterial;
private _successCallback;
private _progressCallback;
private _errorCallback;
private _renderReady;
private _disposed;
private _blockPendingTracking;
private _nonBlockingData;
private _rootMesh;
private _renderReadyObservable;
private _renderPendingCount;
private _loaderPendingCount;
static Extensions: {
[name: string]: GLTFLoaderExtension;
};
static RegisterExtension(extension: GLTFLoaderExtension): void;
readonly gltf: IGLTF;
readonly babylonScene: Scene;
executeWhenRenderReady(func: () => void): void;
constructor(parent: GLTFFileLoader);
dispose(): void;
importMeshAsync(meshesNames: any, scene: Scene, data: IGLTFLoaderData, rootUrl: string, onSuccess: (meshes: AbstractMesh[], particleSystems: ParticleSystem[], skeletons: Skeleton[]) => void, onProgress: (event: ProgressEvent) => void, onError: (message: string) => void): void;
loadAsync(scene: Scene, data: IGLTFLoaderData, rootUrl: string, onSuccess: () => void, onProgress: (event: ProgressEvent) => void, onError: (message: string) => void): void;
private _loadAsync(nodeNames, scene, data, rootUrl, onSuccess, onProgress, onError);
private _onError(message);
private _onProgress(event);
private _onRenderReady();
private _onLoaderComplete();
private _onLoaderFirstLODComplete();
private _loadData(data);
private _addRightHandToLeftHandRootTransform();
private _getMeshes();
private _getSkeletons();
private _getAnimationTargets();
private _showMeshes();
private _startAnimations();
private _loadScene(nodeNames);
private _loadNode(node);
private _loadMesh(node, mesh);
private _loadVertexDataAsync(primitive, onSuccess);
private _createMorphTargets(node, mesh, primitive, babylonMesh);
private _loadMorphTargetsData(mesh, primitive, vertexData, babylonMesh);
private _loadTransform(node);
private _loadSkin(skin);
private _createBone(node, skin, parent, localMatrix, baseMatrix, index);
private _loadBones(skin, inverseBindMatrixData);
private _loadBone(node, skin, inverseBindMatrixData, babylonBones);
private _getNodeMatrix(node);
private _traverseNodes(indices, action, parentNode?);
private _traverseNode(index, action, parentNode?); | private _buildUint8ArrayBuffer(buffer, byteOffset, byteLength, byteStride, bytePerComponent);
private _buildInt16ArrayBuffer(buffer, byteOffset, byteLength, byteStride, bytePerComponent);
private _buildUint16ArrayBuffer(buffer, byteOffset, byteLength, byteStride, bytePerComponent);
private _buildUint32ArrayBuffer(buffer, byteOffset, byteLength, byteStride, bytePerComponent);
private _buildFloat32ArrayBuffer(buffer, byteOffset, byteLength, byteStride, bytePerComponent);
private _extractInterleavedData(sourceBuffer, targetBuffer, bytePerComponent, stride, length);
private _loadBufferViewAsync(bufferView, byteOffset, byteLength, bytePerComponent, componentType, onSuccess);
private _loadAccessorAsync(accessor, onSuccess);
private _getByteStrideFromType(accessor);
blockPendingTracking: boolean;
addPendingData(data: any): void;
removePendingData(data: any): void;
addLoaderNonBlockingPendingData(data: any): void;
addLoaderPendingData(data: any): void;
removeLoaderPendingData(data: any): void;
private _getDefaultMaterial();
private _loadMaterialMetallicRoughnessProperties(material);
loadMaterial(material: IGLTFMaterial, assign: (babylonMaterial: Material, isNew: boolean) => void): void;
createPbrMaterial(material: IGLTFMaterial): void;
loadMaterialBaseProperties(material: IGLTFMaterial): void;
loadMaterialAlphaProperties(material: IGLTFMaterial, colorFactor?: number[]): void;
loadTexture(textureInfo: IGLTFTextureInfo): Texture;
}
}
declare module BABYLON.GLTF2 {
/**
* Utils functions for GLTF
*/
class GLTFUtils {
/**
* If the uri is a base64 string
* @param uri: the uri to test
*/
static IsBase64(uri: string): boolean;
/**
* Decode the base64 uri
* @param uri: the uri to decode
*/
static DecodeBase64(uri: string): ArrayBuffer;
static ForEach(view: Uint16Array | Uint32Array | Float32Array, func: (nvalue: number, index: number) => void): void;
static GetTextureWrapMode(mode: ETextureWrapMode): number;
static GetTextureSamplingMode(magFilter: ETextureMagFilter, minFilter: ETextureMinFilter): number;
/**
* Decodes a buffer view into a string
* @param view: the buffer view
*/
static DecodeBufferToText(view: ArrayBufferView): string;
}
}
declare module BABYLON.GLTF2 {
abstract class GLTFLoaderExtension {
enabled: boolean;
readonly abstract name: string;
protected loadMaterial(loader: GLTFLoader, material: IGLTFMaterial, assign: (babylonMaterial: Material, isNew: boolean) => void): boolean;
static _Extensions: GLTFLoaderExtension[];
static LoadMaterial(loader: GLTFLoader, material: IGLTFMaterial, assign: (babylonMaterial: Material, isNew: boolean) => void): boolean;
private static _ApplyExtensions(action);
}
}
declare module BABYLON.GLTF2.Extensions {
class MSFTLOD extends GLTFLoaderExtension {
/**
* Specify the minimal delay between LODs in ms (default = 250)
*/
static MinimalLODDelay: number;
readonly name: string;
protected loadMaterial(loader: GLTFLoader, material: IGLTFMaterial, assign: (babylonMaterial: Material, isNew: boolean) => void): boolean;
private loadMaterialLOD(loader, material, materialLODs, lod, assign);
}
}
declare module BABYLON.GLTF2.Extensions {
class KHRMaterialsPbrSpecularGlossiness extends GLTFLoaderExtension {
readonly name: string;
protected loadMaterial(loader: GLTFLoader, material: IGLTFMaterial, assign: (babylonMaterial: Material, isNew: boolean) => void): boolean;
private _loadSpecularGlossinessProperties(loader, material, properties);
}
} | private _loadAnimations();
private _loadAnimationChannel(animation, animationIndex, channelIndex);
private _loadBufferAsync(index, onSuccess);
private _buildInt8ArrayBuffer(buffer, byteOffset, byteLength, byteStride, bytePerComponent); |
monitor-classic.component.ts | import { Component, Input, Injector, ViewChild } from '@angular/core';
import { ComponentNames, LogCategories } from '../../shared/models/constants';
import { FeatureComponent } from '../../shared/components/feature-component';
import { FunctionMonitorInfo, MonitorConfigureInfo } from '../../shared/models/function-monitor';
import { Observable } from 'rxjs/Observable';
import { TranslateService } from '@ngx-translate/core';
import { PortalResources } from '../../shared/models/portal-resources';
import { FunctionAppService } from '../../shared/services/function-app.service';
import { FunctionMonitorService } from '../../shared/services/function-monitor.service';
import { TableFunctionMonitorComponent } from '../../table-function-monitor/table-function-monitor.component';
import * as moment from 'moment-mini-ts';
import { PortalService } from '../../shared/services/portal.service';
import { LogService } from '../../shared/services/log.service';
import { BroadcastEvent } from '../../shared/models/broadcast-event';
import { errorIds } from '../../shared/models/error-ids';
@Component({
selector: ComponentNames.monitorClassic,
templateUrl: './monitor-classic.component.html',
styleUrls: ['./../function-monitor.component.scss', './monitor-classic.component.scss']
})
export class | extends FeatureComponent<FunctionMonitorInfo> {
@ViewChild(TableFunctionMonitorComponent) tableFunctionMonitorComponent: TableFunctionMonitorComponent;
@Input() set functionMonitorInfoInput(functionMonitorInfo: FunctionMonitorInfo) {
this.successAggregate = this.errorsAggregate = this._translateService.instant(PortalResources.functionMonitor_loading);
this.monitorConfigureInfo = null;
this.setInput(functionMonitorInfo);
}
public successAggregateHeading: string;
public errorsAggregateHeading: string;
public successAggregate: string;
public errorsAggregate: string;
public functionId: string;
public functionMonitorInfo: FunctionMonitorInfo;
public monitorConfigureInfo: MonitorConfigureInfo;
constructor(
private _translateService: TranslateService,
private _functionAppService: FunctionAppService,
private _functionMonitorService: FunctionMonitorService,
private _portalService: PortalService,
private _logService: LogService,
injector: Injector) {
super(ComponentNames.monitorApplicationInsights, injector, 'dashboard');
this.featureName = ComponentNames.functionMonitor;
this._setHeaders();
}
protected setup(functionMonitorInfoInputEvent: Observable<FunctionMonitorInfo>) {
return functionMonitorInfoInputEvent
.switchMap(functionMonitorInfo => {
this.functionMonitorInfo = functionMonitorInfo;
return this._functionAppService
.getFunctionHostStatus(functionMonitorInfo.functionAppContext)
.flatMap(functionHost => {
if (functionHost.isSuccessful) {
return this._functionMonitorService.getDataForSelectedFunction(
functionMonitorInfo.functionAppContext,
functionMonitorInfo.functionInfo,
functionHost.result.id);
} else {
this.monitorConfigureInfo = {
functionMonitorInfo: this.functionMonitorInfo,
errorEvent: {
errorId: functionHost.error.errorId,
message: this._translateService.instant(PortalResources.monitorHostFetchFailed),
resourceId: functionMonitorInfo.functionAppContext.site.id
}
};
return Observable.of(null);
}
});
})
.do(data => {
this.functionId = !!data ? data.functionId : '';
this.successAggregate = !!data ? data.successCount.toString() : this._translateService.instant(PortalResources.appMonitoring_noData);
this.errorsAggregate = !!data ? data.failedCount.toString() : this._translateService.instant(PortalResources.appMonitoring_noData);
});
}
get shouldRenderMonitorConfigure() {
return this.monitorConfigureInfo !== null && this.monitorConfigureInfo.errorEvent !== null;
}
get shouldRenderAppInsightsUpsell() {
return this.functionMonitorInfo !== null &&
this.functionMonitorInfo !== null &&
this.functionMonitorInfo.appInsightsFeatureEnabled;
}
public refreshMonitorClassicData() {
this.setInput(this.functionMonitorInfo);
this.tableFunctionMonitorComponent.refresh();
}
public configure() {
const appInsightBladeInput = {
detailBlade: 'AppServicesEnablementBlade',
detailBladeInputs: {
resourceUri: this.functionMonitorInfo.functionAppContext.site.id,
linkedComponent: null
},
extension: 'AppInsightsExtension'
};
this._portalService
.openBlade(appInsightBladeInput, ComponentNames.functionMonitor)
.subscribe(result => {
this._broadcastService.broadcastEvent<FunctionMonitorInfo>(BroadcastEvent.RefreshMonitoringView, this.functionMonitorInfo);
}, err => {
this._logService.error(LogCategories.applicationInsightsConfigure, errorIds.applicationInsightsConfigure, err);
});
}
private _setHeaders(): void {
const firstOfMonth = moment().startOf('month');
this.successAggregateHeading = `${this._translateService.instant(PortalResources.functionMonitor_successAggregate)} ${firstOfMonth.format('MMM Do')}`;
this.errorsAggregateHeading = `${this._translateService.instant(PortalResources.functionMonitor_errorsAggregate)} ${firstOfMonth.format('MMM Do')}`;
}
}
| MonitorClassicComponent |
OpenInBrowserRounded.js | "use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
var _interopRequireWildcard = require("@babel/runtime/helpers/interopRequireWildcard");
Object.defineProperty(exports, "__esModule", { | value: true
});
exports["default"] = void 0;
var React = _interopRequireWildcard(require("react"));
var _createSvgIcon = _interopRequireDefault(require("./utils/createSvgIcon"));
var _default = (0, _createSvgIcon["default"])( /*#__PURE__*/React.createElement("path", {
d: "M19 4H5c-1.11 0-2 .9-2 2v12c0 1.1.9 2 2 2h3c.55 0 1-.45 1-1s-.45-1-1-1H5V8h14v10h-3c-.55 0-1 .45-1 1s.45 1 1 1h3c1.1 0 2-.9 2-2V6c0-1.1-.89-2-2-2zm-7.35 6.35l-2.79 2.79c-.32.32-.1.86.35.86H11v5c0 .55.45 1 1 1s1-.45 1-1v-5h1.79c.45 0 .67-.54.35-.85l-2.79-2.79c-.19-.2-.51-.2-.7-.01z"
}), 'OpenInBrowserRounded');
exports["default"] = _default; | |
bench.rs | #[macro_use]
extern crate criterion;
#[macro_use]
extern crate lazy_static;
use criterion::Criterion;
use std::time::Duration;
fn c() -> Criterion {
Criterion::default()
.sample_size(10) // must be >= 10 for Criterion v0.3
.warm_up_time(Duration::from_secs(1))
.with_plots()
}
fn git_hash() -> String {
use std::process::Command;
let output = Command::new("git")
.args(&["rev-parse", "--short", "HEAD"])
.output()
.unwrap();
String::from(String::from_utf8(output.stdout).unwrap().trim())
}
mod trie {
use criterion::{BatchSize, Criterion};
use std::env;
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::str;
use trie_rs::{Trie, TrieBuilder};
lazy_static! {
// Construct Japanese dictionary using EDICT (http://www.edrdg.org/jmdict/edict.html).
static ref TRIE_EDICT: Trie<u8> = {
let mut builder = TrieBuilder::new();
let repo_root = env::var("REPO_ROOT").expect("REPO_ROOT environment variable must be set.");
let edict2_path = format!("{}/benches/edict.furigana", repo_root);
println!("Reading dictionary file from: {}", edict2_path);
let mut n_words = 0;
for result in BufReader::new(File::open(edict2_path).unwrap()).lines() {
let l = result.unwrap();
builder.push(l);
n_words += 1;
}
println!("Read {} words", n_words);
builder.build()
// TODO print memory footprint compared to original `edict.furigana` file
};
}
pub fn exact_match(_: &mut Criterion) {
let times = 100;
super::c().bench_function(
&format!(
"[{}] Trie::exact_match() {} times",
super::git_hash(),
times
),
move |b| {
b.iter_batched(
|| &TRIE_EDICT,
|trie| {
// iter_batched() does not properly time `routine` time when `setup` time is far longer than `routine` time.
// Tested function takes too short compared to build(). So loop many times.
let result = trie.exact_match("すしをにぎる");
for _ in 0..(times - 1) {
trie.exact_match("すしをにぎる");
}
assert_eq!(result, true);
},
BatchSize::SmallInput,
)
},
);
}
pub fn predictive_search(_: &mut Criterion) { | "[{}] Trie::predictive_search() {} times",
super::git_hash(),
times
),
move |b| {
b.iter_batched(
|| &TRIE_EDICT,
|trie| {
// iter_batched() does not properly time `routine` time when `setup` time is far longer than `routine` time.
// Tested function takes too short compared to build(). So loop many times.
let results_in_u8s = trie.predictive_search("すし");
for _ in 0..(times - 1) {
trie.predictive_search("すし");
}
let results_in_str: Vec<&str> = results_in_u8s
.iter()
.map(|u8s| str::from_utf8(u8s).unwrap())
.collect();
assert_eq!(
results_in_str,
vec![
"すし",
"すしだね",
"すしづめ",
"すしのぐ",
"すしめし",
"すしや",
"すしをにぎる"
]
);
},
BatchSize::SmallInput,
)
},
);
}
pub fn common_prefix_search(_: &mut Criterion) {
let times = 100;
super::c().bench_function(
&format!(
"[{}] Trie::common_prefix_search() {} times",
super::git_hash(),
times
),
move |b| {
b.iter_batched(
|| &TRIE_EDICT,
|trie| {
// iter_batched() does not properly time `routine` time when `setup` time is far longer than `routine` time.
// Tested function takes too short compared to build(). So loop many times.
let results_in_u8s = trie.common_prefix_search("すしをにぎる");
for _ in 0..(times - 1) {
trie.common_prefix_search("すしをにぎる");
}
let results_in_str: Vec<&str> = results_in_u8s
.iter()
.map(|u8s| str::from_utf8(u8s).unwrap())
.collect();
assert_eq!(results_in_str, vec!["す", "すし", "すしをにぎる"]);
},
BatchSize::SmallInput,
)
},
);
}
}
criterion_group!(
benches,
trie::exact_match,
trie::predictive_search,
trie::common_prefix_search
);
criterion_main!(benches); | let times = 100;
super::c().bench_function(
&format!( |
iterator.go | // Copyright 2014 The Cayley Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package bolt
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"github.com/barakmich/glog"
"github.com/boltdb/bolt"
"github.com/google/cayley/graph"
"github.com/google/cayley/graph/iterator"
"github.com/google/cayley/quad"
)
var (
boltType graph.Type
bufferSize = 50
errNotExist = errors.New("quad does not exist")
)
func init() {
boltType = graph.RegisterIterator("bolt")
}
type Iterator struct {
uid uint64
tags graph.Tagger
bucket []byte
checkID []byte
dir quad.Direction
qs *QuadStore
result *Token
buffer [][]byte
offset int
done bool
size int64
err error
}
func | (bucket []byte, d quad.Direction, value graph.Value, qs *QuadStore) *Iterator {
tok := value.(*Token)
if !bytes.Equal(tok.bucket, nodeBucket) {
glog.Error("creating an iterator from a non-node value")
return &Iterator{done: true}
}
it := Iterator{
uid: iterator.NextUID(),
bucket: bucket,
dir: d,
qs: qs,
size: qs.SizeOf(value),
}
it.checkID = make([]byte, len(tok.key))
copy(it.checkID, tok.key)
return &it
}
func Type() graph.Type { return boltType }
func (it *Iterator) UID() uint64 {
return it.uid
}
func (it *Iterator) Reset() {
it.buffer = nil
it.offset = 0
it.done = false
}
func (it *Iterator) Tagger() *graph.Tagger {
return &it.tags
}
func (it *Iterator) TagResults(dst map[string]graph.Value) {
for _, tag := range it.tags.Tags() {
dst[tag] = it.Result()
}
for tag, value := range it.tags.Fixed() {
dst[tag] = value
}
}
func (it *Iterator) Clone() graph.Iterator {
out := NewIterator(it.bucket, it.dir, &Token{nodeBucket, it.checkID}, it.qs)
out.Tagger().CopyFrom(it)
return out
}
func (it *Iterator) Close() error {
it.result = nil
it.buffer = nil
it.done = true
return nil
}
func (it *Iterator) isLiveValue(val []byte) bool {
var entry IndexEntry
json.Unmarshal(val, &entry)
return len(entry.History)%2 != 0
}
func (it *Iterator) Next() bool {
if it.done {
return false
}
if len(it.buffer) <= it.offset+1 {
it.offset = 0
var last []byte
if it.buffer != nil {
last = it.buffer[len(it.buffer)-1]
}
it.buffer = make([][]byte, 0, bufferSize)
err := it.qs.db.View(func(tx *bolt.Tx) error {
i := 0
b := tx.Bucket(it.bucket)
cur := b.Cursor()
if last == nil {
k, v := cur.Seek(it.checkID)
if bytes.HasPrefix(k, it.checkID) {
if it.isLiveValue(v) {
var out []byte
out = make([]byte, len(k))
copy(out, k)
it.buffer = append(it.buffer, out)
i++
}
} else {
it.buffer = append(it.buffer, nil)
return errNotExist
}
} else {
k, _ := cur.Seek(last)
if !bytes.Equal(k, last) {
return fmt.Errorf("could not pick up after %v", k)
}
}
for i < bufferSize {
k, v := cur.Next()
if k == nil || !bytes.HasPrefix(k, it.checkID) {
it.buffer = append(it.buffer, nil)
break
}
if !it.isLiveValue(v) {
continue
}
var out []byte
out = make([]byte, len(k))
copy(out, k)
it.buffer = append(it.buffer, out)
i++
}
return nil
})
if err != nil {
if err != errNotExist {
glog.Errorf("Error nexting in database: %v", err)
it.err = err
}
it.done = true
return false
}
} else {
it.offset++
}
if it.Result() == nil {
it.done = true
return false
}
return true
}
func (it *Iterator) Err() error {
return it.err
}
func (it *Iterator) Result() graph.Value {
if it.done {
return nil
}
if it.result != nil {
return it.result
}
if it.offset >= len(it.buffer) {
return nil
}
if it.buffer[it.offset] == nil {
return nil
}
return &Token{bucket: it.bucket, key: it.buffer[it.offset]}
}
func (it *Iterator) NextPath() bool {
return false
}
// No subiterators.
func (it *Iterator) SubIterators() []graph.Iterator {
return nil
}
func PositionOf(tok *Token, d quad.Direction, qs *QuadStore) int {
if bytes.Equal(tok.bucket, spoBucket) {
switch d {
case quad.Subject:
return 0
case quad.Predicate:
return hashSize
case quad.Object:
return 2 * hashSize
case quad.Label:
return 3 * hashSize
}
}
if bytes.Equal(tok.bucket, posBucket) {
switch d {
case quad.Subject:
return 2 * hashSize
case quad.Predicate:
return 0
case quad.Object:
return hashSize
case quad.Label:
return 3 * hashSize
}
}
if bytes.Equal(tok.bucket, ospBucket) {
switch d {
case quad.Subject:
return hashSize
case quad.Predicate:
return 2 * hashSize
case quad.Object:
return 0
case quad.Label:
return 3 * hashSize
}
}
if bytes.Equal(tok.bucket, cpsBucket) {
switch d {
case quad.Subject:
return 2 * hashSize
case quad.Predicate:
return hashSize
case quad.Object:
return 3 * hashSize
case quad.Label:
return 0
}
}
panic("unreachable")
}
func (it *Iterator) Contains(v graph.Value) bool {
val := v.(*Token)
if bytes.Equal(val.bucket, nodeBucket) {
return false
}
offset := PositionOf(val, it.dir, it.qs)
if len(val.key) != 0 && bytes.HasPrefix(val.key[offset:], it.checkID) {
// You may ask, why don't we check to see if it's a valid (not deleted) quad
// again?
//
// We've already done that -- in order to get the graph.Value token in the
// first place, we had to have done the check already; it came from a Next().
//
// However, if it ever starts coming from somewhere else, it'll be more
// efficient to change the interface of the graph.Value for LevelDB to a
// struct with a flag for isValid, to save another random read.
return true
}
return false
}
func (it *Iterator) Size() (int64, bool) {
return it.size, true
}
func (it *Iterator) Describe() graph.Description {
return graph.Description{
UID: it.UID(),
Name: it.qs.NameOf(&Token{it.bucket, it.checkID}),
Type: it.Type(),
Tags: it.tags.Tags(),
Size: it.size,
Direction: it.dir,
}
}
func (it *Iterator) Type() graph.Type { return boltType }
func (it *Iterator) Sorted() bool { return false }
func (it *Iterator) Optimize() (graph.Iterator, bool) {
return it, false
}
func (it *Iterator) Stats() graph.IteratorStats {
s, _ := it.Size()
return graph.IteratorStats{
ContainsCost: 1,
NextCost: 4,
Size: s,
}
}
var _ graph.Nexter = &Iterator{}
| NewIterator |
operations.rs | #![doc = "generated by AutoRust"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::models;
#[derive(Clone)]
pub struct Client {
endpoint: String,
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
scopes: Vec<String>,
pipeline: azure_core::Pipeline,
}
#[derive(Clone)]
pub struct ClientBuilder {
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
endpoint: Option<String>,
scopes: Option<Vec<String>>,
}
pub const DEFAULT_ENDPOINT: &str = azure_core::resource_manager_endpoint::AZURE_PUBLIC_CLOUD;
impl ClientBuilder {
pub fn new(credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>) -> Self {
Self {
credential,
endpoint: None,
scopes: None,
}
}
pub fn endpoint(mut self, endpoint: impl Into<String>) -> Self {
self.endpoint = Some(endpoint.into());
self
}
pub fn scopes(mut self, scopes: &[&str]) -> Self {
self.scopes = Some(scopes.iter().map(|scope| (*scope).to_owned()).collect());
self
}
pub fn build(self) -> Client {
let endpoint = self.endpoint.unwrap_or_else(|| DEFAULT_ENDPOINT.to_owned());
let scopes = self.scopes.unwrap_or_else(|| vec![format!("{}/", endpoint)]);
Client::new(endpoint, self.credential, scopes)
}
}
impl Client {
pub(crate) fn endpoint(&self) -> &str {
self.endpoint.as_str()
}
pub(crate) fn token_credential(&self) -> &dyn azure_core::auth::TokenCredential {
self.credential.as_ref()
}
pub(crate) fn scopes(&self) -> Vec<&str> {
self.scopes.iter().map(String::as_str).collect()
}
pub(crate) async fn send(&self, request: impl Into<azure_core::Request>) -> Result<azure_core::Response, azure_core::Error> {
let mut context = azure_core::Context::default();
let mut request = request.into();
self.pipeline.send(&mut context, &mut request).await
}
pub fn new(
endpoint: impl Into<String>,
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
scopes: Vec<String>,
) -> Self {
let endpoint = endpoint.into();
let pipeline = azure_core::Pipeline::new(
option_env!("CARGO_PKG_NAME"),
option_env!("CARGO_PKG_VERSION"),
azure_core::ClientOptions::default(),
Vec::new(),
Vec::new(),
);
Self {
endpoint,
credential,
scopes,
pipeline,
}
}
pub fn health_alerts(&self) -> health_alerts::Client {
health_alerts::Client(self.clone())
}
pub fn operations(&self) -> operations::Client {
operations::Client(self.clone())
}
}
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
Operations_List(#[from] operations::list::Error),
#[error(transparent)]
HealthAlerts_ListBySubscription(#[from] health_alerts::list_by_subscription::Error),
#[error(transparent)]
HealthAlerts_ListByResourceGroup(#[from] health_alerts::list_by_resource_group::Error),
#[error(transparent)]
HealthAlerts_Get(#[from] health_alerts::get::Error),
#[error(transparent)]
HealthAlerts_CreateOrUpdate(#[from] health_alerts::create_or_update::Error),
#[error(transparent)]
HealthAlerts_Update(#[from] health_alerts::update::Error),
#[error(transparent)]
HealthAlerts_Delete(#[from] health_alerts::delete::Error),
}
pub mod operations {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(&self) -> list::Builder {
list::Builder { client: self.0.clone() }
}
}
pub mod list {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum | {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::OperationsList, Error>> {
Box::pin(async move {
let url_str = &format!("{}/providers/Microsoft.AlertsManagement/operations", self.client.endpoint(),);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2020-08-04-preview");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::OperationsList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
}
pub mod health_alerts {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list_by_subscription(&self, subscription_id: impl Into<String>) -> list_by_subscription::Builder {
list_by_subscription::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
}
}
pub fn list_by_resource_group(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> list_by_resource_group::Builder {
list_by_resource_group::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
rule_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
rule_name: rule_name.into(),
}
}
pub fn create_or_update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
rule_name: impl Into<String>,
parameters: impl Into<models::HealthAlertResource>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
rule_name: rule_name.into(),
parameters: parameters.into(),
}
}
pub fn update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
rule_name: impl Into<String>,
parameters: impl Into<models::HealthAlertResourcePatch>,
) -> update::Builder {
update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
rule_name: rule_name.into(),
parameters: parameters.into(),
}
}
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
rule_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
rule_name: rule_name.into(),
}
}
}
pub mod list_by_subscription {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::HealthAlertsErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::HealthAlertResourceCollection, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.AlertsManagement/resourceHealthAlertRules",
self.client.endpoint(),
&self.subscription_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2020-08-04-preview");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::HealthAlertResourceCollection =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::HealthAlertsErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_by_resource_group {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::HealthAlertsErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::HealthAlertResourceCollection, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.AlertsManagement/resourceHealthAlertRules",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2020-08-04-preview");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::HealthAlertResourceCollection =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::HealthAlertsErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::HealthAlertsErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) rule_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::HealthAlertResource, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.AlertsManagement/resourceHealthAlertRules/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.rule_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2020-08-04-preview");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::HealthAlertResource =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::HealthAlertsErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::HealthAlertsErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) rule_name: String,
pub(crate) parameters: models::HealthAlertResource,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::HealthAlertResource, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.AlertsManagement/resourceHealthAlertRules/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.rule_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2020-08-04-preview");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::HealthAlertResource =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::HealthAlertsErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod update {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::HealthAlertsErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) rule_name: String,
pub(crate) parameters: models::HealthAlertResourcePatch,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::HealthAlertResource, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.AlertsManagement/resourceHealthAlertRules/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.rule_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2020-08-04-preview");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::HealthAlertResource =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::HealthAlertsErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::HealthAlertsErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) rule_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.AlertsManagement/resourceHealthAlertRules/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.rule_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2020-08-04-preview");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::HealthAlertsErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
| Error |
__init__.py | import os | __all__ = [mod.split(".")[0] for mod in os.listdir("handlers") if mod != "__init__.py"] |
|
HashGenerator.py | import hashlib
import sys
import getpass
import argparse
import rx7 as rx
from LIB.Functions import pause, cls
from LIB.Hash import sa
def | (word, file=None, Print=True):
word=bytes(word, encoding='utf-8')
LIST = []
for name,func in sa.items():
try:
result = func(word).hexdigest()
LIST.append(result)
if Print:
print(f' {name.upper()}:{" "*(10-len(name))}{result}')
except TypeError:
pass
if file:
rx.write(str(file),'\n'.join(result))
BANNER = '''
88 88 db .dP"Y8 88 88
88 88 dPYb `Ybo." 88 88
888888 dP__Yb o.`Y8b 888888
88 88 dP""""Yb 8bodP' 88 88
dP""b8 888888 88b 88 888888 88""Yb db 888888 dP"Yb 88""Yb
dP `" 88__ 88Yb88 88__ 88__dP dPYb 88 dP Yb 88__dP
Yb "88 88"" 88 Y88 88"" 88"Yb dP__Yb 88 Yb dP 88"Yb
YboodP 888888 88 Y8 888888 88 Yb dP""""Yb 88 YbodP 88 Yb
'''
if __name__ == "__main__":
if len(sys.argv) > 1:
parser = argparse.ArgumentParser(
'Hash Generator',
description='Generate Hash of a word in all hash types',
allow_abbrev=False,
)
parser.add_argument('HASH',
help="Word which you want to get its hashes"
)
parser.add_argument('-f','--output-file',
metavar='FILE',
help='The file to save hashes of HASH to it'
)
parser.add_argument('-q','--quiet', action='store_false',
help='Run app in quiet mode (Do not print the hashes)'
)
args = parser.parse_args()
hashed_file_name = args.output_file
word = args.HASH
quiet = args.quiet
cls()
rx.style.print(BANNER, 'gold_3b')
print(f'''Here is list of hashes for "{rx.fg('dodger_blue_1')}{word}{rx.attr(0)}:"''')
print_hashes(word, hashed_file_name, quiet)
else:
while True:
cls()
rx.style.print(BANNER, 'gold_3b')
print('Use: "HASH||FILE" to save output to FILE \n')
inp= input('Enter String to Create Hashes: ')
if inp=='exit':
break
elif inp:
if '||' in inp:
inp = inp.split('||')
print(f'''Here is list of hashes for "{rx.fg('dodger_blue_1')}{inp[0]}{rx.attr(0)}":''')
print_hashes(inp[0],inp[1])
else:
print(f'''Here is list of hashes for "{rx.fg('dodger_blue_1')}{inp}{rx.attr(0)}":''')
print_hashes(inp)
pause()
| print_hashes |
parse_arf_result.go | package utils
import (
"bytes"
"fmt"
"html/template"
"io"
"net/url"
"regexp"
"sort"
"strings"
"text/template/parse"
"github.com/antchfx/xmlquery"
"github.com/pkg/errors"
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
"k8s.io/apimachinery/pkg/runtime"
compv1alpha1 "github.com/openshift/compliance-operator/pkg/apis/compliance/v1alpha1"
)
const (
machineConfigFixType = "urn:xccdf:fix:script:ignition"
kubernetesFixType = "urn:xccdf:fix:script:kubernetes"
ocilCheckType = "http://scap.nist.gov/schema/ocil/2"
rulePrefix = "xccdf_org.ssgproject.content_rule_"
valuePrefix = "xccdf_org.ssgproject.content_value_"
ruleValueSuffix = ":var:1"
questionnaireSuffix = "_ocil:questionnaire:1"
questionSuffix = "_question:question:1"
ovalCheckPrefix = "oval:ssg-"
objValuePrefix = "oval:ssg-variable"
ovalCheckType = "http://oval.mitre.org/XMLSchema/oval-definitions-5"
dependencyAnnotationKey = "complianceascode.io/depends-on"
kubeDependencyAnnotationKey = "complianceascode.io/depends-on-obj"
remediationTypeAnnotationKey = "complianceascode.io/remediation-type"
enforcementTypeAnnotationKey = "complianceascode.io/enforcement-type"
optionalAnnotationKey = "complianceascode.io/optional"
valueInputRequiredAnnotationKey = "complianceascode.io/value-input-required"
//index to trim `{{`and`}}`
trimStartIndex = 2
trimEndIndex = 2
)
// Constants useful for parsing warnings
const (
endPointTag = "ocp-api-endpoint"
dumpLocationClass = "ocp-dump-location"
filterTypeClass = "ocp-api-filter"
filteredEndpointClass = "filtered"
)
type ParseResult struct {
Id string
CheckResult *compv1alpha1.ComplianceCheckResult
Remediations []*compv1alpha1.ComplianceRemediation
}
type ResourcePath struct {
ObjPath string
DumpPath string
Filter string
}
// getPathsFromRuleWarning finds the API endpoint from in. The expected structure is:
//
// <warning category="general" lang="en-US"><code class="ocp-api-endpoint">/apis/config.openshift.io/v1/oauths/cluster
// </code></warning>
func GetPathFromWarningXML(in *xmlquery.Node) []ResourcePath {
apiPaths := []ResourcePath{}
codeNodes := in.SelectElements("//html:code")
for _, codeNode := range codeNodes {
if strings.Contains(codeNode.SelectAttr("class"), endPointTag) {
path := codeNode.InnerText()
if len(path) == 0 {
continue
}
dumpPath := path
var filter string
pathID := codeNode.SelectAttr("id")
if pathID != "" {
filterNode := in.SelectElement(fmt.Sprintf(`//*[@id="filter-%s"]`, pathID))
dumpNode := in.SelectElement(fmt.Sprintf(`//*[@id="dump-%s"]`, pathID))
if filterNode != nil && dumpNode != nil {
filter = filterNode.InnerText()
dumpPath = dumpNode.InnerText()
}
}
apiPaths = append(apiPaths, ResourcePath{ObjPath: path, DumpPath: dumpPath, Filter: filter})
}
}
return apiPaths
}
func warningHasApiObjects(in *xmlquery.Node) bool {
codeNodes := in.SelectElements("//html:code")
for _, codeNode := range codeNodes {
if codeNode.SelectAttr("class") == endPointTag {
return true
}
}
return false
}
type NodeByIdHashTable map[string]*xmlquery.Node
type nodeByIdHashVariablesTable map[string][]string
func newByIdHashTable(nodes []*xmlquery.Node) NodeByIdHashTable {
table := make(NodeByIdHashTable)
for i := range nodes {
ruleDefinition := nodes[i]
ruleId := ruleDefinition.SelectAttr("id")
table[ruleId] = ruleDefinition
}
return table
}
func newHashTableFromRootAndQuery(dsDom *xmlquery.Node, root, query string) NodeByIdHashTable {
benchmarkDom := dsDom.SelectElement(root)
rules := benchmarkDom.SelectElements(query)
return newByIdHashTable(rules)
}
func newRuleHashTable(dsDom *xmlquery.Node) NodeByIdHashTable {
return newHashTableFromRootAndQuery(dsDom, "//ds:component/xccdf-1.2:Benchmark", "//xccdf-1.2:Rule")
}
func NewOcilQuestionTable(dsDom *xmlquery.Node) NodeByIdHashTable {
return newHashTableFromRootAndQuery(dsDom, "//ds:component/ocil:ocil", "//ocil:boolean_question")
}
func | (dsDom *xmlquery.Node) NodeByIdHashTable {
return newHashTableFromRootAndQuery(dsDom, "//ds:component/oval-def:oval_definitions/oval-def:states", "*")
}
func newObjHashTable(dsDom *xmlquery.Node) NodeByIdHashTable {
return newHashTableFromRootAndQuery(dsDom, "//ds:component/oval-def:oval_definitions/oval-def:objects", "*")
}
func NewDefHashTable(dsDom *xmlquery.Node) NodeByIdHashTable {
return newHashTableFromRootAndQuery(dsDom, "//ds:component/oval-def:oval_definitions/oval-def:definitions", "*")
}
func newValueListTable(dsDom *xmlquery.Node, statesTable, objectsTable NodeByIdHashTable) nodeByIdHashVariablesTable {
root := "//ds:component/oval-def:oval_definitions/oval-def:tests"
testsDom := dsDom.SelectElement(root).SelectElements("*")
table := make(nodeByIdHashVariablesTable)
for i := range testsDom {
testDefinition := testsDom[i]
testId := testDefinition.SelectAttr("id")
var valueListState []string
var valueListObject []string
var valueList []string
states := testDefinition.SelectElements("//ind:state")
if len(states) > 0 {
for i := range states {
if states[i] == nil {
continue
}
state, ok := statesTable[states[i].SelectAttr("state_ref")]
if !ok {
continue
}
valueListStateTemp, hasList := findAllVariablesFromState(state)
if hasList {
valueListState = append(valueListState, valueListStateTemp...)
}
}
}
objects := testDefinition.SelectElements("//ind:object")
if len(objects) > 0 {
for i := range objects {
if objects[i] == nil {
continue
}
object, ok := objectsTable[objects[i].SelectAttr("object_ref")]
if !ok {
continue
}
valueListObjectTemp, hasList := findAllVariablesFromObject(object)
if hasList {
valueListObject = append(valueListState, valueListObjectTemp...)
}
}
}
if len(valueListState) > 0 {
valueList = append(valueList, valueListState...)
}
if len(valueListObject) > 0 {
valueList = append(valueList, valueListObject...)
}
if len(valueList) > 0 {
table[testId] = valueList
}
}
return table
}
func findAllVariablesFromState(node *xmlquery.Node) ([]string, bool) {
var valueList []string
nodes := node.SelectElements("*")
for i := range nodes {
if nodes[i].SelectAttr("var_ref") != "" {
dnsFriendlyFixId := strings.ReplaceAll(nodes[i].SelectAttr("var_ref"), "_", "-")
valueFormatted := strings.TrimPrefix(dnsFriendlyFixId, ovalCheckPrefix)
valueFormatted = strings.TrimSuffix(valueFormatted, ruleValueSuffix)
valueList = append(valueList, valueFormatted)
}
}
if len(valueList) > 0 {
return valueList, true
} else {
return valueList, false
}
}
func findAllVariablesFromObject(node *xmlquery.Node) ([]string, bool) {
var valueList []string
nodes := node.SelectElements("//ind:var_ref")
for i := range nodes {
if nodes[i].InnerText() != "" {
dnsFriendlyFixId := strings.ReplaceAll(nodes[i].InnerText(), "_", "-")
valueFormatted := strings.TrimPrefix(dnsFriendlyFixId, ovalCheckPrefix)
valueFormatted = strings.TrimSuffix(valueFormatted, ruleValueSuffix)
valueList = append(valueList, valueFormatted)
}
}
if len(valueList) > 0 {
return valueList, true
} else {
return valueList, false
}
}
func GetRuleOvalTest(rule *xmlquery.Node, defTable NodeByIdHashTable) NodeByIdHashTable {
var ovalRefEl *xmlquery.Node
testList := make(map[string]*xmlquery.Node)
for _, check := range rule.SelectElements("//xccdf-1.2:check") {
if check.SelectAttr("system") == ovalCheckType {
ovalRefEl = check.SelectElement("xccdf-1.2:check-content-ref")
break
}
}
if ovalRefEl == nil {
return testList
}
ovalCheckName := strings.TrimSpace(ovalRefEl.SelectAttr("name"))
ovalTest, ok := defTable[ovalCheckName]
if !ok {
return testList
}
ovalTests := ovalTest.SelectElements("//oval-def:criterion")
for i := range ovalTests {
if ovalTests[i].SelectAttr("test_ref") == "" {
continue
}
testList[ovalTests[i].SelectAttr("test_ref")] = ovalTests[i]
}
return testList
}
func removeDuplicate(input []string) []string {
keys := make(map[string]bool)
trimmedList := []string{}
for _, e := range input {
if _, value := keys[e]; !value {
keys[e] = true
trimmedList = append(trimmedList, e)
}
}
return trimmedList
}
func getValueListUsedForRule(rule *xmlquery.Node, ovalTable nodeByIdHashVariablesTable, defTable NodeByIdHashTable, variableList map[string]string) []string {
var valueList []string
ruleTests := GetRuleOvalTest(rule, defTable)
if len(ruleTests) == 0 {
return valueList
}
for test := range ruleTests {
valueListTemp, ok := ovalTable[test]
if !ok {
continue
}
valueList = append(valueList, valueListTemp...)
}
if len(valueList) == 0 {
return valueList
}
valueList = removeDuplicate(valueList)
//remove duplicate because one rule can have different tests that use same variable, so we want to remove the extra variable since we
//want to associate rule with value not specify check
valueList = sort.StringSlice(valueList)
var settableValueList []string
for i := range valueList {
if _, ok := variableList[strings.ReplaceAll(valueList[i], "-", "_")]; ok {
settableValueList = append(settableValueList, valueList[i])
}
}
return settableValueList
}
func getRuleOcilQuestionID(rule *xmlquery.Node) string {
var ocilRefEl *xmlquery.Node
for _, check := range rule.SelectElements("//xccdf-1.2:check") {
if check.SelectAttr("system") == ocilCheckType {
ocilRefEl = check.SelectElement("xccdf-1.2:check-content-ref")
break
}
}
if ocilRefEl == nil {
return ""
}
questionnareName := ocilRefEl.SelectAttr("name")
if strings.HasSuffix(questionnareName, questionnaireSuffix) == false {
return ""
}
return strings.TrimSuffix(questionnareName, questionnaireSuffix) + questionSuffix
}
func GetInstructionsForRule(rule *xmlquery.Node, ocilTable NodeByIdHashTable) string {
// convert rule's questionnaire ID to question ID
ruleQuestionId := getRuleOcilQuestionID(rule)
// look up the node
questionNode, ok := ocilTable[ruleQuestionId]
if !ok {
return ""
}
// if not found, return empty string
textNode := questionNode.SelectElement("ocil:question_text")
if textNode == nil {
return ""
}
// if found, strip the last line
textSlice := strings.Split(strings.TrimSpace(textNode.InnerText()), "\n")
if len(textSlice) > 1 {
textSlice = textSlice[:len(textSlice)-1]
}
return strings.TrimSpace(strings.Join(textSlice, "\n"))
}
// ParseContent parses the DataStream and returns the XML document
func ParseContent(dsReader io.Reader) (*xmlquery.Node, error) {
dsDom, err := xmlquery.Parse(dsReader)
if err != nil {
return nil, err
}
return dsDom, nil
}
func ParseResultsFromContentAndXccdf(scheme *runtime.Scheme, scanName string, namespace string,
dsDom *xmlquery.Node, resultsReader io.Reader) ([]*ParseResult, error) {
resultsDom, err := xmlquery.Parse(resultsReader)
if err != nil {
return nil, err
}
allValues := xmlquery.Find(resultsDom, "//set-value")
valuesList := make(map[string]string)
for _, codeNode := range allValues {
valuesList[strings.TrimPrefix(codeNode.SelectAttr("idref"), valuePrefix)] = codeNode.InnerText()
}
ruleTable := newRuleHashTable(dsDom)
questionsTable := NewOcilQuestionTable(dsDom)
statesTable := newStateHashTable(dsDom)
objsTable := newObjHashTable(dsDom)
defTable := NewDefHashTable(dsDom)
ovalTestVarTable := newValueListTable(dsDom, statesTable, objsTable)
results := resultsDom.SelectElements("//rule-result")
parsedResults := make([]*ParseResult, 0)
var remErrs string
for i := range results {
result := results[i]
ruleIDRef := result.SelectAttr("idref")
if ruleIDRef == "" {
continue
}
resultRule := ruleTable[ruleIDRef]
if resultRule == nil {
continue
}
instructions := GetInstructionsForRule(resultRule, questionsTable)
ruleValues := getValueListUsedForRule(resultRule, ovalTestVarTable, defTable, valuesList)
resCheck, err := newComplianceCheckResult(result, resultRule, ruleIDRef, instructions, scanName, namespace, ruleValues)
if err != nil {
continue
}
if resCheck != nil {
pr := &ParseResult{
Id: ruleIDRef,
CheckResult: resCheck,
}
pr.Remediations, err = newComplianceRemediation(scheme, scanName, namespace, resultRule, valuesList)
if err != nil {
remErrs = "CheckID." + ruleIDRef + err.Error() + "\n"
}
parsedResults = append(parsedResults, pr)
}
}
if remErrs != "" {
return parsedResults, errors.New(remErrs)
}
return parsedResults, nil
}
// Returns a new complianceCheckResult if the check data is usable
func newComplianceCheckResult(result *xmlquery.Node, rule *xmlquery.Node, ruleIdRef, instructions, scanName, namespace string, ruleValues []string) (*compv1alpha1.ComplianceCheckResult, error) {
name := nameFromId(scanName, ruleIdRef)
mappedStatus, err := mapComplianceCheckResultStatus(result)
if err != nil {
return nil, err
}
if mappedStatus == compv1alpha1.CheckResultNoResult {
return nil, nil
}
mappedSeverity, err := mapComplianceCheckResultSeverity(rule)
if err != nil {
return nil, err
}
return &compv1alpha1.ComplianceCheckResult{
ObjectMeta: v1.ObjectMeta{
Name: name,
Namespace: namespace,
},
ID: ruleIdRef,
Status: mappedStatus,
Severity: mappedSeverity,
Instructions: instructions,
Description: complianceCheckResultDescription(rule),
Warnings: GetWarningsForRule(rule),
ValuesUsed: ruleValues,
}, nil
}
func getSafeText(nptr *xmlquery.Node, elem string) string {
elemNode := nptr.SelectElement(elem)
if elemNode == nil {
return ""
}
return elemNode.InnerText()
}
func complianceCheckResultDescription(rule *xmlquery.Node) string {
title := getSafeText(rule, "xccdf-1.2:title")
if title != "" {
title = title + "\n"
}
return title + getSafeText(rule, "xccdf-1.2:rationale")
}
func GetWarningsForRule(rule *xmlquery.Node) []string {
warningObjs := rule.SelectElements("//xccdf-1.2:warning")
warnings := []string{}
for _, warn := range warningObjs {
if warn == nil {
continue
}
// We skip this warning if it's relevant
// to parsing the API paths.
if warningHasApiObjects(warn) {
continue
}
warnings = append(warnings, XmlNodeAsMarkdown(warn))
}
if len(warnings) == 0 {
return nil
}
return warnings
}
func RuleHasApiObjectWarning(rule *xmlquery.Node) bool {
warningObjs := rule.SelectElements("//xccdf-1.2:warning")
for _, warn := range warningObjs {
if warn == nil {
continue
}
if warningHasApiObjects(warn) {
return true
}
}
return false
}
func mapComplianceCheckResultSeverity(result *xmlquery.Node) (compv1alpha1.ComplianceCheckResultSeverity, error) {
severityAttr := result.SelectAttr("severity")
if severityAttr == "" {
return "", errors.New("result node has no 'severity' attribute")
}
// All severities can be found in https://csrc.nist.gov/CSRC/media/Publications/nistir/7275/rev-4/final/documents/nistir-7275r4_updated-march-2012_clean.pdf
// section 6.6.4.2 table 9
switch severityAttr {
case "unknown":
return compv1alpha1.CheckResultSeverityUnknown, nil
case "info":
return compv1alpha1.CheckResultSeverityInfo, nil
case "low":
return compv1alpha1.CheckResultSeverityLow, nil
case "medium":
return compv1alpha1.CheckResultSeverityMedium, nil
case "high":
return compv1alpha1.CheckResultSeverityHigh, nil
}
return compv1alpha1.CheckResultSeverityUnknown, nil
}
func mapComplianceCheckResultStatus(result *xmlquery.Node) (compv1alpha1.ComplianceCheckStatus, error) {
resultEl := result.SelectElement("result")
if resultEl == nil {
return "", errors.New("result node has no 'result' attribute")
}
// All states can be found at https://csrc.nist.gov/CSRC/media/Publications/nistir/7275/rev-4/final/documents/nistir-7275r4_updated-march-2012_clean.pdf
// section 6.6.4.2, table 26
switch resultEl.InnerText() {
// The standard says that "Fixed means the rule failed initially but was then fixed"
case "pass", "fixed":
return compv1alpha1.CheckResultPass, nil
case "fail":
return compv1alpha1.CheckResultFail, nil
// Unknown state is when the rule runs to completion, but then the results can't be interpreted
case "error", "unknown":
return compv1alpha1.CheckResultError, nil
// Notchecked means the rule does not even have a check,
// and the administrators must inspect the rule manually (e.g. disable something in BIOS),
case "notchecked":
return compv1alpha1.CheckResultManual, nil
// informational means that the rule has a check which failed, but the severity is low, depending
// on the environment (e.g. disable USB support completely from the kernel cmdline)
case "informational":
return compv1alpha1.CheckResultInfo, nil
// We map notapplicable to Skipped. Notapplicable means the rule was selected
// but does not apply to the current configuration (e.g. arch-specific),
case "notapplicable":
return compv1alpha1.CheckResultNotApplicable, nil
case "notselected":
// We map notselected to nothing, as the test wasn't included in the benchmark
return compv1alpha1.CheckResultNoResult, nil
}
return compv1alpha1.CheckResultNoResult, fmt.Errorf("couldn't match %s to a known state", resultEl.InnerText())
}
func newComplianceRemediation(scheme *runtime.Scheme, scanName, namespace string, rule *xmlquery.Node, resultValues map[string]string) ([]*compv1alpha1.ComplianceRemediation, error) {
for _, fix := range rule.SelectElements("//xccdf-1.2:fix") {
if isRelevantFix(fix) {
return remediationFromFixElement(scheme, fix, scanName, namespace, resultValues)
}
}
return nil, nil
}
func isRelevantFix(fix *xmlquery.Node) bool {
if fix.SelectAttr("system") == machineConfigFixType {
return true
}
if fix.SelectAttr("system") == kubernetesFixType {
return true
}
return false
}
func nameFromId(scanName, ruleIdRef string) string {
ruleName := strings.TrimPrefix(ruleIdRef, rulePrefix)
dnsFriendlyFixId := strings.ReplaceAll(ruleName, "_", "-")
dnsFriendlyFixId = strings.ToLower(dnsFriendlyFixId)
return fmt.Sprintf("%s-%s", scanName, dnsFriendlyFixId)
}
func remediationFromFixElement(scheme *runtime.Scheme, fix *xmlquery.Node, scanName, namespace string, resultValues map[string]string) ([]*compv1alpha1.ComplianceRemediation, error) {
fixId := fix.SelectAttr("id")
if fixId == "" {
return nil, errors.New("there is no fix-ID attribute")
}
dnsFriendlyFixId := strings.ReplaceAll(fixId, "_", "-")
remName := fmt.Sprintf("%s-%s", scanName, dnsFriendlyFixId)
// TODO(OZZ) fix text
return remediationsFromString(scheme, remName, namespace, fix.InnerText(), resultValues)
}
func remediationsFromString(scheme *runtime.Scheme, name string, namespace string, fixContent string, resultValues map[string]string) ([]*compv1alpha1.ComplianceRemediation, error) {
//ToDO find and substitute the value
fixWithValue, valuesUsedList, notFoundValueList, parsingError := parseValues(fixContent, resultValues)
if parsingError != nil {
return nil, parsingError
}
objs, err := ReadObjectsFromYAML(strings.NewReader(fixWithValue))
if err != nil {
return nil, err
}
rems := make([]*compv1alpha1.ComplianceRemediation, 0, len(objs))
for idx := range objs {
obj := objs[idx]
annotations := make(map[string]string)
if len(notFoundValueList) > 0 {
annotations = handleNotFoundValue(notFoundValueList, annotations)
}
if len(valuesUsedList) > 0 {
annotations = handleValueUsed(valuesUsedList, annotations)
}
if hasValueRequiredAnnotation(obj) {
if (len(notFoundValueList) == 0) && (len(valuesUsedList) == 0) {
return nil, errors.New("do not have any parsed xccdf variable, shoudn't any required values")
} else {
annotations = handleValueRequiredAnnotation(obj, annotations)
}
}
if hasDependencyAnnotation(obj) {
annotations = handleDependencyAnnotation(obj, annotations)
}
if hasOptionalAnnotation(obj) {
annotations = handleOptionalAnnotation(obj, annotations)
}
remType := compv1alpha1.ConfigurationRemediation
if hasTypeAnnotation(obj) {
remType = handleRemediationTypeAnnotation(obj)
}
if remType == compv1alpha1.EnforcementRemediation &&
hasEnforcementTypeAnnotation(obj) {
annotations = handleEnforcementTypeAnnotation(obj, annotations)
}
var remName string
if idx == 0 {
// Use result's name
remName = name
} else {
remName = fmt.Sprintf("%s-%d", name, idx)
}
rems = append(rems, &compv1alpha1.ComplianceRemediation{
ObjectMeta: v1.ObjectMeta{
Name: remName,
Namespace: namespace,
Annotations: annotations,
},
Spec: compv1alpha1.ComplianceRemediationSpec{
ComplianceRemediationSpecMeta: compv1alpha1.ComplianceRemediationSpecMeta{
Apply: false,
Type: remType,
},
Current: compv1alpha1.ComplianceRemediationPayload{
Object: obj,
},
},
Status: compv1alpha1.ComplianceRemediationStatus{
ApplicationState: compv1alpha1.RemediationPending,
},
})
}
return rems, nil
}
func toArrayByComma(format string) []string {
return strings.Split(format, ",")
}
//This function will take orginal remediation content, and a list of all values found in the configMap
//It will processed and substitue the value in remediation content, and return processed Remediation content
//The return will be Processed-Remdiation Content, Value-Used List, Un-Set List, and err if possible
func parseValues(remContent string, resultValues map[string]string) (string, []string, []string, error) {
var valuesUsedList []string
var valuesMissingList []string
//find everything start and end with {{}}
re := regexp.MustCompile(`\{\{[^}]*\}\}`)
contentList := re.FindAllString(remContent, -1)
fixedText := remContent
if len(contentList) == 0 {
return remContent, valuesUsedList, valuesMissingList, nil
}
// there are two types of content we need to process, one is url-encoded machine config source ex. {{ -a%20always0-F%20di }},
// the other one is not url_encoded ex. {{.var_some_value}}, we are going to take care of url-encoded content first, and then
// feed the processed content to template again
for _, content := range contentList {
// take out `{{ `,' }}' out from content
trimmedContent := content[trimStartIndex:][:len(content)-trimStartIndex-trimEndIndex]
// take out leading and tailling spaces
trimmedContent = strings.TrimSpace(trimmedContent)
// trimmedContent here should only contain url-encoded content, check if it contains illegall character space
isIllegalURL := regexp.MustCompile(".*[\\ \"\\<\\>\\{\\}|\\\\^~\\[\\]].*")
if isIllegalURL.MatchString(trimmedContent) {
continue
}
var decodeErr error
preProcessedContent, decodeErr := url.QueryUnescape(trimmedContent)
if decodeErr != nil {
return remContent, valuesUsedList, valuesMissingList, errors.Wrap(decodeErr, "error while decode remediation context: ")
}
// we don't need special processing if preProcessedContent is same as orginal content
if preProcessedContent == trimmedContent {
continue
}
fixedContent, usedVals, missingVals, err := processContent(preProcessedContent, resultValues)
if err != nil {
return remContent, valuesUsedList, valuesMissingList, errors.Wrap(err, "error while processing remediation context: ")
}
valuesUsedList = append(valuesUsedList, usedVals...)
valuesMissingList = append(valuesMissingList, missingVals...)
fixedText = strings.ReplaceAll(fixedText, content, url.PathEscape(fixedContent))
}
// now the content is free of url-encoded string, we can feed the fixedContent to template to process the general case content.
// ex. {{.<variable name>}}
fixedText, usedVals, missingVals, err := processContent(fixedText, resultValues)
if err != nil {
return remContent, valuesUsedList, valuesMissingList, errors.Wrap(err, "error while processing remediation context: ")
}
valuesUsedList = append(valuesUsedList, usedVals...)
valuesMissingList = append(valuesMissingList, missingVals...)
return fixedText, valuesUsedList, valuesMissingList, nil
}
func processContent(preProcessedContent string, resultValues map[string]string) (string, []string, []string, error) {
var valuesUsedList []string
var valuesMissingList []string
var valuesParsedList []string
t, err := template.New("").Option("missingkey=zero").Funcs(template.FuncMap{"toArrayByComma": toArrayByComma}).
Parse(preProcessedContent)
if err != nil {
return preProcessedContent, valuesUsedList, valuesMissingList, errors.Wrap(err, "wrongly formatted remediation context: ") //Error creating template // Wrongly formatted remediation context
}
buf := &bytes.Buffer{}
err = t.Execute(buf, resultValues)
if err != nil {
return preProcessedContent, valuesUsedList, valuesMissingList, errors.Wrap(err, "error while parsing variables into values: ")
}
fixedContent := buf.String()
//Iterate through template tree to get all parsed variable
valuesParsedList = getParsedValueName(t)
for _, parsedVariable := range valuesParsedList {
_, found := resultValues[parsedVariable]
if found {
dnsFriendlyParsedVariable := strings.ReplaceAll(parsedVariable, "_", "-")
valuesUsedList = append(valuesUsedList, dnsFriendlyParsedVariable)
} else {
dnsFriendlyParsedVariable := strings.ReplaceAll(parsedVariable, "_", "-")
valuesMissingList = append(valuesMissingList, dnsFriendlyParsedVariable)
}
}
return fixedContent, valuesUsedList, valuesMissingList, nil
}
func getParsedValueName(t *template.Template) []string {
valueToBeTrimmed := listNodeFields(t.Tree.Root, nil)
return trimToValue(valueToBeTrimmed)
}
//trim {{value | urlquery}} list to value list
func trimToValue(listToBeTrimmed []string) []string {
trimmedValuesList := listToBeTrimmed[:0]
for _, oriVal := range listToBeTrimmed {
re := regexp.MustCompile("([a-zA-Z-0-9]+(_[a-zA-Z-0-9]+)+)")
trimedValueMatch := re.FindStringSubmatch(oriVal)
if len(trimedValueMatch) > 1 {
trimmedValuesList = append(trimmedValuesList, trimedValueMatch[0])
}
}
return trimmedValuesList
}
func listNodeFields(node parse.Node, res []string) []string {
if node.Type() == parse.NodeAction {
res = append(res, node.String())
}
if ln, ok := node.(*parse.ListNode); ok {
for _, n := range ln.Nodes {
res = listNodeFields(n, res)
}
}
return res
}
func hasDependencyAnnotation(u *unstructured.Unstructured) bool {
return hasAnnotation(u, dependencyAnnotationKey) || hasAnnotation(u, kubeDependencyAnnotationKey)
}
func hasValueRequiredAnnotation(u *unstructured.Unstructured) bool {
return hasAnnotation(u, valueInputRequiredAnnotationKey)
}
func hasOptionalAnnotation(u *unstructured.Unstructured) bool {
return hasAnnotation(u, optionalAnnotationKey)
}
func hasTypeAnnotation(u *unstructured.Unstructured) bool {
return hasAnnotation(u, remediationTypeAnnotationKey)
}
func hasEnforcementTypeAnnotation(u *unstructured.Unstructured) bool {
return hasAnnotation(u, enforcementTypeAnnotationKey)
}
func hasAnnotation(u *unstructured.Unstructured, annotation string) bool {
annotations := u.GetAnnotations()
if annotations == nil {
return false
}
_, hasAnn := annotations[annotation]
return hasAnn
}
func handleDependencyAnnotation(u *unstructured.Unstructured, annotations map[string]string) map[string]string {
// We already assume this has some annotation
inAnns := u.GetAnnotations()
// parse
if dependencies, hasDepKey := inAnns[dependencyAnnotationKey]; hasDepKey {
// set dependencies
annotations[compv1alpha1.RemediationDependencyAnnotation] = dependencies
// reset metadata of output object
delete(inAnns, dependencyAnnotationKey)
}
if objDeps, hasKubeDepKey := inAnns[kubeDependencyAnnotationKey]; hasKubeDepKey {
// set dependencies
annotations[compv1alpha1.RemediationObjectDependencyAnnotation] = objDeps
// reset metadata of output object
delete(inAnns, kubeDependencyAnnotationKey)
}
u.SetAnnotations(inAnns)
return annotations
}
func handleValueUsed(valuesList []string, annotations map[string]string) map[string]string {
annotations[compv1alpha1.RemediationValueUsedAnnotation] = strings.Join(valuesList, ",")
return annotations
}
func handleNotFoundValue(notFoundValues []string, annotations map[string]string) map[string]string {
annotations[compv1alpha1.RemediationUnsetValueAnnotation] = strings.Join(notFoundValues, ",")
return annotations
}
func handleValueRequiredAnnotation(u *unstructured.Unstructured, annotations map[string]string) map[string]string {
// We already assume this has some annotation
inAnns := u.GetAnnotations()
// parse
if valueRequired, hasValueReqKey := inAnns[valueInputRequiredAnnotationKey]; hasValueReqKey {
// set required custom variable names
dnsFriendlyRequiredVariable := strings.ReplaceAll(valueRequired, "_", "-")
annotations[compv1alpha1.RemediationValueRequiredAnnotation] = dnsFriendlyRequiredVariable
// reset metadata of output object
delete(inAnns, valueInputRequiredAnnotationKey)
}
u.SetAnnotations(inAnns)
return annotations
}
func handleOptionalAnnotation(u *unstructured.Unstructured, annotations map[string]string) map[string]string {
// We already assume this has some annotation
inAnns := u.GetAnnotations()
// parse
if _, hasKey := inAnns[optionalAnnotationKey]; hasKey {
// set dependencies
annotations[compv1alpha1.RemediationOptionalAnnotation] = ""
// reset metadata of output object
delete(inAnns, optionalAnnotationKey)
}
u.SetAnnotations(inAnns)
return annotations
}
func handleRemediationTypeAnnotation(u *unstructured.Unstructured) compv1alpha1.RemediationType {
// We already assume this has some annotation
inAnns := u.GetAnnotations()
// parse
remType := inAnns[remediationTypeAnnotationKey]
// reset metadata of output object
delete(inAnns, enforcementTypeAnnotationKey)
u.SetAnnotations(inAnns)
return compv1alpha1.RemediationType(remType)
}
func handleEnforcementTypeAnnotation(u *unstructured.Unstructured, annotations map[string]string) map[string]string {
// We already assume this has some annotation
inAnns := u.GetAnnotations()
// parse
typeAnn, hasKey := inAnns[enforcementTypeAnnotationKey]
if hasKey {
// set dependencies
annotations[compv1alpha1.RemediationEnforcementTypeAnnotation] = typeAnn
// reset metadata of output object
delete(inAnns, enforcementTypeAnnotationKey)
}
u.SetAnnotations(inAnns)
return annotations
}
| newStateHashTable |
client.go | /*
Copyright 2017 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package grpcvtworkerclient contains the gRPC version of the vtworker client protocol.
package grpcvtworkerclient
import (
"flag"
"golang.org/x/net/context"
"google.golang.org/grpc"
"github.com/youtube/vitess/go/vt/grpcclient"
"github.com/youtube/vitess/go/vt/logutil"
"github.com/youtube/vitess/go/vt/vterrors"
"github.com/youtube/vitess/go/vt/worker/vtworkerclient"
logutilpb "github.com/youtube/vitess/go/vt/proto/logutil"
vtrpcpb "github.com/youtube/vitess/go/vt/proto/vtrpc"
vtworkerdatapb "github.com/youtube/vitess/go/vt/proto/vtworkerdata"
vtworkerservicepb "github.com/youtube/vitess/go/vt/proto/vtworkerservice"
)
var (
cert = flag.String("vtworker_client_grpc_cert", "", "the cert to use to connect")
key = flag.String("vtworker_client_grpc_key", "", "the key to use to connect")
ca = flag.String("vtworker_client_grpc_ca", "", "the server ca to use to validate servers when connecting")
name = flag.String("vtworker_client_grpc_server_name", "", "the server name to use to validate server certificate")
)
type gRPCVtworkerClient struct {
cc *grpc.ClientConn
c vtworkerservicepb.VtworkerClient
}
func gRPCVtworkerClientFactory(addr string) (vtworkerclient.Client, error) |
type eventStreamAdapter struct {
stream vtworkerservicepb.Vtworker_ExecuteVtworkerCommandClient
}
func (e *eventStreamAdapter) Recv() (*logutilpb.Event, error) {
le, err := e.stream.Recv()
if err != nil {
return nil, vterrors.FromGRPC(err)
}
return le.Event, nil
}
// ExecuteVtworkerCommand is part of the VtworkerClient interface.
func (client *gRPCVtworkerClient) ExecuteVtworkerCommand(ctx context.Context, args []string) (logutil.EventStream, error) {
query := &vtworkerdatapb.ExecuteVtworkerCommandRequest{
Args: args,
}
stream, err := client.c.ExecuteVtworkerCommand(ctx, query)
if err != nil {
return nil, vterrors.FromGRPC(err)
}
return &eventStreamAdapter{stream}, nil
}
// Close is part of the VtworkerClient interface.
func (client *gRPCVtworkerClient) Close() {
client.cc.Close()
}
func init() {
vtworkerclient.RegisterFactory("grpc", gRPCVtworkerClientFactory)
}
| {
// create the RPC client
opt, err := grpcclient.SecureDialOption(*cert, *key, *ca, *name)
if err != nil {
return nil, err
}
cc, err := grpcclient.Dial(addr, grpcclient.FailFast(false), opt)
if err != nil {
return nil, vterrors.Errorf(vtrpcpb.Code_DEADLINE_EXCEEDED, "grpcclient.Dial() err: %v", err)
}
c := vtworkerservicepb.NewVtworkerClient(cc)
return &gRPCVtworkerClient{
cc: cc,
c: c,
}, nil
} |
l13-diff-intro.component.ts | // Imports ____________________________________________________________________
import type { DisplayShortcut, Keybinding } from '../../../types';
import { isMacOs, isWindows, L13Component, L13Element, L13Query } from '../../@l13/core';
import { disableContextMenu, parseIcons } from '../../common';
import styles from '../styles';
import templates from '../templates';
import { L13DiffIntroViewModelService } from './l13-diff-intro.service';
import type { L13DiffIntroViewModel } from './l13-diff-intro.viewmodel';
// Variables __________________________________________________________________
const ALT = '⌥';
const CMD = '⌘';
const CTRL = '⌃';
const SHIFT = '⇧';
const macOSSymbols = {
Alt: ALT,
Cmd: CMD,
Command: CMD,
Control: CTRL,
Ctrl: CTRL,
Meta: CMD,
Option: ALT,
Shift: SHIFT,
};
const keyboardShortcuts:DisplayShortcut[] = [
{
description: 'Filter Diff Result',
key: 'Ctrl+F',
mac: 'Cmd+F',
},
{
description: 'Delete Selected Files',
key: 'Delete',
mac: 'Cmd+Backspace',
},
];
// Initialize _________________________________________________________________
// Exports ____________________________________________________________________
@L13Component({
name: 'l13-diff-intro',
service: L13DiffIntroViewModelService,
styles: [parseIcons(styles['l13-diff-intro/l13-diff-intro.css'])],
template: templates['l13-diff-intro/l13-diff-intro.html'],
})
export class L13DiffIntroComponent extends L13Element<L13DiffIntroViewModel> {
@L13Query('l13-diff-shortcuts')
private shortcuts:HTMLElement;
public constructor () {
super();
this.shortcuts.appendChild(createShortcutViews(keyboardShortcuts));
disableContextMenu(this);
}
}
// Functions __________________________________________________________________
export function detectKeybinding ({ key, mac, win }:Keybinding) {
return isMacOs && mac ? mac : isWindows && win ? win : key;
}
export function getKeyLa | ring) {
return isMacOs ? (<any>macOSSymbols)[key] || key : key;
}
function createShortcutViews (shortcuts:DisplayShortcut[]) {
const fragment = document.createDocumentFragment();
shortcuts.forEach((shortcut) => fragment.appendChild(createShortcutView(shortcut)));
return fragment;
}
function createShortcutView ({ description, key, mac, win }:DisplayShortcut) {
key = detectKeybinding({ key, mac, win });
const dl = document.createElement('DL');
const dt = document.createElement('DT');
const dd = document.createElement('DD');
const div = document.createElement('DIV');
dt.textContent = description;
div.title = key;
div.appendChild(createShortcutKeys(key));
dd.appendChild(div);
dl.appendChild(dt);
dl.appendChild(dd);
return dl;
}
function createShortcutKeys (key:string) :DocumentFragment {
const fragment = document.createDocumentFragment();
key.split('+').forEach((value) => {
const span = document.createElement('SPAN');
span.textContent = getKeyLabel(value);
span.className = '-key';
fragment.appendChild(span);
});
return fragment;
} | bel (key:st |
__init__.py | from urllib.parse import urlencode
from django.conf import settings
from django.db.models import F
from ...checkout.utils import (
get_checkout_from_request, get_or_create_checkout_from_request)
from ...core.utils import get_paginator_items
from ...core.utils.filters import get_now_sorted_by
from ...core.utils.taxes import ZERO_TAXED_MONEY, TaxedMoney
from ..forms import ProductForm
from .availability import products_with_availability
def products_visible_to_user(user):
# pylint: disable=cyclic-import
from ..models import Product
if user.is_authenticated and user.is_active and user.is_staff:
return Product.objects.all()
return Product.objects.published()
def products_with_details(user):
products = products_visible_to_user(user)
products = products.prefetch_related(
'translations', 'category__translations', 'collections__translations',
'images', 'variants__variant_images__image',
'attributes__values__translations',
'product_type__product_attributes__translations',
'product_type__product_attributes__values__translations')
return products
def products_for_products_list(user):
products = products_visible_to_user(user)
products = products.prefetch_related(
'translations', 'images', 'variants__variant_images__image')
return products
def products_for_homepage(user, homepage_collection):
products = products_visible_to_user(user)
products = products.prefetch_related(
'translations', 'images', 'variants__variant_images__image',
'collections')
products = products.filter(collections=homepage_collection)
return products
def get_product_images(product):
"""Return list of product images that will be placed in product gallery."""
return list(product.images.all())
def handle_checkout_form(request, product, create_checkout=False):
if create_checkout:
checkout = get_or_create_checkout_from_request(request)
else:
checkout = get_checkout_from_request(request)
form = ProductForm(
checkout=checkout, product=product, data=request.POST or None,
discounts=request.discounts, taxes=request.taxes)
return form, checkout
def products_for_checkout(user):
products = products_visible_to_user(user)
products = products.prefetch_related('variants__variant_images__image')
return products
def get_variant_url_from_product(product, attributes):
return '%s?%s' % (product.get_absolute_url(), urlencode(attributes))
def get_variant_url(variant):
attributes = {
str(attribute.pk): attribute
for attribute in variant.product.product_type.variant_attributes.all()}
return get_variant_url_from_product(variant.product, attributes)
def allocate_stock(variant, quantity):
variant.quantity_allocated = F('quantity_allocated') + quantity
variant.save(update_fields=['quantity_allocated'])
def deallocate_stock(variant, quantity):
variant.quantity_allocated = F('quantity_allocated') - quantity
variant.save(update_fields=['quantity_allocated'])
def decrease_stock(variant, quantity):
variant.quantity = F('quantity') - quantity
variant.quantity_allocated = F('quantity_allocated') - quantity
variant.save(update_fields=['quantity', 'quantity_allocated'])
def increase_stock(variant, quantity, allocate=False):
"""Return given quantity of product to a stock."""
variant.quantity = F('quantity') + quantity
update_fields = ['quantity']
if allocate:
variant.quantity_allocated = F('quantity_allocated') + quantity
update_fields.append('quantity_allocated')
variant.save(update_fields=update_fields)
def get_product_list_context(request, filter_set):
"""
:param request: request object
:param filter_set: filter set for product list
:return: context dictionary
"""
# Avoiding circular dependency
from ..filters import SORT_BY_FIELDS
qs = filter_set.qs
if not filter_set.form.is_valid():
qs = qs.none()
products_paginated = get_paginator_items(
qs, settings.PAGINATE_BY, request.GET.get('page'))
products_and_availability = list(products_with_availability(
products_paginated, request.discounts, request.taxes, | request.currency))
now_sorted_by = get_now_sorted_by(filter_set)
arg_sort_by = request.GET.get('sort_by')
is_descending = arg_sort_by.startswith('-') if arg_sort_by else False
return {
'filter_set': filter_set,
'products': products_and_availability,
'products_paginated': products_paginated,
'sort_by_choices': SORT_BY_FIELDS,
'now_sorted_by': now_sorted_by,
'is_descending': is_descending}
def collections_visible_to_user(user):
# pylint: disable=cyclic-import
from ..models import Collection
if user.is_authenticated and user.is_active and user.is_staff:
return Collection.objects.all()
return Collection.objects.published()
def calculate_revenue_for_variant(variant, start_date):
"""Calculate total revenue generated by a product variant."""
revenue = ZERO_TAXED_MONEY
for order_line in variant.order_lines.all():
if order_line.order.created >= start_date:
net = order_line.unit_price_net * order_line.quantity
gross = order_line.unit_price_gross * order_line.quantity
revenue += TaxedMoney(net, gross)
return revenue | |
tests3.rs | // tests3.rs
// This test isn't testing our function -- make it do that in such a way that
// the test passes. Then write a second test that tests whether we get the result
// we expect to get when we call `is_even(5)`.
// Execute `rustlings hint tests3` for hints :)
// I AM NOT DONE
pub fn | (num: i32) -> bool {
num % 2 == 0
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn is_true_when_even() {
assert!(is_even(2));
}
}
| is_even |
linear_rank_metric.py | # -*- coding: utf-8 -*-
r"""
Generic structures for linear codes over the rank metric
Rank Metric
===========
In coding theory, the most common metric is the Hamming metric, where distance
between two codewords is given by the number of positions in which they differ.
An alternative to this is the rank metric. Take two fields, `F_q` and `F_{q^m}`,
and define a code `C` to be a set of vectors of length `n` with entries from
`F_{q^m}`. Let `c` be a codeword. We can represent it as an `m \times n` matrix
`M` over `F_q`.
A detailed description on the relationship between the two representations can
be found in :meth:`sage.coding.linear_rank_metric.to_matrix_representation`
and :meth:`sage.coding.linear_rank_metric.from_matrix_representation`.
We can define a metric using the rank of the matrix representation of the
codewords. A distance between two codewords `a, b` is the rank of the matrix
representation of `a - b`. A weight of a codeword `c` is the rank of the matrix
representation of `c`.
This module allows representing rank metric codes which are linear over the
big field `F_{q^m}`, i.e. the usual linearity condition when the codewords are
considered in vector form. One can also consider rank metric codes which are
only linear over `F_q`, but these are not currently supported in SageMath.
Note that linear rank metric codes per the definition of this file are
mathematically just linear block codes, and so could be considered as a
:class:`sage.coding.linear_code.LinearCode`. However, since most of the
functionality of that class is specific to the Hamming metric, the two notions
are implemented as entirely different in SageMath. If you wish to investigate
Hamming-metric properties of a linear rank metric code ``C``, you can easily
convert it by calling ``C_hamm = LinearCode(C)``.
Linear Rank Metric Code and Gabidulin Codes
===========================================
The class :class:`sage.coding.linear_rank_metric.LinearRankMetricCode` is the
analog of :class:`sage.coding.linear_code.LinearCode`, i.e. it is a generator
matrix-based representation of a linear rank metric code without specific
knowledge on the structure of the code.
Gabidulin codes are the main family of structured linear rank metric codes.
These codes are the rank-metric analog of Reed-Solomon codes.
``AbstractLinearRankMetricCode``
--------------------------------
This is a base class designed to contain methods, features and parameters
shared by every linear rank metric code. For instance, generic algorithms for
computing the minimum distance, etc. Many of these algorithms are slow,
e.g. exponential in the code length. It also contains methods for swapping
between vector and matrix representation of elements.
``AbstractLinearCodeNoMetric`` is an abstract class for linear rank metric codes,
so any linear rank metric code class should inherit from this class.
Also ``AbstractLinearCodeNoMetric`` should never itself be instantiated.
See :class:`sage.coding.linear_rank_metric.AbstractLinearRankMetricCode`
for details and examples.
``LinearRankMetricCode``
------------------------
This class is used to represent arbitrary and unstructured linear rank metric
codes. It mostly relies directly on generic methods provided by
``AbstractLinearRankMetricCode``, which means that basic operations on the code
(e.g. computation of the minimum distance) will use slow algorithms.
A ``LinearRankMetricCode`` is instantiated by providing a generator::
sage: G = Matrix(GF(64), [[1,1,0], [0,0,1]])
sage: C = codes.LinearRankMetricCode(G, GF(4))
sage: C
[3, 2] linear rank metric code over GF(64)/GF(4)
sage: C.generator_matrix()
[1 1 0]
[0 0 1]
sage: c = vector(GF(64), (1, 1, 1))
sage: c in C
True
Further references
------------------
Read more about
`rank metric and Gabidulin codes <https://en.wikipedia.org/wiki/Rank_error-correcting_code>`_
AUTHORS:
- Marketa Slukova (2019-08-16): initial version
TESTS::
sage: MS = MatrixSpace(GF(2),4,7)
sage: G = MS([[1,1,1,0,0,0,0], [1,0,0,1,1,0,0], [0,1,0,1,0,1,0], [1,1,0,1,0,0,1]])
sage: C = LinearCode(G)
sage: C == loads(dumps(C))
True
"""
# ****************************************************************************
# Copyright (C) 2019 MARKETA SLUKOVA <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# https://www.gnu.org/licenses/
# ****************************************************************************
from sage.categories.fields import Fields
from sage.matrix.constructor import Matrix
from sage.structure.element import is_Matrix, is_Vector
from sage.modules.free_module_element import vector
from sage.rings.infinity import Infinity
from .linear_code_no_metric import AbstractLinearCodeNoMetric
from .linear_code import LinearCodeGeneratorMatrixEncoder
from .decoder import Decoder
def to_matrix_representation(v, sub_field=None, basis=None):
r"""
Return a matrix representation of ``v`` over ``sub_field`` in terms of
``basis``.
Let `(b_1, b_2, \ldots, b_m)`, `b_i \in GF(q^m)`, be a basis of `GF(q^m)` as
a vector space over `GF(q)`. Take an element `x \in GF(q^m)`. We can write
`x` as `x = u_1 b_1 + u_2 b_2 + \ldots u_m b_m`, where `u_i \in GF(q)`. This
way we can represent an element from `GF(q^m)` as a vector of length `m`
over `GF(q)`.
Given a vector ``v`` of length `n` over some field `F_{q^m}`, we can
represent each entry as a vector of length `m`, yielding an `m \times n`
matrix over ``sub_field``. In case ``sub_field`` is not given, we take the
prime subfield `F_p` of `F_{q^m}`.
INPUT:
- ``v`` -- a vector over some field `F_{q^m}`
- ``sub_field`` -- (default: ``None``) a sub field of `F_{q^m}`. If not
specified, it is the prime subfield `F_p` of `F_{q^m}`.
- ``basis`` -- (default: ``None``) a basis of `F_{q^m}` as a vector space over
``sub_field``. If not specified, given that `q = p^s`, let
`1,\beta,\ldots,\beta^{sm}` be the power basis that SageMath uses to
represent `F_{q^m}`. The default basis is then `1,\beta,\ldots,\beta^{m-1}`.
EXAMPLES::
sage: from sage.coding.linear_rank_metric import to_matrix_representation
sage: x = GF(64).gen()
sage: a = vector(GF(64), (x + 1, x + 1, 1))
sage: to_matrix_representation(a, GF(4))
[1 1 1]
[1 1 0]
[0 0 0]
sage: m = Matrix(GF(4), [[1, 1, 1], [1, 1, 0], [0, 0, 0]])
sage: to_matrix_representation(m)
Traceback (most recent call last):
...
TypeError: Input must be a vector
"""
if not is_Vector(v):
raise TypeError("Input must be a vector")
base_field = v.base_ring()
if not sub_field:
sub_field = base_field.prime_subfield()
n = v.length()
m = base_field.degree()//sub_field.degree()
extension, to_big_field, from_big_field = base_field.vector_space(sub_field, basis, map=True)
return Matrix(sub_field, m, n, lambda i, j: from_big_field(v[j])[i])
def from_matrix_representation(w, base_field=None, basis=None):
r"""
Return a vector representation of a matrix ``w`` over ``base_field`` in terms
of ``basis``.
Given an `m \times n` matrix over `F_q` and some ``basis`` of `F_{q^m}`
over `F_q`, we can represent each of its columns as an element of `F_{q^m}`,
yielding a vector of length `n` over `F_q`.
In case ``base_field`` is not given, we take `F_{q^m}`, the field extension of
`F_q` of degree `m`, the number of rows of ``w``.
INPUT:
- ``w`` -- a matrix over some field `F_q`
- ``base_field`` -- (default: ``None``) an extension field of `F_q`. If not
specified, it is the field `F_{q^m}`, where `m` is the number of rows of
``w``.
- ``basis`` -- (default: ``None``) a basis of `F_{q^m}` as a vector space over
``F_q``. If not specified, given that `q = p^s`, let
`1,\beta,\ldots,\beta^{sm}` be the power basis that SageMath uses to
represent `F_{q^m}`. The default basis is then `1,\beta,\ldots,\beta^{m-1}`.
EXAMPLES::
sage: from sage.coding.linear_rank_metric import from_matrix_representation
sage: m = Matrix(GF(4), [[1, 1, 1], [1, 1, 0], [0, 0, 0]])
sage: from_matrix_representation(m)
(z6 + 1, z6 + 1, 1)
sage: v = vector(GF(4), (1, 0, 0))
sage: from_matrix_representation(v)
Traceback (most recent call last):
...
TypeError: Input must be a matrix
"""
if not is_Matrix(w):
raise TypeError("Input must be a matrix")
sub_field = w.base_ring()
if not base_field:
base_field = sub_field.extension(w.nrows())
v = []
extension, to_big_field, from_big_field = base_field.vector_space(sub_field, basis, map=True)
for i in range(w.ncols()):
v.append(to_big_field(w.column(i)))
return vector(v)
def rank_weight(c, sub_field=None, basis=None):
r"""
Return the rank of ``c`` as a matrix over ``sub_field``.
If ``c`` is a vector over some field `F_{q^m}`, the function converts it
into a matrix over `F_q`.
INPUT:
- ``c`` -- a vector over some field `F_{q^m}`; or a matrix over `F_q`
- ``sub_field`` -- (default: ``None``) a sub field of `F_{q^m}`. If not
specified, it is the prime subfield `F_p` of `F_{q^m}`.
- ``basis`` -- (default: ``None``) a basis of `F_{q^m}` as a vector space over
``sub_field``. If not specified, given that `q = p^s`, let
`1,\beta,\ldots,\beta^{sm}` be the power basis that SageMath uses to
represent `F_{q^m}`. The default basis is then `1,\beta,\ldots,\beta^{m-1}`.
EXAMPLES::
sage: from sage.coding.linear_rank_metric import rank_weight
sage: x = GF(64).gen()
sage: a = vector(GF(64), (x + 1, x + 1, 1))
sage: rank_weight(a, GF(4))
2
"""
if is_Vector(c):
c = to_matrix_representation(c, sub_field, basis)
return c.rank()
def rank_distance(a, b, sub_field=None, basis=None):
r"""
Return the rank of ``a`` - ``b`` as a matrix over ``sub_field``.
Take two vectors ``a``, ``b`` over some field `F_{q^m}`. This function
converts them to matrices over `F_q` and calculates the rank of their
difference.
If ``sub_field`` is not specified, we take the prime subfield `F_q` of
`F_{q^m}`.
INPUT:
- ``a`` -- a vector over some field `F_{q^m}`
- ``b`` -- a vector over some field `F_{q^m}`
- ``sub_field`` -- (default: ``None``) a sub field of `F_{q^m}`. If not
specified, it is the prime subfield `F_p` of `F_{q^m}`.
- ``basis`` -- (default: ``None``) a basis of `F_{q^m}` as a vector space over
``sub_field``. If not specified, given that `q = p^s`, let
`1,\beta,\ldots,\beta^{sm}` be the power basis that SageMath uses to
represent `F_{q^m}`. The default basis is then `1,\beta,\ldots,\beta^{m-1}`.
EXAMPLES::
sage: from sage.coding.linear_rank_metric import rank_distance
sage: x = GF(64).gen()
sage: a = vector(GF(64), (x + 1, x + 1, 1))
sage: b = vector(GF(64), (1, 0, 0))
sage: rank_distance(a, b, GF(4))
2
sage: c = vector(GF(4), (1, 0, 0))
sage: rank_distance(a, c, GF(4))
Traceback (most recent call last):
...
ValueError: The base field of (z6 + 1, z6 + 1, 1) and (1, 0, 0) has to be the same
sage: d = Matrix(GF(64), (1, 0, 0))
sage: rank_distance(a, d, GF(64))
Traceback (most recent call last):
...
TypeError: Both inputs have to be vectors
sage: e = vector(GF(64), (1, 0))
sage: rank_distance(a, e, GF(64))
Traceback (most recent call last):
...
ValueError: The length of (z6 + 1, z6 + 1, 1) and (1, 0) has to be the same
"""
if not (a.base_ring() == b.base_ring()):
raise ValueError("The base field of {} and {} has to be the same".format(a, b))
if not (is_Vector(a) and is_Vector(b)):
raise TypeError("Both inputs have to be vectors")
if not len(a) == len(b):
raise ValueError("The length of {} and {} has to be the same".format(a, b))
a = to_matrix_representation(a, sub_field, basis)
b = to_matrix_representation(b, sub_field, basis)
return (a - b).rank()
class AbstractLinearRankMetricCode(AbstractLinearCodeNoMetric):
r"""
Abstract class for linear rank metric codes.
This class contains methods that can be used on families of linear rank
metric codes. Every linear rank metric code class should inherit from this
abstract class.
This class is intended for codes which are linear over the ``base_field``.
Codewords of rank metric codes have two representations. They can either be
written as a vector of length `n` over `GF(q^m)`, or an `m \times n` matrix
over `GF(q)`. This implementation principally uses the vector representation.
However, one can always get the matrix representation using the
:meth:`sage.coding.linear_rank_metric.AbstractLinearRankMetricCode.to_matrix`
method. To go back to a vector, use the
:meth:`sage.coding.linear_rank_metric.AbstractLinearRankMetricCode.from_matrix`
method.
Instructions on how to make a new family of rank metric codes is analogous
to making a new family of linear codes over the Hamming metric, instructions
for which are in :class:`sage.coding.linear_code.AbstractLinearCode`. For an
example on, see
:meth:`sage.coding.linear_rank_metric.AbstractLinearRankMetricCode.__init__`
.. WARNING::
A lot of methods of the abstract class rely on the knowledge of a generator matrix.
It is thus strongly recommended to set an encoder with a generator matrix implemented
as a default encoder.
"""
_registered_encoders = {}
_registered_decoders = {}
def __init__(self, base_field, sub_field, length, default_encoder_name,
default_decoder_name, basis=None):
r"""
Initialize mandatory parameters that every linear rank metric code has.
This method only exists for inheritance purposes as it initializes
parameters that need to be known by every linear rank metric code.
The class :class:`sage.coding.linear_rank_metric.AbstractLinearRankMetricCode`
should never be directly instantiated.
INPUT:
- ``base_field`` -- the base field of ``self``
- ``sub_field`` -- the sub field of ``self``
- ``length`` -- the length of ``self`` (a Python int or a Sage Integer),
must be > 0 and at most the degree of the field extension
- ``default_encoder_name`` -- the name of the default encoder of ``self``
- ``default_decoder_name`` -- the name of the default decoder of ``self``
- ``basis`` -- (default: ``None``) a basis of `F_{q^m}` as a vector space over
``sub_field``. If not specified, given that `q = p^s`, let
`1,\beta,\ldots,\beta^{sm}` be the power basis that SageMath uses to
represent `F_{q^m}`. The default basis is then `1,\beta,\ldots,\beta^{m-1}`.
EXAMPLES:
The following example demonstrates how to use subclass
`AbstractLinearRankMetricCode` for representing a new family of rank
metric codes. The example is a rank repetition code::
sage: from sage.coding.linear_rank_metric import AbstractLinearRankMetricCode
sage: class RankRepetitionCode(AbstractLinearRankMetricCode):
....: def __init__(self, base_field, sub_field, length):
....: sage.coding.linear_rank_metric.AbstractLinearRankMetricCode.__init__(self, base_field, sub_field, length, "GeneratorMatrix", "NearestNeighbor")
....: beta = base_field.gen()
....: self._generator_matrix = matrix(base_field, [[ beta^i for i in range(length) ]])
....: def generator_matrix(self):
....: return self._generator_matrix
....: def _repr_(self):
....: return "[%d, %d] rank-metric repetition code over GF(%s)" % (self.length(), self.dimension(), self.base_field().cardinality())
We now instantiate a member of our newly made code family::
sage: C = RankRepetitionCode(GF(8), GF(2), 3)
We can check its existence and parameters::
sage: C
[3, 1] rank-metric repetition code over GF(8)
We can encode a vector::
sage: word = vector(C.base_field(), [1])
sage: E = codes.encoders.LinearCodeSystematicEncoder(C)
sage: codeword = E(word)
sage: codeword
(1, z3, z3^2)
We can get the matrix representation of the codeword::
sage: C.matrix_form_of_vector(codeword)
[1 0 0]
[0 1 0]
[0 0 1]
We can decode the vector representation of the codeword::
sage: D = codes.decoders.LinearRankMetricCodeNearestNeighborDecoder(C)
sage: D.decode_to_code(codeword)
(1, z3, z3^2)
sage: D.decode_to_message(codeword)
(1)
We can check that it is truly a part of the framework category::
sage: C.parent()
<class '__main__.RankRepetitionCode_with_category'>
sage: C.category()
Category of facade finite dimensional vector spaces with basis over Finite Field in z3 of size 2^3
And any method that works on rank metric linear codes works for our new dummy code::
sage: C.minimum_distance()
3
sage: C.metric()
'rank'
TESTS:
If ``sub_field`` is not a field, an error is raised::
sage: C = RankRepetitionCode(GF(8), ZZ, 3)
Traceback (most recent call last):
...
ValueError: 'sub_field' must be a field (and Integer Ring is not one)
If ``sub_field`` is not a subfield of ``base_field``, an error is raised::
sage: C = RankRepetitionCode(GF(8), GF(3), 2)
Traceback (most recent call last):
...
ValueError: 'sub_field' has to be a subfield of 'base_field'
"""
self._registered_decoders["NearestNeighbor"] = LinearRankMetricCodeNearestNeighborDecoder
if not sub_field.is_field():
raise ValueError("'sub_field' must be a field (and {} is not one)".format(sub_field))
if not (sub_field.degree().divides(base_field.degree()) and (sub_field.prime_subfield() == base_field.prime_subfield())):
raise ValueError("'sub_field' has to be a subfield of 'base_field'")
m = base_field.degree() // sub_field.degree()
self._extension_degree = m
self._sub_field = sub_field
self._generic_constructor = LinearRankMetricCode
super(AbstractLinearRankMetricCode, self).__init__(base_field, length, default_encoder_name, default_decoder_name, "rank")
def sub_field(self):
r"""
Return the sub field of ``self``.
EXAMPLES::
sage: G = Matrix(GF(64), [[1,1,0], [0,0,1]])
sage: C = codes.LinearRankMetricCode(G, GF(4))
sage: C.sub_field()
Finite Field in z2 of size 2^2
"""
return self._sub_field
def extension_degree(self):
r"""
Return `m`, the degree of the field extension of ``self``.
Let ``base_field`` be `GF(q^m)` and ``sub_field`` be `GF(q)`. Then this
function returns `m`.
EXAMPLES::
sage: G = Matrix(GF(64), [[1,1,0], [0,0,1]])
sage: C = codes.LinearRankMetricCode(G, GF(4))
sage: C.extension_degree()
3
"""
return self._extension_degree
def field_extension(self):
r"""
Return the field extension of ``self``.
Let ``base_field`` be some field `F_{q^m}` and ``sub_field`` `F_{q}`.
This function returns the vector space of dimension `m` over `F_{q}`.
EXAMPLES::
sage: G = Matrix(GF(64), [[1,1,0], [0,0,1]])
sage: C = codes.LinearRankMetricCode(G, GF(4))
sage: C.field_extension()
Vector space of dimension 3 over Finite Field in z2 of size 2^2
"""
return self.base_field().vector_space(self.sub_field(), map=False)
def rank_distance_between_vectors(self, left, right):
r"""
Return the rank of the matrix of ``left`` - ``right``.
INPUT:
- ``left`` -- a vector over the ``base_field`` of ``self``
- ``right`` -- a vector over the ``base_field`` of ``self``
EXAMPLES::
sage: G = Matrix(GF(64), [[1,1,0], [0,0,1]])
sage: C = codes.LinearRankMetricCode(G, GF(4))
sage: x = GF(64).gen()
sage: a = vector(GF(64), (x + 1, x + 1, 1))
sage: b = vector(GF(64), (1, 0, 0))
sage: C.rank_distance_between_vectors(a, b)
2
"""
return rank_distance(left, right, self.sub_field())
def minimum_distance(self):
r"""
Return the minimum distance of ``self``.
This algorithm simply iterates over all the elements of the code and
returns the minimum weight.
EXAMPLES::
sage: F.<a> = GF(8)
sage: G = Matrix(F, [[1,a,a^2,0]])
sage: C = codes.LinearRankMetricCode(G, GF(2))
sage: C.minimum_distance()
3
"""
d = Infinity
for c in self:
if c == self.zero():
continue
d = min(self.rank_weight_of_vector(c), d)
return d
def rank_weight_of_vector(self, word):
r"""
Return the weight of the word, i.e. its rank.
INPUT:
- ``word`` -- a vector over the ``base_field`` of ``self``
EXAMPLES::
sage: G = Matrix(GF(64), [[1,1,0], [0,0,1]])
sage: C = codes.LinearRankMetricCode(G, GF(4))
sage: x = GF(64).gen()
sage: a = vector(GF(64), (x + 1, x + 1, 1))
sage: C.rank_weight_of_vector(a)
2
"""
return rank_weight(word, self.sub_field())
def matrix_form_of_vector(self, word):
r"""
Return the matrix representation of a word.
INPUT:
- ``word`` -- a vector over the ``base_field`` of ``self``
EXAMPLES::
sage: G = Matrix(GF(64), [[1,1,0], [0,0,1]])
sage: C = codes.LinearRankMetricCode(G, GF(4))
sage: x = GF(64).gen()
sage: a = vector(GF(64), (x + 1, x + 1, 1))
sage: C.matrix_form_of_vector(a)
[1 1 1]
[1 1 0]
[0 0 0]
"""
return to_matrix_representation(word, self.sub_field())
def vector_form_of_matrix(self, word):
r"""
Return the vector representation of a word.
INPUT:
- ``word`` -- a matrix over the ``sub_field`` of ``self``
EXAMPLES::
sage: G = Matrix(GF(64), [[1,1,0], [0,0,1]])
sage: C = codes.LinearRankMetricCode(G, GF(4))
sage: x = GF(64).gen()
sage: m = Matrix(GF(4), [[1, 1, 1], [1, 1, 0], [0, 0, 0]])
sage: C.vector_form_of_matrix(m)
(z6 + 1, z6 + 1, 1)
"""
return from_matrix_representation(word, self.base_field())
class LinearRankMetricCode(AbstractLinearRankMetricCode):
r"""
Linear rank metric codes over a finite field, represented using a
generator matrix.
This class should be used for arbitrary and unstructured linear rank metric
codes. This means that basic operations on the code, such as the computation
of the minimum distance, will use generic, slow algorithms.
If you are looking for constructing a code from a more specific family, see
if the family has been implemented by investigating ``codes.<tab>``. These
more specific classes use properties particular to that family to allow
faster algorithms, and could also have family-specific methods.
EXAMPLES::
sage: G = Matrix(GF(64), [[1,1,0], [0,0,1]])
sage: C = codes.LinearRankMetricCode(G, GF(4))
sage: C
[3, 2] linear rank metric code over GF(64)/GF(4)
sage: C.base_field()
Finite Field in z6 of size 2^6
sage: C.sub_field()
Finite Field in z2 of size 2^2
sage: C.length()
3
sage: C.dimension()
2
sage: C[2]
(z6, z6, 0)
sage: E = codes.encoders.LinearCodeGeneratorMatrixEncoder(C)
sage: word = vector(C.base_field(), [1, 0])
sage: E(word)
(1, 1, 0)
"""
def __init__(self, generator, sub_field=None, basis=None):
r"""
See the docstring for :meth:`LinearRankMetricCode`.
INPUT:
- ``generator`` -- a generator matrix over the ``base_field`` with
dimension `k \times n`, where `k` is the dimension of the code and
`n` its length; or a code over a finite field
- ``sub_field`` -- (default: ``None``) the sub field of ``self``, if not
specified, it is the prime field of ``base_field``
- ``basis`` -- (default: ``None``) a basis of `F_{q^m}` as a vector space over
``sub_field``. If not specified, given that `q = p^s`, let
`1,\beta,\ldots,\beta^{sm}` be the power basis that SageMath uses to
represent `F_{q^m}`. The default basis is then `1,\beta,\ldots,\beta^{m-1}`.
EXAMPLES::
sage: G = Matrix(GF(64), [[1,1,0], [0,0,1]])
sage: C = codes.LinearRankMetricCode(G, GF(4)) # indirect doctest
sage: C
[3, 2] linear rank metric code over GF(64)/GF(4)
"""
base_field = generator.base_ring()
if not base_field.is_field():
raise ValueError("'generator' must be defined on a field (not a ring)")
if not sub_field:
sub_field = base_field.prime_subfield()
try:
gen_basis = None
if hasattr(generator,"nrows"): # generator matrix case
if generator.rank() < generator.nrows():
gen_basis = generator.row_space().basis()
else:
gen_basis = generator.basis() # vector space etc. case
if not gen_basis is None:
from sage.matrix.constructor import matrix
generator = matrix(base_field, gen_basis)
if generator.nrows() == 0:
raise ValueError("this linear code contains no non-zero vector")
except AttributeError:
# Assume input is an AbstractLinearRankMetricCode, extract its generator matrix
generator = generator.generator_matrix()
self._generator_matrix = generator
self._length = generator.ncols()
super(LinearRankMetricCode, self).__init__(base_field, sub_field, self._length, "GeneratorMatrix", "NearestNeighbor", basis)
def _repr_(self):
r"""
Return a string representation of ``self``.
EXAMPLES::
sage: G = Matrix(GF(64), [[1,1,0], [0,0,1]])
sage: C = codes.LinearRankMetricCode(G, GF(4))
sage: C
[3, 2] linear rank metric code over GF(64)/GF(4)
"""
R = self.base_field()
S = self.sub_field()
if R and S in Fields():
return "[%s, %s] linear rank metric code over GF(%s)/GF(%s)"%(self.length(), self.dimension(), R.cardinality(), S.cardinality())
else:
return "[%s, %s] linear rank metric code over %s/%s"%(self.length(), self.dimension(), R, S)
def _latex_(self):
r"""
Return a latex representation of ``self``.
EXAMPLES::
sage: G = Matrix(GF(64), [[1,1,0], [0,0,1]])
sage: C = codes.LinearRankMetricCode(G, GF(4))
sage: latex(C)
[3, 2]\textnormal{ Linear rank metric code over }\Bold{F}_{2^{6}}/\Bold{F}_{2^{2}}
"""
return "[%s, %s]\\textnormal{ Linear rank metric code over }%s/%s"\
% (self.length(), self.dimension(), self.base_field()._latex_(), self.sub_field()._latex_())
def generator_matrix(self, encoder_name=None, **kwargs):
r"""
Return a generator matrix of ``self``.
INPUT:
- ``encoder_name`` -- (default: ``None``) name of the encoder which will be
used to compute the generator matrix. ``self._generator_matrix``
will be returned if default value is kept.
- ``kwargs`` -- all additional arguments are forwarded to the construction of the
encoder that is used.
EXAMPLES::
sage: G = Matrix(GF(64), [[1,1,0], [0,0,1]])
sage: C = codes.LinearRankMetricCode(G, GF(4))
sage: C.generator_matrix()
[1 1 0]
[0 0 1]
"""
if encoder_name is None or encoder_name == 'GeneratorMatrix':
g = self._generator_matrix
else:
g = super(LinearRankMetricCode, self).generator_matrix(encoder_name, **kwargs)
g.set_immutable()
return g
####################### decoders ###############################
class LinearRankMetricCodeNearestNeighborDecoder(Decoder):
r"""
Construct a decoder for Linear Rank Metric Codes.
This decoder will decode to the nearest codeword found.
"""
def __init__(self, code):
r"""
INPUT:
- ``code`` -- A code associated to this decoder
EXAMPLES::
sage: G = Matrix(GF(64), [[1,1,0], [0,0,1]])
sage: C = codes.LinearRankMetricCode(G, GF(4))
sage: D = codes.decoders.LinearRankMetricCodeNearestNeighborDecoder(C)
sage: D
Nearest neighbor decoder for [3, 2] linear rank metric code over GF(64)/GF(4)
"""
super(LinearRankMetricCodeNearestNeighborDecoder, self).__init__(code, code.ambient_space(), \
code._default_encoder_name)
def __eq__(self, other):
r"""
Test equality between LinearRankMetricCodeNearestNeighborDecoder objects.
EXAMPLES::
sage: G = Matrix(GF(64), [[1,1,0], [0,0,1]])
sage: C = codes.LinearRankMetricCode(G, GF(4))
sage: D1 = codes.decoders.LinearRankMetricCodeNearestNeighborDecoder(C)
sage: D2 = codes.decoders.LinearRankMetricCodeNearestNeighborDecoder(C)
sage: D1 == D2
True
"""
return isinstance(other, LinearRankMetricCodeNearestNeighborDecoder)\
and self.code() == other.code()
def _repr_(self):
r"""
Return a string representation of ``self``.
EXAMPLES::
sage: G = Matrix(GF(64), [[1,1,0], [0,0,1]])
sage: C = codes.LinearRankMetricCode(G, GF(4))
sage: D = codes.decoders.LinearRankMetricCodeNearestNeighborDecoder(C)
sage: D
Nearest neighbor decoder for [3, 2] linear rank metric code over GF(64)/GF(4)
"""
return "Nearest neighbor decoder for %s" % self.code()
def _latex_(self):
r"""
Return a latex representation of ``self``.
EXAMPLES::
sage: G = Matrix(GF(64), [[1,1,0], [0,0,1]])
sage: C = codes.LinearRankMetricCode(G, GF(4))
sage: D = codes.decoders.LinearRankMetricCodeNearestNeighborDecoder(C)
sage: latex(D)
\textnormal{Nearest neighbor decoder for }[3, 2]\textnormal{ Linear rank metric code over }\Bold{F}_{2^{6}}/\Bold{F}_{2^{2}}
"""
return "\\textnormal{Nearest neighbor decoder for }%s" % self.code()._latex_()
def decode_to_code(self, r):
r"""
Corrects the errors in ``word`` and returns a codeword.
INPUT:
- ``r`` -- a codeword of ``self``
OUTPUT:
- a vector of ``self``'s message space
EXAMPLES::
sage: F.<a> = GF(4)
sage: G = Matrix(F, [[1,1,0]])
sage: C = codes.LinearRankMetricCode(G, GF(2))
sage: D = codes.decoders.LinearRankMetricCodeNearestNeighborDecoder(C)
sage: D.decode_to_code(vector(F, [a, a, 1]))
(a, a, 0)
"""
C = self.code()
c_min = C.zero()
h_min = C.rank_weight_of_vector(r)
for c in C:
if C.rank_weight_of_vector(c-r) < h_min:
h_min = C.rank_weight_of_vector(c-r)
c_min = c
c_min.set_immutable()
return c_min
def decoding_radius(self):
|
####################### registration ###############################
LinearRankMetricCode._registered_encoders["GeneratorMatrix"] = LinearCodeGeneratorMatrixEncoder
| r"""
Return maximal number of errors ``self`` can decode.
EXAMPLES::
sage: F.<a> = GF(8)
sage: G = Matrix(F, [[1,a,a^2,0]])
sage: C = codes.LinearRankMetricCode(G, GF(2))
sage: D = codes.decoders.LinearRankMetricCodeNearestNeighborDecoder(C)
sage: D.decoding_radius()
1
"""
return (self.code().minimum_distance()-1) // 2 |
fluentbit.go | // Copyright The OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package datasenders // import "github.com/open-telemetry/opentelemetry-collector-contrib/testbed/datasenders"
import (
"context"
"encoding/json"
"fmt"
"io/ioutil"
"os"
"os/exec"
"strconv"
"time"
"go.opentelemetry.io/collector/consumer"
"go.opentelemetry.io/collector/model/pdata"
"github.com/open-telemetry/opentelemetry-collector-contrib/testbed/testbed"
)
type FluentBitFileLogWriter struct {
testbed.DataSenderBase
file *os.File
parsersFile *os.File
}
// Ensure FluentBitFileLogWriter implements LogDataSender
var _ testbed.LogDataSender = (*FluentBitFileLogWriter)(nil)
// NewFluentBitFileLogWriter creates a new data sender that will write log entries to a
// file, to be tailed by FluentBit and sent to the collector.
func NewFluentBitFileLogWriter(host string, port int) *FluentBitFileLogWriter {
file, err := ioutil.TempFile("", "perf-logs.json")
if err != nil {
panic("failed to create temp file")
}
parsersFile, err := ioutil.TempFile("", "parsers.json")
if err != nil {
panic("failed to create temp file")
}
f := &FluentBitFileLogWriter{
DataSenderBase: testbed.DataSenderBase{
Port: port,
Host: host,
},
file: file,
parsersFile: parsersFile,
}
f.setupParsers()
return f
}
func (f *FluentBitFileLogWriter) Capabilities() consumer.Capabilities {
return consumer.Capabilities{MutatesData: false}
}
func (f *FluentBitFileLogWriter) Start() error {
if _, err := exec.LookPath("fluent-bit"); err != nil {
return err
}
return nil
}
func (f *FluentBitFileLogWriter) setupParsers() {
_, err := f.parsersFile.Write([]byte(`
[PARSER]
Name json
Format json
Time_Key time
Time_Format %d/%m/%Y:%H:%M:%S %z
`))
if err != nil {
panic("failed to write parsers")
}
f.parsersFile.Close()
}
func (f *FluentBitFileLogWriter) ConsumeLogs(_ context.Context, logs pdata.Logs) error {
for i := 0; i < logs.ResourceLogs().Len(); i++ {
for j := 0; j < logs.ResourceLogs().At(i).InstrumentationLibraryLogs().Len(); j++ {
ills := logs.ResourceLogs().At(i).InstrumentationLibraryLogs().At(j)
for k := 0; k < ills.LogRecords().Len(); k++ {
_, err := f.file.Write(append(f.convertLogToJSON(ills.LogRecords().At(k)), '\n'))
if err != nil {
return err | }
return nil
}
func (f *FluentBitFileLogWriter) convertLogToJSON(lr pdata.LogRecord) []byte {
rec := map[string]string{
"time": time.Unix(0, int64(lr.Timestamp())).Format("02/01/2006:15:04:05Z"),
}
rec["log"] = lr.Body().StringVal()
lr.Attributes().Range(func(k string, v pdata.Value) bool {
switch v.Type() {
case pdata.ValueTypeString:
rec[k] = v.StringVal()
case pdata.ValueTypeInt:
rec[k] = strconv.FormatInt(v.IntVal(), 10)
case pdata.ValueTypeDouble:
rec[k] = strconv.FormatFloat(v.DoubleVal(), 'f', -1, 64)
case pdata.ValueTypeBool:
rec[k] = strconv.FormatBool(v.BoolVal())
default:
panic("missing case")
}
return true
})
b, err := json.Marshal(rec)
if err != nil {
panic("failed to write log: " + err.Error())
}
return b
}
func (f *FluentBitFileLogWriter) Flush() {
_ = f.file.Sync()
}
func (f *FluentBitFileLogWriter) GenConfigYAMLStr() string {
// Note that this generates a receiver config for agent.
return fmt.Sprintf(`
fluentforward:
endpoint: "%s"`, f.GetEndpoint())
}
func (f *FluentBitFileLogWriter) Extensions() map[string]string {
return map[string]string{
"fluentbit": fmt.Sprintf(`
fluentbit:
executable_path: fluent-bit
tcp_endpoint: "%s"
config: |
[SERVICE]
parsers_file %s
[INPUT]
Name tail
parser json
path %s
`, f.GetEndpoint(), f.parsersFile.Name(), f.file.Name()),
}
}
func (f *FluentBitFileLogWriter) ProtocolName() string {
return "fluentforward"
} | }
}
} |
pvaSchema.ts | // Copyright (c) Microsoft Corporation.
// Licensed under the MIT License. | const checkSchemaFor = (schema, definition) => {
if (schema?.content?.definitions) {
return schema.content.definitions[definition] !== undefined;
}
return false;
};
export const checkForPVASchema = (schema) => checkSchemaFor(schema, 'Microsoft.VirtualAgents.Recognizer');
export const checkForOrchestratorSchema = (schema) => checkSchemaFor(schema, 'Microsoft.OrchestratorRecognizer'); | |
executor.rs | use std::io::{BufRead, Read, Write};
use std::path::Path;
use csv;
use rusqlite;
use csv_utils::string_to_csv_output;
use db_utils::{escape_columns, escape_values, AllString};
use errors::{Result, ResultExt};
/// Main struct that parses the CSV and put the data into a SQLite
pub struct Executor<W: Write> {
conn: rusqlite::Connection,
output: W,
delimiter: u8,
}
impl<W> Executor<W>
where
W: Write,
{
pub fn new<R>(
readers: Vec<R>,
output: W,
delimiter: u8,
batch_insert_number: usize,
) -> Result<Executor<W>>
where
R: BufRead,
{
let conn = Self::create_database()?;
Self::process_csv_files(readers, delimiter, batch_insert_number, &conn)?;
Ok(Executor {
conn,
output,
delimiter,
})
}
fn create_database() -> Result<rusqlite::Connection> {
Ok(rusqlite::Connection::open_in_memory().chain_err(|| "Opening memory database.")?)
}
fn process_csv_files<R>(
readers: Vec<R>,
delimiter: u8,
batch_insert_number: usize,
conn: &rusqlite::Connection,
) -> Result<()>
where
R: Read,
{
for (i, reader) in readers.into_iter().enumerate() {
let table_number = i + 1;
let mut csv_reader = csv::ReaderBuilder::new()
.delimiter(delimiter)
.from_reader(reader);
let columns = Self::get_csv_columns(&mut csv_reader)?;
Self::create_table(&conn, &columns, table_number)?;
Self::fill_data(
&conn,
&columns,
table_number,
batch_insert_number,
csv_reader,
)?;
}
Ok(())
}
fn get_csv_columns<R>(csv_reader: &mut csv::Reader<R>) -> Result<csv::StringRecord>
where
R: Read,
{
Ok(csv_reader
.headers()
.chain_err(|| "Reading headers")?
.clone())
}
fn create_table(
conn: &rusqlite::Connection,
columns: &csv::StringRecord,
table_number: usize,
) -> Result<()> {
let quoted_columns: Vec<String> = columns
.iter()
.map(|c| format!("\"{}\" VARCHAR NULL", c))
.collect();
let create_query = format!(
"CREATE TABLE table{} ({})",
table_number,
quoted_columns.join(", ")
);
conn.execute(&create_query, &[])
.chain_err(|| format!("Error creating the database. Used query {}", create_query))?;
Ok(())
}
fn | <R>(
conn: &rusqlite::Connection,
columns: &csv::StringRecord,
table_number: usize,
batch_insert_number: usize,
mut reader: csv::Reader<R>,
) -> Result<()>
where
R: Read,
{
let quoted_columns = escape_columns(columns);
let insert = format!(
"INSERT INTO table{} ({}) VALUES\n",
table_number,
quoted_columns.join(", ")
);
let mut rows: Vec<String> = vec![];
for (i, row) in reader.records().enumerate() {
let row = row.chain_err(|| "Error reading row")?;
let db_row = escape_values(&row);
rows.push(format!("({})", db_row.join(", ")));
if i % batch_insert_number == 0 {
Self::batch_insert(&conn, &insert, &mut rows)?;
}
}
Self::batch_insert(&conn, &insert, &mut rows)?;
Ok(())
}
/// Consume rows vector and write them into sqlite
fn batch_insert(
conn: &rusqlite::Connection,
insert: &str,
rows: &mut Vec<String>,
) -> Result<()> {
let mut batch = insert.to_owned();
batch.push_str(&rows.join(",\n"));
conn.execute(&batch, &[])
.chain_err(|| "Error running insert query.")?;
rows.clear();
Ok(())
}
fn delimiter_to_string(&self) -> String {
let mut delimiter = String::new();
delimiter.push(self.delimiter as char);
delimiter
}
/// Run the query and write its result as CSV into the specified output stream
pub fn write_query_results(&mut self, query: &str) -> Result<()> {
let delimiter = self.delimiter_to_string();
let mut prepared = Self::prepare_query(&self.conn, query)?;
let output_error = "Error writing on selected output";
Self::write_headers(&prepared, &mut self.output, &output_error, &delimiter)?;
let mut rows = prepared
.query(&[])
.chain_err(|| "Error binding parameters")?;
Self::write_rows(&mut rows, &mut self.output, &output_error, &delimiter)?;
self.output.flush().chain_err(|| "Error writing results")?;
Ok(())
}
fn prepare_query<'a>(
conn: &'a rusqlite::Connection,
query: &str,
) -> Result<rusqlite::Statement<'a>> {
Ok(conn
.prepare(query)
.chain_err(|| format!("Error preparing query: {}", query))?)
}
fn write_headers(
prepared: &rusqlite::Statement,
output: &mut W,
output_error: &str,
delimiter: &str,
) -> Result<()> {
let columns_names = prepared
.column_names()
.iter()
.map(|c| format!("\"{}\"", c))
.collect::<Vec<String>>()
.join(&delimiter);
writeln!(output, "{}", columns_names).chain_err(|| output_error)?;
Ok(())
}
fn write_rows(
rows: &mut rusqlite::Rows,
output: &mut W,
output_error: &str,
delimiter: &str,
) -> Result<()> {
while let Some(row) = rows.next() {
let row = row.chain_err(|| "Error reading results")?;
let output_rows = (0..row.column_count())
.map(|r| row.get::<usize, AllString>(r).into())
.map(|r| string_to_csv_output(&r))
.collect::<Vec<String>>()
.join(&delimiter);
writeln!(output, "{}", output_rows).chain_err(|| output_error)?;
}
Ok(())
}
pub fn dump_database<P>(&self, output: P) -> Result<()>
where
P: AsRef<Path>,
{
self.conn
.backup(rusqlite::DatabaseName::Main, output, None)
.chain_err(|| "Failed to dump database")?;
Ok(())
}
}
| fill_data |
utils.ts | import chalk from 'chalk';
import { writeFile, mkdir } from 'fs';
import type { Alias } from './types';
/**
* Convert Windows backslash paths to slash paths: foo\\bar ➔ foo/bar
* @param string
* @returns
*/
export function slash(string: string) {
return string.replace(/\\/g, '/')
}
/**
* Split String on Seperator into Array
* @param string
* @param seperator
*/
export function split(string: string, seperator: string) {
return string.split(seperator);
}
/**
*
* @param string
* @param color
*/
export function lo | tring: string, color: string = 'red') {
return console.log(chalk.keyword(color)(`
[vite-aliases]: ${string}
`));
}
/**
* Creates a Logfile
* If needed it will also create a Logfolder
* @param path
* @param data
*/
export function createLogfile(path: string, data: Alias[]) {
mkdir(`${path}`, { recursive: true }, (error) => {
writeFile(`${path}/vite-aliases.json`, JSON.stringify(data), (error) => {
if(error) {
log('An Error occured while creating the Logfile.');
}
});
if(error) {
log('[vite-aliases]: An Error occured while creating the Logfolder.')
}
});
log('Logfile created!', 'green');
} | g(s |
missing_event_generic_on_module_with_instance.rs | use frame_support::construct_runtime;
construct_runtime! {
pub enum Runtime where
Block = Block,
NodeBlock = Block,
UncheckedExtrinsic = UncheckedExtrinsic | }
}
fn main() {} | {
System: system::{Pallet},
Balance: balances::<Instance1>::{Event}, |
Control.js | const net = require('net');
const mergeOptions = require('merge-options');
const EffectInterface = require('./EffectInterface');
const CustomMode = require('./CustomMode');
const { AsyncEffectInterface } = require("./AsyncEffectInterface");
const PORT = 5577;
// some controllers send their responses in multiple chunks, and we only know that we got the full message, if the controller doesn't send something for a while
const RESPONSE_TIMEOUT = 500; // 0.5 sec
const patterns = Object.freeze({
seven_color_cross_fade: 0x25,
red_gradual_change: 0x26,
green_gradual_change: 0x27,
blue_gradual_change: 0x28,
yellow_gradual_change: 0x29,
cyan_gradual_change: 0x2a,
purple_gradual_change: 0x2b,
white_gradual_change: 0x2c,
red_green_cross_fade: 0x2d,
red_blue_cross_fade: 0x2e,
green_blue_cross_fade: 0x2f,
seven_color_strobe_flash: 0x30,
red_strobe_flash: 0x31,
green_strobe_flash: 0x32,
blue_stobe_flash: 0x33,
yellow_strobe_flash: 0x34,
cyan_strobe_flash: 0x35,
purple_strobe_flash: 0x36,
white_strobe_flash: 0x37,
seven_color_jumping: 0x38,
});
/**
* @typedef {Object} QueryResponse
* @property {number} type
* @property {boolean} on
* @property {string} mode
* @property {number} speed
* @property {object} color
* @property {number} color.red
* @property {number} color.green
* @property {number} color.blue
* @property {number} warm_white
* @property {number} cold_white
*/
/*
* Helper functions
*/
function determineMode(resp) {
if (resp.readUInt8(3) === 0x61 || (resp.readUInt8(3) === 0 && resp.readUInt8(4) === 0x61)) {
return "color";
} else if (resp.readUInt8(3) === 0x62) {
return "special";
} else if (resp.readUInt8(3) === 0x60) {
return "custom";
} else if (resp.readUInt8(3) >= 0x25 && resp.readUInt8(3) <= 0x38) { // we can ignore bit 4 here, since it is always 0x21 and resp.readUInt16BE(3) is >= 9505
return "pattern";
} else if (resp.readUInt16BE(3) >= 0x64 && resp.readUInt16BE(3) <= 0x018f) {
return "ia_pattern";
} else {
return null;
}
}
function determinePattern(resp) {
if (resp.readUInt8(3) >= 0x25 && resp.readUInt8(3) <= 0x38) {
for (let pattern_name in patterns) {
if (patterns[pattern_name] === resp.readUInt8(3)) return pattern_name;
}
}
if (resp.readUInt16BE(3) >= 0x64 && resp.readUInt16BE(3) <= 0x018f) {
return resp.readUInt16BE(3) - 99;
}
return null;
}
function delayToSpeed(delay) {
delay = clamp(delay, 1, 31);
delay -= 1; // bring into interval [0, 30]
return 100 - (delay / 30 * 100);
}
function speedToDelay(speed) {
speed = clamp(speed, 0, 100);
return (30 - ((speed / 100) * 30)) + 1;
}
function | (value, min, max) {
return Math.min(max, Math.max(min, value));
}
class Control {
/**
* Create a new Control instance. This does not connect to the controller, yet.
* @param {String} address IP or hostname of the controller
* @param {Object} options
* @param {boolean} options.wait_for_reply [Deprecated] Wait for the controllers to send data as acknowledgement. (Default: true)
* @param {boolean} options.log_all_received Print all received bytes into stdout for debug purposes (Default: false)
* @param {boolean} options.apply_masks Set the mask bit in setColor and setWarmWhite (Default: false)
* @param {boolean} options.cold_white_support Send a different version of the color change packets, which also set the cold white values (Default: false)
* @param {Number} options.connect_timeout Duration in milliseconds after which the connection attempt will be cancelled if the connection can not be established (Default: null [No timeout])
* @param {Number} options.command_timeout Duration in milliseconds after which an acknowledged command will be regarded as failed. Set to null to disable. (Default: 1000)
* @param {Object} options.ack
* @param {boolean} options.ack.power Wait for controller to send data to achnowledge power change commands (Default: true)
* @param {boolean} options.ack.color Wait for controller to send data to achnowledge color change commands (Default: true)
* @param {boolean} options.ack.pattern Wait for controller to send data to achnowledge built-in pattern change commands (Default: true)
* @param {boolean} options.ack.custom_pattern Wait for controller to send data to acknowledge custom pattern change commands (Default: true)
*/
constructor(address, options = {}) {
this._address = address;
if ("wait_for_reply" in options) {
options.ack = (options.wait_for_reply) ? Control.ackMask(0x0F) : Control.ackMask(0);
}
this._options = mergeOptions({
log_all_received: false,
apply_masks: false,
ack: {
power: true,
color: true,
pattern: true,
custom_pattern: true
},
connect_timeout: null,
command_timeout: 1000,
cold_white_support: false,
}, options);
this._commandQueue = [];
this._socket = null;
this._receivedData = Buffer.alloc(0);
this._receiveTimeout = null;
this._connectTimeout = null;
this._commandTimeout = null;
this._preventDataSending = false;
// store the values of the last sent/received values to enable the convenience methods
this._lastColor = { red: 0, green: 0, blue: 0 };
this._lastWW = 0;
this._lastCW = 0;
}
static get patternNames() {
return Object.keys(patterns);
}
static ackMask(mask) {
return {
power: (mask & 0x01) > 0,
color: (mask & 0x02) > 0,
pattern: (mask & 0x04) > 0,
custom_pattern: (mask & 0x08) > 0
};
}
_receiveData(empty, data) {
if (this._commandTimeout !== null) { // we have received _something_ so the command cannot timeout anymore
clearTimeout(this._commandTimeout);
this._commandTimeout = null;
}
if (empty) {
// no data, so request is instantly finished
// this can happend when a command is sent without waiting for a reply or when a timeout is reached
let finished_command = this._commandQueue[0];
if (finished_command != undefined) {
const resolve = finished_command.resolve;
if (resolve != undefined) {
resolve(this._receivedData);
}
}
// clear received data
this._receivedData = Buffer.alloc(0);
this._commandQueue.shift();
this._handleNextCommand();
} else {
this._receivedData = Buffer.concat([ this._receivedData, data ]);
if (this._receiveTimeout != null) clearTimeout(this._receiveTimeout);
// since we don't know how long the response is going to be, set a timeout after which we consider the
// whole message to be received
this._receiveTimeout = setTimeout(() => {
this._receiveData(true);
}, RESPONSE_TIMEOUT);
}
}
_handleCommandTimeout() {
this._commandTimeout = null;
let timedout_command = this._commandQueue[0];
if (timedout_command !== undefined) {
const reject = timedout_command.reject;
if (reject != undefined) {
reject(new Error("Command timed out"));
}
}
this._receivedData = Buffer.alloc(0); // just for good measure
this._commandQueue.shift();
this._handleNextCommand();
}
_handleNextCommand() {
if(this._commandQueue.length == 0) {
if(this._socket != null) this._socket.end();
this._socket = null;
} else {
let cmd = this._commandQueue[0];
if(!cmd.expect_reply) {
this._socket.write(cmd.command, "binary", () => {
this._receiveData(true);
});
} else {
this._socket.write(cmd.command, "binary", () => {
if (this._options.command_timeout === null) return;
this._commandTimeout = setTimeout(() => {
this._handleCommandTimeout();
}, this._options.command_timeout);
});
}
}
}
_sendCommand(buf, expect_reply, resolve, reject) {
// calculate checksum
let checksum = 0;
for (let byte of buf.values()) {
checksum += byte;
}
checksum &= 0xFF;
// append checksum to command buffer
let command = Buffer.concat([ buf, Buffer.from( [checksum] ) ]);
if (this._commandQueue.length == 0 && this._socket == null) {
this._commandQueue.push({ expect_reply, resolve, reject, command });
this._preventDataSending = false;
this._socket = net.connect(PORT, this._address, () => {
if (this._connectTimeout != null) {
clearTimeout(this._connectTimeout);
this._connectTimeout = null;
}
if (!this._preventDataSending) { // prevent "write after end" errors
this._handleNextCommand(); // which is the "first" command in this case
}
});
this._socket.on('error', (err) => {
this._socketErrorHandler(err, reject);
});
this._socket.on('data', (data) => {
if (this._options.log_all_received) {
console.log("Received:", data.toString("hex").replace(/(\w{2})/g, "$1 "));
}
this._receiveData(false, data);
});
if (this._options.connect_timeout != null) {
this._connectTimeout = setTimeout(() => {
this._socketErrorHandler(new Error("Connection timeout reached"), reject);
}, this._options.connect_timeout);
}
} else {
this._commandQueue.push({ expect_reply, resolve, reject, command });
}
}
_socketErrorHandler(err, reject) {
this._preventDataSending = true;
reject(err);
if(this._socket != null) this._socket.end();
this._socket = null;
// also reject all commands currently in the queue
for(let c of this._commandQueue) {
let reject = c.reject;
if(reject != undefined) {
reject(err);
}
}
this._commandQueue = []; // reset commandqueue so commands dont get stuck if the controller becomes unavailable
}
_buildColorChangeCommand(red, green, blue, ww, cw, mask, permanent = true) {
const first_byte = permanent ? 0x31 : 0x41; // specifies the command
red = clamp(red, 0, 255);
green = clamp(green, 0, 255);
blue = clamp(blue, 0, 255);
ww = clamp(ww, 0, 255); // warm white
let cmd_buf;
if (this._options.cold_white_support) {
cw = clamp(cw, 0, 255); // cold white
cmd_buf = Buffer.from([ first_byte, red, green, blue, ww, cw, mask, 0x0f ]);
} else if(this._options.custom_SPI){
const temp_buf = this.toHex([red, green, blue, ww, cw, mask])
console.log(temp_buf);
cmd_buf = new Buffer(`b0b1b2b300010147000d4101${temp_buf}320000f063e2`, 'hex');
} else if(this._options.custom_HF) {
const postfix = (0x130 + red+green+blue) & 0xff;
cmd_buf = Buffer.from([ first_byte, red, green, blue, ww, mask, 0xf0, 0x0f, postfix ]);
} else if(this._options.custom_ZJ) {
const postfix = (0x130 + red+green+blue) & 0xff;
cmd_buf = Buffer.from([ first_byte, red, green, blue, ww, mask, 0x0f, 0x0f, postfix ]);
} else {
cmd_buf = Buffer.from([ first_byte, red, green, blue, ww, mask, 0x0f ]);
}
console.log(cmd_buf);
return cmd_buf;
}
_sendColorChangeCommand(red, green, blue, ww, cw, mask, callback) {
const cmd_buf = this._buildColorChangeCommand(red, green, blue, ww, cw, mask);
const promise = new Promise((resolve, reject) => {
this._sendCommand(cmd_buf, this._options.ack.color, resolve, reject);
}).then(data => {
return (data.length > 0 || !this._options.ack.color);
}).then(result => {
if (result) {
this._lastColor = { red, green, blue };
this._lastWW = ww;
this._lastCW = cw;
}
return result;
});
if (callback && typeof callback == 'function') {
promise.then(callback.bind(null, null), callback);
}
return promise;
}
/**
* Sets the power state either to on or off
* @param {Boolean} on
* @param {function} callback called with (err, success)
* @returns {Promise<boolean>}
*/
setPower(on, callback) {
let cmd_buf = Buffer.from([0x71, (on) ? 0x23 : 0x24, 0x0f]);
const promise = new Promise((resolve, reject) => {
this._sendCommand(cmd_buf, this._options.ack.power, resolve, reject);
}).then(data => {
return (data.length > 0 || !this._options.ack.power); // the responses vary from controller to controller and I don't know what they mean
});
if (callback && typeof callback == 'function') {
promise.then(callback.bind(null, null), callback);
}
return promise;
}
/**
* Convenience method to call setPower(true)
* @param {function} callback
* @returns {Promise<boolean>}
*/
turnOn(callback) {
return this.setPower(true, callback);
}
/**
* Convenience method to call setPower(false)
* @param {function} callback
* @returns {Promise<boolean>}
*/
turnOff(callback) {
return this.setPower(false, callback);
}
/**
* Sets the color and warm white values of the controller.
* Also saves the values for further calls to setColor, setWarmWhite, etc
* @param {Number} red
* @param {Number} green
* @param {Number} blue
* @param {Number} ww
* @param {function} callback called with (err, success)
* @returns {Promise<boolean>}
*/
setColorAndWarmWhite(red, green, blue, ww, callback) {
if (this._options.apply_masks) {
console.warn("WARNING: Masks are enabled, but a method which does not use them was called.");
}
return this._sendColorChangeCommand(red, green, blue, ww, this._lastCW, 0, callback);
}
/**
* Sets the color and white values of the controller.
* Also saves the values for further calls to setColor, setWarmWhite, etc
* @param {Number} red
* @param {Number} green
* @param {Number} blue
* @param {Number} ww warm white
* @param {Number} cw cold white
* @param {function} callback called with (err, success)
* @returns {Promise<boolean>}
*/
setColorAndWhites(red, green, blue, ww, cw, callback) {
if (this._options.apply_masks) {
console.warn("WARNING: Masks are enabled, but a method which does not use them was called.");
}
return this._sendColorChangeCommand(red, green, blue, ww, cw, 0, callback);
}
/**
* Sets the color values of the controller.
* Depending on apply_masks, only the color values, or color values as well as previous warm white values will be sent
* @param {Number} red
* @param {Number} green
* @param {Number} blue
* @param {function} callback called with (err, success)
* @returns {Promise<boolean>}
*/
setColor(red, green, blue, callback) {
if (this._options.apply_masks) {
return this._sendColorChangeCommand(red, green, blue, 0, 0, 0xF0, callback);
} else {
return this.setColorAndWhites(red, green, blue, this._lastWW, this._lastCW, callback);
}
}
/**
* Sets the warm white values of the controller.
* Depending on apply_masks, only the warm white values, or warm white values as well as previous color values will be sent
* @param {Number} ww
* @param {function} callback called with (err, success)
* @returns {Promise<boolean>}
*/
setWarmWhite(ww, callback) {
if (this._options.apply_masks) {
return this._sendColorChangeCommand(0, 0, 0, ww, this._lastCW, 0x0F, callback);
} else {
return this.setColorAndWarmWhite(this._lastColor.red, this._lastColor.green, this._lastColor.blue, ww, callback);
}
}
/**
* Sets the white values of the controller.
* Depending on apply_masks, only the cold white values, or cold white values as well as previous color values will be sent
* @param {Number} ww warm white
* @param {Number} cw cold white
* @param {function} callback called with (err, success)
* @returns {Promise<boolean>}
*/
setWhites(ww, cw, callback) {
if (cw != 0 && !this._options.cold_white_support) {
console.warn("WARNING: Cold white support is not enabled, but the cold white value was set to a non-zero value.");
}
if (this._options.apply_masks) {
return this._sendColorChangeCommand(0, 0, 0, ww, cw, 0x0F, callback);
} else {
return this.setColorAndWhites(this._lastColor.red, this._lastColor.green, this._lastColor.blue, ww, cw, callback);
}
}
/**
* Convenience method to scale down the colors with a brightness value between 0 and 100
* If you send red, green and blue to 0, this sets the color to white with the specified brightness (but not warm white!)
* @param {Number} red
* @param {Number} green
* @param {Number} blue
* @param {Number} brightness
* @param {function} callback
* @returns {Promise<boolean>}
*/
setColorWithBrightness(red, green, blue, brightness, callback) {
brightness = clamp(brightness, 0, 100);
let r = (255/100) * brightness;
let g = (255/100) * brightness;
let b = (255/100) * brightness;
if(red > 0 || green > 0 || blue > 0) {
r = Math.round(clamp(red, 0, 255) / 100 * brightness);
g = Math.round(clamp(green, 0, 255) / 100 * brightness);
b = Math.round(clamp(blue, 0, 255) / 100 * brightness);
}
return this.setColor(r, g, b, callback);
}
/**
* Sets the controller to display one of the predefined patterns
* @param {String} pattern Name of the pattern
* @param {Number} speed between 0 and 100
* @param {function} callback
* @returns {Promise<boolean>}
*/
setPattern(pattern, speed, callback) {
const pattern_code = patterns[pattern];
if (pattern_code == undefined) {
const promise = Promise.reject(new Error("Invalid pattern"));
if (callback && typeof callback == 'function') {
promise.then(callback.bind(null, null), callback);
}
return promise;
}
const delay = speedToDelay(speed);
const cmd_buf = Buffer.from([0x61, pattern_code, delay, 0x0f]);
const promise = new Promise((resolve, reject) => {
this._sendCommand(cmd_buf, this._options.ack.pattern, resolve, reject);
}).then(data => {
return (data.length > 0 || !this._options.ack.pattern);
});
if (callback && typeof callback == 'function') {
promise.then(callback.bind(null, null), callback);
}
return promise;
}
/**
* Sets the controller to display one of the predefined patterns
* @param {Number} code Code of the pattern, between 1 and 300
* @param {Number} speed between 0 and 100
* @param {function} callback
* @returns {Promise<boolean>}
*/
setIAPattern(code, speed, callback) {
if (code < 1 || code > 300) {
const promise = Promise.reject(new Error("Invalid code"));
if (callback && typeof callback === 'function') {
promise.then(callback.bind(null, null), callback);
}
return promise;
}
let cmd_buf = null;
if(this._options.custom_SPI) {
const temp_buf = this.toHex([code, speed])
cmd_buf = new Buffer(`b0b1b2b30001010d000542${temp_buf}64bf58`, 'hex');
} else {
code += 99;
let bufferArray = [ 0x61 ];
bufferArray.push(code >> 8);
bufferArray.push(code & 0xff);
bufferArray.push(speed);
bufferArray.push(0x0f);
cmd_buf = Buffer.from(bufferArray);
}
console.log(cmd_buf)
const promise = new Promise((resolve, reject) => {
this._sendCommand(cmd_buf, this._options.ack.pattern, resolve, reject);
}).then(data => {
return (data.length > 0 || !this._options.ack.pattern);
});
if (callback && typeof callback == 'function') {
promise.then(callback.bind(null, null), callback);
}
return promise;
}
/**
* Sets the controller to display a custom pattern
* @param {CustomMode} pattern
* @param {Number} speed
* @param {function} callback
* @returns {Promise<boolean>}
*/
setCustomPattern(pattern, speed, callback) {
if (!(pattern instanceof CustomMode)) {
const promise = Promise.reject(new Error("Invalid pattern"));
if (callback && typeof callback == 'function') {
promise.then(callback.bind(null, null), callback);
}
return promise;
}
let delay = speedToDelay(speed);
// construct command buffer
let cmd_buf_values = [ 0x51 ];
for(let i = 0; i < 16; i++) {
if (pattern.colors[i]) {
cmd_buf_values.push(pattern.colors[i].red, pattern.colors[i].green, pattern.colors[i].blue, 0);
} else {
cmd_buf_values.push(1, 2, 3, 0);
}
}
cmd_buf_values.push(delay);
switch (pattern.transitionType) {
case "fade":
cmd_buf_values.push(0x3a);
break;
case "jump":
cmd_buf_values.push(0x3b);
break;
case "strobe":
cmd_buf_values.push(0x3c);
break;
}
cmd_buf_values.push(0xff, 0x0f);
const cmd_buf = Buffer.from(cmd_buf_values);
const promise = new Promise((resolve, reject) => {
this._sendCommand(cmd_buf, this._options.ack.custom_pattern, resolve, reject);
}).then(data => {
return (data.length > 0 || !this._options.ack.custom_pattern);
});
if (callback && typeof callback == 'function') {
promise.then(callback.bind(null, null), callback);
}
return promise;
}
/**
* @deprecated Creates a new EffectInterface, which establishes a persistent connection to the controller
* @param {function} callback
* @returns {Promise<EffectInterface>}
*/
startEffectMode(callback) {
const promise = new Promise((resolve, reject) => {
new EffectInterface(this._address, PORT, this._options, (err, effect_interface) => {
if (err) return reject(err);
resolve(effect_interface);
});
});
if (callback && typeof callback == 'function') {
promise.then(callback.bind(null, null), callback);
}
return promise;
}
/**
* Get a new instance of the AsyncEffectInterface, which is used to create and drive a persistent connection to
* the controller in order to create dynamic effects
* @returns {AsyncEffectInterface}
*/
getAsyncEffectMode() {
return new AsyncEffectInterface(this._address, PORT, this, this._options.ack.color, this._options.apply_masks);
}
/**
* Queries the controller for it's current state
* This method stores the color and ww values for future calls to setColor, setWarmWhite, etc.
* It will also set apply_masks to true for controllers which require it.
* @param {function} callback
* @returns {Promise<QueryResponse>}
*/
queryState(callback) {
let cmd_buf = Buffer.from([0x81, 0x8a, 0x8b]);
const promise = new Promise((resolve, reject) => {
this._sendCommand(cmd_buf, true, resolve, reject);
}).then(data => {
if(data.length < 14) throw new Error("Only got short reply");
const mode = determineMode(data);
let state = {
type: data.readUInt8(1),
on: (data.readUInt8(2) == 0x23),
mode: mode,
pattern: determinePattern(data),
speed: (mode !== "ia_pattern") ? delayToSpeed(data.readUInt8(5)) : data.readUInt8(5),
color: {
red: data.readUInt8(6),
green: data.readUInt8(7),
blue: data.readUInt8(8)
},
warm_white: data.readUInt8(9),
cold_white: data.readUInt8(11)
};
this._lastColor = { red: state.color.red, green: state.color.green, blue: state.color.blue };
this._lastWW = state.warm_white;
this._lastCW = state.cold_white;
switch (state.type) {
case 0x25:
this._options.apply_masks = true;
break;
case 0x35:
this._options.apply_masks = true;
this._options.cold_white_support = true;
break;
case 0x44:
this._options.apply_masks = true;
break;
// otherwise do not change any options
}
return state;
});
if (callback && typeof callback == 'function') {
promise.then(callback.bind(null, null), callback);
}
return promise;
}
toHex(byteArray) {
return Array.from(byteArray, function(byte) {
return ('0' + (byte & 0xFF).toString(16)).slice(-2);
}).join('')
}
}
module.exports = Control;
| clamp |
dev.py | from pyredis import RedisConnection
from pprint import pprint
# 1. Object Creation
# pass everything you would pass to redis.Redis()
redis_args = {
'host': 'localhost',
# 'password': 'redis1234',
# 'port': 1234,
}
with RedisConnection(**redis_args) as my_redis:
my_redis.set('key', 'value')
# 2. Redis Get and Set
# redis set
with RedisConnection(**redis_args) as my_redis:
my_redis.set('a_sting', 'my_sting value')
my_redis.set('a_list', [1, 4, 3, 2])
my_redis.set('a_dict', {'key_1': 'val_1', 'key_2': 'val_2'})
# redis get
with RedisConnection(**redis_args) as my_redis:
data = my_redis.get('a_dict')
# data is already converted to a dict
print(type(data))
# 3. Handle Lists and Dicts
# get multiple keys / data
with RedisConnection(**redis_args) as my_redis:
# get all keys that start with a_
pattern = 'a_'
keys = my_redis.get_key_pattern(pattern)
print(f"list of all keys that start with {pattern}: {keys}")
data = my_redis.get_data_for_keys(keys)
print(f"data of all keys that start with {pattern}: {data}")
# or retrieve the data as a key: data dictionary for a specific pattern
print('data as key: data dictionary for a pattern:')
data = my_redis.get_keys('a_')
pprint(data)
# set all entries of a dictionary to redis
data = {'a': 12, 'b': 'myvalue'} | # yo can continue working with the keys
keys = my_redis.set_dict(data)
print(my_redis.get('a'))
print(my_redis.get(keys[1]))
# 4. Fallback
# or work directly on the redis.Redis() object as you would with the official package
# by using the RedisConnection.R attribute
with RedisConnection(**redis_args) as my_redis:
print('access redis client through object...')
print(my_redis.R.get('a_dict')) | with RedisConnection(**redis_args) as my_redis: |
main.go | package main
import (
"encoding/json"
"errors"
"flag"
"fmt"
"io/ioutil"
"net/http"
"os"
"time"
"github.com/Netflix/titus-executor/api/netflix/titus"
"github.com/golang/protobuf/proto" // nolint: staticcheck
)
func checkTaskIdentityPayload(taskIdent *titus.TaskIdentity) error {
if taskIdent.Ipv4Address == nil || *taskIdent.Ipv4Address == "" {
return errors.New("field 'IPv4Address' unset")
}
if taskIdent.UnixTimestampSec == nil || *taskIdent.UnixTimestampSec == uint64(0) {
return errors.New("field 'UnixTimestampSec' unset")
}
// TODO: fill in more
return nil
}
// This implements a fake metatron identity service for testing. It fetches the task identity
// document, does a small amount of validation, and writes it in JSON to `/task-identity`.
func | () {
sleep := flag.Int("sleep", 0, "sleep for this many seconds after fetching")
flag.Parse()
res, err := http.Get("http://169.254.169.254/nflx/v1/task-identity")
if err != nil {
panic(err)
}
defer res.Body.Close() // nolint: errcheck
if res.StatusCode != 200 {
panic(fmt.Errorf("expected 200 from task identity endpoint, but got %d", res.StatusCode))
}
docBytes, err := ioutil.ReadAll(res.Body)
if err != nil {
panic(err)
}
taskIdentDoc := new(titus.TaskIdentityDocument)
if err = proto.Unmarshal(docBytes, taskIdentDoc); err != nil {
panic(err)
}
taskIdent := new(titus.TaskIdentity)
if err = proto.Unmarshal(taskIdentDoc.Identity, taskIdent); err != nil {
panic(err)
}
f, err := os.OpenFile("/task-identity", os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644) // nolint: gosec
if err != nil {
panic(err)
}
defer f.Close() // nolint: errcheck
if err = json.NewEncoder(f).Encode(taskIdent); err != nil {
panic(err)
}
if err = checkTaskIdentityPayload(taskIdent); err != nil {
panic(err)
}
if *sleep != 0 {
time.Sleep(time.Duration(*sleep) * time.Second)
}
}
| main |
grep.rs | use std::path::PathBuf;
use std::process::Command;
use anyhow::{Context, Result};
use structopt::StructOpt;
use filter::{
matcher::{Bonus, MatchType},
subprocess::Exec,
FilterContext, Source,
};
use icon::IconPainter;
use utility::is_git_repo;
use crate::app::Params;
use crate::cmd::cache::{cache_exists, send_response_from_cache, SendResponse};
use crate::process::light::{set_current_dir, LightCommand};
use crate::tools::rg::JsonLine;
const RG_ARGS: [&str; 7] = [
"rg",
"--column",
"--line-number",
"--no-heading",
"--color=never",
"--smart-case",
"",
];
// Ref https://github.com/liuchengxu/vim-clap/issues/533
#[cfg(windows)]
const RG_EXEC_CMD: &str = "rg --column --line-number --no-heading --color=never --smart-case '' .";
#[cfg(not(windows))]
const RG_EXEC_CMD: &str = "rg --column --line-number --no-heading --color=never --smart-case ''";
#[derive(StructOpt, Debug, Clone)]
pub struct Grep {
/// Specify the query string for GREP_CMD.
#[structopt(index = 1, short, long)]
grep_query: String,
/// Specify the grep command to run, normally rg will be used.
///
/// Incase of clap can not reconginize such option: --cmd "rg --vimgrep ... "fn ul"".
/// |-----------------|
/// this can be seen as an option by mistake.
#[structopt(short, long, required_if("sync", "true"))]
grep_cmd: Option<String>,
/// Delegate to -g option of rg
#[structopt(short = "g", long = "glob")]
glob: Option<String>,
/// Specify the working directory of CMD
#[structopt(long = "cmd-dir", parse(from_os_str))]
cmd_dir: Option<PathBuf>,
/// Read input from a cached grep tempfile, only absolute file path is supported.
#[structopt(long = "input", parse(from_os_str))]
input: Option<PathBuf>,
/// Synchronous filtering, returns after the input stream is complete.
#[structopt(short, long)]
sync: bool,
}
fn prepare_sync_grep_cmd(cmd_str: &str, cmd_dir: Option<PathBuf>) -> (Command, Vec<&str>) {
let args = cmd_str
.split_whitespace()
// If cmd_str contains a quoted option, that's problematic.
//
// Ref https://github.com/liuchengxu/vim-clap/issues/595
.map(|s| {
if s.len() > 2 {
if s.starts_with('"') && s.chars().nth_back(0).unwrap() == '"' {
&s[1..s.len() - 1]
} else {
s
}
} else {
s
}
})
.chain(std::iter::once("--json")) // Force using json format.
.collect::<Vec<&str>>();
let mut cmd = Command::new(args[0]);
set_current_dir(&mut cmd, cmd_dir);
(cmd, args)
}
impl Grep {
pub fn run(&self, params: Params) -> Result<()> {
if self.sync {
self.sync_run(params)?;
} else {
self.dyn_run(params)?;
}
Ok(())
}
/// Runs grep command and returns until its output stream is completed.
///
/// Write the output to the cache file if neccessary.
fn sync_run(
&self,
Params {
number,
winwidth,
icon_painter,
..
}: Params,
) -> Result<()> {
let grep_cmd = self
.grep_cmd
.clone()
.context("--grep-cmd is required when --sync is on")?;
let (mut cmd, mut args) = prepare_sync_grep_cmd(&grep_cmd, self.cmd_dir.clone());
// We split out the grep opts and query in case of the possible escape issue of clap.
args.push(&self.grep_query);
if let Some(ref g) = self.glob {
args.push("-g");
args.push(g);
}
// currently vim-clap only supports rg.
// Ref https://github.com/liuchengxu/vim-clap/pull/60
if cfg!(windows) {
args.push(".");
}
cmd.args(&args[1..]);
let mut light_cmd = LightCommand::new_grep(&mut cmd, None, number, None, None);
let execute_info = light_cmd.execute(&args)?;
let enable_icon = icon_painter.is_some();
let (lines, indices): (Vec<String>, Vec<Vec<usize>>) = execute_info
.lines
.iter()
.filter_map(|s| serde_json::from_str::<JsonLine>(s).ok())
.map(|line| line.build_grep_line(enable_icon))
.unzip();
let total = lines.len();
let (lines, indices, truncated_map) = printer::truncate_grep_lines(
lines,
indices,
winwidth.unwrap_or(80),
if enable_icon { Some(2) } else { None },
);
if truncated_map.is_empty() {
utility::println_json!(total, lines, indices);
} else {
utility::println_json!(total, lines, indices, truncated_map);
}
Ok(())
}
/// Runs grep using the dyn filter.
///
/// Firstly try using the cache.
fn dyn_run(
&self,
Params {
number,
winwidth,
icon_painter,
no_cache,
}: Params,
) -> Result<()> {
let do_dyn_filter = |source: Source<std::iter::Empty<_>>| {
filter::dyn_run(
&self.grep_query,
source,
FilterContext::new(
None,
number,
winwidth,
icon_painter,
MatchType::IgnoreFilePath,
),
vec![Bonus::None],
)
};
let source: Source<std::iter::Empty<_>> = if let Some(ref tempfile) = self.input {
Source::File(tempfile.clone())
} else if let Some(ref dir) = self.cmd_dir {
if !no_cache {
if let Ok((cached_file, _)) = cache_exists(&RG_ARGS, dir) {
return do_dyn_filter(Source::File(cached_file));
}
}
Exec::shell(RG_EXEC_CMD).cwd(dir).into()
} else {
Exec::shell(RG_EXEC_CMD).into()
};
do_dyn_filter(source)
}
}
#[derive(StructOpt, Debug, Clone)]
pub struct RipGrepForerunner {
/// Specify the working directory of CMD
#[structopt(long = "cmd-dir", parse(from_os_str))]
cmd_dir: Option<PathBuf>,
/// Specify the threshold for writing the output of command to a tempfile.
#[structopt(long = "output-threshold", default_value = "30000")]
output_threshold: usize,
}
impl RipGrepForerunner {
/// Skip the forerunner job if `cmd_dir` is not a git repo.
///
/// Only spawn the forerunner job for git repo for now.
fn should_skip(&self) -> bool {
if let Some(ref dir) = self.cmd_dir {
if !is_git_repo(dir) {
return true;
}
} else if let Ok(dir) = std::env::current_dir() {
if !is_git_repo(&dir) {
return true;
}
}
false
}
pub fn run(
self, | Params {
number,
icon_painter,
no_cache,
..
}: Params,
) -> Result<()> {
if !no_cache {
if let Some(ref dir) = self.cmd_dir {
if let Ok((cache, total)) = cache_exists(&RG_ARGS, dir) {
send_response_from_cache(
&cache,
total,
SendResponse::Json,
Some(IconPainter::Grep),
);
return Ok(());
}
}
}
if self.should_skip() {
return Ok(());
}
let mut cmd = Command::new(RG_ARGS[0]);
// Do not use --vimgrep here.
cmd.args(&RG_ARGS[1..]);
set_current_dir(&mut cmd, self.cmd_dir.clone());
let mut light_cmd = LightCommand::new_grep(
&mut cmd,
self.cmd_dir,
number,
icon_painter,
Some(self.output_threshold),
);
light_cmd.execute(&RG_ARGS)?.print();
Ok(())
}
} | |
user.js | module.exports = router => {
const userController = require('../../plugins/wechat')
const mongoose = require('mongoose')
const Topic = mongoose.model('Topic')
const Team = mongoose.model('Team')
const User = mongoose.model('User')
const Comment = mongoose.model('Comment')
const Forward = mongoose.model('Forward')
const Message = mongoose.model('Message')
router.post("/openid", userController.login)
//获取城市信息
router.post('/reverseGeocoder', userController.reverseGeocoder)
//重新授权时,更新用户信息
router.put('/user', async (req, res) => {
const model = await User.findByIdAndUpdate(req.body.userid, {
"$set": {
"nickName": req.body.nickName,
"avatarUrl": req.body.avatarUrl
}
}, {
"fields": { "nickName": 1, "avatarUrl": 1 },
"new": true
})
res.send(model)
})
//获取用户信息、话题、组队
router.post('/user', async (req, res) => {
let data = await User.findById(req.body.id, { openid: 0 })
// .populate('topics', 'content images locationName location good')
.populate({
path: 'topics',
select: 'content images locationName location good forwardCount owner createdAt',
populate: { path: 'owner', select: 'nickName avatarUrl' }
})
.populate({
path: 'teams',
select: 'postUrl locationName good collect forwardCount location owner createdAt status memberNum hasJoin',
populate: { path: 'owner', select: 'nickName avatarUrl' }
})
.populate({
path: 'joinedTeams',
select: 'postUrl locationName good collect forwardCount location owner createdAt status memberNum hasJoin',
populate: { path: 'owner', select: 'nickName avatarUrl' }
})
//fail // .populate('teams', 'postUrl locationName good collect location owner.nickName owner.avatarUrl')
.lean()
const fans = await User.find({
follow: req.body.id
}, { _id: 1 })
if (data.follow)
data.followCount = data.follow.length
else
data.followCount = 0
if (fans)
data.fansCount = fans.length
else
data.fansCount = 0
// data.teams.commentCount = comments.length
// data.teams.forEach(item => {
// item.goodCount = item.good.length
// item.collectCount = item.collect.length
// });
// data.topics.forEach(item => {
// item.goodCount = item.good.length
// });
//转发
let forwards_topic = await Forward.find({
owner: req.body.id,
ref: 'Topic'
}).populate({
path: 'from',
model: 'Topic',
populate: { path: 'owner', select: 'nickName avatarUrl' }
}).populate('owner', 'nickName avatarUrl').lean()
let forwards_team = await Forward.find({
owner: req.body.id,
ref: 'Team'
}).populate({
path: 'from',
model: 'Team',
populate: { path: 'owner', select: 'nickName avatarUrl' }
}).populate('owner', 'nickName avatarUrl').lean()
let arr = []
forwards_topic.forEach((item) => {
item.from&&arr.push(item.from)
})
forwards_team.forEach((item) => {
item.from&&arr.push(item.from)
})
// console.log(forwards_topic[1].from.distance)
try {
let total = data.teams.concat(data.topics||[]).concat(arr).concat(data.joinedTeams||[])
let p = [
userController.addDistance(req.body.lat, req.body.lng, total),
userController.addCommentCount(total)
]
await Promise.all(p)
} catch (err) {
console.log(err)
}
//排序
data.topics.push.apply(data.topics, forwards_topic);
data.topics.sort(function (a, b) {
let date1 = a['createdAt'] || '2019-10-18T17:51:17.846Z'
let date2 = b['createdAt'] || '2019-10-18T17:51:17.846Z'
return date1 > date2 ? -1 : 1
})
data.teams.push.apply(data.teams, forwards_team);
data.teams.sort(function (a, b) {
let date1 = a['createdAt'] || '2019-10-18T17:51:17.846Z'
let date2 = b['createdAt'] || '2019-10-18T17:51:17.846Z'
return date1 > date2 ? -1 : 1
})
//未读消息
const messages = await Message.find({
"$or": [
{ participant: data._id, status: 1 },
{ participant: data._id, status: 2 },
{ participant: data._id, status: 4 },
{ owner: data._id, status: 0 }
]
}).lean()
messages != '' && messages.sort(function (a, b) {
let date1 = a['updatedAt'] || '2019-10-18T17:51:17.846Z'
let date2 = b['updatedAt'] || '2019-10-18T17:51:17.846Z'
return date1 > date2 ? -1 : 1
})
let unRead = false
const latestRM = data.latestReadMsg ? data.latestReadMsg.toString() : ''
if (messages != '' && messages[0]._id != latestRM) {
unRead = true
}
data.unRead = unRead
res.send(data)
})
//获取用户基本信息
router.get('/userBasicInfo/:id', async (req, res) => {
const model = await User.findById(req.params.id,
{ no: 1, nickName: 1, avatarUrl: 1, virtualAvatarUrl: 1, intro: 1, phone: 1, wechat: 1, collectionsOpen: 1 }).lean()
res.send(model)
})
//更新用户基本信息
router.put('/userBasicInfo', async (req, res) => {
await User.findByIdAndUpdate(req.body.userid, {
"$set": req.body.info
})
res.send({ success: true })
})
//获取我的关注
router.get('/user/follow/:id', async (req, res) => {
const data = await User.findById(req.params.id, { follow: 1 })
.populate('follow', 'nickName avatarUrl intro')
.lean()
res.send(data)
})
//获取我的粉丝
router.get('/user/fans/:id', async (req, res) => {
const data = await User.find({
follow: req.params.id
}, { nickName: 1, avatarUrl: 1, intro: 1 })
.lean()
res.send(data)
})
//关注
router.post('/follow', async (req, res) => {
await User.findByIdAndUpdate(req.body.myid, {
"$addToSet": {
"follow": req.body.userid
}
})
res.send({ success: true, msg: '关注成功' })
})
//取消关注
router.post('/followCancel', async (req, res) => {
await User.findByIdAndUpdate(req.body.myid, {
"$pull": {
"follow": req.body.userid
}
})
res.send({ success: true })
})
//获取收藏列表
router.post('/user/collection', async (req, res) => {
const collections = await Team.find({
collect: req.body.id
}, { postUrl: 1, locationName: 1, good: 1, collect: 1, location: 1, createdAt: 1, forwardCount:1, status:1, memberNum:1, hasJoin:1 })
.populate('owner', 'nickName avatarUrl intro').lean()
await userController.addDistance(req.body.lat, req.body.lng, collections)
await userController.addCommentCount(collections)
// const data = collections.map(item => {
// item.goodCount = item.good.length
// item.collectCount = item.collect.length
// return item
// })
res.send(collections)
})
| //获取评论信息
router.get('/messages/comment/:id', async (req, res) => {
const teams = await Team.find({
owner: req.params.id
}, { _id: 1 })
const teamIdArr = teams.map(v => v._id)
const topics = await Topic.find({
owner: req.params.id
}, { _id: 1 })
const topicIdArr = topics.map(v => v._id)
const idArr = teamIdArr.concat(topicIdArr)
const comments = await Comment.find({
to: { $in: idArr }
}).populate('owner', 'nickName avatarUrl')
.lean()
res.send(comments)
})
//删除历史参与过的活动
router.delete('/joinedTeam', async (req, res) => {
await User.findByIdAndUpdate(req.body.userid, {
"$pull": {
"joinedTeams": req.body.teamid
}
})
res.send({ success: true })
})
} | |
exceptions.py | # -*- coding: utf-8 -*-
"""Exceptions used in this module"""
class CoincError(Exception):
|
class ConfigError(CoincError):
"""Raised when there are invalid value filled in Configuration Sheet
Extends:
CoincError
"""
pass
class QueryError(CoincError):
"""Raised when invalid query were given
Extends:
CoincError
"""
pass
class AppIDError(CoincError):
"""Raised when App ID can not be used
Extends:
CoincError
"""
pass
class ApiError(CoincError):
"""Raised when API is unreachable or return bad response
Extends:
CoincError
"""
pass
class UnknownPythonError(CoincError):
"""Raised when Python runtime version can not be correctly detacted
Extends:
CoincError
"""
pass
| """Base Class used to declare other errors for Coinc
Extends:
Exception
"""
pass |
geolite2.go | package geolite2
import (
"math"
"net"
"go.uber.org/zap"
"github.com/oschwald/geoip2-golang"
"github.com/vu-ngoc-son/XDP-p2p-router/database"
"github.com/vu-ngoc-son/XDP-p2p-router/internal/common"
"github.com/vu-ngoc-son/XDP-p2p-router/internal/logger"
)
type GeoLite2 struct {
ASN, City, Country *geoip2.Reader
HostPublicIP string
HostLongitude, HostLatitude float64
HostASN uint
HostISP string
HostCountryCode string
}
func NewGeoLite2(asnDBPath, cityDBPath, countryDBPath, hostPublicIP string) (*GeoLite2, error) {
asnDB, err := geoip2.Open(asnDBPath)
if err != nil {
return nil, err
}
cityDB, err := geoip2.Open(cityDBPath)
if err != nil {
return nil, err
}
countryDB, err := geoip2.Open(countryDBPath)
if err != nil {
return nil, err
}
asnRecord, err := asnDB.ASN(net.ParseIP(hostPublicIP))
if err != nil {
return nil, err
}
cityRecord, err := cityDB.City(net.ParseIP(hostPublicIP))
if err != nil {
return nil, err
}
countryRecord, err := countryDB.Country(net.ParseIP(hostPublicIP))
if err != nil {
return nil, err
}
return &GeoLite2{
ASN: asnDB,
City: cityDB,
Country: countryDB,
HostPublicIP: hostPublicIP,
HostLatitude: cityRecord.Location.Latitude,
HostLongitude: cityRecord.Location.Longitude,
HostASN: asnRecord.AutonomousSystemNumber,
HostISP: asnRecord.AutonomousSystemOrganization,
HostCountryCode: countryRecord.Country.IsoCode,
}, nil
}
func (g *GeoLite2) Close() {
myLogger := logger.GetLogger()
err := g.ASN.Close()
if err != nil {
myLogger.Error("failed to close asn db", zap.Error(common.ErrFailedToCloseGeoLite2))
}
err = g.Country.Close()
if err != nil {
myLogger.Error("failed to close country db", zap.Error(common.ErrFailedToCloseGeoLite2))
}
err = g.City.Close()
if err != nil {
myLogger.Error("failed to close city db", zap.Error(common.ErrFailedToCloseGeoLite2))
}
myLogger.Info("close geolite2 dbs successfully")
}
func (g *GeoLite2) IPInfo(IP net.IP, IPNumber uint32, rxPkt, rxByte uint64) (*database.Peers, error) {
myLogger := logger.GetLogger()
if common.IsPrivateIP(IP) {
return &database.Peers{
IpAddress: IP.String(),
IpNumber: IPNumber,
Asn: g.HostASN,
Isp: g.HostISP,
CountryCode: g.HostCountryCode,
Longitude: g.HostLongitude,
Latitude: g.HostLatitude,
Distance: 0.0,
TotalBytes: rxByte,
TotalPackets: rxPkt,
}, nil
}
asnRecord, err := g.ASN.ASN(IP)
if err != nil {
myLogger.Error("error while querying asn", zap.Error(common.ErrFailedToQueryGeoLite2))
return nil, err
}
cityRecord, err := g.City.City(IP)
if err != nil {
myLogger.Error("error while querying city", zap.Error(common.ErrFailedToQueryGeoLite2))
return nil, err
}
latitude := cityRecord.Location.Latitude
longitude := cityRecord.Location.Longitude
distance := g.DistanceToHost(latitude, longitude)
countryRecord, err := g.Country.Country(IP)
if err != nil {
myLogger.Error("error while querying country", zap.Error(common.ErrFailedToQueryGeoLite2))
return nil, err
}
countryCode := countryRecord.Country.IsoCode
if countryCode == "" {
countryCode = "OTHER"
}
myLogger.Info("get peer info successfully", zap.String("peer_address", IP.String()), zap.Float64("distance", distance))
return &database.Peers{
IpAddress: IP.String(),
IpNumber: IPNumber,
Asn: asnRecord.AutonomousSystemNumber,
Isp: asnRecord.AutonomousSystemOrganization,
CountryCode: countryCode,
Longitude: longitude,
Latitude: latitude,
Distance: distance,
TotalBytes: rxByte,
TotalPackets: rxPkt,
}, nil
}
func (g *GeoLite2) HostInfo() (*database.Hosts, error) {
myLogger := logger.GetLogger()
IP := net.ParseIP(g.HostPublicIP)
asnRecord, err := g.ASN.ASN(IP)
if err != nil {
myLogger.Error("error while querying asn", zap.Error(common.ErrFailedToQueryGeoLite2))
return nil, err
}
cityRecord, err := g.City.City(IP)
if err != nil {
myLogger.Error("error while querying city", zap.Error(common.ErrFailedToQueryGeoLite2))
return nil, err
}
countryRecord, err := g.Country.Country(IP)
if err != nil {
myLogger.Error("error while querying country", zap.Error(common.ErrFailedToQueryGeoLite2))
return nil, err
}
latitude := cityRecord.Location.Latitude
longitude := cityRecord.Location.Longitude
distance := 0.0
myLogger.Info("get host info successfully")
return &database.Hosts{
Ip: g.HostPublicIP,
Asn: asnRecord.AutonomousSystemNumber,
Isp: asnRecord.AutonomousSystemOrganization,
CountryCode: countryRecord.Country.IsoCode,
Longitude: longitude,
Latitude: latitude,
Distance: distance,
}, nil
}
// DistanceToHost references: https://www.geodatasource.com/developers/go
func (g *GeoLite2) DistanceToHost(latitude, longitude float64) float64 {
myLogger := logger.GetLogger()
const PI float64 = math.Pi
radianLat1 := PI * latitude / 180
radianLat2 := PI * g.HostLatitude / 180
theta := longitude - g.HostLongitude
radianTheta := PI * theta / 180
dist := math.Sin(radianLat1)*math.Sin(radianLat2) + math.Cos(radianLat1)*math.Cos(radianLat2)*math.Cos(radianTheta)
if dist > 1 {
dist = 1
}
dist = math.Acos(dist)
dist = dist * 180 / PI
dist = dist * 60 * 1.1515
dist = dist * 1.609344 //Kms
if dist == math.NaN() {
myLogger.Debug("got result NaN when calculate host distance",
zap.Float64("longitude", longitude), | return 0.0
}
myLogger.Debug("distance calculated",
zap.Float64("longitude", longitude),
zap.Float64("latitude", latitude),
zap.Float64("distance (Kms)", dist),
)
return dist
} | zap.Float64("latitude", latitude),
zap.Float64("distance (Kms)", dist),
) |
providers.py | # Copyright (c) 2011, Found IT A/S and Piped Project Contributors.
# See LICENSE for details.
from twisted.application import internet, service, strports
from twisted.conch import manhole, manhole_ssh, error as conch_error
from twisted.conch.insults import insults
from twisted.conch.ssh import keys
from twisted.cred import error, portal
from twisted.internet import defer
from twisted.python import reflect
from zope import interface
from piped import resource
class ManholeProvider(object, service.MultiService):
""" Embeds manholes in Piped services.
Configuration example::
manholes:
my_manhole:
enabled: true # defaults to true
port: 10022 # defaults to 10022
# or instead of port, you may use a strport
# listen: 10222
keys:
public_key_file: path # or public_key: str
private_key_file: path # or private_key: str
checkers: # multiple checkers are allowed
inmemory:
checker: twisted.cred.checkers.InMemoryUsernamePasswordDatabaseDontUse
arguments:
username: password
"""
interface.classProvides(resource.IResourceProvider)
def __init__(self):
service.MultiService.__init__(self)
def configure(self, runtime_environment):
self.setName('manhole')
self.setServiceParent(runtime_environment.application)
self.manholes = runtime_environment.get_configuration_value('manholes', dict())
for manhole_name, manhole_configuration in self.manholes.items():
if not manhole_configuration.get('enabled', True):
continue # this manhole has been disabled, so don't create it
manholeservice = ManholeService(manhole_configuration)
manholeservice.setName(manhole_name)
manholeservice.setServiceParent(self)
manholeservice.configure(runtime_environment)
class PipedManhole(manhole.ColoredManhole):
""" A colored manhole that handles a few extra key combinations. """
def connectionMade(self):
r = manhole.ColoredManhole.connectionMade(self)
# add a keyhandler for what my macbook sends when Im hitting backspace
self.keyHandlers['\x08'] = self.handle_BACKSPACE
self.keyHandlers['\x01'] = self.handle_HOME # CTRL-A
self.keyHandlers['\x05'] = self.handle_END # CTRL-E
self.keyHandlers['\x15'] = self.handle_BACKSPACE_LINE # CTRL-U
self.keyHandlers['\x17'] = self.handle_BACKSPACE_WORD # CTRL-W
return r
def handle_BACKSPACE_LINE(self):
while self.lineBufferIndex > 0:
self.handle_BACKSPACE()
WORD_DELIMITERS = """ .;:({['\""""
def handle_BACKSPACE_WORD(self):
self.handle_BACKSPACE()
while self.lineBufferIndex > 0 and self.lineBuffer[self.lineBufferIndex-1] not in self.WORD_DELIMITERS:
self.handle_BACKSPACE()
class PipedConchFactory(manhole_ssh.ConchFactory):
""" A conch factory that can be initialized with an explicit pair of
public_key, private_key to use.
"""
def __init__(self, portal, private_key=None, public_key=None, **kw):
manhole_ssh.ConchFactory.__init__(self, portal)
if private_key:
self.privateKeys = {
'ssh-rsa' : keys.Key.fromString(private_key)
}
if public_key:
self.publicKeys = {
'ssh-rsa' : keys.Key.fromString(public_key)
}
class ManholeService(service.MultiService):
""" A configurable manhole service.
See ManholeProvider for a configuration example.
"""
protocolFactory = PipedManhole
conchFactory = PipedConchFactory
def __init__(self, manhole_configuration):
service.MultiService.__init__(self)
self.manhole_configuration = manhole_configuration
def configure(self, runtime_environment):
self.runtime_environment = runtime_environment
self.key_config = self._normalize_key_config(self.manhole_configuration.get('keys', dict()))
factory = self._make_factory()
listen = str(self.manhole_configuration.get('listen', self.manhole_configuration.get('port', 10022)))
tcpservice = strports.service(listen, factory)
tcpservice.setName(self.name)
tcpservice.setServiceParent(self)
self._configure_dependencies(self.manhole_configuration.get('dependencies', dict()))
def _configure_dependencies(self, dependency_map):
for dependency_key, dependency_configuration in dependency_map.items():
if isinstance(dependency_configuration, basestring):
dependency_configuration = dependency_map[dependency_key] = dict(provider=dependency_configuration)
self.dependencies = self.runtime_environment.create_dependency_map(self, **dependency_map)
def _normalize_key_config(self, key_config):
private_key_file = key_config.pop('private_key_file', None)
public_key_file = key_config.pop('public_key_file', None)
if private_key_file:
private_key_file = getattr(private_key_file, 'path', private_key_file)
key_config['private_key'] = open(private_key_file).read()
if public_key_file:
public_key_file = getattr(public_key_file, 'path', public_key_file)
key_config['public_key'] = open(public_key_file).read()
return key_config
def _make_factory(self):
checkers = self._make_checkers()
realm = PipedTerminalRealm()
portal_ = MultipleCheckersPortal(realm, checkers)
def chainProtocolFactory():
return insults.ServerProtocol(self.protocolFactory, namespace=self._get_namespace())
realm.chainedProtocolFactory = chainProtocolFactory
factory = self.conchFactory(portal_, **self.key_config)
return factory
def _make_checkers(self):
cs = list()
for checker_config in self.manhole_configuration['checkers'].values():
checker_name = checker_config.pop('checker')
checker_factory = reflect.namedAny(checker_name)
checker = checker_factory(**checker_config.get('arguments', dict()))
cs.append(checker)
return cs
def _get_namespace(self):
namespace = dict(runtime_environment=self.runtime_environment, dependencies=self.dependencies)
for key, value in self.manhole_configuration.get('namespace', dict()).items():
namespace[key] = reflect.namedAny(value)
return namespace | chainedProtocol.avatar = avatar
manhole_ssh.TerminalSessionTransport.__init__(self, proto, chainedProtocol, avatar, width, height)
class PipedTerminalRealm(manhole_ssh.TerminalRealm):
transportFactory = PipedTerminalSessionTransport
def _getAvatar(self, avatarId):
avatar = manhole_ssh.TerminalRealm._getAvatar(self, avatarId)
avatar.avatarId = avatarId
return avatar
class MultipleCheckersPortal(portal.Portal):
""" A Portal subclass that authenticates against multiple checkers. """
def registerChecker(self, checker, *credentialInterfaces):
if not credentialInterfaces:
credentialInterfaces = checker.credentialInterfaces
for credentialInterface in credentialInterfaces:
self.checkers.setdefault(credentialInterface, list()).append(checker)
@defer.inlineCallbacks
def login(self, credentials, mind, *interfaces):
for i in self.checkers:
if i.providedBy(credentials):
for checker in self.checkers[i]:
try:
avatar_id = yield checker.requestAvatarId(credentials)
avatar = yield self.realm.requestAvatar(avatar_id, mind, *interfaces)
defer.returnValue(avatar)
except conch_error.ValidPublicKey:
# This is how SSHPublicKeyDatabase says "Your public key is okay, now prove you have
# the private key to continue".
raise
except error.UnauthorizedLogin:
continue
raise error.UnauthorizedLogin()
ifac = interface.providedBy(credentials)
raise error.UnhandledCredentials("No checker for %s" % ', '.join(map(reflect.qual, ifac))) |
class PipedTerminalSessionTransport(manhole_ssh.TerminalSessionTransport):
def __init__(self, proto, chainedProtocol, avatar, width, height): |
q_lambda.py | import numpy as np
from mushroom_rl.algorithms.value.td import TD
from mushroom_rl.utils.eligibility_trace import EligibilityTrace
from mushroom_rl.utils.table import Table
class | (TD):
"""
Q(Lambda) algorithm.
"Learning from Delayed Rewards". Watkins C.J.C.H.. 1989.
"""
def __init__(self, mdp_info, policy, learning_rate, lambda_coeff,
trace='replacing'):
"""
Constructor.
Args:
lambda_coeff (float): eligibility trace coefficient;
trace (str, 'replacing'): type of eligibility trace to use.
"""
Q = Table(mdp_info.size)
self._lambda = lambda_coeff
self.e = EligibilityTrace(Q.shape, trace)
self._add_save_attr(
_lambda='primitive',
e='pickle'
)
super().__init__(mdp_info, policy, Q, learning_rate)
def _update(self, state, action, reward, next_state, absorbing):
q_current = self.Q[state, action]
q_next = np.max(self.Q[next_state, :]) if not absorbing else 0.
delta = reward + self.mdp_info.gamma*q_next - q_current
self.e.update(state, action)
self.Q.table += self.alpha(state, action) * delta * self.e.table
self.e.table *= self.mdp_info.gamma * self._lambda
def episode_start(self):
self.e.reset()
super().episode_start()
| QLambda |
index.ts | export * from './Popconfirm.style'; |
||
uuid.go | package uuid
import guid "github.com/satori/go.uuid"
type GUID string
func New() GUID {
return GUID(guid.NewV4().String())
}
func | (str string) error {
_, err := guid.FromString(str)
return err
}
| IsGUID |
converter.go | package translator
import (
"fmt"
"strings"
"k8s.io/apimachinery/pkg/util/sets"
"github.com/gogo/protobuf/types"
"github.com/gogo/protobuf/proto"
errors "github.com/rotisserie/eris"
gatewayv1 "github.com/solo-io/gloo/projects/gateway/pkg/api/v1"
"github.com/solo-io/gloo/projects/gateway/pkg/defaults"
gloov1 "github.com/solo-io/gloo/projects/gloo/pkg/api/v1"
matchersv1 "github.com/solo-io/gloo/projects/gloo/pkg/api/v1/core/matchers"
glooutils "github.com/solo-io/gloo/projects/gloo/pkg/utils"
"github.com/solo-io/solo-kit/pkg/api/v1/resources"
"github.com/solo-io/solo-kit/pkg/api/v1/resources/core"
"github.com/solo-io/solo-kit/pkg/api/v2/reporter"
)
const (
unnamedRouteName = "<unnamed>"
defaultTableWeight = 0
)
var (
NoActionErr = errors.New("invalid route: route must specify an action")
MatcherCountErr = errors.New("invalid route: routes with delegate actions must omit or specify a single matcher")
MissingPrefixErr = errors.New("invalid route: routes with delegate actions must use a prefix matcher")
InvalidPrefixErr = errors.New("invalid route: route table matchers must begin with the prefix of their parent route's matcher")
InvalidHeaderErr = errors.New("invalid route: route table matchers must have all headers that were specified on their parent route's matcher")
InvalidQueryParamErr = errors.New("invalid route: route table matchers must have all query params that were specified on their parent route's matcher")
InvalidMethodErr = errors.New("invalid route: route table matchers must have all methods that were specified on their parent route's matcher")
DelegationCycleErr = func(cycleInfo string) error {
return errors.Errorf("invalid route: delegation cycle detected: %s", cycleInfo)
}
InvalidRouteTableForDelegatePrefixErr = func(delegatePrefix, prefixString string) error {
return errors.Wrapf(InvalidPrefixErr, "required prefix: %v, prefix: %v", delegatePrefix, prefixString)
}
InvalidRouteTableForDelegateHeadersErr = func(delegateHeaders, childHeaders []*matchersv1.HeaderMatcher) error {
return errors.Wrapf(InvalidHeaderErr, "required headers: %v, headers: %v", delegateHeaders, childHeaders)
}
InvalidRouteTableForDelegateQueryParamsErr = func(delegateQueryParams, childQueryParams []*matchersv1.QueryParameterMatcher) error {
return errors.Wrapf(InvalidQueryParamErr, "required query params: %v, query params: %v", delegateQueryParams, childQueryParams)
}
InvalidRouteTableForDelegateMethodsErr = func(delegateMethods, childMethods []string) error {
return errors.Wrapf(InvalidMethodErr, "required methods: %v, methods: %v", delegateMethods, childMethods)
}
TopLevelVirtualResourceErr = func(rtRef core.Metadata, err error) error {
return errors.Wrapf(err, "on sub route table %s", rtRef.Ref().Key())
}
)
type RouteConverter interface {
// Converts a VirtualService to a set of Gloo API routes (i.e. routes on a Proxy resource).
// A non-nil error indicates an unexpected internal failure, all configuration errors are added to the given report object.
ConvertVirtualService(virtualService *gatewayv1.VirtualService, reports reporter.ResourceReports) ([]*gloov1.Route, error)
}
func NewRouteConverter(selector RouteTableSelector, indexer RouteTableIndexer) RouteConverter {
return &routeVisitor{
routeTableSelector: selector,
routeTableIndexer: indexer,
}
}
// We define this interface to abstract both virtual services and route tables.
type resourceWithRoutes interface {
InputResource() resources.InputResource
GetRoutes() []*gatewayv1.Route
}
type visitableVirtualService struct {
*gatewayv1.VirtualService
}
func (v *visitableVirtualService) GetRoutes() []*gatewayv1.Route {
return v.GetVirtualHost().GetRoutes()
}
func (v *visitableVirtualService) InputResource() resources.InputResource {
return v.VirtualService
}
type visitableRouteTable struct {
*gatewayv1.RouteTable
}
func (v *visitableRouteTable) InputResource() resources.InputResource {
return v.RouteTable
}
// Implements Converter interface by recursively visiting a routing resource
type routeVisitor struct {
// Used to select route tables for delegated routes.
routeTableSelector RouteTableSelector
// Used to sort route tables when multiple ones are matched by a selector.
routeTableIndexer RouteTableIndexer
}
// Helper object used to store information about previously visited routes.
type routeInfo struct {
// The matcher for the route
matcher *matchersv1.Matcher
// The options on the route.
options *gloov1.RouteOptions
// Used to build the name of the route as we traverse the tree.
name string
// Is true if any route on the current route tree branch is explicitly named by the user.
hasName bool
// Whether any child route objects should inherit headers, methods, and query param matchers from the parent.
inheritableMatchers bool
}
// Helper object for reporting errors and warnings
type reporterHelper struct {
reports reporter.ResourceReports
topLevelVirtualService *gatewayv1.VirtualService
}
func (r *reporterHelper) addError(resource resources.InputResource, err error) {
r.reports.AddError(resource, err)
// If the resource is a Route Table, also add the error to the top level virtual service.
if rt, ok := resource.(*gatewayv1.RouteTable); ok {
r.reports.AddError(r.topLevelVirtualService, TopLevelVirtualResourceErr(rt.GetMetadata(), err))
}
}
func (r *reporterHelper) addWarning(resource resources.InputResource, err error) {
r.reports.AddWarning(resource, err.Error())
// If the resource is a Route Table, also add the warning to the top level virtual service.
if rt, ok := resource.(*gatewayv1.RouteTable); ok {
r.reports.AddWarning(r.topLevelVirtualService, TopLevelVirtualResourceErr(rt.GetMetadata(), err).Error())
}
}
func (rv *routeVisitor) ConvertVirtualService(virtualService *gatewayv1.VirtualService, reports reporter.ResourceReports) ([]*gloov1.Route, error) {
wrapper := &visitableVirtualService{VirtualService: virtualService}
return rv.visit(
wrapper,
nil,
nil,
&reporterHelper{
reports: reports,
topLevelVirtualService: virtualService,
},
)
}
// Performs a depth-first, in-order traversal of a route tree rooted at the given resource.
// The additional arguments are used to store the state of the traversal of the current branch of the route tree.
func (rv *routeVisitor) visit(
resource resourceWithRoutes,
parentRoute *routeInfo,
visitedRouteTables gatewayv1.RouteTableList,
reporterHelper *reporterHelper,
) ([]*gloov1.Route, error) {
var routes []*gloov1.Route
for _, gatewayRoute := range resource.GetRoutes() {
// Clone route to be safe, since we might mutate it
routeClone := proto.Clone(gatewayRoute).(*gatewayv1.Route)
// Determine route name
name, routeHasName := routeName(resource.InputResource(), routeClone, parentRoute)
routeClone.Name = name
// If the parent route is not nil, this route has been delegated to and we need to perform additional operations
if parentRoute != nil {
var err error
routeClone, err = validateAndMergeParentRoute(routeClone, parentRoute)
if err != nil {
reporterHelper.addError(resource.InputResource(), err)
continue
}
}
switch action := routeClone.Action.(type) {
case *gatewayv1.Route_DelegateAction:
// Validate the matcher of the delegate route
delegateMatcher, err := getDelegateRouteMatcher(routeClone)
if err != nil {
reporterHelper.addError(resource.InputResource(), err)
continue
}
// Determine the route tables to delegate to
routeTables, err := rv.routeTableSelector.SelectRouteTables(action.DelegateAction, resource.InputResource().GetMetadata().Namespace)
if err != nil {
reporterHelper.addWarning(resource.InputResource(), err)
continue
}
// Default missing weights to 0
for _, routeTable := range routeTables {
if routeTable.GetWeight() == nil {
routeTable.Weight = &types.Int32Value{Value: defaultTableWeight}
}
}
routeTablesByWeight, sortedWeights := rv.routeTableIndexer.IndexByWeight(routeTables)
// Process the route tables in order by weight
for _, weight := range sortedWeights {
routeTablesForWeight := routeTablesByWeight[weight]
var rtRoutesForWeight []*gloov1.Route
for _, routeTable := range routeTablesForWeight {
// Check for delegation cycles
if err := checkForCycles(routeTable, visitedRouteTables); err != nil {
// Note that we do not report the error on the table we are currently visiting, but on the
// one we are about to visit, since that is the one that started the cycle.
reporterHelper.addError(routeTable, err)
continue
}
// Collect information about this route that are relevant when visiting the delegated route table
currentRouteInfo := &routeInfo{
matcher: delegateMatcher,
options: routeClone.Options,
name: name,
hasName: routeHasName,
inheritableMatchers: routeClone.InheritableMatchers.GetValue(),
}
// Make a copy of the existing set of visited route tables. We need to pass this information into
// the recursive call and we do NOT want the original slice to be modified.
visitedRtCopy := append(append([]*gatewayv1.RouteTable{}, visitedRouteTables...), routeTable)
// Recursive call
subRoutes, err := rv.visit(
&visitableRouteTable{routeTable},
currentRouteInfo,
visitedRtCopy,
reporterHelper,
)
if err != nil {
return nil, err
}
rtRoutesForWeight = append(rtRoutesForWeight, subRoutes...)
}
// If we have multiple route tables with this weight, we want to try and sort the resulting routes in
// order to protect against short-circuiting, e.g. we want to avoid `/foo` coming before `/foo/bar`.
if len(routeTablesForWeight) > 1 {
glooutils.SortRoutesByPath(rtRoutesForWeight)
}
routes = append(routes, rtRoutesForWeight...)
}
default:
// If there are no named routes on this branch of the route tree, then wipe the name.
if !routeHasName {
routeClone.Name = ""
}
// if this is a routeAction pointing to an upstream without specifying the namespace, set the namespace to that of the parent resource
if action, ok := routeClone.Action.(*gatewayv1.Route_RouteAction); ok |
glooRoute, err := convertSimpleAction(routeClone)
if err != nil {
reporterHelper.addError(resource.InputResource(), err)
continue
}
routes = append(routes, glooRoute)
}
}
// Append source metadata to all the routes
for _, r := range routes {
if err := appendSource(r, resource.InputResource()); err != nil {
// should never happen
return nil, err
}
}
return routes, nil
}
// Returns the name of the route and a flag that is true if either the route or the parent route are explicitly named.
// Route names have the following format: "vs:myvirtualservice_route:myfirstroute_rt:myroutetable_route:<unnamed>"
func routeName(resource resources.InputResource, route *gatewayv1.Route, parentRouteInfo *routeInfo) (string, bool) {
var prefix string
if parentRouteInfo != nil {
prefix = parentRouteInfo.name + "_"
}
resourceKindName := ""
switch resource.(type) {
case *gatewayv1.VirtualService:
resourceKindName = "vs"
case *gatewayv1.RouteTable:
resourceKindName = "rt"
default:
// Should never happen
}
resourceName := resource.GetMetadata().Name
var isRouteNamed bool
routeDisplayName := route.Name
if routeDisplayName == "" {
routeDisplayName = unnamedRouteName
} else {
isRouteNamed = true
}
// If the current route has no name, but the parent one does, then we consider the resulting route to be named.
isRouteNamed = isRouteNamed || (parentRouteInfo != nil && parentRouteInfo.hasName)
return fmt.Sprintf("%s%s:%s_route:%s", prefix, resourceKindName, resourceName, routeDisplayName), isRouteNamed
}
func convertSimpleAction(simpleRoute *gatewayv1.Route) (*gloov1.Route, error) {
matchers := []*matchersv1.Matcher{defaults.DefaultMatcher()}
if len(simpleRoute.Matchers) > 0 {
matchers = simpleRoute.Matchers
}
glooRoute := &gloov1.Route{
Matchers: matchers,
Options: simpleRoute.Options,
Name: simpleRoute.Name,
}
switch action := simpleRoute.Action.(type) {
case *gatewayv1.Route_RedirectAction:
glooRoute.Action = &gloov1.Route_RedirectAction{
RedirectAction: action.RedirectAction,
}
case *gatewayv1.Route_DirectResponseAction:
glooRoute.Action = &gloov1.Route_DirectResponseAction{
DirectResponseAction: action.DirectResponseAction,
}
case *gatewayv1.Route_RouteAction:
glooRoute.Action = &gloov1.Route_RouteAction{
RouteAction: action.RouteAction,
}
case *gatewayv1.Route_DelegateAction:
// Should never happen
return nil, errors.New("internal error: expected simple route action but found delegation!")
default:
return nil, NoActionErr
}
return glooRoute, nil
}
// If any of the matching route tables has already been visited, then we have a delegation cycle.
func checkForCycles(toVisit *gatewayv1.RouteTable, visited gatewayv1.RouteTableList) error {
for _, alreadyVisitedTable := range visited {
if toVisit == alreadyVisitedTable {
return DelegationCycleErr(
buildCycleInfoString(append(append(gatewayv1.RouteTableList{}, visited...), toVisit)),
)
}
}
return nil
}
func getDelegateRouteMatcher(route *gatewayv1.Route) (*matchersv1.Matcher, error) {
switch len(route.GetMatchers()) {
case 0:
return defaults.DefaultMatcher(), nil
case 1:
matcher := route.GetMatchers()[0]
if matcher.GetPathSpecifier() == nil {
return defaults.DefaultMatcher(), nil // no path specifier provided, default to '/' prefix matcher
}
if matcher.GetPrefix() == "" {
return nil, MissingPrefixErr
}
return matcher, nil
default:
return nil, MatcherCountErr
}
}
func validateAndMergeParentRoute(child *gatewayv1.Route, parent *routeInfo) (*gatewayv1.Route, error) {
// inherit inheritance config from parent if unset
if child.InheritableMatchers == nil {
child.InheritableMatchers = &types.BoolValue{
Value: parent.inheritableMatchers,
}
}
// inherit route table config from parent
if parent.inheritableMatchers {
for _, childMatch := range child.Matchers {
childMatch.Headers = append(parent.matcher.Headers, childMatch.Headers...)
childMatch.Methods = append(parent.matcher.Methods, childMatch.Methods...)
childMatch.QueryParameters = append(parent.matcher.QueryParameters, childMatch.QueryParameters...)
}
}
// Verify that the matchers are compatible with the parent prefix
if err := isRouteTableValidForDelegateMatcher(parent.matcher, child); err != nil {
return nil, err
}
// Merge plugins from parent routes
merged, err := mergeRoutePlugins(child.GetOptions(), parent.options)
if err != nil {
// Should never happen
return nil, errors.Wrapf(err, "internal error: merging route plugins from parent to delegated route")
}
child.Options = merged
return child, nil
}
func isRouteTableValidForDelegateMatcher(parentMatcher *matchersv1.Matcher, childRoute *gatewayv1.Route) error {
// If the route has no matchers, we fall back to the default prefix matcher like for regular routes.
// In these case, we only accept it if the parent also uses the default matcher.
if len(childRoute.Matchers) == 0 && parentMatcher.GetPrefix() != defaults.DefaultMatcher().GetPrefix() {
return InvalidRouteTableForDelegatePrefixErr(parentMatcher.GetPrefix(), defaults.DefaultMatcher().GetPrefix())
}
for _, childMatch := range childRoute.Matchers {
// ensure all sub-routes in the delegated route table match the parent prefix
if pathString := glooutils.PathAsString(childMatch); !strings.HasPrefix(pathString, parentMatcher.GetPrefix()) {
return InvalidRouteTableForDelegatePrefixErr(parentMatcher.GetPrefix(), pathString)
}
// ensure all headers in the delegated route table are a superset of those from the parent route resource
childHeaderNameToHeader := map[string]*matchersv1.HeaderMatcher{}
for _, childHeader := range childMatch.Headers {
childHeaderNameToHeader[childHeader.Name] = childHeader
}
for _, parentHeader := range parentMatcher.Headers {
if childHeader, ok := childHeaderNameToHeader[parentHeader.GetName()]; !ok {
return InvalidRouteTableForDelegateHeadersErr(parentMatcher.Headers, childMatch.Headers)
} else if !parentHeader.Equal(childHeader) {
return InvalidRouteTableForDelegateHeadersErr(parentMatcher.Headers, childMatch.Headers)
}
}
// ensure all query parameters in the delegated route table are a superset of those from the parent route resource
childQueryParamNameToHeader := map[string]*matchersv1.QueryParameterMatcher{}
for _, childQueryParam := range childMatch.QueryParameters {
childQueryParamNameToHeader[childQueryParam.Name] = childQueryParam
}
for _, parentQueryParameter := range parentMatcher.QueryParameters {
if childQueryParam, ok := childQueryParamNameToHeader[parentQueryParameter.GetName()]; !ok {
return InvalidRouteTableForDelegateQueryParamsErr(parentMatcher.QueryParameters, childMatch.QueryParameters)
} else if !parentQueryParameter.Equal(childQueryParam) {
return InvalidRouteTableForDelegateQueryParamsErr(parentMatcher.QueryParameters, childMatch.QueryParameters)
}
}
// ensure all HTTP methods in the delegated route table are a superset of those from the parent route resource
childMethodsSet := sets.NewString(childMatch.Methods...)
if !childMethodsSet.HasAll(parentMatcher.Methods...) {
return InvalidRouteTableForDelegateMethodsErr(parentMatcher.Methods, childMatch.Methods)
}
}
return nil
}
// Handles new and deprecated format for referencing a route table
// TODO: remove this function when we remove the deprecated fields from the API
func getRouteTableRef(delegate *gatewayv1.DelegateAction) *core.ResourceRef {
if delegate.Namespace != "" || delegate.Name != "" {
return &core.ResourceRef{
Namespace: delegate.Namespace,
Name: delegate.Name,
}
}
return delegate.GetRef()
}
func buildCycleInfoString(routeTables gatewayv1.RouteTableList) string {
var visitedTables []string
for _, rt := range routeTables {
visitedTables = append(visitedTables, fmt.Sprintf("[%s]", rt.Metadata.Ref().Key()))
}
return strings.Join(visitedTables, " -> ")
}
| {
parentNamespace := resource.InputResource().GetMetadata().Namespace
if upstream := action.RouteAction.GetSingle().GetUpstream(); upstream != nil && upstream.GetNamespace() == "" {
upstream.Namespace = parentNamespace
}
if multiDests := action.RouteAction.GetMulti().GetDestinations(); multiDests != nil {
for _, dest := range multiDests {
if upstream := dest.GetDestination().GetUpstream(); upstream != nil && upstream.GetNamespace() == "" {
upstream.Namespace = parentNamespace
}
}
}
} |
info.go | package model
import (
"time"
"github.com/copernet/whccommon/model"
)
func GetInfoForNetwork() (*model.Block, error) {
var block model.Block
err := db.Table("blocks").
Select("block_height, block_time, whccount").
Last(&block).Error
if err != nil |
return &block, nil
}
func GetTotalWormholeTxCount() (int, error) {
var count int
row := db.Table("blocks").
Select("sum(whccount) as count").Row()
err := row.Scan(&count)
if err != nil {
return 0, err
}
return count, nil
}
func GetWormholeTxCountInPeriod(from int64) (int, error) {
now := time.Now().Unix()
var count int
row := db.Table("blocks").
Select("sum(whccount) as count").Where("block_time between ? and ?", from, now).
Row()
err := row.Scan(&count)
if err != nil {
return 0, err
}
return count, nil
}
| {
return nil, err
} |
atof.rs | //! Benchmarks for the lexical string-to-float conversion routines.
use std::env;
use std::fs;
use std::path::PathBuf;
#[macro_use]
extern crate bencher;
#[macro_use]
extern crate lazy_static;
extern crate lexical_core;
extern crate serde_json;
use bencher::{black_box, Bencher};
use lexical_core::parse as lexical_parse;
/// Return the `target/debug` directory path.
pub fn debug_dir() -> PathBuf {
env::current_exe()
.expect("unittest executable path")
.parent()
.expect("unittest executable directory")
.parent()
.expect("debug directory")
.to_path_buf()
}
/// Return the `target` directory path.
pub fn target_dir() -> PathBuf {
debug_dir()
.parent()
.expect("target directory")
.to_path_buf()
}
/// Return the project directory path.
pub fn project_dir() -> PathBuf {
target_dir()
.parent()
.expect("rust directory")
.parent()
.expect("project directory")
.to_path_buf()
}
/// Return the `data` directory path.
pub fn data_dir() -> PathBuf {
let mut dir = project_dir();
dir.push("data");
dir
}
fn parse_json(name: &str) -> Vec<String> {
let mut path = data_dir();
path.push(name);
let s = fs::read_to_string(path).unwrap();
serde_json::from_str(&s).unwrap()
}
lazy_static! {
static ref DENORMAL_DATA: Vec<String> = parse_json("denormal_halfway.json");
static ref LARGE_DATA: Vec<String> = parse_json("large_halfway.json");
static ref DIGITS2_DATA: Vec<String> = parse_json("digits2.json");
static ref DIGITS8_DATA: Vec<String> = parse_json("digits8.json");
static ref DIGITS16_DATA: Vec<String> = parse_json("digits16.json");
static ref DIGITS32_DATA: Vec<String> = parse_json("digits32.json");
static ref DIGITS64_DATA: Vec<String> = parse_json("digits64.json");
}
fn denormal10(bench: &mut Bencher) {
let data: &[String] = &DENORMAL_DATA;
bench.iter(|| {
black_box(lexical_parse::<f64>(data[0].as_bytes()).unwrap());
})
}
fn denormal20(bench: &mut Bencher) {
let data: &[String] = &DENORMAL_DATA;
bench.iter(|| {
black_box(lexical_parse::<f64>(data[1].as_bytes()).unwrap());
})
}
fn denormal30(bench: &mut Bencher) {
let data: &[String] = &DENORMAL_DATA;
bench.iter(|| {
black_box(lexical_parse::<f64>(data[2].as_bytes()).unwrap());
})
}
fn denormal40(bench: &mut Bencher) {
let data: &[String] = &DENORMAL_DATA;
bench.iter(|| {
black_box(lexical_parse::<f64>(data[3].as_bytes()).unwrap());
})
}
fn denormal50(bench: &mut Bencher) {
let data: &[String] = &DENORMAL_DATA;
bench.iter(|| {
black_box(lexical_parse::<f64>(data[4].as_bytes()).unwrap());
})
}
fn denormal100(bench: &mut Bencher) {
let data: &[String] = &DENORMAL_DATA;
bench.iter(|| {
black_box(lexical_parse::<f64>(data[5].as_bytes()).unwrap());
})
}
fn denormal200(bench: &mut Bencher) {
let data: &[String] = &DENORMAL_DATA;
bench.iter(|| {
black_box(lexical_parse::<f64>(data[6].as_bytes()).unwrap());
})
}
fn denormal400(bench: &mut Bencher) {
let data: &[String] = &DENORMAL_DATA;
bench.iter(|| {
black_box(lexical_parse::<f64>(data[7].as_bytes()).unwrap());
})
}
fn denormal800(bench: &mut Bencher) |
fn denormal1600(bench: &mut Bencher) {
let data: &[String] = &DENORMAL_DATA;
bench.iter(|| {
black_box(lexical_parse::<f64>(data[9].as_bytes()).unwrap());
})
}
fn denormal3200(bench: &mut Bencher) {
let data: &[String] = &DENORMAL_DATA;
bench.iter(|| {
black_box(lexical_parse::<f64>(data[10].as_bytes()).unwrap());
})
}
fn denormal6400(bench: &mut Bencher) {
let data: &[String] = &DENORMAL_DATA;
bench.iter(|| {
black_box(lexical_parse::<f64>(data[11].as_bytes()).unwrap());
})
}
fn large10(bench: &mut Bencher) {
let data: &[String] = &LARGE_DATA;
bench.iter(|| {
black_box(lexical_parse::<f64>(data[0].as_bytes()).unwrap());
})
}
fn large20(bench: &mut Bencher) {
let data: &[String] = &LARGE_DATA;
bench.iter(|| {
black_box(lexical_parse::<f64>(data[1].as_bytes()).unwrap());
})
}
fn large30(bench: &mut Bencher) {
let data: &[String] = &LARGE_DATA;
bench.iter(|| {
black_box(lexical_parse::<f64>(data[2].as_bytes()).unwrap());
})
}
fn large40(bench: &mut Bencher) {
let data: &[String] = &LARGE_DATA;
bench.iter(|| {
black_box(lexical_parse::<f64>(data[3].as_bytes()).unwrap());
})
}
fn large50(bench: &mut Bencher) {
let data: &[String] = &LARGE_DATA;
bench.iter(|| {
black_box(lexical_parse::<f64>(data[4].as_bytes()).unwrap());
})
}
fn large100(bench: &mut Bencher) {
let data: &[String] = &LARGE_DATA;
bench.iter(|| {
black_box(lexical_parse::<f64>(data[5].as_bytes()).unwrap());
})
}
fn large200(bench: &mut Bencher) {
let data: &[String] = &LARGE_DATA;
bench.iter(|| {
black_box(lexical_parse::<f64>(data[6].as_bytes()).unwrap());
})
}
fn large400(bench: &mut Bencher) {
let data: &[String] = &LARGE_DATA;
bench.iter(|| {
black_box(lexical_parse::<f64>(data[7].as_bytes()).unwrap());
})
}
fn large800(bench: &mut Bencher) {
let data: &[String] = &LARGE_DATA;
bench.iter(|| {
black_box(lexical_parse::<f64>(data[8].as_bytes()).unwrap());
})
}
fn large1600(bench: &mut Bencher) {
let data: &[String] = &LARGE_DATA;
bench.iter(|| {
black_box(lexical_parse::<f64>(data[9].as_bytes()).unwrap());
})
}
fn large3200(bench: &mut Bencher) {
let data: &[String] = &LARGE_DATA;
bench.iter(|| {
black_box(lexical_parse::<f64>(data[10].as_bytes()).unwrap());
})
}
fn large6400(bench: &mut Bencher) {
let data: &[String] = &LARGE_DATA;
bench.iter(|| {
black_box(lexical_parse::<f64>(data[11].as_bytes()).unwrap());
})
}
fn digits2(bench: &mut Bencher) {
let data: &[String] = &DIGITS2_DATA;
bench.iter(|| {
for value in data.iter() {
black_box(lexical_parse::<f64>(value.as_bytes()).unwrap());
}
})
}
fn digits8(bench: &mut Bencher) {
let data: &[String] = &DIGITS8_DATA;
bench.iter(|| {
for value in data.iter() {
black_box(lexical_parse::<f64>(value.as_bytes()).unwrap());
}
})
}
fn digits16(bench: &mut Bencher) {
let data: &[String] = &DIGITS16_DATA;
bench.iter(|| {
for value in data.iter() {
black_box(lexical_parse::<f64>(value.as_bytes()).unwrap());
}
})
}
fn digits32(bench: &mut Bencher) {
let data: &[String] = &DIGITS32_DATA;
bench.iter(|| {
for value in data.iter() {
black_box(lexical_parse::<f64>(value.as_bytes()).unwrap());
}
})
}
fn digits64(bench: &mut Bencher) {
let data: &[String] = &DIGITS64_DATA;
bench.iter(|| {
for value in data.iter() {
black_box(lexical_parse::<f64>(value.as_bytes()).unwrap());
}
})
}
benchmark_group!(denormal, denormal10, denormal20, denormal30, denormal40, denormal50, denormal100, denormal200, denormal400, denormal800, denormal1600, denormal3200, denormal6400);
benchmark_group!(large, large10, large20, large30, large40, large50, large100, large200, large400, large800, large1600, large3200, large6400);
benchmark_group!(digits, digits2, digits8, digits16, digits32, digits64);
benchmark_main!(denormal, large, digits);
| {
let data: &[String] = &DENORMAL_DATA;
bench.iter(|| {
black_box(lexical_parse::<f64>(data[8].as_bytes()).unwrap());
})
} |
server.js | const express = require('express')
const routes = require('./routes')
const nunjucks = require('nunjucks')
const methodOverride = require('method-override')
const server = express()
server.use(express.urlencoded({ extended: true }));
server.use(express.static('public'))
server.use(methodOverride("_method"));
server.use(routes)
server.set('view engine', 'njk')
nunjucks.configure('views', {
express: server,
autoescape:false,
noCache:true,
}) |
server.listen(5555, function() {
console.log('Server is running on port 5500')
}) | |
naca4.py | """
Python 2 and 3 code to generate 4 and 5 digit NACA profiles
The NACA airfoils are airfoil shapes for aircraft wings developed
by the National Advisory Committee for Aeronautics (NACA).
The shape of the NACA airfoils is described using a series of
digits following the word "NACA". The parameters in the numerical
code can be entered into equations to precisely generate the
cross-section of the airfoil and calculate its properties.
https://en.wikipedia.org/wiki/NACA_airfoil
Pots of the Matlab code available here:
http://www.mathworks.com/matlabcentral/fileexchange/19915-naca-4-digit-airfoil-generator
http://www.mathworks.com/matlabcentral/fileexchange/23241-naca-5-digit-airfoil-generator
Copyright (C) 2011 by Dirk Gorissen <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from math import cos, sin, tan
from math import atan
from math import pi
from math import pow
from math import sqrt
def linspace(start,stop,np):
"""
Emulate Matlab linspace
"""
return [start+(stop-start)*i/(np-1) for i in range(np)]
def interpolate(xa,ya,queryPoints):
"""
A cubic spline interpolation on a given set of points (x,y)
Recalculates everything on every call which is far from efficient but does the job for now
should eventually be replaced by an external helper class
"""
# PreCompute() from Paint Mono which in turn adapted:
# NUMERICAL RECIPES IN C: THE ART OF SCIENTIFIC COMPUTING
# ISBN 0-521-43108-5, page 113, section 3.3.
# http://paint-mono.googlecode.com/svn/trunk/src/PdnLib/SplineInterpolator.cs
#number of points
n = len(xa)
u, y2 = [0]*n, [0]*n
for i in range(1,n-1):
# This is the decomposition loop of the tridiagonal algorithm.
# y2 and u are used for temporary storage of the decomposed factors.
wx = xa[i + 1] - xa[i - 1]
sig = (xa[i] - xa[i - 1]) / wx
p = sig * y2[i - 1] + 2.0
y2[i] = (sig - 1.0) / p
ddydx = (ya[i + 1] - ya[i]) / (xa[i + 1] - xa[i]) - (ya[i] - ya[i - 1]) / (xa[i] - xa[i - 1])
u[i] = (6.0 * ddydx / wx - sig * u[i - 1]) / p
y2[n - 1] = 0
# This is the backsubstitution loop of the tridiagonal algorithm
#((int i = n - 2; i >= 0; --i):
for i in range(n-2,-1,-1):
y2[i] = y2[i] * y2[i + 1] + u[i]
# interpolate() adapted from Paint Mono which in turn adapted:
# NUMERICAL RECIPES IN C: THE ART OF SCIENTIFIC COMPUTING
# ISBN 0-521-43108-5, page 113, section 3.3.
# http://paint-mono.googlecode.com/svn/trunk/src/PdnLib/SplineInterpolator.cs
results = [0]*n
#loop over all query points
for i in range(len(queryPoints)):
# bisection. This is optimal if sequential calls to this
# routine are at random values of x. If sequential calls
# are in order, and closely spaced, one would do better
# to store previous values of klo and khi and test if
klo = 0
khi = n - 1
while (khi - klo > 1):
k = (khi + klo) >> 1
if (xa[k] > queryPoints[i]):
khi = k
else:
klo = k
h = xa[khi] - xa[klo]
a = (xa[khi] - queryPoints[i]) / h
b = (queryPoints[i] - xa[klo]) / h
# Cubic spline polynomial is now evaluated.
results[i] = a * ya[klo] + b * ya[khi] + ((a * a * a - a) * y2[klo] + (b * b * b - b) * y2[khi]) * (h * h) / 6.0
return results
def naca4(number, n, finite_TE = False, half_cosine_spacing = False):
"""
Returns 2*n+1 points in [0 1] for the given 4 digit NACA number string
"""
m = float(number[0])/100.0
p = float(number[1])/10.0
t = float(number[2:])/100.0
a0 = +0.2969
a1 = -0.1260
a2 = -0.3516
a3 = +0.2843
if finite_TE:
a4 = -0.1015 # For finite thick TE
else:
a4 = -0.1036 # For zero thick TE
if half_cosine_spacing:
beta = linspace(0.0,pi,n+1)
x = [(0.5*(1.0-cos(xx))) for xx in beta] # Half cosine based spacing
else:
x = linspace(0.0,1.0,n+1)
yt = [5*t*(a0*sqrt(xx)+a1*xx+a2*pow(xx,2)+a3*pow(xx,3)+a4*pow(xx,4)) for xx in x]
xc1 = [xx for xx in x if xx <= p]
xc2 = [xx for xx in x if xx > p]
if p == 0:
xu = x
yu = yt
xl = x
yl = [-xx for xx in yt]
xc = xc1 + xc2
zc = [0]*len(xc)
else:
yc1 = [m/pow(p,2)*xx*(2*p-xx) for xx in xc1]
yc2 = [m/pow(1-p,2)*(1-2*p+xx)*(1-xx) for xx in xc2]
zc = yc1 + yc2
dyc1_dx = [m/pow(p,2)*(2*p-2*xx) for xx in xc1]
dyc2_dx = [m/pow(1-p,2)*(2*p-2*xx) for xx in xc2]
dyc_dx = dyc1_dx + dyc2_dx
theta = [atan(xx) for xx in dyc_dx]
xu = [xx - yy * sin(zz) for xx,yy,zz in zip(x,yt,theta)]
yu = [xx + yy * cos(zz) for xx,yy,zz in zip(zc,yt,theta)]
xl = [xx + yy * sin(zz) for xx,yy,zz in zip(x,yt,theta)]
yl = [xx - yy * cos(zz) for xx,yy,zz in zip(zc,yt,theta)]
X = xu[::-1] + xl[1:]
Z = yu[::-1] + yl[1:]
return X,Z |
def naca5(number, n, finite_TE = False, half_cosine_spacing = False):
"""
Returns 2*n+1 points in [0 1] for the given 5 digit NACA number string
"""
naca1 = int(number[0])
naca23 = int(number[1:3])
naca45 = int(number[3:])
cld = naca1*(3.0/2.0)/10.0
p = 0.5*naca23/100.0
t = naca45/100.0
a0 = +0.2969
a1 = -0.1260
a2 = -0.3516
a3 = +0.2843
if finite_TE:
a4 = -0.1015 # For finite thickness trailing edge
else:
a4 = -0.1036 # For zero thickness trailing edge
if half_cosine_spacing:
beta = linspace(0.0,pi,n+1)
x = [(0.5*(1.0-cos(x))) for x in beta] # Half cosine based spacing
else:
x = linspace(0.0,1.0,n+1)
yt = [5*t*(a0*sqrt(xx)+a1*xx+a2*pow(xx,2)+a3*pow(xx,3)+a4*pow(xx,4)) for xx in x]
P = [0.05,0.1,0.15,0.2,0.25]
M = [0.0580,0.1260,0.2025,0.2900,0.3910]
K = [361.4,51.64,15.957,6.643,3.230]
m = interpolate(P,M,[p])[0]
k1 = interpolate(M,K,[m])[0]
xc1 = [xx for xx in x if xx <= p]
xc2 = [xx for xx in x if xx > p]
xc = xc1 + xc2
if p == 0:
xu = x
yu = yt
xl = x
yl = [-x for x in yt]
zc = [0]*len(xc)
else:
yc1 = [k1/6.0*(pow(xx,3)-3*m*pow(xx,2)+ pow(m,2)*(3-m)*xx) for xx in xc1]
yc2 = [k1/6.0*pow(m,3)*(1-xx) for xx in xc2]
zc = [cld/0.3 * xx for xx in yc1 + yc2]
dyc1_dx = [cld/0.3*(1.0/6.0)*k1*(3*pow(xx,2)-6*m*xx+pow(m,2)*(3-m)) for xx in xc1]
dyc2_dx = [cld/0.3*(1.0/6.0)*k1*pow(m,3)]*len(xc2)
dyc_dx = dyc1_dx + dyc2_dx
theta = [atan(xx) for xx in dyc_dx]
xu = [xx - yy * sin(zz) for xx,yy,zz in zip(x,yt,theta)]
yu = [xx + yy * cos(zz) for xx,yy,zz in zip(zc,yt,theta)]
xl = [xx + yy * sin(zz) for xx,yy,zz in zip(x,yt,theta)]
yl = [xx - yy * cos(zz) for xx,yy,zz in zip(zc,yt,theta)]
X = xu[::-1] + xl[1:]
Z = yu[::-1] + yl[1:]
return X,Z
def naca(number, n, finite_TE = False, half_cosine_spacing = False):
if len(number)==4:
return naca4(number, n, finite_TE, half_cosine_spacing)
elif len(number)==5:
return naca5(number, n, finite_TE, half_cosine_spacing)
else:
raise Exception
class Display(object):
def __init__(self):
import matplotlib.pyplot as plt
self.plt = plt
self.h = []
self.label = []
self.fig, self.ax = self.plt.subplots()
self.plt.axis('equal')
self.plt.xlabel('x')
self.plt.ylabel('y')
self.ax.grid(True)
def plot(self, X, Y,label=''):
h, = self.plt.plot(X, Y, '-', linewidth = 1)
self.h.append(h)
self.label.append(label)
def show(self):
self.plt.axis((-0.1,1.1)+self.plt.axis()[2:])
self.ax.legend(self.h, self.label)
self.plt.show()
def demo(profNaca = ['0009', '2414', '6409'], nPoints = 240, finite_TE = False, half_cosine_spacing = False):
#profNaca = ['0009', '0012', '2414', '2415', '6409' , '0006', '0008', '0010', '0012', '0015']
d = Display()
for i,p in enumerate(profNaca):
X,Y = naca(p, nPoints, finite_TE, half_cosine_spacing)
d.plot(X, Y, p)
d.show() | |
declarations.d.ts | declare namespace NodeJS { | UPLOAD_BUCKET_ID: string;
UPLOAD_BUCKET_REGION: string;
}
} | export interface ProcessEnv {
TABLE_REGION: string;
TABLE_NAME_DEPLOYMENTS: string;
TABLE_NAME_ALIASES: string; |
DeprecatedTypeControl.js | /* SPDX-License-Identifier: Apache-2.0 */
/* Copyright Contributors to the ODPi Egeria project. */
import React, { useContext } from "react";
import { RequestContext } from "../../contexts/RequestContext";
import { TypesContext } from "../../contexts/TypesContext";
import "./resource-selector.scss"
/*
* The DeprecatedTypeControl provides a checkbox that the user can check if they want
* display of deprecated types. If the option is checked, the types are read in
* the explorer and will be displayed in diagrams or on details panels. If not
* checked, the types are not read in the explorer.
*/
export default function | () {
const requestContext = useContext(RequestContext);
const typesContext = useContext(TypesContext);
const updateDeprecatedTypeOption = () => {
requestContext.updateDeprecatedTypeOption();
};
return (
<div className="resource-controls">
<label htmlFor="cbDeprecation">Include deprecated types : </label>
<input type="checkbox"
id="cbDeprecation"
name="cbDeprecation"
onChange={updateDeprecatedTypeOption}
checked={ requestContext.deprecatedTypeOption }
value={ requestContext.deprecatedTypeOption } />
<br />
</div>
);
}
| DeprecatedTypeControl |
test_wrappers.py | # Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest.mock import ANY, Mock
import pytest
import torch
from torch.utils.data.dataloader import DataLoader
from pytorch_lightning.core.mixins import DeviceDtypeModuleMixin
from pytorch_lightning.lite import LightningLite
from pytorch_lightning.lite.wrappers import _LiteDataLoader, _LiteModule, _LiteOptimizer
from tests.helpers.runif import RunIf
class EmptyLite(LightningLite):
def run(self):
pass
def test_lite_module_wraps():
"""Test that the wrapped module is accessible via the property."""
module = Mock()
assert _LiteModule(module, Mock()).module is module
@RunIf(min_gpus=1)
@pytest.mark.parametrize(
"precision, input_type, expected_type",
[
(32, torch.float16, torch.float32),
(32, torch.float32, torch.float32),
(32, torch.float64, torch.float32),
(32, torch.int, torch.int),
(16, torch.float32, torch.float16),
(16, torch.float64, torch.float16),
(16, torch.long, torch.long),
pytest.param("bf16", torch.float32, torch.bfloat16, marks=RunIf(min_torch="1.10")),
pytest.param("bf16", torch.float64, torch.bfloat16, marks=RunIf(min_torch="1.10")),
pytest.param("bf16", torch.bool, torch.bool, marks=RunIf(min_torch="1.10")),
],
)
def test_lite_module_forward_conversion(precision, input_type, expected_type):
"""Test that the LiteModule performs autocasting on the input tensors and during forward()."""
lite = EmptyLite(precision=precision, accelerator="gpu", devices=1)
device = torch.device("cuda", 0)
def check_autocast(forward_input):
assert precision != 16 or torch.is_autocast_enabled()
return forward_input
module = Mock(wraps=torch.nn.Identity(), side_effect=check_autocast)
lite_module = _LiteModule(module, lite._precision_plugin).to(device)
out = lite_module(torch.tensor([1, 2, 3], dtype=input_type, device=device))
assert module.call_args[0][0].dtype == expected_type
assert out.dtype == input_type or out.dtype == torch.get_default_dtype()
@pytest.mark.parametrize(
"device", [torch.device("cpu"), pytest.param(torch.device("cuda", 0), marks=RunIf(min_gpus=1))]
)
@pytest.mark.parametrize("dtype", [torch.float32, torch.float16])
def test_lite_module_device_dtype_propagation(device, dtype):
"""Test that the LiteModule propagates device and dtype properties to its submodules (e.g. torchmetrics)."""
class DeviceModule(DeviceDtypeModuleMixin):
pass
device_module = DeviceModule()
lite_module = _LiteModule(device_module, Mock())
lite_module.to(device)
assert device_module.device == device
assert lite_module.device == device
lite_module.to(dtype)
assert device_module.dtype == dtype
assert lite_module.dtype == dtype
def test_lite_dataloader_iterator():
"""Test that the iteration over a LiteDataLoader wraps the iterator of the underlying dataloader (no automatic
device placement)."""
dataloader = DataLoader(range(5), batch_size=2)
lite_dataloader = _LiteDataLoader(dataloader)
assert len(lite_dataloader) == len(dataloader) == 3
iterator = iter(dataloader)
lite_iterator = iter(lite_dataloader)
assert torch.equal(next(iterator), next(lite_iterator))
assert torch.equal(next(iterator), next(lite_iterator))
assert torch.equal(next(iterator), next(lite_iterator))
with pytest.raises(StopIteration):
next(iterator)
with pytest.raises(StopIteration):
next(lite_iterator)
@pytest.mark.parametrize(
"src_device, dest_device",
[
(torch.device("cpu"), torch.device("cpu")),
pytest.param(torch.device("cpu"), torch.device("cuda", 0), marks=RunIf(min_gpus=1)),
pytest.param(torch.device("cuda", 0), torch.device("cpu"), marks=RunIf(min_gpus=1)),
],
)
def test_lite_dataloader_device_placement(src_device, dest_device):
"""Test that the LiteDataLoader moves data to the device in its iterator."""
sample0 = torch.tensor(0, device=src_device)
sample1 = torch.tensor(1, device=src_device)
sample2 = {"data": torch.tensor(2, device=src_device)}
sample3 = {"data": torch.tensor(3, device=src_device)}
dataloader = DataLoader([sample0, sample1, sample2, sample3], batch_size=2)
lite_dataloader = _LiteDataLoader(dataloader=dataloader, device=dest_device)
iterator = iter(lite_dataloader)
batch0 = next(iterator)
assert torch.equal(batch0, torch.tensor([0, 1], device=dest_device))
batch1 = next(iterator)
assert torch.equal(batch1["data"], torch.tensor([2, 3], device=dest_device))
def test_lite_optimizer_wraps():
"""Test that the LiteOptimizer fully wraps the optimizer.""" | assert lite_optimizer.optimizer is optimizer
assert isinstance(lite_optimizer, optimizer_cls)
def test_lite_optimizer_state_dict():
"""Test that the LiteOptimizer calls into the strategy to collect the state."""
optimizer = Mock()
strategy = Mock()
lite_optimizer = _LiteOptimizer(optimizer=optimizer, strategy=strategy)
lite_optimizer.state_dict()
strategy.optimizer_state.assert_called_with(optimizer)
def test_lite_optimizer_steps():
"""Test that the LiteOptimizer forwards the step() and zero_grad() calls to the wrapped optimizer."""
optimizer = Mock()
strategy = Mock()
strategy.optimizer_step.return_value = 123
lite_optimizer = _LiteOptimizer(optimizer=optimizer, strategy=strategy)
step_output = lite_optimizer.step()
assert step_output == 123
strategy.optimizer_step.assert_called_once()
strategy.optimizer_step.assert_called_with(optimizer, opt_idx=0, closure=ANY, model=strategy.model) | optimizer_cls = torch.optim.SGD
optimizer = Mock(spec=optimizer_cls)
lite_optimizer = _LiteOptimizer(optimizer, Mock()) |
types.rs | use crate::pack;
use std::{io, path::PathBuf, sync::Arc};
use tempfile::NamedTempFile;
/// Configuration for [write_stream_to_directory][pack::Bundle::write_stream_to_directory()] or
/// [write_to_directory_eagerly][pack::Bundle::write_to_directory_eagerly()] | pub struct Options {
/// The amount of threads to use at most when resolving the pack. If `None`, all logical cores are used.
pub thread_limit: Option<usize>,
/// Determine how much processing to spend on protecting against corruption or recovering from errors.
pub iteration_mode: pack::data::iter::Mode,
/// The version of pack index to write, should be [`pack::index::Version::default()`]
pub index_kind: pack::index::Version,
}
/// Returned by [write_stream_to_directory][pack::Bundle::write_stream_to_directory()] or
/// [write_to_directory_eagerly][pack::Bundle::write_to_directory_eagerly()]
#[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)]
#[cfg_attr(feature = "serde1", derive(serde::Serialize, serde::Deserialize))]
pub struct Outcome {
/// The successful result of the index write operation
pub index: pack::index::write::Outcome,
/// The version of the pack
pub pack_kind: pack::data::Version,
/// The path to the pack index file
pub index_path: Option<PathBuf>,
/// The path to the pack data file
pub data_path: Option<PathBuf>,
}
impl Outcome {
/// Instantiate a bundle from the newly written index and data file that are represented by this `Outcome`
pub fn to_bundle(&self) -> Option<Result<pack::Bundle, pack::bundle::Error>> {
self.index_path.as_ref().map(pack::Bundle::at)
}
}
pub(crate) struct PassThrough<R> {
pub reader: R,
pub writer: Option<Arc<parking_lot::Mutex<NamedTempFile>>>,
}
impl<R> io::Read for PassThrough<R>
where
R: io::Read,
{
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
let bytes_read = self.reader.read(buf)?;
if let Some(writer) = self.writer.as_mut() {
use io::Write;
writer.lock().write_all(&buf[..bytes_read])?;
}
Ok(bytes_read)
}
}
impl<R> io::BufRead for PassThrough<R>
where
R: io::BufRead,
{
fn fill_buf(&mut self) -> io::Result<&[u8]> {
self.reader.fill_buf()
}
fn consume(&mut self, amt: usize) {
self.reader.consume(amt)
}
} | #[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)]
#[cfg_attr(feature = "serde1", derive(serde::Serialize, serde::Deserialize))] |
stack.go | package merr
import (
"fmt"
"path/filepath"
"runtime"
"strings"
"text/tabwriter"
)
// MaxStackSize indicates the maximum number of stack frames which will be
// stored when embedding stack traces in errors.
var MaxStackSize = 50
// Stacktrace represents a stack trace at a particular point in execution.
type Stacktrace struct {
frames []uintptr
}
func | (skip int) Stacktrace {
stackSlice := make([]uintptr, MaxStackSize+skip)
// incr skip once for newStacktrace, and once for runtime.Callers
l := runtime.Callers(skip+2, stackSlice)
return Stacktrace{frames: stackSlice[:l]}
}
// Frame returns the first frame in the stack.
func (s Stacktrace) Frame() runtime.Frame {
if len(s.frames) == 0 {
panic("cannot call Frame on empty stack")
}
frame, _ := runtime.CallersFrames([]uintptr(s.frames)).Next()
return frame
}
// Frames returns all runtime.Frame instances for this stack.
func (s Stacktrace) Frames() []runtime.Frame {
if len(s.frames) == 0 {
return nil
}
out := make([]runtime.Frame, 0, len(s.frames))
frames := runtime.CallersFrames([]uintptr(s.frames))
for {
frame, more := frames.Next()
out = append(out, frame)
if !more {
break
}
}
return out
}
// String returns a string representing the top-most frame of the stack.
func (s Stacktrace) String() string {
if len(s.frames) == 0 {
return ""
}
frame := s.Frame()
file, dir := filepath.Base(frame.File), filepath.Dir(frame.File)
dir = filepath.Base(dir) // only want the first dirname, ie the pkg name
return fmt.Sprintf("%s/%s:%d", dir, file, frame.Line)
}
// FullString returns the full stack trace.
func (s Stacktrace) FullString() string {
sb := new(strings.Builder)
tw := tabwriter.NewWriter(sb, 0, 4, 4, ' ', 0)
for _, frame := range s.Frames() {
file := fmt.Sprintf("%s:%d", frame.File, frame.Line)
fmt.Fprintf(tw, "%s\t%s\n", file, frame.Function)
}
if err := tw.Flush(); err != nil {
panic(err)
}
return sb.String()
}
| newStacktrace |
09.py | from random import randrange
from time import time
def bubble_sort(arr):
for i in range(len(arr)):
for j in range(len(arr)-1, i, -1):
if arr[j] < arr[j-1]:
# меняем элементы местами
arr[j], arr[j-1] = arr[j-1], arr[j]
return arr
def opt_bubble_sort(arr): | :
swap = False
for i in range(len(arr)-1):
if arr[i] > arr[i+1]:
arr[i], arr[i+1] = arr[i+1], arr[i]
swap = True
if not swap:
break
swap = False
for j in range(len(arr)-1, 0):
if arr[j] < arr[j+1]:
# меняем элементы местами
arr[j], arr[j+1] = arr[j+1], arr[j]
swap = True
return arr
# измерить время работы алгоритма в случайом массиве
def check_time_in_random_arr(f):
arr = [randrange(100) for i in range(1100)]
start = time()
f(arr)
end = time()
return end - start
# время работы алгоритма в сортированном массиве
def check_time(f):
arr = [i for i in range(1100)]
start = time()
f(arr)
end = time()
return end - start
bubble_sort_time = check_time(bubble_sort)
opt_bubble_sort_time = check_time(opt_bubble_sort)
bubble_sort_time2 = check_time_in_random_arr(bubble_sort)
opt_bubble_sort_time2 = check_time_in_random_arr(opt_bubble_sort)
print('''
Время работы в уже отсортированном массиве:\n
Обычный пузырёк: {}\n
Модифицированный {}\n
Время работы в случайном массиве: \n
Обычный пузырёк: {}\n
Модифицированный: {}'''.format(bubble_sort_time, opt_bubble_sort_time, bubble_sort_time2, opt_bubble_sort_time2))
|
while True |
idbobjectstore_get3.any.js | // META: title=IDBObjectStore.get() - key is a Date
// META: script=support.js
// @author Microsoft <https://www.microsoft.com>
"use strict";
let db;
const t = async_test();
const record = { key: new Date(), property: "data" }; |
const open_rq = createdb(t);
open_rq.onupgradeneeded = event => {
db = event.target.result;
db.createObjectStore("store", { keyPath: "key" })
.add(record);
};
open_rq.onsuccess = event => {
const rq = db.transaction("store")
.objectStore("store")
.get(record.key);
rq.onsuccess = t.step_func(event => {
assert_equals(event.target.result.key.valueOf(), record.key.valueOf());
assert_equals(event.target.result.property, record.property);
t.done();
});
}; | |
android.py | # Copyright (C) 2017 shimoda [email protected]
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
from __future__ import print_function
__author__ = 'Damon Kohler <[email protected]>'
import sys
import time
import collections
import json
import os
import socket
from logging import warning as warn
PORT = os.environ.get('AP_PORT')
HOST = os.environ.get('AP_HOST')
HANDSHAKE = os.environ.get('AP_HANDSHAKE')
Result = collections.namedtuple('Result', 'id,result,error')
class Android(object):
def __init__(self, addr=None):
if addr is None:
addr = HOST, PORT
if True:
try:
self.conn = socket.create_connection(addr)
except:
self.conn = self.launchSL4A(addr)
if sys.version_info[0] == 2:
self.client = self.conn.makefile('rw')
else:
self.client = self.conn.makefile('rw', encoding='utf-8')
self.id = 0
if HANDSHAKE is not None:
self._authenticate(HANDSHAKE)
def _rpc(self, method, *args):
data = {'id': self.id,
'method': method,
'params': args}
request = json.dumps(data)
self.client.write(request+'\n')
self.client.flush()
response = self.client.readline()
self.id += 1
result = json.loads(response)
if result['error'] is not None:
print(result['error'])
# namedtuple doesn't work with unicode keys.
return Result(id=result['id'], result=result['result'],
error=result['error'], )
def __getattr__(self, name):
def rpc_call(*args):
return self._rpc(name, *args)
return rpc_call
if True:
def launchSL4A(self, addr):
if addr[0] is None:
addr = ("127.0.0.1", addr[1])
if addr[1] is None:
|
sl4a = 'com.googlecode.android_scripting'
cmd = ('am start -a %s.action.LAUNCH_SERVER '
'--ei %s.extra.USE_SERVICE_PORT %s '
'%s/.activity.ScriptingLayerServiceLauncher '
% (sl4a, sl4a, addr[1], sl4a))
warn("launch SL4A with %s" % str(addr))
os.system(cmd)
time.sleep(2)
return socket.create_connection(addr)
# vi: et:ts=4:nowrap
| addr = (addr[0], "8888") |
main.py | '''Game main module.
Contains the entry point used by the run_game.py script.
Feel free to put all your game code here, or in other modules in this "gamelib"
package.
'''
import pygame
import pygame.display
import pygame.surface
import pygame.event
import pygame.image
import pygame.transform
from gamelib.scene import MainScene
from gamelib.sprite import Spritey
def main():
| ''' entry point of the game '''
pygame.init()
real_screen = pygame.display.set_mode([640*2, 480*2])
clock = pygame.time.Clock()
scene = MainScene()
while scene:
scene = scene.update()
for event in pygame.event.get():
if event.type == pygame.KEYDOWN and event.key == pygame.K_q or event.type == pygame.QUIT:
scene = None
if scene:
scene.draw(real_screen)
pygame.display.flip()
clock.tick(60) |
|
main_test.go | package main
import (
"bytes"
"fmt"
"io"
"os"
"strings"
"testing"
"github.com/pkg/errors"
)
var stderr io.Writer = os.Stderr
func pipeStderr(t *testing.T, r *os.File, wait chan<- struct{}) {
defer func() {
close(wait)
if err := r.Close(); err != nil {
t.Error(err)
}
}()
var buf [4096]byte
for {
n, err := r.Read(buf[:])
if err != nil && err != io.EOF {
t.Error(err)
return
}
n1, err1 := stderr.Write(buf[:n])
if err1 != nil {
t.Error(err1)
return
}
if n1 != n {
t.Error(io.ErrShortWrite)
return
}
if err == io.EOF {
return
}
}
}
func testStderr(t *testing.T, f func(), expected string) {
r, w, err := os.Pipe()
if err != nil {
panic(err)
}
realStderr := os.Stderr
os.Stderr = w
wait := make(chan struct{})
go pipeStderr(t, r, wait)
var buf bytes.Buffer
oldStderr := stderr
stderr = &buf
defer func() {
if err := w.Close(); err != nil {
t.Error(err)
}
<-wait
os.Stderr = realStderr
stderr = oldStderr
if actual := strings.Replace(buf.String(), "\r\n", "\n", -1); !strings.EqualFold(actual, expected) {
t.Error("Expected stderr does not match actual stderr.")
t.Errorf("expected: %q", expected)
t.Errorf("actual: %q", actual)
}
}()
f()
}
func testExit(t *testing.T, f func(), expectedCode int) {
type exited int
oldExit := exit
exit = func(code int) {
panic(exited(code))
}
defer func() {
exit = oldExit
if r := recover(); r == nil {
t.Errorf("Expected call to exit(%d), but exit was not called.", expectedCode)
} else if code, ok := r.(exited); ok {
if int(code) != expectedCode {
t.Errorf("Expected call to exit(%d), but exit(%d) was called.", expectedCode, int(code))
}
} else {
panic(r)
}
}()
f()
}
func | (t *testing.T) {
testStderr(t, func() {
testExit(t, usage, 2)
}, "usage: git last-modified [<options>] [[--] <path>...]\n"+
" -commit-date\n"+
" \tUse the commit date for the last commit this file was involved in instead of the author date.\n"+
" -n\tDry run. Implies -v. Don't modify any file modification times.\n"+
" -q\tQuiet. Don't warn about files specified on the command line that are not in Git.\n"+
" -v\tVerbose. Print each filename and modification time as they are processed.\n")
}
func TestCheckError(t *testing.T) {
testStderr(t, func() {
checkError(nil) // should not exit
}, "")
err := errors.New("test")
testStderr(t, func() {
testExit(t, func() {
checkError(err)
}, 1)
}, "git-last-modified: test\n")
*flagVerbose = true
defer func() {
*flagVerbose = false
}()
testStderr(t, func() {
testExit(t, func() {
checkError(err)
}, 1)
}, fmt.Sprintf("git-last-modified: test%+v\n", err.(interface{ StackTrace() errors.StackTrace }).StackTrace()))
}
| TestUsage |
es_sinker.rs | use crate::{BlockData, BlockSimplified, BlockWithMetadata};
use anyhow::Result;
use elasticsearch::http::response::Response;
use elasticsearch::indices::{
IndicesCreateParts, IndicesExistsParts, IndicesGetMappingParts, IndicesPutMappingParts,
};
use elasticsearch::{
BulkOperation, BulkOperations, BulkParts, DeleteByQueryParts, DeleteParts, Elasticsearch,
GetParts,
};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use starcoin_crypto::HashValue;
use starcoin_logger::prelude::*;
use tokio::sync::RwLock;
#[derive(Clone, Debug)]
pub struct IndexConfig {
pub block_index: String,
pub uncle_block_index: String,
pub txn_info_index: String,
}
impl IndexConfig {
pub fn new_with_prefix(prefix: impl AsRef<str>) -> Self {
Self {
block_index: format!("{}.blocks", prefix.as_ref()),
uncle_block_index: format!("{}.uncle_blocks", prefix.as_ref()),
txn_info_index: format!("{}.txn_infos", prefix.as_ref()),
}
}
}
impl Default for IndexConfig {
fn default() -> Self {
Self {
block_index: "blocks".to_string(),
uncle_block_index: "uncle_blocks".to_string(),
txn_info_index: "txn_infos".to_string(),
}
}
}
#[derive(Debug, Default)]
pub struct EsSinker {
es: Elasticsearch,
config: IndexConfig,
state: RwLock<SinkState>,
}
#[derive(Clone, Debug, Default)]
struct SinkState {
tip: Option<LocalTipInfo>,
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct LocalTipInfo {
pub block_hash: HashValue,
pub block_number: u64,
}
impl EsSinker {
pub fn new(es: Elasticsearch, config: IndexConfig) -> Self {
Self {
es,
config,
state: Default::default(),
}
}
async fn create_index_if_not_exists(&self, index: &str) -> Result<()> {
let exists = self
.es
.indices()
.exists(IndicesExistsParts::Index(&[index]))
.send()
.await?
.status_code()
.is_success();
if !exists {
self.es
.indices()
.create(IndicesCreateParts::Index(index))
.send()
.await?
.error_for_status_code()?;
}
Ok(())
}
/// init es indices
pub async fn init_indices(&self) -> Result<()> {
let block_index = self.config.block_index.as_str();
let uncle_block_index = self.config.uncle_block_index.as_str();
let txn_info_index = self.config.txn_info_index.as_str();
self.create_index_if_not_exists(block_index).await?;
self.create_index_if_not_exists(uncle_block_index).await?;
self.create_index_if_not_exists(txn_info_index).await?;
let tip = self.get_remote_tip_header().await?;
self.state.write().await.tip = tip.clone();
if let Some(tip_info) = tip {
info!(
"remote tips: {}, {}",
tip_info.block_hash, tip_info.block_number
);
}
Ok(())
}
pub async fn update_remote_tip_header(
&self,
block_hash: HashValue,
block_number: u64,
) -> Result<serde_json::Value> {
let tip_info = LocalTipInfo {
block_number,
block_hash,
};
let body = serde_json::json!({
"_meta": {
"tip": tip_info
}
});
let block_index = self.config.block_index.as_str(); | .indices()
.put_mapping(IndicesPutMappingParts::Index(&[block_index]))
.body(body)
.send()
.await?;
let resp = resp.error_for_status_code()?;
let data = resp.json().await?;
// self.state.write().await.tip = Some(tip_info);
Ok(data)
}
pub async fn update_local_tip_header(
&self,
block_hash: HashValue,
block_number: u64,
) -> Result<()> {
let tip_info = LocalTipInfo {
block_number,
block_hash,
};
self.state.write().await.tip = Some(tip_info);
Ok(())
}
pub async fn get_local_tip_header(&self) -> Result<Option<LocalTipInfo>> {
let tip = self.state.read().await.tip.clone();
Ok(tip)
}
async fn get_remote_tip_header(&self) -> Result<Option<LocalTipInfo>> {
let block_index = self.config.block_index.as_str();
let resp_data: Value = self
.es
.indices()
.get_mapping(IndicesGetMappingParts::Index(&[block_index]))
.send()
.await?
.error_for_status_code()?
.json()
.await?;
let v = resp_data[block_index]["mappings"]["_meta"]["tip"].clone();
let tip_info: Option<LocalTipInfo> = serde_json::from_value(v)?;
Ok(tip_info)
}
pub async fn rollback_to_last_block(&self) -> Result<()> {
let tip_header = self.get_local_tip_header().await?;
if tip_header.is_none() {
return Ok(());
}
let tip_header = tip_header.unwrap();
let block_index = self.config.block_index.as_str();
let block_id = tip_header.block_hash.to_string();
let data: Value = self
.es
.get(GetParts::IndexId(block_index, block_id.as_str()))
.send()
.await?
.error_for_status_code()?
.json()
.await?;
let parent_hash: HashValue = if data["found"].as_bool().unwrap() {
serde_json::from_value(data["_source"]["header"]["parent_hash"].clone())?
} else {
anyhow::bail!("cannot get block data with id {}", block_id);
};
// first, rollback tip header
let rollback_to = (parent_hash, tip_header.block_number - 1);
self.update_remote_tip_header(rollback_to.0, rollback_to.1)
.await?;
self.update_local_tip_header(rollback_to.0, rollback_to.1)
.await?;
// delete block
{
let resp = self
.es
.delete(DeleteParts::IndexId(block_index, block_id.as_str()))
.send()
.await?;
let exception = resp.exception().await?;
if let Some(ex) = exception {
anyhow::bail!("{}", serde_json::to_string(&ex)?);
}
}
// delete related txn infos.
{
let txn_info_index = self.config.txn_info_index.as_str();
let search_condition = serde_json::json!({
"query": {
"match": {
"block_hash": block_id,
}
}
});
let resp = self
.es
.delete_by_query(DeleteByQueryParts::Index(&[txn_info_index]))
.body(search_condition)
.send()
.await?;
// check response
if resp.status_code().is_client_error() || resp.status_code().is_server_error() {
let exception = resp.exception().await?;
if let Some(ex) = exception {
anyhow::bail!("{}", serde_json::to_string(&ex)?);
}
} else {
let delete_response: serde_json::Value = resp.json().await?;
let total_txn = delete_response["total"].as_u64();
let deleted_txns = delete_response["deleted"].as_u64();
info!(
"cleanup block {}, total txns: {:?}, delete {:?} txns",
block_id, total_txn, deleted_txns
);
}
}
info!(
"Rollback to block: {}, height: {}",
rollback_to.0, rollback_to.1
);
Ok(())
}
pub async fn repair_block(&self, block: BlockData) -> Result<()> {
self.bulk(vec![block]).await?;
Ok(())
}
// bulk insert data into es.
pub async fn bulk(&self, blocks: Vec<BlockData>) -> anyhow::Result<()> {
if blocks.is_empty() {
return Ok(());
}
let mut bulk_operations = BulkOperations::new();
let block_index = self.config.block_index.as_str();
let txn_info_index = self.config.txn_info_index.as_str();
let uncle_index = self.config.uncle_block_index.as_str();
for blockdata in blocks {
let BlockData { block, txns_data } = blockdata;
bulk_operations.push(
BulkOperation::index(BlockWithMetadata {
block: block.clone(),
metadata: txns_data[0].block_metadata.clone(),
})
.id(block.header.block_hash.to_string())
.index(block_index),
)?;
for txn_data in txns_data {
bulk_operations.push(
BulkOperation::index(txn_data.clone())
.id(txn_data.info.transaction_hash.to_string())
.index(txn_info_index),
)?;
}
//add uncle
if !block.uncles.is_empty() {
for uncle in block.uncles {
bulk_operations.push(
BulkOperation::index(BlockSimplified {
header: uncle.clone(),
uncle_block_number: block.header.number,
})
.id(uncle.block_hash.to_string())
.index(uncle_index),
)?;
}
}
}
let resp = self
.es
.bulk(BulkParts::None)
.body(vec![bulk_operations])
.send()
.await?;
EsSinker::check_status_code(resp).await
}
// bulk insert data into es.
pub async fn bulk_uncle(&self, uncle_blocks: Vec<BlockData>) -> anyhow::Result<()> {
if uncle_blocks.is_empty() {
return Ok(());
}
let mut bulk_operations = BulkOperations::new();
let block_index = self.config.uncle_block_index.as_str();
for blockdata in uncle_blocks {
let BlockData { block, txns_data } = blockdata;
bulk_operations.push(
BulkOperation::index(BlockWithMetadata {
block: block.clone(),
metadata: txns_data[0].block_metadata.clone(),
})
.id(block.header.block_hash.to_string())
.index(block_index),
)?;
}
let resp = self
.es
.bulk(BulkParts::None)
.body(vec![bulk_operations])
.send()
.await?;
EsSinker::check_status_code(resp).await
}
async fn check_status_code(resp: Response) -> anyhow::Result<()> {
// check response
if resp.status_code().is_client_error() || resp.status_code().is_server_error() {
let exception = resp.exception().await?;
if let Some(ex) = exception {
anyhow::bail!("{}", serde_json::to_string(&ex)?);
}
} else {
let bulk_response: serde_json::Value = resp.json().await?;
if let Some(true) = bulk_response["errors"].as_bool() {
anyhow::bail!(
"[es] bulk error: {}",
serde_json::to_string(&bulk_response)?
);
}
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::LocalTipInfo;
use crate::{EsSinker, IndexConfig};
use elasticsearch::http::transport::SingleNodeConnectionPool;
use elasticsearch::http::Url;
use elasticsearch::Elasticsearch;
use starcoin_crypto::HashValue;
use std::str::FromStr;
#[tokio::test(threaded_scheduler)]
#[ignore]
async fn test_update_tip_header() {
let es_url = "http://localhost:9200";
let transport = elasticsearch::http::transport::TransportBuilder::new(
SingleNodeConnectionPool::new(Url::from_str(es_url).unwrap()),
)
.build()
.unwrap();
let es = Elasticsearch::new(transport);
let sinker = EsSinker {
es,
config: IndexConfig::default(),
state: Default::default(),
};
let v = sinker.get_local_tip_header().await.unwrap();
assert!(v.is_none());
let tip_info = LocalTipInfo {
block_hash: HashValue::random(),
block_number: 1,
};
let _ = sinker
.update_remote_tip_header(tip_info.block_hash, tip_info.block_number)
.await
.unwrap();
let v = sinker.get_local_tip_header().await.unwrap().unwrap();
assert_eq!(v, tip_info);
}
} | let resp = self
.es |
proto_validation.go | /*
* Copyright (c) Facebook, Inc. and its affiliates.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
package servicers
import (
"errors"
"magma/orc8r/cloud/go/serde"
stateservice "magma/orc8r/cloud/go/services/state"
"magma/orc8r/lib/go/protos"
"github.com/thoas/go-funk"
)
// ValidateGetStatesRequest checks that all required fields exist
func ValidateGetStatesRequest(req *protos.GetStatesRequest) error {
if err := enforceNetworkID(req.NetworkID); err != nil {
return err
}
if funk.IsEmpty(req.Ids) && funk.IsEmpty(req.TypeFilter) && funk.IsEmpty(req.IdFilter) {
return errors.New("at least one filter criteria must be specified in the request")
}
return nil
}
// ValidateDeleteStatesRequest checks that all required fields exist
func ValidateDeleteStatesRequest(req *protos.DeleteStatesRequest) error {
if err := enforceNetworkID(req.NetworkID); err != nil {
return err
}
if funk.IsEmpty(req.Ids) {
return errors.New("States value must be specified and non-empty")
}
return nil
}
// ValidateSyncStatesRequest checks that all required fields exist
func ValidateSyncStatesRequest(req *protos.SyncStatesRequest) error {
if req.GetStates() == nil || len(req.GetStates()) == 0 {
return errors.New("States value must be specified and non-empty")
}
return nil
}
// PartitionStatesBySerializability checks that each state is deserializable.
// If a state is not deserializable, we will send back the states type, key, and error.
func PartitionStatesBySerializability(req *protos.ReportStatesRequest) ([]*protos.State, []*protos.IDAndError, error) {
validatedStates := []*protos.State{} | invalidStates := []*protos.IDAndError{}
states := req.GetStates()
if states == nil || len(states) == 0 {
return nil, nil, errors.New("States value must be specified and non-empty")
}
for _, state := range states {
model, err := serde.Deserialize(stateservice.SerdeDomain, state.GetType(), state.GetValue())
if err != nil {
stateAndError := &protos.IDAndError{
Type: state.Type,
DeviceID: state.DeviceID,
Error: err.Error(), // deserialization error
}
invalidStates = append(invalidStates, stateAndError)
} else {
if err := model.(serde.ValidateableBinaryConvertible).ValidateModel(); err != nil {
stateAndError := &protos.IDAndError{
Type: state.Type,
DeviceID: state.DeviceID,
Error: err.Error(), // validation error
}
invalidStates = append(invalidStates, stateAndError)
continue
}
validatedStates = append(validatedStates, state)
}
}
return validatedStates, invalidStates, nil
}
func enforceNetworkID(networkID string) error {
if len(networkID) == 0 {
return errors.New("Network ID must be specified")
}
return nil
} | |
salesforce.js | 'use strict'
const logger = require('winston')
const jsforce = require('jsforce')
const _ = require('lodash')
const config = require('../../config/server')
const DEVICE_FIELD_NAME = `${config.salesforce.namespace}__XI_Device_ID__c`
const DEVICE_FIELD_NAME_WITHOUT_XI = `${config.salesforce.namespace}__Device_ID__c`
const END_USER_FIELD_NAME = `${config.salesforce.namespace}__XI_End_User_ID__c`
const salesforce = {
login () {
if (!this.loggedIn) {
if (!(config.salesforce.user && config.salesforce.pass && config.salesforce.token)) {
this.loggedIn = Promise.reject('Environment variables are missing')
} else {
this.connection = new jsforce.Connection()
this.loggedIn = this.connection.login(config.salesforce.user, `${config.salesforce.pass}${config.salesforce.token}`)
}
this.loggedIn
.then(() => logger.info('salesforce#login: success'))
.catch((err) => logger.error('salesforce#login: error', err))
}
return this.loggedIn
},
/**
* @param {Array} assets
*/
addAssets (assets) {
assets = assets.map((a) => ({
Name: a.name || a.serialNumber,
SerialNumber: a.serialNumber,
[DEVICE_FIELD_NAME_WITHOUT_XI]: a.id || a.deviceId,
Contact: { [END_USER_FIELD_NAME]: a.organizationId }
}))
return this.login()
.then(() => this.connection.sobject('Asset').upsertBulk(assets, DEVICE_FIELD_NAME_WITHOUT_XI))
.then((results) => {
results.forEach((result, idx) => {
if (result.success) {
logger.info('Salesforce #addAssets', `inserted successfully: ${assets[idx].SerialNumber}`)
} else {
throw result
}
})
})
.catch((err) => {
logger.error('Salesforce #addAssets', err)
throw new Error(err)
})
},
/**
* @param {Array} cases
*/
addCases (cases) {
cases = cases.map((c) => ({
Subject: c.subject,
Description: c.description,
[DEVICE_FIELD_NAME]: c.id || c.deviceId
}))
return this.login()
.then(() => this.connection.sobject('Case').insert(cases))
.then((results) => {
results.forEach((result, idx) => {
if (!result.success) {
throw result
}
logger.info('Salesforce #addCases', `inserted successfully: ${cases[idx].Subject}`)
})
})
.catch((err) => {
logger.error('Salesforce #addCases', err)
})
},
/**
* @param {Array} contacts
*/
addContacts (contacts) {
contacts = _.uniq(contacts.map((c) => ({
Email: config.salesforce.user,
[END_USER_FIELD_NAME]: c.organizationId
})))
const chunksOfContacts = _.chunk(contacts, 10)
return this.login()
.then(() => Promise.all(chunksOfContacts.map((chunk) => this.connection.sobject('Contact').upsert(chunk, END_USER_FIELD_NAME))))
.then((chunkOfResults) => _.flatten(chunkOfResults))
.then((results) => {
results.forEach((result, idx) => { | })
})
.catch((err) => {
logger.error('Salesforce #addContacts', err)
})
},
/**
* @param {String} id
*/
retrieveContact (id) {
return this.login()
.then(() => this.connection.sobject('Contact').retrieve(id))
},
/**
* @return {Promise} user email as a promise
*/
getUserEmail () {
return this.login()
.then(() => this.connection.query(`SELECT Id, Email FROM User WHERE Id = '${this.connection.userInfo.id}'`))
.then((result) => result.records[0].Email)
}
}
module.exports = salesforce | if (!result.success) {
throw result
}
logger.info('Salesforce #addContacts', `inserted successfully: ${JSON.stringify(contacts[idx])}`) |
bin.rs | use fr::{Simplify, C, Error, Program};
use clap::{clap_app, crate_version, AppSettings};
use std::{
fs::{read_to_string, write},
process::exit
};
enum Target {
C, BrainFuck
}
fn | () -> Result<(), Error> {
let matches = clap_app!(fr =>
(version: crate_version!())
(author: "Adam McDaniel <[email protected]>")
(about: "Compiles code written in the Free programming language")
(@arg input: +takes_value +required "Path to free file to compile")
(@arg output: +takes_value "Path to output file")
// (@arg leak_check: --leakcheck "Add memory leak checks")
(@group target =>
(@arg c: -c --c "Compile to C")
(@arg bf: -b --bf "Compile to SMPL/Brainfuck")
)
)
.setting(AppSettings::ArgRequiredElseHelp)
.get_matches();
let target = if matches.is_present("bf") { Target::BrainFuck }
else { Target::C };
let optimization = 10;
// let leak_check = matches.is_present("leak_check");
let output_file = match matches.value_of("output") {
Some(file) => file,
None => match target {
Target::C => "out.c",
Target::BrainFuck => "out.smpl",
}
};
if let Some(file) = matches.value_of("input") {
if let Ok(contents) = read_to_string(file) {
let compiled = optimize(match Program::from(contents).compile() {
Ok(c) => c,
Err(e) => {
println!("Could not compile program: {:#?}", e);
exit(1);
}
}, optimization);
let output_contents = match target {
Target::C => C::simplify(compiled),
Target::BrainFuck => compiled
};
if let Ok(_) = write(&output_file, &output_contents) {
println!("Successfully compiled program to {}", output_file);
}
}
}
Ok(())
}
pub fn optimize(s: impl ToString, level: usize) -> String {
let mut compiled = s.to_string().chars().filter(|ch| ['>', '<', ',', '.', '[', ']', '+', '-', '*', '?', '&'].contains(ch)).collect::<String>();
let original_len = compiled.len();
for n in 1..level+1 {
let to = ">".repeat(n);
let back = "<".repeat(n);
let move1 = to.clone() + &back;
let move2 = back + &to;
for _ in 0..10 {
compiled = compiled.replace(&move1, "").replace(&move2, "");
}
}
compiled
} | main |
test_ValWatcher.py | import pytest
from riotwatcher import ValWatcher
@pytest.mark.val | class TestValWatcher:
def test_require_api_key(self):
with pytest.raises(ValueError):
ValWatcher(None)
def test_allows_positional_api_key(self):
ValWatcher("RGAPI-this-is-a-fake")
def test_allows_keyword_api_key(self):
ValWatcher(api_key="RGAPI-this-is-a-fake") | @pytest.mark.usefixtures("reset_globals") |
fmr.rs | #[doc = "Register `FMR` reader"]
pub struct R(crate::R<FMR_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<FMR_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<FMR_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<FMR_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `FMR` writer"]
pub struct W(crate::W<FMR_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<FMR_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<FMR_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<FMR_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `ENCF0` reader - ENable Compare Fault Channel 0"]
pub struct ENCF0_R(crate::FieldReader<bool, bool>);
impl ENCF0_R {
pub(crate) fn new(bits: bool) -> Self {
ENCF0_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for ENCF0_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `ENCF0` writer - ENable Compare Fault Channel 0"]
pub struct ENCF0_W<'a> {
w: &'a mut W,
}
impl<'a> ENCF0_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | (value as u32 & 0x01);
self.w
}
}
#[doc = "Field `ENCF1` reader - ENable Compare Fault Channel 1"]
pub struct ENCF1_R(crate::FieldReader<bool, bool>);
impl ENCF1_R {
pub(crate) fn new(bits: bool) -> Self {
ENCF1_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for ENCF1_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `ENCF1` writer - ENable Compare Fault Channel 1"]
pub struct ENCF1_W<'a> {
w: &'a mut W,
}
impl<'a> ENCF1_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | ((value as u32 & 0x01) << 1);
self.w
}
}
impl R {
#[doc = "Bit 0 - ENable Compare Fault Channel 0"]
#[inline(always)]
pub fn encf0(&self) -> ENCF0_R {
ENCF0_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - ENable Compare Fault Channel 1"]
#[inline(always)]
pub fn encf1(&self) -> ENCF1_R {
ENCF1_R::new(((self.bits >> 1) & 0x01) != 0)
}
} | pub fn encf0(&mut self) -> ENCF0_W {
ENCF0_W { w: self }
}
#[doc = "Bit 1 - ENable Compare Fault Channel 1"]
#[inline(always)]
pub fn encf1(&mut self) -> ENCF1_W {
ENCF1_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Fault Mode Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmr](index.html) module"]
pub struct FMR_SPEC;
impl crate::RegisterSpec for FMR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [fmr::R](R) reader structure"]
impl crate::Readable for FMR_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [fmr::W](W) writer structure"]
impl crate::Writable for FMR_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets FMR to value 0"]
impl crate::Resettable for FMR_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
} | impl W {
#[doc = "Bit 0 - ENable Compare Fault Channel 0"]
#[inline(always)] |
reducer.ts | import { Action, Location } from 'history';
import { AnyAction, Reducer } from 'redux';
import { LOCATION_CHANGE } from './actions';
export type RouterState = {
location?: Location | null;
action?: Action | null;
previousLocations?: { location?: Location | null; action?: Action | null }[];
};
export const createRouterReducer = ({ savePreviousLocations = 0 }): Reducer => {
const initialState: RouterState = {
location: null,
action: null,
};
// eslint-disable-next-line no-restricted-globals
const numLocationToTrack = isNaN(savePreviousLocations) ? 0 : savePreviousLocations;
if (numLocationToTrack) initialState.previousLocations = []; | if (type === LOCATION_CHANGE) {
const { location, action } = payload || {};
const previousLocations = numLocationToTrack // @ts-ignore
? [{ location, action }, ...state.previousLocations.slice(0, numLocationToTrack)]
: undefined;
return { ...state, location, action, previousLocations };
}
return state;
};
}; |
return (state = initialState, { type, payload } = {} as AnyAction) => { |
common.py | # YOLOv5 common modules
import math
from copy import copy
from pathlib import Path
import numpy as np
import pandas as pd
import requests
import torch
import torch.nn as nn
from PIL import Image
from torch.cuda import amp
from utils.datasets import letterbox
from utils.general import non_max_suppression, make_divisible, scale_coords, increment_path, xyxy2xywh, save_one_box
from utils.plots import colors, plot_one_box
from utils.torch_utils import time_synchronized
def autopad(k, p=None): # kernel, padding
# Pad to 'same'
if p is None:
p = k // 2 if isinstance(k, int) else [x // 2 for x in k] # auto-pad
return p
def DWConv(c1, c2, k=1, s=1, act=True):
# Depthwise convolution
return Conv(c1, c2, k, s, g=math.gcd(c1, c2), act=act)
class Conv(nn.Module):
# Standard convolution
def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True): # ch_in, ch_out, kernel, stride, padding, groups
super(Conv, self).__init__()
self.conv = nn.Conv2d(c1, c2, k, s, autopad(k, p), groups=g, bias=False)
self.bn = nn.BatchNorm2d(c2)
self.act = nn.SiLU() if act is True else (act if isinstance(act, nn.Module) else nn.Identity())
def forward(self, x):
return self.act(self.bn(self.conv(x)))
def fuseforward(self, x):
return self.act(self.conv(x))
class TransformerLayer(nn.Module):
# Transformer layer https://arxiv.org/abs/2010.11929 (LayerNorm layers removed for better performance)
def __init__(self, c, num_heads):
super().__init__()
self.q = nn.Linear(c, c, bias=False)
self.k = nn.Linear(c, c, bias=False)
self.v = nn.Linear(c, c, bias=False)
self.ma = nn.MultiheadAttention(embed_dim=c, num_heads=num_heads)
self.fc1 = nn.Linear(c, c, bias=False)
self.fc2 = nn.Linear(c, c, bias=False)
def forward(self, x):
x = self.ma(self.q(x), self.k(x), self.v(x))[0] + x
x = self.fc2(self.fc1(x)) + x
return x
class TransformerBlock(nn.Module):
# Vision Transformer https://arxiv.org/abs/2010.11929
def | (self, c1, c2, num_heads, num_layers):
super().__init__()
self.conv = None
if c1 != c2:
self.conv = Conv(c1, c2)
self.linear = nn.Linear(c2, c2) # learnable position embedding
self.tr = nn.Sequential(*[TransformerLayer(c2, num_heads) for _ in range(num_layers)])
self.c2 = c2
def forward(self, x):
if self.conv is not None:
x = self.conv(x)
b, _, w, h = x.shape
p = x.flatten(2)
p = p.unsqueeze(0)
p = p.transpose(0, 3)
p = p.squeeze(3)
e = self.linear(p)
x = p + e
x = self.tr(x)
x = x.unsqueeze(3)
x = x.transpose(0, 3)
x = x.reshape(b, self.c2, w, h)
return x
class Bottleneck(nn.Module):
# Standard bottleneck
def __init__(self, c1, c2, shortcut=True, g=1, e=0.5): # ch_in, ch_out, shortcut, groups, expansion
super(Bottleneck, self).__init__()
c_ = int(c2 * e) # hidden channels
self.cv1 = Conv(c1, c_, 1, 1)
self.cv2 = Conv(c_, c2, 3, 1, g=g)
self.add = shortcut and c1 == c2
def forward(self, x):
return x + self.cv2(self.cv1(x)) if self.add else self.cv2(self.cv1(x))
class BottleneckCSP(nn.Module):
# CSP Bottleneck https://github.com/WongKinYiu/CrossStagePartialNetworks
def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5): # ch_in, ch_out, number, shortcut, groups, expansion
super(BottleneckCSP, self).__init__()
c_ = int(c2 * e) # hidden channels
self.cv1 = Conv(c1, c_, 1, 1)
self.cv2 = nn.Conv2d(c1, c_, 1, 1, bias=False)
self.cv3 = nn.Conv2d(c_, c_, 1, 1, bias=False)
self.cv4 = Conv(2 * c_, c2, 1, 1)
self.bn = nn.BatchNorm2d(2 * c_) # applied to cat(cv2, cv3)
self.act = nn.LeakyReLU(0.1, inplace=True)
self.m = nn.Sequential(*[Bottleneck(c_, c_, shortcut, g, e=1.0) for _ in range(n)])
def forward(self, x):
y1 = self.cv3(self.m(self.cv1(x)))
y2 = self.cv2(x)
return self.cv4(self.act(self.bn(torch.cat((y1, y2), dim=1))))
class C3(nn.Module):
# CSP Bottleneck with 3 convolutions
def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5): # ch_in, ch_out, number, shortcut, groups, expansion
super(C3, self).__init__()
c_ = int(c2 * e) # hidden channels
self.cv1 = Conv(c1, c_, 1, 1)
self.cv2 = Conv(c1, c_, 1, 1)
self.cv3 = Conv(2 * c_, c2, 1) # act=FReLU(c2)
self.m = nn.Sequential(*[Bottleneck(c_, c_, shortcut, g, e=1.0) for _ in range(n)])
# self.m = nn.Sequential(*[CrossConv(c_, c_, 3, 1, g, 1.0, shortcut) for _ in range(n)])
def forward(self, x):
return self.cv3(torch.cat((self.m(self.cv1(x)), self.cv2(x)), dim=1))
class C3TR(C3):
# C3 module with TransformerBlock()
def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5):
super().__init__(c1, c2, n, shortcut, g, e)
c_ = int(c2 * e)
self.m = TransformerBlock(c_, c_, 4, n)
class SPP(nn.Module):
# Spatial pyramid pooling layer used in YOLOv3-SPP
def __init__(self, c1, c2, k=(5, 9, 13)):
super(SPP, self).__init__()
c_ = c1 // 2 # hidden channels
self.cv1 = Conv(c1, c_, 1, 1)
self.cv2 = Conv(c_ * (len(k) + 1), c2, 1, 1)
self.m = nn.ModuleList([nn.MaxPool2d(kernel_size=x, stride=1, padding=x // 2) for x in k])
def forward(self, x):
x = self.cv1(x)
return self.cv2(torch.cat([x] + [m(x) for m in self.m], 1))
class Focus(nn.Module):
# Focus wh information into c-space
def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True): # ch_in, ch_out, kernel, stride, padding, groups
super(Focus, self).__init__()
self.conv = Conv(c1 * 4, c2, k, s, p, g, act)
# self.contract = Contract(gain=2)
def forward(self, x): # x(b,c,w,h) -> y(b,4c,w/2,h/2)
return self.conv(torch.cat([x[..., ::2, ::2], x[..., 1::2, ::2], x[..., ::2, 1::2], x[..., 1::2, 1::2]], 1))
# return self.conv(self.contract(x))
class Contract(nn.Module):
# Contract width-height into channels, i.e. x(1,64,80,80) to x(1,256,40,40)
def __init__(self, gain=2):
super().__init__()
self.gain = gain
def forward(self, x):
N, C, H, W = x.size() # assert (H / s == 0) and (W / s == 0), 'Indivisible gain'
s = self.gain
x = x.view(N, C, H // s, s, W // s, s) # x(1,64,40,2,40,2)
x = x.permute(0, 3, 5, 1, 2, 4).contiguous() # x(1,2,2,64,40,40)
return x.view(N, C * s * s, H // s, W // s) # x(1,256,40,40)
class Expand(nn.Module):
# Expand channels into width-height, i.e. x(1,64,80,80) to x(1,16,160,160)
def __init__(self, gain=2):
super().__init__()
self.gain = gain
def forward(self, x):
N, C, H, W = x.size() # assert C / s ** 2 == 0, 'Indivisible gain'
s = self.gain
x = x.view(N, s, s, C // s ** 2, H, W) # x(1,2,2,16,80,80)
x = x.permute(0, 3, 4, 1, 5, 2).contiguous() # x(1,16,80,2,80,2)
return x.view(N, C // s ** 2, H * s, W * s) # x(1,16,160,160)
class Concat(nn.Module):
# Concatenate a list of tensors along dimension
def __init__(self, dimension=1):
super(Concat, self).__init__()
self.d = dimension
def forward(self, x):
return torch.cat(x, self.d)
class NMS(nn.Module):
# Non-Maximum Suppression (NMS) module
conf = 0.25 # confidence threshold
iou = 0.45 # IoU threshold
classes = None # (optional list) filter by class
max_det = 1000 # maximum number of detections per image
def __init__(self):
super(NMS, self).__init__()
def forward(self, x):
return non_max_suppression(x[0], self.conf, iou_thres=self.iou, classes=self.classes, max_det=self.max_det)
class AutoShape(nn.Module):
# input-robust model wrapper for passing cv2/np/PIL/torch inputs. Includes preprocessing, inference and NMS
conf = 0.25 # NMS confidence threshold
iou = 0.45 # NMS IoU threshold
classes = None # (optional list) filter by class
max_det = 1000 # maximum number of detections per image
def __init__(self, model):
super(AutoShape, self).__init__()
self.model = model.eval()
def autoshape(self):
print('AutoShape already enabled, skipping... ') # model already converted to model.autoshape()
return self
@torch.no_grad()
def forward(self, imgs, size=640, augment=False, profile=False):
# Inference from various sources. For height=640, width=1280, RGB images example inputs are:
# filename: imgs = 'data/images/zidane.jpg'
# URI: = 'https://github.com/ultralytics/yolov5/releases/download/v1.0/zidane.jpg'
# OpenCV: = cv2.imread('image.jpg')[:,:,::-1] # HWC BGR to RGB x(640,1280,3)
# PIL: = Image.open('image.jpg') # HWC x(640,1280,3)
# numpy: = np.zeros((640,1280,3)) # HWC
# torch: = torch.zeros(16,3,320,640) # BCHW (scaled to size=640, 0-1 values)
# multiple: = [Image.open('image1.jpg'), Image.open('image2.jpg'), ...] # list of images
t = [time_synchronized()]
p = next(self.model.parameters()) # for device and type
if isinstance(imgs, torch.Tensor): # torch
with amp.autocast(enabled=p.device.type != 'cpu'):
return self.model(imgs.to(p.device).type_as(p), augment, profile) # inference
# Pre-process
n, imgs = (len(imgs), imgs) if isinstance(imgs, list) else (1, [imgs]) # number of images, list of images
shape0, shape1, files = [], [], [] # image and inference shapes, filenames
for i, im in enumerate(imgs):
f = f'image{i}' # filename
if isinstance(im, str): # filename or uri
im, f = np.asarray(Image.open(requests.get(im, stream=True).raw if im.startswith('http') else im)), im
elif isinstance(im, Image.Image): # PIL Image
im, f = np.asarray(im), getattr(im, 'filename', f) or f
files.append(Path(f).with_suffix('.jpg').name)
if im.shape[0] < 5: # image in CHW
im = im.transpose((1, 2, 0)) # reverse dataloader .transpose(2, 0, 1)
im = im[:, :, :3] if im.ndim == 3 else np.tile(im[:, :, None], 3) # enforce 3ch input
s = im.shape[:2] # HWC
shape0.append(s) # image shape
g = (size / max(s)) # gain
shape1.append([y * g for y in s])
imgs[i] = im if im.data.contiguous else np.ascontiguousarray(im) # update
shape1 = [make_divisible(x, int(self.stride.max())) for x in np.stack(shape1, 0).max(0)] # inference shape
x = [letterbox(im, new_shape=shape1, auto=False)[0] for im in imgs] # pad
x = np.stack(x, 0) if n > 1 else x[0][None] # stack
x = np.ascontiguousarray(x.transpose((0, 3, 1, 2))) # BHWC to BCHW
x = torch.from_numpy(x).to(p.device).type_as(p) / 255. # uint8 to fp16/32
t.append(time_synchronized())
with amp.autocast(enabled=p.device.type != 'cpu'):
# Inference
y = self.model(x, augment, profile)[0] # forward
t.append(time_synchronized())
# Post-process
y = non_max_suppression(y, self.conf, iou_thres=self.iou, classes=self.classes, max_det=self.max_det) # NMS
for i in range(n):
scale_coords(shape1, y[i][:, :4], shape0[i])
t.append(time_synchronized())
return Detections(imgs, y, files, t, self.names, x.shape)
class Detections:
# detections class for YOLOv5 inference results
def __init__(self, imgs, pred, files, times=None, names=None, shape=None):
super(Detections, self).__init__()
d = pred[0].device # device
gn = [torch.tensor([*[im.shape[i] for i in [1, 0, 1, 0]], 1., 1.], device=d) for im in imgs] # normalizations
self.imgs = imgs # list of images as numpy arrays
self.pred = pred # list of tensors pred[0] = (xyxy, conf, cls)
self.names = names # class names
self.files = files # image filenames
self.xyxy = pred # xyxy pixels
self.xywh = [xyxy2xywh(x) for x in pred] # xywh pixels
self.xyxyn = [x / g for x, g in zip(self.xyxy, gn)] # xyxy normalized
self.xywhn = [x / g for x, g in zip(self.xywh, gn)] # xywh normalized
self.n = len(self.pred) # number of images (batch size)
self.t = tuple((times[i + 1] - times[i]) * 1000 / self.n for i in range(3)) # timestamps (ms)
self.s = shape # inference BCHW shape
def display(self, pprint=False, show=False, save=False, crop=False, render=False, save_dir=Path('')):
for i, (im, pred) in enumerate(zip(self.imgs, self.pred)):
str = f'image {i + 1}/{len(self.pred)}: {im.shape[0]}x{im.shape[1]} '
if pred is not None:
for c in pred[:, -1].unique():
n = (pred[:, -1] == c).sum() # detections per class
str += f"{n} {self.names[int(c)]}{'s' * (n > 1)}, " # add to string
if show or save or render or crop:
for *box, conf, cls in pred: # xyxy, confidence, class
label = f'{self.names[int(cls)]} {conf:.2f}'
if crop:
save_one_box(box, im, file=save_dir / 'crops' / self.names[int(cls)] / self.files[i])
else: # all others
plot_one_box(box, im, label=label, color=colors(cls))
im = Image.fromarray(im.astype(np.uint8)) if isinstance(im, np.ndarray) else im # from np
if pprint:
print(str.rstrip(', '))
if show:
im.show(self.files[i]) # show
if save:
f = self.files[i]
im.save(save_dir / f) # save
print(f"{'Saved' * (i == 0)} {f}", end=',' if i < self.n - 1 else f' to {save_dir}\n')
if render:
self.imgs[i] = np.asarray(im)
def print(self):
self.display(pprint=True) # print results
print(f'Speed: %.1fms pre-process, %.1fms inference, %.1fms NMS per image at shape {tuple(self.s)}' % self.t)
def show(self):
self.display(show=True) # show results
def save(self, save_dir='runs/hub/exp'):
save_dir = increment_path(save_dir, exist_ok=save_dir != 'runs/hub/exp', mkdir=True) # increment save_dir
self.display(save=True, save_dir=save_dir) # save results
def crop(self, save_dir='runs/hub/exp'):
save_dir = increment_path(save_dir, exist_ok=save_dir != 'runs/hub/exp', mkdir=True) # increment save_dir
self.display(crop=True, save_dir=save_dir) # crop results
print(f'Saved results to {save_dir}\n')
def render(self):
self.display(render=True) # render results
return self.imgs
def pandas(self):
# return detections as pandas DataFrames, i.e. print(results.pandas().xyxy[0])
new = copy(self) # return copy
ca = 'xmin', 'ymin', 'xmax', 'ymax', 'confidence', 'class', 'name' # xyxy columns
cb = 'xcenter', 'ycenter', 'width', 'height', 'confidence', 'class', 'name' # xywh columns
for k, c in zip(['xyxy', 'xyxyn', 'xywh', 'xywhn'], [ca, ca, cb, cb]):
a = [[x[:5] + [int(x[5]), self.names[int(x[5])]] for x in x.tolist()] for x in getattr(self, k)] # update
setattr(new, k, [pd.DataFrame(x, columns=c) for x in a])
return new
def tolist(self):
# return a list of Detections objects, i.e. 'for result in results.tolist():'
x = [Detections([self.imgs[i]], [self.pred[i]], self.names, self.s) for i in range(self.n)]
for d in x:
for k in ['imgs', 'pred', 'xyxy', 'xyxyn', 'xywh', 'xywhn']:
setattr(d, k, getattr(d, k)[0]) # pop out of list
return x
def __len__(self):
return self.n
class Classify(nn.Module):
# Classification head, i.e. x(b,c1,20,20) to x(b,c2)
def __init__(self, c1, c2, k=1, s=1, p=None, g=1): # ch_in, ch_out, kernel, stride, padding, groups
super(Classify, self).__init__()
self.aap = nn.AdaptiveAvgPool2d(1) # to x(b,c1,1,1)
self.conv = nn.Conv2d(c1, c2, k, s, autopad(k, p), groups=g) # to x(b,c2,1,1)
self.flat = nn.Flatten()
def forward(self, x):
z = torch.cat([self.aap(y) for y in (x if isinstance(x, list) else [x])], 1) # cat if list
return self.flat(self.conv(z)) # flatten to x(b,c2) | __init__ |
ModuleProgressContent.tsx | import {PureComponent} from "react";
import React from "react";
import {PropsInterface} from "../../interfaces/interfaces/PropsInterface";
import {Card} from "../Card";
import {TitleSubtitleInterface} from "../../interfaces/interfaces/TitleSubtitleInterface";
import {ProgressLabel, ProgressLabelInterfaceProps} from "../Progress";
import {ListSeparator, ListSeparatorInterfacePropsVariant} from "../List";
import {Separator} from "../Separator";
export interface ModuleProgressContentInterfaceProps extends PropsInterface, TitleSubtitleInterface {
fluid?: boolean;
list: ModuleProgressContentInterfacePropsList[];
zebra?: boolean;
variant?: ListSeparatorInterfacePropsVariant;
}
export interface ModuleProgressContentInterfacePropsList extends ProgressLabelInterfaceProps {
separator?: boolean;
styleContent?: any;
}
export class | extends PureComponent<ModuleProgressContentInterfaceProps> {
constructor(props) {
super(props);
}
render() {
const template = (
<ListSeparator
separator={this.props.separator}
variant={this.props.variant || "inline"}
default
zebra={this.props.zebra}
list={this.props.list}
data={(item: ModuleProgressContentInterfacePropsList) => (
<ProgressLabel title={item.title} subTitle={item.subTitle} label={item.label} value={item.value} style={{marginBottom: 5}}/>
)}
/>
);
if (this.props.card) {
return (
<Card
styleContent={this.props.styleContent}
className={this.props.className}
title={this.props.title}
subTitle={this.props.subTitle}
style={this.props.style}
fluid={this.props.fluid}
>
{this.props.children && (
<>
{this.props.children}
<Separator style={{marginLeft: -15, marginRight: -15}}/>
</>
)}
{template}
</Card>
)
}
return template;
}
}
| ModuleProgressContent |
config.py | '''
Configuration object
====================
The :class:`Config` object is an instance of a modified Python ConfigParser.
See the `ConfigParser documentation
<http://docs.python.org/library/configparser.html>`_ for more information.
Kivy has a configuration file which determines the default settings. In
order to change these settings, you can alter this file manually or use
the Config object. Please see the :ref:`Configure Kivy` section for more
information.
Note: To avoid instances where the config settings do not work or they are
not applied before window creation (like setting an initial window size),
Config.set should be used before importing any modules that affect the
application window (ie. importing Window). Ideally, these settings should
be declared right at the start of your main.py script.
Usage of the Config object
--------------------------
To read a configuration token from a particular section::
>>> from kivy.config import Config
>>> Config.getint('kivy', 'show_fps')
0
Change the configuration and save it::
>>> Config.set('postproc', 'retain_time', '50')
>>> Config.write()
.. versionchanged:: 1.7.1
The ConfigParser should work correctly with utf-8 now. The values are
converted from ascii to unicode only when needed. The method get() returns
utf-8 strings.
Available configuration tokens
------------------------------
.. |log_levels| replace:: 'debug', 'info', 'warning', 'error' or 'critical'
:kivy:
`desktop`: int, 0 or 1
This option controls desktop OS specific features, such as enabling
drag-able scroll-bar in scroll views, disabling of bubbles in
TextInput etc. 0 is disabled, 1 is enabled.
`exit_on_escape`: int, 0 or 1
Enables exiting kivy when escape is pressed.
0 is disabled, 1 is enabled.
`keyboard_layout`: string
Identifier of the layout to use.
`keyboard_mode`: string
Specifies the keyboard mode to use. If can be one of the following:
* '' - Let Kivy choose the best option for your current platform.
* 'system' - real keyboard.
* 'dock' - one virtual keyboard docked to a screen side.
* 'multi' - one virtual keyboard for every widget request.
* 'systemanddock' - virtual docked keyboard plus input from real
keyboard.
* 'systemandmulti' - analogous.
`log_dir`: string
Path of log directory.
`log_enable`: int, 0 or 1
Activate file logging. 0 is disabled, 1 is enabled.
`log_level`: string, one of |log_levels|
Set the minimum log level to use.
`log_name`: string
Format string to use for the filename of log file.
`window_icon`: string
Path of the window icon. Use this if you want to replace the default
pygame icon.
:postproc:
`double_tap_distance`: float
Maximum distance allowed for a double tap, normalized inside the range
0 - 1000.
`double_tap_time`: int
Time allowed for the detection of double tap, in milliseconds.
`ignore`: list of tuples
List of regions where new touches are ignored.
This configuration token can be used to resolve hotspot problems
with DIY hardware. The format of the list must be::
ignore = [(xmin, ymin, xmax, ymax), ...]
All the values must be inside the range 0 - 1.
`jitter_distance`: int
Maximum distance for jitter detection, normalized inside the range 0
- 1000.
`jitter_ignore_devices`: string, separated with commas
List of devices to ignore from jitter detection.
`retain_distance`: int
If the touch moves more than is indicated by retain_distance, it will
not be retained. Argument should be an int between 0 and 1000.
`retain_time`: int
Time allowed for a retain touch, in milliseconds.
`triple_tap_distance`: float
Maximum distance allowed for a triple tap, normalized inside the range
0 - 1000.
`triple_tap_time`: int
Time allowed for the detection of triple tap, in milliseconds.
:graphics:
`fbo`: string, one of 'hardware', 'software' or 'force-hardware'
Selects the FBO backend to use.
`fullscreen`: int or string, one of 0, 1, 'fake' or 'auto'
Activate fullscreen. If set to `1`, a resolution of `width`
times `height` pixels will be used.
If set to `auto`, your current display's resolution will be
used instead. This is most likely what you want.
If you want to place the window in another display,
use `fake` and adjust `width`, `height`, `top` and `left`.
`height`: int
Height of the :class:`~kivy.core.window.Window`, not used if
`fullscreen` is set to `auto`.
`left`: int
Left position of the :class:`~kivy.core.window.Window`.
`maxfps`: int, defaults to 60
Maximum FPS allowed.
'multisamples': int, defaults to 2
Sets the `MultiSample Anti-Aliasing (MSAA)
<http://en.wikipedia.org/wiki/Multisample_anti-aliasing>`_ level.
Increasing this value results in smoother graphics but at the cost of
processing time.
.. note::
This feature is limited by device hardware support and will have no
effect on devices which do not support the level of MSAA requested.
`position`: string, one of 'auto' or 'custom'
Position of the window on your display. If `auto` is used, you have no
control of the initial position: `top` and `left` are ignored.
`show_cursor`: int, one of 0 or 1
Show the cursor on the screen.
`top`: int
Top position of the :class:`~kivy.core.window.Window`.
`resizable`: int, one of 0 or 1
If 0, the window will have a fixed size. If 1, the window will be
resizable.
`rotation`: int, one of 0, 90, 180 or 270
Rotation of the :class:`~kivy.core.window.Window`.
`width`: int
Width of the :class:`~kivy.core.window.Window`, not used if
`fullscreen` is set to `auto`.
:input:
You can create new input devices using this syntax::
# example of input provider instance
yourid = providerid,parameters
# example for tuio provider
default = tuio,127.0.0.1:3333
mytable = tuio,192.168.0.1:3334
.. seealso::
Check the providers in kivy.input.providers for the syntax to use
inside the configuration file.
:widgets:
`scroll_distance`: int
Default value of the
:attr:`~kivy.uix.scrollview.ScrollView.scroll_distance`
property used by the :class:`~kivy.uix.scrollview.ScrollView` widget.
Check the widget documentation for more information.
`scroll_friction`: float
Default value of the
:attr:`~kivy.uix.scrollview.ScrollView.scroll_friction`
property used by the :class:`~kivy.uix.scrollview.ScrollView` widget.
Check the widget documentation for more information.
`scroll_timeout`: int
Default value of the
:attr:`~kivy.uix.scrollview.ScrollView.scroll_timeout`
property used by the :class:`~kivy.uix.scrollview.ScrollView` widget.
Check the widget documentation for more information.
`scroll_stoptime`: int
Default value of the
:attr:`~kivy.uix.scrollview.ScrollView.scroll_stoptime`
property used by the :class:`~kivy.uix.scrollview.ScrollView` widget.
Check the widget documentation for more information.
.. deprecated:: 1.7.0
Please use
:class:`~kivy.uix.scrollview.ScrollView.effect_cls` instead.
`scroll_moves`: int
Default value of the
:attr:`~kivy.uix.scrollview.ScrollView.scroll_moves`
property used by the :class:`~kivy.uix.scrollview.ScrollView` widget.
Check the widget documentation for more information.
.. deprecated:: 1.7.0
Please use
:class:`~kivy.uix.scrollview.ScrollView.effect_cls` instead.
:modules:
You can activate modules with this syntax::
modulename =
Anything after the = will be passed to the module as arguments.
Check the specific module's documentation for a list of accepted
arguments.
.. versionchanged:: 1.8.0
`systemanddock` and `systemandmulti` has been added as possible values for
`keyboard_mode` in the kivy section. `exit_on_escape` has been added
to the kivy section.
.. versionchanged:: 1.2.0
`resizable` has been added to graphics section.
.. versionchanged:: 1.1.0
tuio no longer listens by default. Window icons are not copied to
user directory anymore. You can still set a new window icon by using the
``window_icon`` config setting.
.. versionchanged:: 1.0.8
`scroll_timeout`, `scroll_distance` and `scroll_friction` have been added.
`list_friction`, `list_trigger_distance` and `list_friction_bound`
have been removed. `keyboard_type` and `keyboard_layout` have been
removed from the widget. `keyboard_mode` and `keyboard_layout` have
been added to the kivy section.
'''
__all__ = ('Config', 'ConfigParser')
try:
from ConfigParser import ConfigParser as PythonConfigParser
except ImportError:
from configparser import RawConfigParser as PythonConfigParser
from os import environ
from os.path import exists
from kivy import kivy_config_fn
from kivy.logger import Logger, logger_config_update
from collections import OrderedDict
from kivy.utils import platform
from kivy.compat import PY2, string_types
from weakref import ref
_is_rpi = exists('/opt/vc/include/bcm_host.h')
# Version number of current configuration format
KIVY_CONFIG_VERSION = 10
Config = None
'''Kivy configuration object. Its :attr:`~kivy.config.ConfigParser.name` is
`'kivy'`
'''
class ConfigParser(PythonConfigParser, object):
'''Enhanced ConfigParser class that supports the addition of default
sections and default values.
By default, the kivy ConfigParser instance, :attr:`~kivy.config.Config`,
is given the name `'kivy'` and the ConfigParser instance used by App,
:meth:`~kivy.app.App.build_settings`, is given the name `'app'`.
:Parameters:
`name`: string
The name of the instance. See :attr:`name`. Defaults to `''`.
..versionchanged:: 1.8.1
Each ConfigParser can now be named, :attr:`name`. You can get the
ConfigParser associated with a name using :meth:`get_configparser`.
In addition, you can now control the config values with
:class:`~kivy.properties.ConfigParserProperty`.
.. versionadded:: 1.0.7
'''
def __init__(self, name=''):
PythonConfigParser.__init__(self)
self._sections = OrderedDict()
self.filename = None
self._callbacks = []
self.name = name
def add_callback(self, callback, section=None, key=None):
'''Add a callback to be called when a specific section/key changed. If
you don't specify a section or a key, it will call the callback
for all section/keys changes.
Callbacks will receive 3 arguments: the section, key and value.
.. versionadded:: 1.4.1
'''
if section is None and key is not None:
raise Exception('You cannot specify a key without a section')
self._callbacks.append((callback, section, key))
def remove_callback(self, callback, section=None, key=None):
'''Removes a callback added with :meth:`add_callback`.
:meth:`remove_callback` must be called with the same parameters as
:meth:`add_callback`.
Raises a `ValueError` if not found.
.. versionadded:: 1.8.1
'''
self._callbacks.remove((callback, section, key))
def _do_callbacks(self, section, key, value):
for callback, csection, ckey in self._callbacks:
if csection is not None and csection != section:
continue
elif ckey is not None and ckey != key:
continue
callback(section, key, value)
def read(self, filename):
'''Read only one filename. In contrast to the original ConfigParser of
Python, this one is able to read only one file at a time. The last
read file will be used for the :meth:`write` method.
.. versionchanged:: 1.8.1
:meth:`read` now calls the callbacks if read changed any values.
'''
if not isinstance(filename, string_types):
raise Exception('Only one filename is accepted ({})'.format(
string_types.__name__))
self.filename = filename
# If we try to open directly the configuration file in utf-8,
# we correctly get the unicode value by default.
# But, when we try to save it again, all the values we didn't changed
# are still unicode, and then the PythonConfigParser internal do
# a str() conversion -> fail.
# Instead we currently to the conversion to utf-8 when value are
# "get()", but we internally store them in ascii.
#with codecs.open(filename, 'r', encoding='utf-8') as f:
# self.readfp(f)
old_vals = {sect: {k: v for k, v in self.items(sect)} for sect in
self.sections()}
PythonConfigParser.read(self, filename)
# when reading new file, sections/keys are only increased, not removed
f = self._do_callbacks
for section in self.sections():
if section not in old_vals: # new section
for k, v in self.items(section):
f(section, k, v)
continue
old_keys = old_vals[section]
for k, v in self.items(section): # just update new/changed keys
if k not in old_keys or v != old_keys[k]:
f(section, k, v)
def set(self, section, option, value):
'''Functions similarly to PythonConfigParser's set method, except that
the value is implicitly converted to a string.
'''
e_value = value
if not isinstance(value, string_types):
# might be boolean, int, etc.
e_value = str(value)
if PY2:
if isinstance(value, unicode):
e_value = value.encode('utf-8')
ret = PythonConfigParser.set(self, section, option, e_value)
self._do_callbacks(section, option, value)
return ret
def setall(self, section, keyvalues):
'''Set a lot of keys/values in one section at the same time.
'''
for key, value in keyvalues.items():
self.set(section, key, value)
def get(self, section, option, **kwargs):
value = PythonConfigParser.get(self, section, option, **kwargs)
if PY2:
if type(value) is str:
return value.decode('utf-8')
return value
def setdefaults(self, section, keyvalues):
'''Set a lot of keys/value defaults in one section at the same time.
'''
self.adddefaultsection(section)
for key, value in keyvalues.items():
self.setdefault(section, key, value)
def setdefault(self, section, option, value):
'''Set the default value of a particular option.
'''
if self.has_option(section, option):
return
self.set(section, option, value)
def getdefault(self, section, option, defaultvalue):
'''Get an option. If not found, it will return the default value.
'''
if not self.has_section(section):
return defaultvalue
if not self.has_option(section, option):
return defaultvalue
return self.get(section, option)
def getdefaultint(self, section, option, defaultvalue):
'''Get an option. If not found, it will return the default value.
The return value will be always converted as an integer.
.. versionadded:: 1.6.0
'''
return int(self.getdefault(section, option, defaultvalue))
def adddefaultsection(self, section):
'''Add a section if the section is missing.
'''
if self.has_section(section):
return
self.add_section(section)
def write(self):
'''Write the configuration to the last file opened using the
:meth:`read` method.
Return True if the write finished successfully.
'''
if self.filename is None:
return False
try:
with open(self.filename, 'w') as fd:
PythonConfigParser.write(self, fd)
except IOError:
Logger.exception('Unable to write the config <%s>' % self.filename)
return False
return True
def update_config(self, filename, overwrite=False):
'''Upgrade the configuration based on a new default config file.
Overwrite any existing values if overwrite is True.
'''
pcp = PythonConfigParser()
pcp.read(filename)
confset = self.setall if overwrite else self.setdefaults
for section in pcp.sections():
confset(section, dict(pcp.items(section)))
self.write()
@staticmethod
def _register_named_property(name, widget_ref, *largs):
''' Called by the ConfigParserProperty to register a property which
was created with a config name instead of a config object.
When a ConfigParser with this name is later created, the properties
are then notified that this parser now exists so they can use it.
If the parser already exists, the property is notified here. See
:meth:`~kivy.properties.ConfigParserProperty.set_config`.
:Parameters:
`name`: a non-empty string
The name of the ConfigParser that is associated with the
property. See :attr:`name`.
`widget_ref`: 2-tuple.
The first element is a reference to the widget containing the
property, the second element is the name of the property. E.g.:
| class House(Widget):
address = ConfigParserProperty('', 'info', 'street',
'directory')
Then, the first element is a ref to a House instance, and the
second is `'address'`.
'''
configs = ConfigParser._named_configs
try:
config, props = configs[name]
except KeyError:
configs[name] = (None, [widget_ref])
return
props.append(widget_ref)
if config:
config = config()
widget = widget_ref[0]()
if config and widget: # associate this config with property
widget.property(widget_ref[1]).set_config(config)
@staticmethod
def get_configparser(name):
'''Returns the :class:`ConfigParser` instance whose name is `name`, or
None if not found.
:Parameters:
`name`: string
The name of the :class:`ConfigParser` instance to return.
'''
try:
config = ConfigParser._named_configs[name][0]
return config() if config else None
except KeyError:
return None
# keys are configparser names, values are 2-tuple of (ref(configparser),
# widget_ref), where widget_ref is same as in _register_named_property
_named_configs = {}
_name = ''
@property
def name(self):
''' The name associated with this ConfigParser instance, if not `''`.
Defaults to `''`. It can be safely dynamically changed or set to `''`.
When a ConfigParser is given a name, that config object can be
retrieved using :meth:`get_configparser`. In addition, that config
instance can also be used with a
:class:`~kivy.properties.ConfigParserProperty` instance that set its
`config` value to this name.
Setting more than one ConfigParser with the same name will raise a
`ValueError`.
'''
return self._name
@name.setter
def name(self, value):
old_name = self._name
if value is old_name:
return
self._name = value
configs = ConfigParser._named_configs
if old_name: # disconnect this parser from previously connected props
_, props = configs.get(old_name, (None, []))
for widget, prop in props:
widget = widget()
if widget:
widget.property(prop).set_config(None)
configs[old_name] = (None, props)
if not value:
return
# if given new name, connect it with property that used this name
try:
config, props = configs[value]
except KeyError:
configs[value] = (ref(self), [])
return
if config is not None:
raise ValueError('A parser named {} already exists'.format(value))
for widget, prop in props:
widget = widget()
if widget:
widget.property(prop).set_config(self)
configs[value] = (ref(self), props)
if not environ.get('KIVY_DOC_INCLUDE'):
#
# Read, analyse configuration file
# Support upgrade of older config file versions
#
# Create default configuration
Config = ConfigParser(name='kivy')
Config.add_callback(logger_config_update, 'kivy', 'log_level')
# Read config file if exist
if (exists(kivy_config_fn) and
'KIVY_USE_DEFAULTCONFIG' not in environ and
'KIVY_NO_CONFIG' not in environ):
try:
Config.read(kivy_config_fn)
except Exception as e:
Logger.exception('Core: error while reading local'
'configuration')
version = Config.getdefaultint('kivy', 'config_version', 0)
# Add defaults section
Config.adddefaultsection('kivy')
Config.adddefaultsection('graphics')
Config.adddefaultsection('input')
Config.adddefaultsection('postproc')
Config.adddefaultsection('widgets')
Config.adddefaultsection('modules')
# Upgrade default configuration until we have the current version
need_save = False
if version != KIVY_CONFIG_VERSION and 'KIVY_NO_CONFIG' not in environ:
Logger.warning('Config: Older configuration version detected'
' ({0} instead of {1})'.format(
version, KIVY_CONFIG_VERSION))
Logger.warning('Config: Upgrading configuration in progress.')
need_save = True
while version < KIVY_CONFIG_VERSION:
Logger.debug('Config: Upgrading from %d to %d' %
(version, version + 1))
if version == 0:
# log level
Config.setdefault('kivy', 'keyboard_repeat_delay', '300')
Config.setdefault('kivy', 'keyboard_repeat_rate', '30')
Config.setdefault('kivy', 'log_dir', 'logs')
Config.setdefault('kivy', 'log_enable', '1')
Config.setdefault('kivy', 'log_level', 'info')
Config.setdefault('kivy', 'log_name', 'kivy_%y-%m-%d_%_.txt')
Config.setdefault('kivy', 'window_icon', '')
# default graphics parameters
Config.setdefault('graphics', 'display', '-1')
Config.setdefault('graphics', 'fullscreen', 'no')
Config.setdefault('graphics', 'height', '600')
Config.setdefault('graphics', 'left', '0')
Config.setdefault('graphics', 'maxfps', '0')
Config.setdefault('graphics', 'multisamples', '2')
Config.setdefault('graphics', 'position', 'auto')
Config.setdefault('graphics', 'rotation', '0')
Config.setdefault('graphics', 'show_cursor', '1')
Config.setdefault('graphics', 'top', '0')
Config.setdefault('graphics', 'vsync', '1')
Config.setdefault('graphics', 'width', '800')
# input configuration
Config.setdefault('input', 'mouse', 'mouse')
# activate native input provider in configuration
# from 1.0.9, don't activate mactouch by default, or app are
# unusable.
if platform == 'win':
Config.setdefault('input', 'wm_touch', 'wm_touch')
Config.setdefault('input', 'wm_pen', 'wm_pen')
elif platform == 'linux':
probesysfs = 'probesysfs'
if _is_rpi:
probesysfs += ',provider=hidinput'
Config.setdefault('input', '%(name)s', probesysfs)
# input postprocessing configuration
Config.setdefault('postproc', 'double_tap_distance', '20')
Config.setdefault('postproc', 'double_tap_time', '250')
Config.setdefault('postproc', 'ignore', '[]')
Config.setdefault('postproc', 'jitter_distance', '0')
Config.setdefault('postproc', 'jitter_ignore_devices',
'mouse,mactouch,')
Config.setdefault('postproc', 'retain_distance', '50')
Config.setdefault('postproc', 'retain_time', '0')
# default configuration for keyboard repeatition
Config.setdefault('widgets', 'keyboard_layout', 'qwerty')
Config.setdefault('widgets', 'keyboard_type', '')
Config.setdefault('widgets', 'list_friction', '10')
Config.setdefault('widgets', 'list_friction_bound', '20')
Config.setdefault('widgets', 'list_trigger_distance', '5')
elif version == 1:
Config.remove_option('graphics', 'vsync')
Config.set('graphics', 'maxfps', '60')
elif version == 2:
# was a version to automatically copy windows icon in the user
# directory, but it's now not used anymore. User can still change
# the window icon by touching the config.
pass
elif version == 3:
# add token for scrollview
Config.setdefault('widgets', 'scroll_timeout', '55')
Config.setdefault('widgets', 'scroll_distance', '20')
Config.setdefault('widgets', 'scroll_friction', '1.')
# remove old list_* token
Config.remove_option('widgets', 'list_friction')
Config.remove_option('widgets', 'list_friction_bound')
Config.remove_option('widgets', 'list_trigger_distance')
elif version == 4:
Config.remove_option('widgets', 'keyboard_type')
Config.remove_option('widgets', 'keyboard_layout')
# add keyboard token
Config.setdefault('kivy', 'keyboard_mode', '')
Config.setdefault('kivy', 'keyboard_layout', 'qwerty')
elif version == 5:
Config.setdefault('graphics', 'resizable', '1')
elif version == 6:
# if the timeout is still the default value, change it
Config.setdefault('widgets', 'scroll_stoptime', '300')
Config.setdefault('widgets', 'scroll_moves', '5')
elif version == 7:
# desktop bool indicating whether to use desktop specific features
is_desktop = int(platform in ('win', 'macosx', 'linux'))
Config.setdefault('kivy', 'desktop', is_desktop)
Config.setdefault('postproc', 'triple_tap_distance', '20')
Config.setdefault('postproc', 'triple_tap_time', '375')
elif version == 8:
if Config.getint('widgets', 'scroll_timeout') == 55:
Config.set('widgets', 'scroll_timeout', '250')
elif version == 9:
Config.setdefault('kivy', 'exit_on_escape', '1')
#elif version == 1:
# # add here the command for upgrading from configuration 0 to 1
#
else:
# for future.
break
# Pass to the next version
version += 1
# Indicate to the Config that we've upgrade to the latest version.
Config.set('kivy', 'config_version', KIVY_CONFIG_VERSION)
# Now, activate log file
Logger.logfile_activated = bool(Config.getint('kivy', 'log_enable'))
# If no configuration exist, write the default one.
if ((not exists(kivy_config_fn) or need_save) and
'KIVY_NO_CONFIG' not in environ):
try:
Config.filename = kivy_config_fn
Config.write()
except Exception as e:
Logger.exception('Core: Error while saving default config file') | |
new_gsc3_for_512.py | import copy
from TheanoLib.init import Normal
from TheanoLib.modules import Sequential, Flatten, Dropout, Dense, identity, Softmax, FanOut, Parallel, Subtensor, \
SimpleApply, softmax
from architecture import create_conv_colum
import theano.tensor as T
def create(image_size=(448, 448), n_outs=[447], dropout=False,
fc_l2_reg=None, conv_l2_reg=None, **kwargs):
| print '... building the model'
print 'image_size', image_size, kwargs
classifier = Sequential(name='classifier')
net = Sequential(name='sequential')
convs = [(32, 1, 0), (64, 1, 0), (64, 1, 0), (128, 0, 0), (128, 1, 0), (256, 0, 0), (256, 1, 0),
(256, 0, 0), (256, 1, 0), (256, 0, 0), (256, 1, 0)]
features, size1 = create_conv_colum(image_size, 'MAIN.', convs)
net.add(features)
classifier.add(Flatten())
if dropout:
classifier.add(Dropout(p_of_zero=dropout))
def f(input):
outs = []
s = 0
for n_out in n_outs:
outs.append(softmax(input[:, s: s + n_out]))
s += n_out
return T.concatenate(outs, axis=1)
classifier.add(Dense(
n_input= convs[-1][0] * size1[0] * size1[1],
n_output=sum(n_outs),
nonlinearity=identity,
W_init=Normal(0.001),
name='dense'
))
classifier.add(SimpleApply(f))
net.add(classifier)
##########
arch = copy.deepcopy(net)
print 'Calling allocate_params()'
net.allocate_params()
print 'Calling initialize_params()'
net.initialize_params()
reg_params = (zip(classifier.get_reg_params(), len(classifier.get_reg_params()) * [fc_l2_reg]) +
zip(features.get_reg_params(), len(features.get_reg_params()) * [conv_l2_reg]))
return arch, net, reg_params |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.