file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
intTestSupport.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
/*
* This file contains support functions to make integration testing easier
*/
import { DebugClient } from 'vscode-debugadapter-testsupport';
const ImplementsBreakpointLocation = Symbol();
/**
* Simple breakpoint location params (based on what the debug test client accepts)
*/
export class BreakpointLocation {
[ImplementsBreakpointLocation]: 'BreakpointLocation';
public constructor (
/** The path to the source file in which to set a breakpoint */
public readonly path: string,
/** The line number in the file to set a breakpoint on */
public readonly line: number,
/** Optional breakpoint column */
public readonly column?: number,
/** Whether or not we should assert if the bp is verified or not */
public readonly verified?: boolean) {}
public toString(): string {
return `${this.path}:${this.line}:${this.column} verified: ${this.verified}`;
}
}
| */
export async function launchTestAdapter(client: DebugClient, launchConfig: any) {
let init = client.waitForEvent('initialized');
await client.launch(launchConfig);
await init;
await client.configurationDoneRequest();
}
/**
* Easier way to set breakpoints for testing
* @param client DebugClient
* @param location Breakpoint location
*/
export function setBreakpoint(client: DebugClient, location: { path: string, line: number, column?: number, verified?: boolean }) {
return client.setBreakpointsRequest({
lines: [location.line],
breakpoints: [{ line: location.line, column: location.column }],
source: { path: location.path }
});
}
/**
* Set a conditional breakpoint in a file
* @param client DebugClient
* @param location Desired breakpoint location
* @param condition The condition on which the breakpoint should be hit
*/
export function setConditionalBreakpoint(client: DebugClient, location: { path: string, line: number, column?: number, verified?: boolean }, condition: string) {
return client.setBreakpointsRequest({
lines: [location.line],
breakpoints: [{ line: location.line, column: location.column, condition }],
source: { path: location.path }
});
} | /**
* Launch an instance of chrome and wait for the debug adapter to initialize and attach
* @param client Debug Client
* @param launchConfig The launch config to use |
page-expand.js | import Component from '/material/script/Component.js';
import '../../views/page/view-page.js';
import '../../views/section/view-section.js';
import '../../views/article/view-article.js';
import '../../views/source/view-source.js';
import '../../views/import/view-import.js';
import '/material/components/expand/material-expand.js';
const component = Component.meta(import.meta.url, 'page-expand');
/**
*
*/
class | extends Component {
/**
*
*/
constructor() {
super(component);
}
}
Component.define(component, ExpandPage);
// #region [Private]
// #endregion
| ExpandPage |
config.py | base_path = '~/.weevely/'
# History path
history_path = '~/.weevely/history'
# Session path
sessions_path = '~/.weevely/sessions/'
sessions_ext = '.session'
# Supported Channels
channels = [
# Obfuscated channel inside POST requests introduced
# in Weevely 3.6
'ObfPost',
]
# Append random GET parameters to every request to
# make sure the page is not cache by proxies.
add_random_param_nocache = False
# Add additional headers to be sent at every request e.g.
# additional_headers = [
# ( 'Authentication', 'Basic QWxhZGRpbjpvcGVuIHNlc2FtBl==' )
# ]
additional_headers = []
# Agents and obfuscators used by generator.py
agent_templates_folder_path = 'bd/agents/'
obfuscators_templates_folder_path = 'bd/obfuscators/'
#######################################
# Resolve given paths - DO NOT CHANGE #
#######################################
import os, sys
base_path = os.path.expanduser(base_path)
history_path = os.path.expanduser(history_path)
sessions_path = os.path.expanduser(sessions_path)
weevely_path = os.path.dirname(os.path.realpath(sys.argv[0]))
agent_templates_folder_path = os.path.join(
weevely_path,
agent_templates_folder_path
)
obfuscators_templates_folder_path = os.path.join(
weevely_path,
obfuscators_templates_folder_path
) | # Base path for log files and sessions |
|
diviner_test.go | // Copyright 2019 GRAIL, Inc. All rights reserved.
// Use of this source code is governed by the Apache 2.0
// license that can be found in the LICENSE file.
package diviner
import (
"testing"
"time"
)
func TestReplicates(t *testing.T) {
var r Replicates
r.Set(2)
if !r.Contains(2) {
t.Error("not contains 3")
}
if r.Completed(3) {
t.Error("complete 3")
} | r.Set(1)
if !r.Completed(3) {
t.Error("not complete 3")
}
r.Clear(0)
if r.Completed(3) {
t.Error("complete 3")
}
if got, want := r.Count(), 2; got != want {
t.Errorf("got %v, want %v", got, want)
}
seen := make(map[int]bool)
for num, r := r.Next(); num != -1; num, r = r.Next() {
if seen[num] {
t.Errorf("seen twice: %d", num)
}
seen[num] = true
}
if got, want := len(seen), 2; got != want {
t.Fatalf("got %v, want %v", got, want)
}
if !seen[1] || !seen[2] {
t.Errorf("not seen 1, 2: %v", seen)
}
}
func TestReplicatedTrial(t *testing.T) {
rep := replicatedTrial(
Run{Replicate: 0, State: Success, Metrics: []Metrics{{"x": 1.0}}},
Run{Replicate: 0, State: Pending, Metrics: []Metrics{{"x": 0.5}}},
Run{Replicate: 1, State: Success, Metrics: []Metrics{{"x": 2.0}}},
)
if got, want := rep.Metrics, (Metrics{"x": 1.5}); !got.Equal(want) {
t.Errorf("got %v, want %v", got, want)
}
if got, want := rep.Replicates.Count(), 2; got != want {
t.Errorf("got %v, want %v", got, want)
}
if rep.Pending {
t.Error("rep was pending")
}
var (
now = time.Now()
then = now.Add(-time.Minute)
)
rep = replicatedTrial(
Run{Replicate: 0, State: Pending, Updated: then, Metrics: []Metrics{{"x": 1.0}}},
Run{Replicate: 0, State: Pending, Updated: now, Metrics: []Metrics{{"x": 0.5}}},
Run{Replicate: 1, State: Success, Metrics: []Metrics{{"x": 2.0}}},
)
if got, want := rep.Metrics, (Metrics{"x": 1.25}); !got.Equal(want) {
t.Errorf("got %v, want %v", got, want)
}
if got, want := rep.Replicates.Count(), 2; got != want {
t.Errorf("got %v, want %v", got, want)
}
if !rep.Pending {
t.Error("rep was not pending")
}
}
func replicatedTrial(runs ...Run) Trial {
trials := make([]Trial, len(runs))
for i, run := range runs {
trials[i] = run.Trial()
}
return ReplicatedTrial(trials)
} | r.Set(0) |
utils.rs | pub fn color_string_splitter(color: &String) -> Vec<&str> {
if color.contains(",") {
color.trim().split(",").collect::<Vec<&str>>()
} else {
color.trim().split_ascii_whitespace().collect::<Vec<&str>>() | }
} |
|
actions.d.tsx | import {Artist, Album} from 'definitions';
import {DSEntity, DSArtist, DSAlbum, Release} from 'compactd-models';
interface StoreActionBase {
type: string;
};
export interface ToggleDownloadsAction {
type: 'cassette/store/TOGGLE_DOWNLOADS';
} |
export interface ToggleSearchAction {
type: 'cassette/store/TOGGLE_SEARCH';
}
export interface SetSearchResultsAction {
type: 'cassette/store/SET_SEARCH_RESULTS';
query: string;
results?: DSEntity[];
}
export interface SelectDSArtistAction {
type: 'cassette/store/SELECT_DS_ARTIST',
artist: string;
}
export interface ResolveDSArtistAction {
type: 'cassette/store/RESOLVE_DS_ARTIST';
artist: DSArtist;
id: string;
}
export interface SelectDSAlbumAction {
type: 'cassette/store/SELECT_DS_ALBUM';
album: string;
}
export interface ResolveDSAlbumAction {
type: 'cassette/store/RESOLVE_DS_ALBUM';
album: DSAlbum;
id: string;
}
export interface SetStoreScopeAction {
type: 'cassette/store/SET_STORE_SCOPE';
scope: 'artist' | 'album' | 'search';
}
export interface SelectResultsAction {
type: 'cassette/store/SELECT_RESULTS';
album: string;
}
export interface ResolveResultsAction {
type: 'cassette/store/RESOLVE_RESULTS';
results: Release [];
id: string;
}
export interface DownloadResult {
type: 'cassette/store/DOWNLOAD_RESULT';
result: {
id: string;
hash: string;
name: string;
}
}
export interface UpdateProgress {
type: 'cassette/store/UPDATE_DL_PROGRESS';
hash: string;
progress: number;
}
export type StoreAction =
ToggleDownloadsAction
| ToggleSearchAction
| SetSearchResultsAction
| ResolveDSArtistAction
| SelectDSArtistAction
| ResolveDSAlbumAction
| SelectDSAlbumAction
| SelectResultsAction
| ResolveResultsAction
| DownloadResult
| UpdateProgress;
export type StoreActions = {
toggleDownloads: () => void;
toggleSearch: () => void;
searchDatasource: (q: string) => void;
selectDSArtist: (artist: string) => void;
selectDSAlbum: (album: string) => void;
goBackToSearch: () => void;
loadResults: (artist: string, album: string) => void;
downloadResult: (release: Release, album: DSAlbum) => void;
initResults: () => void;
} | |
rleplus_test.go | package rleplus_test
import (
"fmt"
"math"
"sort"
"testing"
"github.com/filecoin-project/go-lotus/extern/rleplus"
bitvector "github.com/filecoin-project/go-lotus/extern/rleplus/internal"
"gotest.tools/assert"
)
func TestRleplus(t *testing.T) | {
t.Run("Encode", func(t *testing.T) {
// Encode an intset
ints := []uint64{
// run of 1
0,
// gap of 1
// run of 1
2,
// gap of 1
// run of 3
4, 5, 6,
// gap of 4
// run of 17
11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
}
expectedBits := []byte{
0, 0, // version
1, // first bit
1, // run of 1
1, // gap of 1
1, // run of 1
1, // gap of 1
0, 1, 1, 1, 0, 0, // run of 3
0, 1, 0, 0, 1, 0, // gap of 4
// run of 17 < 0 0 (varint) >
0, 0,
1, 0, 0, 0, 1, 0, 0, 0,
}
v := bitvector.BitVector{}
for _, bit := range expectedBits {
v.Push(bit)
}
actualBytes, _, err := rleplus.Encode(ints)
assert.NilError(t, err)
assert.Equal(t, len(v.Buf), len(actualBytes))
for idx, expected := range v.Buf {
assert.Equal(
t,
fmt.Sprintf("%08b", expected),
fmt.Sprintf("%08b", actualBytes[idx]),
)
}
})
t.Run("Encode allows all runs sizes possible uint64", func(t *testing.T) {
// create a run of math.MaxUint64
ints := []uint64{math.MaxUint64}
_, _, err := rleplus.Encode(ints)
assert.NilError(t, err)
})
t.Run("Decode", func(t *testing.T) {
testCases := [][]uint64{
{},
{1},
{0},
{0, 1, 2, 3},
{
// run of 1
0,
// gap of 1
// run of 1
2,
// gap of 1
// run of 3
4, 5, 6,
// gap of 4
// run of 17
11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
},
}
for _, tc := range testCases {
encoded, _, err := rleplus.Encode(tc)
assert.NilError(t, err)
result, err := rleplus.Decode(encoded)
assert.NilError(t, err)
sort.Slice(tc, func(i, j int) bool { return tc[i] < tc[j] })
sort.Slice(result, func(i, j int) bool { return result[i] < result[j] })
assert.Equal(t, len(tc), len(result))
for idx, expected := range tc {
assert.Equal(t, expected, result[idx])
}
}
})
t.Run("Decode version check", func(t *testing.T) {
_, err := rleplus.Decode([]byte{0xff})
assert.Error(t, err, "invalid RLE+ version")
})
t.Run("Decode returns an error with a bad encoding", func(t *testing.T) {
// create an encoding with a buffer with a run which is too long
_, err := rleplus.Decode([]byte{0xe0, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff})
assert.Error(t, err, "invalid encoding for RLE+ version 0")
})
t.Run("outputs same as reference implementation", func(t *testing.T) {
// Encoding bitvec![LittleEndian; 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
// in the Rust reference implementation gives an encoding of [223, 145, 136, 0] (without version field)
// The bit vector is equivalent to the integer set { 0, 2, 4, 5, 6, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27 }
// This is the above reference output with a version header "00" manually added
referenceEncoding := []byte{124, 71, 34, 2}
expectedNumbers := []uint64{0, 2, 4, 5, 6, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27}
encoded, _, err := rleplus.Encode(expectedNumbers)
assert.NilError(t, err)
// Our encoded bytes are the same as the ref bytes
assert.Equal(t, len(referenceEncoding), len(encoded))
for idx, expected := range referenceEncoding {
assert.Equal(t, expected, encoded[idx])
}
decoded, err := rleplus.Decode(referenceEncoding)
assert.NilError(t, err)
// Our decoded integers are the same as expected
sort.Slice(decoded, func(i, j int) bool { return decoded[i] < decoded[j] })
assert.Equal(t, len(expectedNumbers), len(decoded))
for idx, expected := range expectedNumbers {
assert.Equal(t, expected, decoded[idx])
}
})
t.Run("RunLengths", func(t *testing.T) {
testCases := []struct {
ints []uint64
first byte
runs []uint64
}{
// empty
{},
// leading with ones
{[]uint64{0}, 1, []uint64{1}},
{[]uint64{0, 1}, 1, []uint64{2}},
{[]uint64{0, 0xffffffff, 0xffffffff + 1}, 1, []uint64{1, 0xffffffff - 1, 2}},
// leading with zeroes
{[]uint64{1}, 0, []uint64{1, 1}},
{[]uint64{2}, 0, []uint64{2, 1}},
{[]uint64{10, 11, 13, 20}, 0, []uint64{10, 2, 1, 1, 6, 1}},
{[]uint64{10, 11, 11, 13, 20, 10, 11, 13, 20}, 0, []uint64{10, 2, 1, 1, 6, 1}},
}
for _, testCase := range testCases {
first, runs := rleplus.RunLengths(testCase.ints)
assert.Equal(t, testCase.first, first)
assert.Equal(t, len(testCase.runs), len(runs))
for idx, runLength := range testCase.runs {
assert.Equal(t, runLength, runs[idx])
}
}
})
} |
|
get_user_team_associations_for_user.py | #!/usr/bin/python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example gets all user team associations for a single user.
To determine which users exist, run get_all_users.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
""" |
# Import appropriate modules from the client library.
from googleads import dfp
USER_ID = 'INSERT_USER_ID_HERE'
def main(client, user_id):
# Initialize appropriate service.
user_team_association_service = client.GetService(
'UserTeamAssociationService', version='v201505')
# Create query.
values = [{
'key': 'userId',
'value': {
'xsi_type': 'NumberValue',
'value': user_id
}
}]
query = 'WHERE userId = :userId'
# Create a filter statement.
statement = dfp.FilterStatement(query, values)
# Get user team associations by statement.
while True:
response = user_team_association_service.getUserTeamAssociationsByStatement(
statement.ToStatement())
if 'results' in response:
# Display results.
for user_team_association in response['results']:
print ('User team association between user with ID \'%s\' and team with'
' ID \'%s\' was found.' % (user_team_association['userId'],
user_team_association['teamId']))
statement.offset += dfp.SUGGESTED_PAGE_LIMIT
else:
break
print '\nNumber of results found: %s' % response['totalResultSetSize']
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client, USER_ID) | |
start.js | var dataset = [];
document.addEventListener('DOMContentLoaded', function () {
d3.dsv(",", "text/plain")("https://rawgit.com/danilomalzao/teste/master/leilaoxmercado/dump.csv?v5", function (leilao) {
dataset = leilao.map(function (val) {val.source = 1; return val});
d3.dsv(",", "text/plain")("https://rawgit.com/danilomalzao/teste/master/leilaoxmercado/dump_zap.csv?v9", function (mercado) {
dataset = dataset.concat(mercado.map(function (val) {val.source = 2; return val}));
function redraw(start) {
var activeFolder = start / qntItem;
d3.select("#buttons").selectAll('a').classed('active_link', function (d, i) {
return d3.select("table").select('tbody').selectAll("tr").size() > 0
&& i == (activeFolder + 1);
});
d3.select("table").select('tbody').selectAll("tr")
.style("display", function (d, i) {
return (i >= start && i < start + qntItem) ? null : "none";
})
};
var qntItem = 20;
var head = {
// coord_x,coord_y,size,address,price,type,regiao_id,bairro,farmac_dist,mercado_dist,restaurante_dist
"Tipo": "type",
"Preço (R$)": "price",
"Area m²": "size",
Bairro: "bairro",
"Farmácia próxima": "farmac_dist",
"Mercado próximo": "mercado_dist",
"Restaurante próximo": "restaurante_dist",
Região: "regiao_id",
Endereço: "address"
};
//1: left, 2: center, 3: right, 4: justify
var headAlign = {
Endereço: 2,
Tamanho: 1,
"Tipo de Imóvel": 1,
Preço: 1,
Bairro: 2,
Região: 1,
"Farmácia": 1,
"Mercado": 1,
"Restaurante": 1
};
var sizeMD = {
//Ano: 1,
// Dia: 2,
// Turno: 1,
// Bairro: 2,
// Lugar: 3,
// Marca: 3
};
var formatField = {
"farmac_dist": function (dist) {
return (dist * 1000) + "m";
},
"mercado_dist": function (dist) {
return (dist * 1000) + "m";
},
"restaurante_dist": function (dist) {
return (dist * 1000) + "m";
},
"price": function (price) {
return price + "k";
},
"size": function (size) {
return parseInt(size) ? (size + "m²") : "Não informado";
},
"bairro": function (bairro) {
return bairro || "Não informado";
},
"regiao_id": function (regId, data) {
var regions = {
1: "BARREIRO",
2: "CENTRO-SUL",
3: "LESTE",
4: "NORDESTE",
5: "NOROESTE",
6: "NORTE",
7: "OESTE",
8: "PAMPULHA",
9: "VENDA NOVA",
"-1": "REGIAO_METROPOLITANA"
};
return regions[regId] || "REGIAO_METROPOLITANA";
},
"type": function (type, data) {
return ((type == 3 || type == 4) ? "Apartamento" : "Casa") + " - "
+ (data.source == 1 ? "Leilão" : "Mercado");
},
DATAOCORRENCIA: function (date) {
var regex = /([0-9]{1,2})[\\\/-]([0-9]{1,2})[\\\/-]([0-9]{4})/g;
var m;
if ((m = regex.exec(date)) !== null) {
// This is necessary to avoid infinite loops with zero-width matches
if (m.index === regex.lastIndex) {
regex.lastIndex++;
}
date = m[3] + "/" + m[2] + "/" + m[1];
}
return date;
}
};
var ordHead = {
hName: Object.keys(head)[0],
asc: true
};
var portion = 0;
var search = "";
for (var iData = 0, lenData = dataset.length, fieldKeys = Object.keys(formatField); iData < lenData; iData++) {
var curData = dataset[iData];
for (var iField = 0, lenField = fieldKeys.length; iField < lenField; iField++)
dataset[iData][fieldKeys[iField]] = formatField[fieldKeys[iField]](dataset[iData][fieldKeys[iField]], dataset[iData]);
}
var sortByColumn = (function (dataset, head) {
return function sortByColumn(name, hName) {
if (ordHead.hName == hName)
ordHead.asc = !ordHead.asc;
else {
ordHead.hName = hName;
ordHead.asc = true;
}
dataset.sort(function (a, b) {
var aName = (a[name] && a[name].trim()) || "";
var bName = (b[name] && b[name].trim()) || "";
var val;
if ($.isNumeric(aName) && $.isNumeric(bName))
val = aName - bName;
else if (aName == bName)
val = 0;
else
val = aName > bName ? 1 : -1;
return (ordHead.asc) ? val : -val;
});
bindToData(dataset, head);
portion = 0;
redraw(0);
}
})(dataset, head);
sortByColumn(head[ordHead.hName], ordHead.hName);
d3.select("input[name=search]").on("keyup", function () {
if (this.value === undefined || this.value === false)
search = "";
else
search = this.value;
bindToData(dataset, head);
portion = 0;
redraw(0);
});
d3.select("select").on("change", function (dataset, head) {
return function () {
qntItem = parseInt(this.options[this.selectedIndex].value);
portion = 0;
bindToData(dataset, head);
redraw(0);
};
}(dataset, head));
d3.select("table").append('thead');
d3.select("table").select('thead').append('tr');
d3.select("table").append('tbody');
function bindToData(dataset, head) {
var _dataset = [];
for (var iData = 0, lenData = dataset.length; iData < lenData; iData++) {
var contains = false;
var fullContent = "";
for (var iHead = 0, hKeys = Object.keys(head), lenHead = hKeys.length; iHead < lenHead; iHead++) {
fullContent += dataset[iData][head[hKeys[iHead]]] + "��";
}
if (search === "" || fullContent.toLowerCase().indexOf(search.toLowerCase()) != -1)
_dataset.push(dataset[iData]);
}
d3.select("table").select('thead').select('tr')
.selectAll('th').data([]).exit().remove();
d3.select("table").select('thead').select('tr')
.selectAll('th').data(Object.keys(head))
.enter().append('th').on("click", function (d, i) {
return sortByColumn(head[d], d);
})
.text(function (d) {
return d
})
.each(function (d, i) {
var curClass = this.className + " col-md-" + (Object.values(sizeMD)[i]) + " ";
if (ordHead.hName == d && ordHead.asc)
curClass += "fa fa-sort-amount-asc";
else if (ordHead.hName == d && !ordHead.asc)
curClass += "fa fa-sort-amount-desc";
else
curClass += "fa fa-sort";
this.className = curClass;
});
d3.select("table").select('tbody')
.selectAll("tr").data([]).exit().remove();
var tr = d3.select("table").select('tbody')
.selectAll("tr").data(_dataset);
tr.enter().append("tr");
tr.exit().remove();
var td = d3.select("table").select('tbody')
.selectAll("tr").selectAll("td").data(function (d, i) {
function headToArr(d) {
var arr = [];
for (var hArr = Object.keys(head), i = 0;
i < hArr.length; i++) {
arr.push(d[head[hArr[i]]]);
}
return arr;
};
return headToArr(d);
});
td.enter().append("td")
.each(function (d, i) {
var curClass = this.className + " col-md-" + (Object.values(sizeMD)[i]) + " ";
switch (Object.values(headAlign)[i]) {
case 1:
curClass += "t_left";
case 2:
curClass += "t_center";
case 3:
curClass += "t_right";
case 4:
curClass += "t_justify";
}
this.className = curClass;
})
.text(function (d) {
return d;
});
td.exit().remove();
createIndex(_dataset.length);
};
bindToData(dataset, head);
function createIndex(dataQnt) {
d3.select("#buttons").selectAll("a").data([]).exit().remove();
// the chain select here pushes the datum onto the up and down buttons also
d3.select("#buttons").append("a").on("click", function () {
portion -= qntItem;
if (portion < 0)
portion = 0;
redraw(portion);
})
.attr("href", "#")
.html(" << ");
for (var i = 0, iLen = Math.floor(dataQnt / qntItem); dataQnt > 0 && i <= iLen; i++) {
d3.select("#buttons").append("a").on("click", (function (i) {
return function () {
portion = i * qntItem;
redraw(portion);
};
})(i))
.attr("href", "#")
.html(" " + (i + 1) + " ");
}
d3.select("#buttons").append("a").on("click", function (d) {
portion += qntItem;
if (dataQnt - portion < 0)
portion -= qntItem;
redraw(portion);
})
.attr("href", "#")
.html(" >> ");
}
redraw(0);
})
});
}, false);
function applyEffects() {
| on("scroll", function onScroll(event){
var scrollPos = $(document).scrollTop();
$('nav.navbar a').each(function () {
var currLink = $(this);
var refElement = $(currLink.attr("href"));
if (refElement.position().top <= scrollPos && refElement.position().top + refElement.height() > scrollPos) {
$('nav.navbar a').removeClass("refactive");
currLink.addClass("refactive");
} else{
currLink.removeClass("refactive");
}
});
});
}
applyEffects(); | $(document). |
visit_ast.rs | //! The Rust AST Visitor. Extracts useful information and massages it into a form
//! usable for `clean`.
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_hir as hir;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::DefId;
use rustc_hir::Node;
use rustc_middle::middle::privacy::AccessLevel;
use rustc_middle::ty::TyCtxt;
use rustc_span::source_map::Spanned;
use rustc_span::symbol::{kw, sym, Symbol};
use rustc_span::{self, Span};
use std::mem;
use crate::clean::{self, AttributesExt, NestedAttributesExt};
use crate::core;
use crate::doctree::*;
// FIXME: Should this be replaced with tcx.def_path_str?
fn def_id_to_path(tcx: TyCtxt<'_>, did: DefId) -> Vec<String> {
let crate_name = tcx.crate_name(did.krate).to_string();
let relative = tcx.def_path(did).data.into_iter().filter_map(|elem| {
// extern blocks have an empty name
let s = elem.data.to_string();
if !s.is_empty() { Some(s) } else { None }
});
std::iter::once(crate_name).chain(relative).collect()
}
// Also, is there some reason that this doesn't use the 'visit'
// framework from syntax?.
crate struct RustdocVisitor<'a, 'tcx> {
cx: &'a mut core::DocContext<'tcx>,
view_item_stack: FxHashSet<hir::HirId>,
inlining: bool,
/// Are the current module and all of its parents public?
inside_public_path: bool,
exact_paths: FxHashMap<DefId, Vec<String>>,
}
impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
crate fn new(cx: &'a mut core::DocContext<'tcx>) -> RustdocVisitor<'a, 'tcx> {
// If the root is re-exported, terminate all recursion.
let mut stack = FxHashSet::default();
stack.insert(hir::CRATE_HIR_ID);
RustdocVisitor {
cx,
view_item_stack: stack,
inlining: false,
inside_public_path: true,
exact_paths: FxHashMap::default(),
}
}
fn store_path(&mut self, did: DefId) {
let tcx = self.cx.tcx;
self.exact_paths.entry(did).or_insert_with(|| def_id_to_path(tcx, did));
}
crate fn visit(mut self, krate: &'tcx hir::Crate<'_>) -> Module<'tcx> {
let mut top_level_module = self.visit_mod_contents(
krate.item.span,
&Spanned { span: rustc_span::DUMMY_SP, node: hir::VisibilityKind::Public },
hir::CRATE_HIR_ID,
&krate.item.module,
None,
);
top_level_module.is_crate = true;
// Attach the crate's exported macros to the top-level module.
// In the case of macros 2.0 (`pub macro`), and for built-in `derive`s or attributes as
// well (_e.g._, `Copy`), these are wrongly bundled in there too, so we need to fix that by
// moving them back to their correct locations.
'exported_macros: for def in krate.exported_macros {
// The `def` of a macro in `exported_macros` should correspond to either:
// - a `#[macro_export] macro_rules!` macro,
// - a built-in `derive` (or attribute) macro such as the ones in `::core`,
// - a `pub macro`.
// Only the last two need to be fixed, thus:
if def.ast.macro_rules {
top_level_module.macros.push((def, None));
continue 'exported_macros;
}
let tcx = self.cx.tcx;
// Note: this is not the same as `.parent_module()`. Indeed, the latter looks
// for the closest module _ancestor_, which is not necessarily a direct parent
// (since a direct parent isn't necessarily a module, c.f. #77828).
let macro_parent_def_id = {
use rustc_middle::ty::DefIdTree;
tcx.parent(tcx.hir().local_def_id(def.hir_id).to_def_id()).unwrap()
};
let macro_parent_path = tcx.def_path(macro_parent_def_id);
// HACK: rustdoc has no way to lookup `doctree::Module`s by their HirId. Instead,
// lookup the module by its name, by looking at each path segment one at a time.
let mut cur_mod = &mut top_level_module;
for path_segment in macro_parent_path.data {
// Path segments may refer to a module (in which case they belong to the type
// namespace), which is _necessary_ for the macro to be accessible outside it
// (no "associated macros" as of yet). Else we bail with an outer `continue`.
let path_segment_ty_ns = match path_segment.data {
rustc_hir::definitions::DefPathData::TypeNs(symbol) => symbol,
_ => continue 'exported_macros,
};
// Descend into the child module that matches this path segment (if any).
match cur_mod.mods.iter_mut().find(|child| child.name == Some(path_segment_ty_ns)) {
Some(child_mod) => cur_mod = &mut *child_mod,
None => continue 'exported_macros,
}
}
let cur_mod_def_id = tcx.hir().local_def_id(cur_mod.id).to_def_id();
assert_eq!(cur_mod_def_id, macro_parent_def_id);
cur_mod.macros.push((def, None));
}
self.cx.renderinfo.get_mut().exact_paths = self.exact_paths;
top_level_module
}
fn visit_mod_contents(
&mut self,
span: Span,
vis: &'tcx hir::Visibility<'_>,
id: hir::HirId,
m: &'tcx hir::Mod<'tcx>,
name: Option<Symbol>,
) -> Module<'tcx> |
/// Tries to resolve the target of a `crate use` statement and inlines the
/// target if it is defined locally and would not be documented otherwise,
/// or when it is specifically requested with `please_inline`.
/// (the latter is the case when the import is marked `doc(inline)`)
///
/// Cross-crate inlining occurs later on during crate cleaning
/// and follows different rules.
///
/// Returns `true` if the target has been inlined.
fn maybe_inline_local(
&mut self,
id: hir::HirId,
res: Res,
renamed: Option<Symbol>,
glob: bool,
om: &mut Module<'tcx>,
please_inline: bool,
) -> bool {
fn inherits_doc_hidden(cx: &core::DocContext<'_>, mut node: hir::HirId) -> bool {
while let Some(id) = cx.tcx.hir().get_enclosing_scope(node) {
node = id;
if cx.tcx.hir().attrs(node).lists(sym::doc).has_word(sym::hidden) {
return true;
}
if node == hir::CRATE_HIR_ID {
break;
}
}
false
}
debug!("maybe_inline_local res: {:?}", res);
let tcx = self.cx.tcx;
let res_did = if let Some(did) = res.opt_def_id() {
did
} else {
return false;
};
let use_attrs = tcx.hir().attrs(id);
// Don't inline `doc(hidden)` imports so they can be stripped at a later stage.
let is_no_inline = use_attrs.lists(sym::doc).has_word(sym::no_inline)
|| use_attrs.lists(sym::doc).has_word(sym::hidden);
// For cross-crate impl inlining we need to know whether items are
// reachable in documentation -- a previously nonreachable item can be
// made reachable by cross-crate inlining which we're checking here.
// (this is done here because we need to know this upfront).
if !res_did.is_local() && !is_no_inline {
let attrs = clean::inline::load_attrs(self.cx, res_did);
let self_is_hidden = attrs.lists(sym::doc).has_word(sym::hidden);
if !self_is_hidden {
if let Res::Def(kind, did) = res {
if kind == DefKind::Mod {
crate::visit_lib::LibEmbargoVisitor::new(self.cx).visit_mod(did)
} else {
// All items need to be handled here in case someone wishes to link
// to them with intra-doc links
self.cx
.renderinfo
.get_mut()
.access_levels
.map
.insert(did, AccessLevel::Public);
}
}
}
return false;
}
let res_hir_id = match res_did.as_local() {
Some(n) => tcx.hir().local_def_id_to_hir_id(n),
None => return false,
};
let is_private = !self.cx.renderinfo.borrow().access_levels.is_public(res_did);
let is_hidden = inherits_doc_hidden(self.cx, res_hir_id);
// Only inline if requested or if the item would otherwise be stripped.
if (!please_inline && !is_private && !is_hidden) || is_no_inline {
return false;
}
if !self.view_item_stack.insert(res_hir_id) {
return false;
}
let ret = match tcx.hir().get(res_hir_id) {
Node::Item(&hir::Item { kind: hir::ItemKind::Mod(ref m), .. }) if glob => {
let prev = mem::replace(&mut self.inlining, true);
for i in m.item_ids {
let i = self.cx.tcx.hir().expect_item(i.id);
self.visit_item(i, None, om);
}
self.inlining = prev;
true
}
Node::Item(it) if !glob => {
let prev = mem::replace(&mut self.inlining, true);
self.visit_item(it, renamed, om);
self.inlining = prev;
true
}
Node::ForeignItem(it) if !glob => {
let prev = mem::replace(&mut self.inlining, true);
self.visit_foreign_item(it, renamed, om);
self.inlining = prev;
true
}
Node::MacroDef(def) if !glob => {
om.macros.push((def, renamed));
true
}
_ => false,
};
self.view_item_stack.remove(&res_hir_id);
ret
}
fn visit_item(
&mut self,
item: &'tcx hir::Item<'_>,
renamed: Option<Symbol>,
om: &mut Module<'tcx>,
) {
debug!("visiting item {:?}", item);
let name = renamed.unwrap_or(item.ident.name);
if item.vis.node.is_pub() {
let def_id = self.cx.tcx.hir().local_def_id(item.hir_id);
self.store_path(def_id.to_def_id());
}
match item.kind {
hir::ItemKind::ForeignMod { items, .. } => {
for item in items {
let item = self.cx.tcx.hir().foreign_item(item.id);
self.visit_foreign_item(item, None, om);
}
}
// If we're inlining, skip private items.
_ if self.inlining && !item.vis.node.is_pub() => {}
hir::ItemKind::GlobalAsm(..) => {}
hir::ItemKind::Use(_, hir::UseKind::ListStem) => {}
hir::ItemKind::Use(ref path, kind) => {
let is_glob = kind == hir::UseKind::Glob;
// Struct and variant constructors and proc macro stubs always show up alongside
// their definitions, we've already processed them so just discard these.
if let Res::Def(DefKind::Ctor(..), _) | Res::SelfCtor(..) = path.res {
return;
}
// If there was a private module in the current path then don't bother inlining
// anything as it will probably be stripped anyway.
if item.vis.node.is_pub() && self.inside_public_path {
let please_inline = item.attrs.iter().any(|item| match item.meta_item_list() {
Some(ref list) if item.has_name(sym::doc) => {
list.iter().any(|i| i.has_name(sym::inline))
}
_ => false,
});
let ident = if is_glob { None } else { Some(name) };
if self.maybe_inline_local(
item.hir_id,
path.res,
ident,
is_glob,
om,
please_inline,
) {
return;
}
}
om.imports.push(Import {
name,
id: item.hir_id,
vis: &item.vis,
attrs: &item.attrs,
path,
glob: is_glob,
span: item.span,
});
}
hir::ItemKind::Mod(ref m) => {
om.mods.push(self.visit_mod_contents(
item.span,
&item.vis,
item.hir_id,
m,
Some(name),
));
}
hir::ItemKind::Fn(..)
| hir::ItemKind::ExternCrate(..)
| hir::ItemKind::Enum(..)
| hir::ItemKind::Struct(..)
| hir::ItemKind::Union(..)
| hir::ItemKind::TyAlias(..)
| hir::ItemKind::OpaqueTy(..)
| hir::ItemKind::Static(..)
| hir::ItemKind::Trait(..)
| hir::ItemKind::TraitAlias(..) => om.items.push((item, renamed)),
hir::ItemKind::Const(..) => {
// Underscore constants do not correspond to a nameable item and
// so are never useful in documentation.
if name != kw::Underscore {
om.items.push((item, renamed));
}
}
hir::ItemKind::Impl(ref impl_) => {
// Don't duplicate impls when inlining or if it's implementing a trait, we'll pick
// them up regardless of where they're located.
if !self.inlining && impl_.of_trait.is_none() {
om.items.push((item, None));
}
}
}
}
fn visit_foreign_item(
&mut self,
item: &'tcx hir::ForeignItem<'_>,
renamed: Option<Symbol>,
om: &mut Module<'tcx>,
) {
// If inlining we only want to include public functions.
if !self.inlining || item.vis.node.is_pub() {
om.foreigns.push((item, renamed));
}
}
}
| {
let mut om = Module::new(name);
om.where_outer = span;
om.where_inner = m.inner;
om.id = id;
// Keep track of if there were any private modules in the path.
let orig_inside_public_path = self.inside_public_path;
self.inside_public_path &= vis.node.is_pub();
for i in m.item_ids {
let item = self.cx.tcx.hir().expect_item(i.id);
self.visit_item(item, None, &mut om);
}
self.inside_public_path = orig_inside_public_path;
om
} |
project_template.py | #!/usr/bin/env python
# Import modules
import matplotlib.colors
import matplotlib.pyplot as plt
import numpy as np
import sklearn
from sklearn.preprocessing import LabelEncoder
import pickle
from sensor_stick.srv import GetNormals
from sensor_stick.features import compute_color_histograms
from sensor_stick.features import compute_normal_histograms
from visualization_msgs.msg import Marker
from sensor_stick.marker_tools import *
from sensor_stick.msg import DetectedObjectsArray
from sensor_stick.msg import DetectedObject
from sensor_stick.pcl_helper import *
import rospy
import tf
from geometry_msgs.msg import Pose
from std_msgs.msg import Float64
from std_msgs.msg import Int32
from std_msgs.msg import String
from pr2_robot.srv import *
from rospy_message_converter import message_converter
import yaml
# Helper function to get surface normals
def get_normals(cloud):
get_normals_prox = rospy.ServiceProxy('/feature_extractor/get_normals', GetNormals)
return get_normals_prox(cloud).cluster
#Helper function to convert RGB to HSV
def rgb_to_hsv(rgb_list):
rgb_normalized = [1.0*rgb_list[0]/255, 1.0*rgb_list[1]/255, 1.0*rgb_list[2]/255]
hsv_normalized = matplotlib.colors.rgb_to_hsv([[rgb_normalized]])[0][0]
return hsv_normalized
bins_range=(0, 256)
nbins = 32
#Helper function to compute color histograms
def compute_color_histograms(cloud, using_hsv=False):
# Compute histograms for the clusters
point_colors_list = []
# Step through each point in the point cloud
for point in pc2.read_points(cloud, skip_nans=True):
rgb_list = float_to_rgb(point[3])
if using_hsv:
point_colors_list.append(rgb_to_hsv(rgb_list) * 255)
else:
point_colors_list.append(rgb_list)
# Populate lists with color values
channel_1_vals = []
channel_2_vals = []
channel_3_vals = []
for color in point_colors_list:
channel_1_vals.append(color[0])
channel_2_vals.append(color[1])
channel_3_vals.append(color[2])
# Compute histograms
# Compute the histogram of the HSV channels separately
h_hist = np.histogram(channel_1_vals, bins=nbins, range=bins_range)
s_hist = np.histogram(channel_2_vals, bins=nbins, range=bins_range)
v_hist = np.histogram(channel_3_vals, bins=nbins, range=bins_range)
# Concatenate the histograms into a single feature vector
hist_features = np.concatenate((h_hist[0], s_hist[0], v_hist[0])).astype(np.float64)
# Normalize the result
normed_features = hist_features / np.sum(hist_features)
return normed_features
#Helper function to compute normal histograms
def compute_normal_histograms(normal_cloud):
norm_x_vals = []
norm_y_vals = []
norm_z_vals = []
for norm_component in pc2.read_points(normal_cloud,
field_names = ('normal_x', 'normal_y', 'normal_z'),
skip_nans=True):
norm_x_vals.append(norm_component[0])
norm_y_vals.append(norm_component[1])
norm_z_vals.append(norm_component[2])
# TODO: Compute histograms of normal values (just like with color)
x_hist = np.histogram(norm_x_vals, bins=nbins, range =bins_range)
y_hist = np.histogram(norm_y_vals, bins=nbins, range =bins_range)
z_hist = np.histogram(norm_z_vals, bins=nbins, range =bins_range)
# TODO: Concatenate and normalize the histograms
hist_features = np.concatenate((x_hist[0], y_hist[0], z_hist[0])).astype(np.float64)
normed_features = hist_features/ np.sum(hist_features)
return normed_features
# Helper function to create a yaml friendly dictionary from ROS messages
def make_yaml_dict(test_scene_num, arm_name, object_name, pick_pose, place_pose):
yaml_dict = {}
yaml_dict["test_scene_num"] = test_scene_num.data
yaml_dict["arm_name"] = arm_name.data
yaml_dict["object_name"] = object_name.data
yaml_dict["pick_pose"] = message_converter.convert_ros_message_to_dictionary(pick_pose)
yaml_dict["place_pose"] = message_converter.convert_ros_message_to_dictionary(place_pose)
print type(yaml_dict["arm_name"]), type(yaml_dict["pick_pose"])
return yaml_dict
# Helper function to output to yaml file
def send_to_yaml(yaml_filename, dict_list):
data_dict = {"object_list": dict_list}
with open(yaml_filename, 'w+') as outfile:
yaml.dump(data_dict, outfile, default_flow_style=False)
print "done yaml"
# Callback function for your Point Cloud Subscriber
def pcl_callback(pcl_msg):
# Convert ROS msg to PCL data
pcl_data=ros_to_pcl(pcl_msg)
# Voxel Grid filter
# Create a VoxelGrid filter object for our input point cloud
vox = pcl_data.make_voxel_grid_filter()
# Choose a voxel (also known as leaf) size
# Note: this (1) is a poor choice of leaf size
# Experiment and find the appropriate size!
LEAF_SIZE = 0.008
# Set the voxel (or leaf) size
vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
# Call the filter function to obtain the resultant downsampled point cloud
cloud_filtered = vox.filter()
# Much like the previous filters, we start by creating a filter object:
cloud_filter = cloud_filtered.make_statistical_outlier_filter()
# Set the number of neighboring points to analyze for any given point
cloud_filter.set_mean_k(50)
# Set threshold scale factor
x = 1.0
# Any point with a mean distance larger than global (mean distance+x*std_dev) will be considered outlier
cloud_filter.set_std_dev_mul_thresh(x)
# Finally call the filter function for magic
cloud_filtered = cloud_filter.filter()
# PassThrough filter
# Create a PassThrough filter object.
passthrough1 = cloud_filtered.make_passthrough_filter()
# Assign axis and range to the passthrough filter object.
filter_axis1 = 'z'
passthrough1.set_filter_field_name(filter_axis1)
axis_min1 = 0.6
axis_max1 = 1.1
passthrough1.set_filter_limits(axis_min1, axis_max1)
# Finally use the filter function to obtain the resultant point cloud.
cloud_p1_filtered = passthrough1.filter()
# Create a PassThrough filter object.
passthrough2 = cloud_p1_filtered.make_passthrough_filter()
# Assign axis and range to the passthrough filter object.
filter_axis2 = 'y'
passthrough2.set_filter_field_name(filter_axis2)
axis_min2 = -0.55
axis_max2 = 0.55
passthrough2.set_filter_limits(axis_min2, axis_max2)
cloud_p_filtered = passthrough2.filter()
# RANSAC plane segmentation
# Create the segmentation object
seg = cloud_p_filtered.make_segmenter()
# Set the model you wish to fit
seg.set_model_type(pcl.SACMODEL_PLANE)
seg.set_method_type(pcl.SAC_RANSAC)
# Max distance for a point to be considered fitting the model
# Experiment with different values for max_distance
# for segmenting the table
max_distance = 0.03
seg.set_distance_threshold(max_distance)
# Call the segment function to obtain set of inlier indices and model coefficients
inliers, coefficients = seg.segment()
# Extract inliers
extracted_inliers = cloud_p_filtered.extract(inliers, negative=False)
# Extract outliers
extracted_outliers = cloud_p_filtered.extract(inliers, negative=True)
# Euclidean Clustering
white_cloud = XYZRGB_to_XYZ(extracted_outliers) # Apply function to convert XYZRGB to XYZ
tree = white_cloud.make_kdtree()
# Create a cluster extraction object
ec = white_cloud.make_EuclideanClusterExtraction()
# Set tolerances for distance threshold
# as well as minimum and maximum cluster size (in points)
# NOTE: These are poor choices of clustering parameters
# Your task is to experiment and find values that work for segmenting objects.
ec.set_ClusterTolerance(0.01)
ec.set_MinClusterSize(50)
ec.set_MaxClusterSize(3000)
# Search the k-d tree for clusters
ec.set_SearchMethod(tree)
# Extract indices for each of the discovered clusters
cluster_indices = ec.Extract()
# Create Cluster-Mask Point Cloud to visualize each cluster separately
#Assign a color corresponding to each segmented object in scene
cluster_color = get_color_list(len(cluster_indices))
color_cluster_point_list = []
for j, indices in enumerate(cluster_indices):
for i, indice in enumerate(indices):
color_cluster_point_list.append([white_cloud[indice][0],
white_cloud[indice][1],
white_cloud[indice][2],
rgb_to_float(cluster_color[j])])
#Create new cloud containing all clusters, each with unique color
cluster_cloud = pcl.PointCloud_PointXYZRGB()
cluster_cloud.from_list(color_cluster_point_list)
# Convert PCL data to ROS messages
ros_cluster_cloud = pcl_to_ros(cluster_cloud)
ros_cloud_objects = pcl_to_ros(extracted_outliers)
ros_cloud_table = pcl_to_ros(extracted_inliers)
# Publish ROS messages
pcl_cluster_cloud_pub.publish(ros_cluster_cloud)
pcl_objects_pub.publish(ros_cloud_objects)
pcl_table_pub.publish(ros_cloud_table)
# Classify the clusters! (loop through each detected cluster one at a time)
detected_objects_labels = []
detected_objects = []
labeled_features =[]
for index, pts_list in enumerate(cluster_indices):
# Grab the points for the cluster
pcl_cluster = extracted_outliers.extract(pts_list)
ros_cluster = pcl_to_ros(pcl_cluster)
# Compute the associated feature vector
# Extract histogram features
chists = compute_color_histograms(ros_cluster, using_hsv=True)
normals = get_normals(ros_cluster)
nhists = compute_normal_histograms(normals)
feature = np.concatenate((chists, nhists)).astype(np.float64)
#detected_objects.append([feature])
# Make the prediction
prediction = clf.predict(scaler.transform(feature.reshape(1,-1)))
label = encoder.inverse_transform(prediction)[0]
detected_objects_labels.append(label)
# Publish a label into RViz
label_pos = list(white_cloud[pts_list[0]])
label_pos[2] += .4
object_markers_pub.publish(make_label(label,label_pos, index))
# Add the detected object to the list of detected objects.
do = DetectedObject()
do.label = label
do.cloud = ros_cluster
detected_objects.append(do)
# Publish the list of detected objects
rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels))
detected_objects_pub.publish(detected_objects)
# Suggested location for where to invoke your pr2_mover() function within pcl_callback()
# Could add some logic to determine whether or not your object detections are robust
# before calling pr2_mover()
try:
pr2_mover(detected_objects)
except rospy.ROSInterruptException:
pass
# function to load parameters and request PickPlace service
def | (detected):
# TODO: Initialize variables
test_scene_num = Int32()
object_name = String()
arm_name = String()
pick_pose = Pose()
place_pose = Pose()
dict_list = []
yaml_filename = 'output_3.yaml' #Change for different worlds
test_scene_num.data = 3 #Change for different worlds
labels = []
centroids = []
# TODO: Get/Read parameters
object_list_param = rospy.get_param('/object_list')
dropbox_param = rospy.get_param('/dropbox')
# TODO: Parse parameters into individual variables
for obj in detected:
#print obj.label
labels.append(obj.label)
points_arr = ros_to_pcl(obj.cloud).to_array()
centroids.append(np.mean(points_arr, axis=0)[:3])
# TODO: Rotate PR2 in place to capture side tables for the collision map
# TODO: Loop through the pick list
for i in range(0, len(object_list_param)):
object_name.data = object_list_param[i]['name']
object_group = object_list_param[i]['group']
for j in range(0,len(labels)):
if object_name.data == labels[j]:
pick_pose.position.x = np.asscalar(centroids[j][0])
pick_pose.position.y = np.asscalar(centroids[j][1])
pick_pose.position.z = np.asscalar(centroids[j][2])
#print pick_pose
# TODO: Get the PointCloud for a given object and obtain it's centroid
# TODO: Create 'place_pose' for the object
for j in range(0, len(dropbox_param)):
if object_group == dropbox_param[j]['group']:
place_pose.position.x = dropbox_param[j]['position'][0]
place_pose.position.y = dropbox_param[j]['position'][1]
place_pose.position.z = dropbox_param[j]['position'][2]
# TODO: Assign the arm to be used for pick_place
if object_group =='green':
arm_name.data = 'right'
elif object_group == 'red':
arm_name.data = 'left'
# TODO: Create a list of dictionaries (made with make_yaml_dict()) for later output to yaml format
print "Test_num:",type(test_scene_num),"Arm_name:", type(arm_name),"Ob_name:", type(object_name),"Pick_pose:", type(pick_pose),"Place_pose:", type(place_pose)
yaml_dict = make_yaml_dict(test_scene_num, arm_name, object_name, pick_pose, place_pose)
dict_list.append(yaml_dict)
# Wait for 'pick_place_routine' service to come up
rospy.wait_for_service('pick_place_routine')
#try:
#pick_place_routine = rospy.ServiceProxy('pick_place_routine', PickPlace)
# TODO: Insert your message variables to be sent as a service request
#resp = pick_place_routine(test_scene_num, object_name, arm_name, pick_pose, place_pose)
#print ("Response: ",resp.success)
#except rospy.ServiceException, e:
#print "Service call failed: %s"%e
# TODO: Output your request parameters into output yaml file
send_to_yaml(yaml_filename, dict_list)
if __name__ == '__main__':
# TODO: ROS node initialization
rospy.init_node('clustering', anonymous=True)
# TODO: Create Subscribers
pcl_sub = rospy.Subscriber("/pr2/world/points", pc2.PointCloud2, pcl_callback, queue_size=1)
# TODO: Create Publishers
detected_objects_pub = rospy.Publisher("/detected_objects", DetectedObjectsArray, queue_size=1)
object_markers_pub = rospy.Publisher("/object_markers", Marker, queue_size=1)
pcl_objects_pub = rospy.Publisher("/pcl_objects", PointCloud2, queue_size=1)
pcl_table_pub = rospy.Publisher("/pcl_table", PointCloud2, queue_size=1)
pcl_cluster_cloud_pub = rospy.Publisher("/pcl_clusters", PointCloud2, queue_size=1)
# Initialize color_list
get_color_list.color_list = []
# Load Model From disk
model = pickle.load(open('model.sav', 'rb'))
clf = model['classifier']
encoder = LabelEncoder()
encoder.classes_ = model['classes']
scaler = model['scaler']
# TODO: Spin while node is not shutdown
while not rospy.is_shutdown():
rospy.spin()
| pr2_mover |
util.py | import os
import uuid
from contextlib import contextmanager
from datetime import datetime
from xml.etree import cElementTree as ElementTree
from casexml.apps.phone.restore_caching import RestorePayloadPathCache
from corehq.apps.receiverwrapper.util import submit_form_locally
from corehq.form_processor.tests.utils import FormProcessorTestUtils
from corehq.util.test_utils import unit_testing_only
from dimagi.utils.dates import utcnow_sans_milliseconds
from lxml import etree
from casexml.apps.case.xml import V1, V2, NS_VERSION_MAP
from casexml.apps.phone.restore import RestoreConfig, RestoreParams
from six.moves import range
TEST_DOMAIN_NAME = 'test-domain'
class _RestoreCaseBlock(object):
|
def bootstrap_case_from_xml(test_class, filename, case_id_override=None, domain=None):
starttime = utcnow_sans_milliseconds()
file_path = os.path.join(os.path.dirname(__file__), "data", filename)
with open(file_path, "r") as f:
xml_data = f.read()
updated_xml, uid, case_id = _replace_ids_in_xform_xml(
xml_data,
case_id_override=case_id_override,
)
domain = domain or 'test-domain'
result = submit_form_locally(updated_xml, domain=domain)
test_class.assertLessEqual(starttime, result.case.server_modified_on)
test_class.assertGreaterEqual(datetime.utcnow(), result.case.server_modified_on)
test_class.assertEqual(case_id, result.case.case_id)
return result.xform, result.case
def _replace_ids_in_xform_xml(xml_data, case_id_override=None):
# from our test forms, replace the UIDs so we don't get id conflicts
uid, case_id = (uuid.uuid4().hex for i in range(2))
if case_id_override:
case_id = case_id_override
xml_data = xml_data.replace("REPLACE_UID", uid)
xml_data = xml_data.replace("REPLACE_CASEID", case_id)
return xml_data, uid, case_id
def check_xml_line_by_line(test_case, expected, actual):
"""Does what it's called, hopefully parameters are self-explanatory"""
# this is totally wacky, but elementtree strips needless
# whitespace that mindom will preserve in the original string
parser = etree.XMLParser(remove_blank_text=True)
parsed_expected = etree.tostring(etree.XML(expected, parser), pretty_print=True).decode('utf-8')
parsed_actual = etree.tostring(etree.XML(actual, parser), pretty_print=True).decode('utf-8')
if parsed_expected == parsed_actual:
return
try:
expected_lines = parsed_expected.split("\n")
actual_lines = parsed_actual.split("\n")
test_case.assertEqual(
len(expected_lines),
len(actual_lines),
"Parsed xml files are different lengths\n" +
"Expected: \n%s\nActual:\n%s" % (parsed_expected, parsed_actual))
for i in range(len(expected_lines)):
test_case.assertEqual(expected_lines[i], actual_lines[i])
except AssertionError:
import logging
logging.error("Failure in xml comparison\nExpected:\n%s\nActual:\n%s" % (parsed_expected, parsed_actual))
raise
def get_case_xmlns(version):
return NS_VERSION_MAP.get(version, 'http://openrosa.org/http/response')
def extract_caseblocks_from_xml(payload_string, version=V2):
parsed_payload = ElementTree.fromstring(payload_string)
xml_blocks = parsed_payload.findall('{{{0}}}case'.format(get_case_xmlns(version)))
return [_RestoreCaseBlock(b, version) for b in xml_blocks]
@contextmanager
def _cached_restore(testcase, user, restore_id="", version=V2,
purge_restore_cache=False):
"""DEPRECATED use <MockDevice>.sync().cases"""
assert not hasattr(testcase, 'restore_config'), testcase
assert not hasattr(testcase, 'payload_string'), testcase
if restore_id and purge_restore_cache:
RestorePayloadPathCache(
domain=user.domain,
user_id=user.user_id,
sync_log_id=restore_id,
device_id=None,
).invalidate()
testcase.restore_config = RestoreConfig(
project=user.project,
restore_user=user, params=RestoreParams(restore_id, version=version),
**getattr(testcase, 'restore_options', {})
)
testcase.payload_string = testcase.restore_config.get_payload().as_string()
try:
yield
finally:
del testcase.restore_config, testcase.payload_string
def deprecated_check_user_has_case(testcase, user, case_blocks, should_have=True,
line_by_line=True, restore_id="", version=V2,
purge_restore_cache=False, return_single=False):
"""DEPRECATED use <MockDevice>.sync().cases"""
try:
restore_config = testcase.restore_config
payload_string = testcase.payload_string
except AttributeError:
with _cached_restore(testcase, user, restore_id, version, purge_restore_cache):
restore_config = testcase.restore_config
payload_string = testcase.payload_string
return _check_payload_has_cases(
testcase=testcase,
payload_string=payload_string,
username=user.username,
case_blocks=case_blocks,
should_have=should_have,
line_by_line=line_by_line,
version=version,
return_single=return_single,
restore_config=restore_config,
)
def _check_payload_has_cases(testcase, payload_string, username, case_blocks, should_have=True,
line_by_line=True, version=V2, return_single=False, restore_config=None):
"""DEPRECATED use <MockDevice>.sync().cases"""
if not isinstance(case_blocks, list):
case_blocks = [case_blocks]
return_single = True
XMLNS = NS_VERSION_MAP.get(version, 'http://openrosa.org/http/response')
blocks_from_restore = extract_caseblocks_from_xml(payload_string, version)
def check_block(case_block):
case_block.set('xmlns', XMLNS)
case_block = _RestoreCaseBlock(ElementTree.fromstring(ElementTree.tostring(case_block)), version=version)
case_id = case_block.get_case_id()
n = 0
def extra_info():
return "\n%s\n%s" % (case_block.to_string(), [b.to_string() for b in blocks_from_restore])
match = None
for block in blocks_from_restore:
if block.get_case_id() == case_id:
if should_have:
if line_by_line:
check_xml_line_by_line(
testcase,
case_block.to_string(),
block.to_string(),
)
match = block
n += 1
if n == 2:
testcase.fail(
"Block for case_id '%s' appears twice"
" in ota restore for user '%s':%s" % (case_id, username, extra_info())
)
else:
testcase.fail(
"User '%s' gets case '%s' "
"but shouldn't:%s" % (username, case_id, extra_info())
)
if not n and should_have:
testcase.fail("Block for case_id '%s' doesn't appear in ota restore for user '%s':%s"
% (case_id, username, extra_info()))
return match
matches = [check_block(case_block) for case_block in case_blocks]
return restore_config, matches[0] if return_single else matches
@unit_testing_only
def delete_all_cases():
FormProcessorTestUtils.delete_all_cases()
@unit_testing_only
def delete_all_xforms():
FormProcessorTestUtils.delete_all_xforms()
@unit_testing_only
def delete_all_sync_logs():
FormProcessorTestUtils.delete_all_sync_logs()
@unit_testing_only
def delete_all_ledgers():
FormProcessorTestUtils.delete_all_ledgers()
| """
Little shim class for working with XML case blocks in a restore payload
NOTE the recommended way to inspect case restore payloads is to
use <MockDevice>.sync().cases, so don't use this in tests.
"""
def __init__(self, xml_element, version=V2):
self.xml_element = xml_element
self.version = version
def to_string(self):
return ElementTree.tostring(self.xml_element)
def get_case_id(self):
if self.version == V1:
return self.xml_element.findtext('{{{0}}}case_id'.format(get_case_xmlns(self.version)))
else:
return self.xml_element.get('case_id')
def get_case_name(self):
assert self.version == V2, 'get_case_name not yet supported for legacy V1 casexml'
# note: there has to be a better way to work with namespaced xpath.... right?!?!
return self.xml_element.findtext('{{{0}}}create/{{{0}}}case_name'.format(get_case_xmlns(self.version))) |
ListDataSourcesCommand.ts | import { QuickSightClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../QuickSightClient.ts";
import { ListDataSourcesRequest, ListDataSourcesResponse } from "../models/models_0.ts";
import {
deserializeAws_restJson1ListDataSourcesCommand,
serializeAws_restJson1ListDataSourcesCommand,
} from "../protocols/Aws_restJson1.ts";
import { getSerdePlugin } from "../../middleware-serde/mod.ts";
import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "../../protocol-http/mod.ts";
import { Command as $Command } from "../../smithy-client/mod.ts";
import {
FinalizeHandlerArguments,
Handler,
HandlerExecutionContext,
MiddlewareStack,
HttpHandlerOptions as __HttpHandlerOptions,
MetadataBearer as __MetadataBearer,
SerdeContext as __SerdeContext,
} from "../../types/mod.ts";
export type ListDataSourcesCommandInput = ListDataSourcesRequest;
export type ListDataSourcesCommandOutput = ListDataSourcesResponse & __MetadataBearer;
/**
* <p>Lists data sources in current AWS Region that belong to this AWS account.</p>
*/
export class ListDataSourcesCommand extends $Command<
ListDataSourcesCommandInput,
ListDataSourcesCommandOutput,
QuickSightClientResolvedConfig
> {
// Start section: command_properties
// End section: command_properties
constructor(readonly input: ListDataSourcesCommandInput) {
// Start section: command_constructor
super();
// End section: command_constructor
}
/**
* @internal
*/
resolveMiddleware(
clientStack: MiddlewareStack<ServiceInputTypes, ServiceOutputTypes>,
configuration: QuickSightClientResolvedConfig,
options?: __HttpHandlerOptions
): Handler<ListDataSourcesCommandInput, ListDataSourcesCommandOutput> {
this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize));
const stack = clientStack.concat(this.middlewareStack); | const clientName = "QuickSightClient";
const commandName = "ListDataSourcesCommand";
const handlerExecutionContext: HandlerExecutionContext = {
logger,
clientName,
commandName,
inputFilterSensitiveLog: ListDataSourcesRequest.filterSensitiveLog,
outputFilterSensitiveLog: ListDataSourcesResponse.filterSensitiveLog,
};
const { requestHandler } = configuration;
return stack.resolve(
(request: FinalizeHandlerArguments<any>) =>
requestHandler.handle(request.request as __HttpRequest, options || {}),
handlerExecutionContext
);
}
private serialize(input: ListDataSourcesCommandInput, context: __SerdeContext): Promise<__HttpRequest> {
return serializeAws_restJson1ListDataSourcesCommand(input, context);
}
private deserialize(output: __HttpResponse, context: __SerdeContext): Promise<ListDataSourcesCommandOutput> {
return deserializeAws_restJson1ListDataSourcesCommand(output, context);
}
// Start section: command_body_extra
// End section: command_body_extra
} |
const { logger } = configuration; |
devopsproject.go | /*
Copyright 2020 The KubeSphere Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by lister-gen. DO NOT EDIT.
package v1alpha3
import (
"k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/labels"
"k8s.io/client-go/tools/cache"
v1alpha3 "kubesphere.io/api/devops/v1alpha3"
)
// DevOpsProjectLister helps list DevOpsProjects.
// All objects returned here must be treated as read-only.
type DevOpsProjectLister interface {
// List lists all DevOpsProjects in the indexer.
// Objects returned here must be treated as read-only.
List(selector labels.Selector) (ret []*v1alpha3.DevOpsProject, err error)
// Get retrieves the DevOpsProject from the index for a given name.
// Objects returned here must be treated as read-only.
Get(name string) (*v1alpha3.DevOpsProject, error)
DevOpsProjectListerExpansion
}
// devOpsProjectLister implements the DevOpsProjectLister interface.
type devOpsProjectLister struct {
indexer cache.Indexer
}
// NewDevOpsProjectLister returns a new DevOpsProjectLister.
func | (indexer cache.Indexer) DevOpsProjectLister {
return &devOpsProjectLister{indexer: indexer}
}
// List lists all DevOpsProjects in the indexer.
func (s *devOpsProjectLister) List(selector labels.Selector) (ret []*v1alpha3.DevOpsProject, err error) {
err = cache.ListAll(s.indexer, selector, func(m interface{}) {
ret = append(ret, m.(*v1alpha3.DevOpsProject))
})
return ret, err
}
// Get retrieves the DevOpsProject from the index for a given name.
func (s *devOpsProjectLister) Get(name string) (*v1alpha3.DevOpsProject, error) {
obj, exists, err := s.indexer.GetByKey(name)
if err != nil {
return nil, err
}
if !exists {
return nil, errors.NewNotFound(v1alpha3.Resource("devopsproject"), name)
}
return obj.(*v1alpha3.DevOpsProject), nil
}
| NewDevOpsProjectLister |
mod.rs | /// NOTE: These syscalls only support wasm_32 for now because they take u32 offset
use libc::{c_int, c_long, getenv};
use std::ffi::CString;
use std::mem;
use std::os::raw::c_char;
use crate::env::call_malloc;
use crate::utils::{copy_cstr_into_wasm, read_string_from_wasm};
use wasmer_runtime_core::vm::Ctx;
extern "C" {
#[link_name = "_putenv"]
pub fn putenv(s: *const c_char) -> c_int;
}
// #[no_mangle]
/// emscripten: _getenv // (name: *const char) -> *const c_char;
pub fn _getenv(ctx: &mut Ctx, name: u32) -> u32 {
debug!("emscripten::_getenv");
let name_string = read_string_from_wasm(ctx.memory(0), name);
debug!("=> name({:?})", name_string);
let c_str = unsafe { getenv(name_string.as_ptr() as *const libc::c_char) };
if c_str.is_null() |
unsafe { copy_cstr_into_wasm(ctx, c_str as *const c_char) }
}
/// emscripten: _setenv // (name: *const char, name: *const value, overwrite: int);
pub fn _setenv(ctx: &mut Ctx, name: u32, value: u32, _overwrite: u32) -> c_int {
debug!("emscripten::_setenv");
// setenv does not exist on windows, so we hack it with _putenv
let name = read_string_from_wasm(ctx.memory(0), name);
let value = read_string_from_wasm(ctx.memory(0), value);
let putenv_string = format!("{}={}", name, value);
let putenv_cstring = CString::new(putenv_string).unwrap();
let putenv_raw_ptr = putenv_cstring.as_ptr();
debug!("=> name({:?})", name);
debug!("=> value({:?})", value);
unsafe { putenv(putenv_raw_ptr) }
}
/// emscripten: _putenv // (name: *const char);
pub fn _putenv(ctx: &mut Ctx, name: c_int) -> c_int {
debug!("emscripten::_putenv");
let name_addr = emscripten_memory_pointer!(ctx.memory(0), name) as *const c_char;
debug!("=> name({:?})", unsafe {
std::ffi::CStr::from_ptr(name_addr)
});
unsafe { putenv(name_addr) }
}
/// emscripten: _unsetenv // (name: *const char);
pub fn _unsetenv(ctx: &mut Ctx, name: u32) -> c_int {
debug!("emscripten::_unsetenv");
let name = read_string_from_wasm(ctx.memory(0), name);
// no unsetenv on windows, so use putenv with an empty value
let unsetenv_string = format!("{}=", name);
let unsetenv_cstring = CString::new(unsetenv_string).unwrap();
let unsetenv_raw_ptr = unsetenv_cstring.as_ptr();
debug!("=> name({:?})", name);
unsafe { putenv(unsetenv_raw_ptr) }
}
#[allow(clippy::cast_ptr_alignment)]
pub fn _getpwnam(ctx: &mut Ctx, name_ptr: c_int) -> c_int {
debug!("emscripten::_getpwnam {}", name_ptr);
#[cfg(not(feature = "debug"))]
let _ = name_ptr;
#[repr(C)]
struct GuestPasswd {
pw_name: u32,
pw_passwd: u32,
pw_uid: u32,
pw_gid: u32,
pw_gecos: u32,
pw_dir: u32,
pw_shell: u32,
}
// stub this in windows as it is not valid
unsafe {
let passwd_struct_offset = call_malloc(ctx, mem::size_of::<GuestPasswd>() as _);
let passwd_struct_ptr =
emscripten_memory_pointer!(ctx.memory(0), passwd_struct_offset) as *mut GuestPasswd;
(*passwd_struct_ptr).pw_name = 0;
(*passwd_struct_ptr).pw_passwd = 0;
(*passwd_struct_ptr).pw_gecos = 0;
(*passwd_struct_ptr).pw_dir = 0;
(*passwd_struct_ptr).pw_shell = 0;
(*passwd_struct_ptr).pw_uid = 0;
(*passwd_struct_ptr).pw_gid = 0;
passwd_struct_offset as c_int
}
}
#[allow(clippy::cast_ptr_alignment)]
pub fn _getgrnam(ctx: &mut Ctx, name_ptr: c_int) -> c_int {
debug!("emscripten::_getgrnam {}", name_ptr);
#[cfg(not(feature = "debug"))]
let _ = name_ptr;
#[repr(C)]
struct GuestGroup {
gr_name: u32,
gr_passwd: u32,
gr_gid: u32,
gr_mem: u32,
}
// stub the group struct as it is not supported on windows
unsafe {
let group_struct_offset = call_malloc(ctx, mem::size_of::<GuestGroup>() as _);
let group_struct_ptr =
emscripten_memory_pointer!(ctx.memory(0), group_struct_offset) as *mut GuestGroup;
(*group_struct_ptr).gr_name = 0;
(*group_struct_ptr).gr_passwd = 0;
(*group_struct_ptr).gr_gid = 0;
(*group_struct_ptr).gr_mem = 0;
group_struct_offset as c_int
}
}
pub fn _sysconf(_ctx: &mut Ctx, name: c_int) -> c_long {
debug!("emscripten::_sysconf {}", name);
#[cfg(not(feature = "debug"))]
let _ = name;
// stub because sysconf is not valid on windows
0
}
| {
return 0;
} |
result.test.ts | import {
Err,
None,
Ok,
Option,
Result,
ResultErrType,
ResultErrTypes,
ResultOkType,
ResultOkTypes,
Some,
} from '../src';
import { eq } from './util';
test('Err<E> | Ok<T> should be Result<T, E>', () => {
const r1 = Err(0);
const r2 = new Ok('');
const r = Math.random() ? r1 : r2;
expect(Result.isResult(r1)).toEqual(true);
expect(Result.isResult(r2)).toEqual(true);
expect(Result.isResult(Some(3))).toEqual(false);
eq<typeof r, Result<string, number>>(true);
});
test('Type can be narrowed using ok & err', () => {
const r1 = Ok(0) as Result<number, string>;
if (r1.ok) {
eq<Ok<number>, typeof r1>(true);
} else {
eq<Err<string>, typeof r1>(true);
}
if (r1.err) {
eq<Err<string>, typeof r1>(true);
} else {
eq<Ok<number>, typeof r1>(true);
}
});
test('map', () => {
const r = new Err(0) as Result<string, number>;
const r2 = r.map(Symbol);
eq<typeof r2, Result<symbol, number>>(true);
});
test('andThen', () => {
const result = new Ok('Ok') as Result<string, boolean>;
const then = result.andThen(() => new Err('broke') as Result<boolean, string>);
expect(then).toMatchResult(new Err('broke'));
function takesResult(result: Result<boolean, string | boolean>): void {}
takesResult(then);
});
test('mapErr', () => {
const r = new Err(0) as Result<string, number>;
const r2 = r.mapErr(Symbol);
eq<typeof r2, Result<string, symbol>>(true);
});
test('Iterable', () => {
const r1 = new Ok([true, false]) as Result<boolean[], number>;
const r1Iter = r1[Symbol.iterator]();
eq<Iterator<boolean>, typeof r1Iter>(true);
const r2 = new Ok(32) as Result<number, string>;
const r2Iter = r2[Symbol.iterator]();
eq<Iterator<never>, typeof r2Iter>(true);
});
test('ResultOkType', () => {
type a = ResultOkType<Ok<string>>;
eq<string, a>(true);
type b = ResultOkType<Err<string>>;
eq<never, b>(true);
type c = ResultOkType<Result<string, number>>;
eq<string, c>(true);
});
test('ResultErrType', () => {
type a = ResultErrType<Ok<string>>;
eq<never, a>(true);
type b = ResultErrType<Err<string>>;
eq<string, b>(true);
type c = ResultErrType<Result<string, number>>;
eq<number, c>(true);
});
test('ResultOkTypes & ResultErrTypes', () => {
type a = ResultOkTypes<
[Ok<string>, Err<string>, Result<symbol, number>, Result<never, string>, Ok<32> | Err<boolean>]
>;
eq<[string, never, symbol, never, 32], a>(true);
type b = ResultErrTypes<
[Ok<string>, Err<string>, Result<symbol, number>, Result<never, symbol>, Ok<boolean> | Err<32>]
>;
eq<[never, string, number, symbol, 32], b>(true);
});
test('Result.all', () => {
const ok0 = Ok(3);
const ok1 = new Ok(true);
const ok2 = new Ok(8 as const) as Result<8, boolean>;
const err0 = Err(Symbol());
const err1 = new Err(Error());
const err2 = new Err(9 as const) as Result<boolean, 9>;
const all0 = Result.all();
expect(all0).toMatchResult(Ok([]));
eq<typeof all0, Result<[], never>>(true);
const all1 = Result.all(ok0, ok1);
expect(all1).toMatchResult(Ok([3, true]));
eq<typeof all1, Result<[number, boolean], never>>(true);
const all3 = Result.all(err0, err1);
expect(all3).toMatchResult(Err(err0.val));
eq<typeof all3, Result<[never, never], symbol | Error>>(true);
const all4 = Result.all(...([] as Result<string, number>[]));
eq<typeof all4, Result<string[], number>>(true);
const all5 = Result.all(ok0, ok1, ok2, err2);
expect(all5).toMatchResult(Err(9));
eq<typeof all5, Result<[number, boolean, 8, boolean], boolean | 9>>(true);
});
test('Result.any', () => {
const ok0 = new Ok(3);
const ok1 = new Ok(true);
const ok2 = new Ok(8 as const) as Result<8, boolean>;
const err0 = new Err(Symbol());
const err1 = new Err(Error());
const err2 = new Err(9 as const) as Result<boolean, 9>;
const any0 = Result.any();
expect(any0).toMatchResult(Err([]));
eq<typeof any0, Result<never, []>>(true);
const any1 = Result.any(ok0, ok1);
expect(any1).toMatchResult(Ok(3));
eq<typeof any1, Result<number | boolean, [never, never]>>(true);
const any3 = Result.any(err0, err1);
expect(any3).toMatchResult(Err([err0.val, err1.val]));
eq<typeof any3, Result<never, [symbol, Error]>>(true);
const any4 = Result.any(...([] as Result<string, number>[]));
eq<typeof any4, Result<string, number[]>>(true);
const any5 = Result.any(err0, err1, err2, ok2);
expect(any5).toMatchResult(Ok(8));
eq<typeof any5, Result<boolean | 8, [symbol, Error, 9, boolean]>>(true);
});
test('Result.wrap', () => {
const a = Result.wrap(() => 1);
expect(a).toMatchResult(Ok(1));
eq<typeof a, Result<number, unknown>>(true);
class CustomError {
readonly message = 'hi';
}
const err = new CustomError();
const b = Result.wrap<number, CustomError>(() => {
throw err;
});
expect(b).toMatchResult(Err(err));
eq<typeof b, Result<number, CustomError>>(true);
});
test('Result.wrapAsync', async () => {
const a = await Result.wrapAsync(async () => 1);
expect(a).toMatchResult(Ok(1));
eq<typeof a, Result<number, unknown>>(true);
class CustomError {
readonly message = 'hi';
}
const err = new CustomError();
const b = await Result.wrapAsync<number, CustomError>(async () => {
throw err;
});
expect(b).toMatchResult(Err(err));
eq<typeof b, Result<number, CustomError>>(true);
const c = await Result.wrapAsync<number, string>(() => {
throw 'thrown before promise';
return Promise.resolve(3);
});
expect(c).toMatchResult(Err('thrown before promise'));
eq<typeof c, Result<number, string>>(true);
});
test('safeUnwrap', () => {
const ok1 = new Ok(3).safeUnwrap();
expect(ok1).toEqual(3);
eq<typeof ok1, number>(true);
const err = new Err('hi');
const result = new Ok(1) as Result<number, string>;
expect(() => {
// @ts-expect-error
err.safeUnwrap();
}).toThrowError();
// @ts-expect-error
result.safeUnwrap();
if (result.ok) {
const val = result.safeUnwrap();
eq<typeof val, number>(true);
expect(val).toEqual(1);
} else {
// @ts-expect-error
result.safeUnwrap();
}
});
test('Issue #24', () => {
const getStatus = (payload: boolean): Result<boolean, Error> => {
if (payload) {
return Ok(payload);
}
return Err(new Error('Payload is false'));
};
const processStatus = (): Result<boolean, Error> => {
return getStatus(true)
.andThen((result) => Ok(result))
.map((data) => data);
};
});
test('To option', () => {
const result = new Ok('hello') as Result<string, number>;
const option = result.toOption();
eq<typeof option, Option<string>>(true);
expect(option).toEqual(Some('hello'));
const result2: Result<string, number> = new Err(32);
const option2 = result2.toOption();
expect(option2).toEqual(None); | }); |
|
index.js | import createPayOrder from './pay/createPayOrder.js';
import getOrderStatus from './pay/getOrderStatus.js'; | global.PUB_PARAMS = param;
}
export {
configParams,
createPayOrder,
getOrderStatus
} |
function configParams(param) { |
BaseResolver.ts | import { Query, Resolver } from '@nestjs/graphql'
import { Type } from '@nestjs/common'
export function BaseResolver<T extends Type<unknown>>(classRef: T): any {
@Resolver({ isAbstract: true })
abstract class BaseResolverHost {
@Query((type) => classRef, { name: `find${classRef.name}` })
async find(): Promise<T[]> {
return [] // ???
}
} | return BaseResolverHost
} |
|
array.go | package document
import (
"bytes"
"encoding/json"
"errors"
"io"
"reflect"
)
// ErrValueNotFound must be returned by Array implementations, when calling the GetByIndex method and
// the index wasn't found in the array.
var ErrValueNotFound = errors.New("value not found")
// An Array contains a set of values.
type Array interface {
// Iterate goes through all the values of the array and calls the given function by passing each one of them.
// If the given function returns an error, the iteration stops.
Iterate(fn func(i int, value Value) error) error
// GetByIndex returns a value by index of the array.
GetByIndex(i int) (Value, error)
}
// ArrayLength returns the length of an array.
func ArrayLength(a Array) (int, error) {
if vb, ok := a.(ValueBuffer); ok {
return len(vb), nil
}
var len int
err := a.Iterate(func(_ int, _ Value) error {
len++
return nil
})
return len, err
}
// ValueBuffer is an array that holds values in memory.
type ValueBuffer []Value
// NewValueBuffer creates a buffer of values.
func NewValueBuffer(values ...Value) ValueBuffer {
return ValueBuffer(values)
}
// Iterate over all the values of the buffer. It implements the Array interface.
func (vb ValueBuffer) Iterate(fn func(i int, value Value) error) error {
for i, v := range vb {
err := fn(i, v)
if err != nil |
}
return nil
}
// GetByIndex returns a value set at the given index. If the index is out of range it returns an error.
func (vb ValueBuffer) GetByIndex(i int) (Value, error) {
if i >= len(vb) {
return Value{}, ErrValueNotFound
}
return vb[i], nil
}
// Append a value to the buffer and return a new buffer.
func (vb ValueBuffer) Append(v Value) ValueBuffer {
return append(vb, v)
}
// ScanArray copies all the values of a to the buffer.
func (vb *ValueBuffer) ScanArray(a Array) error {
return a.Iterate(func(i int, v Value) error {
*vb = append(*vb, v)
return nil
})
}
// Copy deep copies all the values from the given array.
// If a value is a document or an array, it will be stored as a FieldBuffer or ValueBuffer respectively.
func (vb *ValueBuffer) Copy(a Array) error {
err := vb.ScanArray(a)
if err != nil {
return err
}
for _, v := range *vb {
switch v.Type {
case DocumentValue:
var buf FieldBuffer
err = buf.Copy(v.V.(Document))
if err != nil {
return err
}
*vb = vb.Append(NewDocumentValue(&buf))
case ArrayValue:
var buf ValueBuffer
err = buf.Copy(v.V.(Array))
if err != nil {
return err
}
*vb = vb.Append(NewArrayValue(&buf))
}
}
return nil
}
// Replace the value of the index by v.
func (vb *ValueBuffer) Replace(index int, v Value) error {
if len(*vb) <= index {
return ErrFieldNotFound
}
(*vb)[index] = v
return nil
}
// UnmarshalJSON implements the json.Unmarshaler interface.
func (vb *ValueBuffer) UnmarshalJSON(data []byte) error {
dec := json.NewDecoder(bytes.NewReader(data))
t, err := dec.Token()
if err == io.EOF {
return err
}
return parseJSONArray(dec, t, vb)
}
type sliceArray struct {
ref reflect.Value
}
var _ Array = (*sliceArray)(nil)
func (s sliceArray) Iterate(fn func(i int, v Value) error) error {
l := s.ref.Len()
for i := 0; i < l; i++ {
f := s.ref.Index(i)
v, err := NewValue(f.Interface())
if err != nil {
if err.(*ErrUnsupportedType) != nil {
continue
}
return err
}
err = fn(i, v)
if err != nil {
return err
}
}
return nil
}
func (s sliceArray) GetByIndex(i int) (Value, error) {
if i >= s.ref.Len() {
return Value{}, ErrFieldNotFound
}
v := s.ref.Index(i)
if !v.IsValid() {
return Value{}, ErrFieldNotFound
}
return NewValue(v.Interface())
}
| {
return err
} |
using_golang.go | package main
import (
"fmt"
"ioutil"
"log"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/ethclient"
)
func main() | {
INFURA_PROJECT_ID := string(ioutil.ReadFile("../.infura-key"))
client, err := ethclient.Dial("https://rinkeby.infura.io/v3/" + INFURA_PROJECT_ID)
if err != nil {
log.Fatal(err)
}
fmt.Println("connected via Infura")
tx := types.NewTransaction
} |
|
book.rest-api.ts | import { bookRepository } from 'dals';
import { authorizationMiddleware } from 'pods/security';
import {
mapBookListFromModelToApi,
mapBookFromModelToApi,
mapBookFromApiToModel,
} from './book.mappers';
import { paginateBookList } from './book.helpers';
export const booksApi = Router();
booksApi
.get('/', authorizationMiddleware(), async (req, res, next) => {
try {
const page = Number(req.query.page);
const pageSize = Number(req.query.pageSize);
const bookList = await bookRepository.getBookList();
const paginatedBookList = paginateBookList(bookList, page, pageSize);
res.send(mapBookListFromModelToApi(paginatedBookList));
} catch (error) {
next(error);
}
})
.get('/:id', authorizationMiddleware(), async (req, res, next) => {
try {
const { id } = req.params;
const book = await bookRepository.getBook(id);
res.send(mapBookFromModelToApi(book));
} catch (error) {
next(error);
}
})
.post('/', authorizationMiddleware(['admin']), async (req, res, next) => {
try {
const modelBook = mapBookFromApiToModel(req.body);
const newBook = await bookRepository.saveBook(modelBook);
res.status(201).send(mapBookFromModelToApi(newBook));
} catch (error) {
next(error);
}
})
.put('/:id', authorizationMiddleware(['admin']), async (req, res, next) => {
try {
const { id } = req.params;
const modelBook = mapBookFromApiToModel({ ...req.body, id });
await bookRepository.saveBook(modelBook);
res.sendStatus(204);
} catch (error) {
next(error);
}
})
.delete(
'/:id',
authorizationMiddleware(['admin']),
async (req, res, next) => {
try {
const { id } = req.params;
const isDeleted = await bookRepository.deleteBook(id);
res.sendStatus(isDeleted ? 204 : 404);
} catch (error) {
next(error);
}
}
); | import { Router } from 'express'; |
|
PropField.js | import {SocketHandle} from '../../sockets/SocketHandle';
import {Control} from 'rete-react-render-plugin';
import classNames from 'classnames';
import {paramCase} from 'change-case';
import getPropLabel from '../../../../utils/getPropLabel';
import React from 'react';
import useReactTooltip from '../../../../hooks/useReactTooltip';
import useLearningModeState from '../../../../hooks/persistent/useLearningModeState';
import getInfoText from '../../../../utils/getInfoText';
export default function | ({prop, node, hideLeft, hideRight, bindSocket, bindControl}) {
const [learningMode] = useLearningModeState();
const advanced = !!node.data['editor:advanced'];
const input = node.inputs.get(prop.key);
const output = node.outputs.get(prop.key);
const control = node.controls.get(prop.key) || (input?.showControl() && input.control);
useReactTooltip();
if(prop.advanced && !advanced) {
if(
(!input || !input.connections.length) &&
(!output || !output.connections.length) &&
(!control || control.getValue() === control.getDefaultValue())
) {
return null;
}
}
const leftSocket = input && !hideLeft && (
<SocketHandle
type="input"
socket={input.socket}
propKey={prop.key}
io={input}
innerRef={bindSocket}
/>
);
const rightSocket = output && !hideRight && (
<SocketHandle
type="output"
propKey={prop.key}
socket={output.socket}
io={output}
innerRef={bindSocket}
/>
);
const controlField = control && (
<Control
className={input ? 'input-control' : 'control'}
control={control}
innerRef={bindControl}
/>
);
const tooltip = learningMode ? getInfoText(prop.info) : null;
const tooltipDelay = 100;
return (
<div
className={classNames('prop', 'key-' + paramCase(prop.key), !advanced && prop.advanced && 'advanced')}
data-tip={tooltip}
data-delay-show={tooltipDelay}>
<div className="input">
{leftSocket}
{controlField || (leftSocket && (
<div className="input-title">
{getPropLabel(prop)}
</div>
))}
</div>
<div className="output">
{!input && (rightSocket && (
<div className="output-title">
{getPropLabel(prop)}
</div>
))}
{rightSocket}
</div>
</div>
);
} | PropField |
device_query.py | '''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.EGL import _types as _cs
# End users want this...
from OpenGL.raw.EGL._types import *
from OpenGL.raw.EGL import _errors
from OpenGL.constant import Constant as _C
import ctypes | return _p.createFunction( function,_p.PLATFORM.EGL,'EGL_EXT_device_query',error_checker=_errors._error_checker)
EGL_BAD_DEVICE_EXT=_C('EGL_BAD_DEVICE_EXT',0x322B)
EGL_DEVICE_EXT=_C('EGL_DEVICE_EXT',0x322C)
# EGL_NO_DEVICE_EXT=_C('EGL_NO_DEVICE_EXT',((EGLDeviceEXT)(0)))
@_f
@_p.types(_cs.EGLBoolean,_cs.EGLDeviceEXT,_cs.EGLint,arrays.EGLAttribArray)
def eglQueryDeviceAttribEXT(device,attribute,value):pass
@_f
@_p.types(ctypes.c_char_p,_cs.EGLDeviceEXT,_cs.EGLint)
def eglQueryDeviceStringEXT(device,name):pass
@_f
@_p.types(_cs.EGLBoolean,_cs.EGLDisplay,_cs.EGLint,arrays.EGLAttribArray)
def eglQueryDisplayAttribEXT(dpy,attribute,value):pass | _EXTENSION_NAME = 'EGL_EXT_device_query'
def _f( function ): |
HtmlTsInputChoiceValidatorMulti.ts | import AbstractHtmlTsInputValidator from "./Core/AbstractHtmlTsInputValidator";
import HtmlTsInputDictionary from "../HtmlTsInputDictionary";
export interface HtmlTsInputChoiceValidatorMultiType extends HtmlTsInputValidatorBaseTypes<string[]> {
// 自動でテスト
isNotNull?: boolean;
// 手動でテスト
minSelect?: number;
maxSelect?: number;
}
class HtmlTsInputChoiceValidatorMulti extends AbstractHtmlTsInputValidator<string[]>{
protected keys: string[] = [
"isNotNull",
];
protected map: { [p: string]: HtmlTsInputTextValidatorMapType<string[]> } = {
isNotNull: {
func: HtmlTsValidateArray.isNotNull,
wordKey: "choiceIsNotNull",
},
};
protected isTest: { [p: string]: boolean } = {
"isNotNull": false,
};
protected params: HtmlTsInputChoiceValidatorMultiType;
constructor(params: HtmlTsInputChoiceValidatorMultiType) {
super(params);
if (this.params === undefined) return;
if (this.params.isNotNull !== undefined) this.isTest["isNotNull"] = this.params.isNotNull;
}
validate(value: string[]): HtmlTsInputValidatorResult {
const result = super.validate(value);
if (this.params === undefined) return result;
if (this.params.minSelect !== undefined) {
result.append(
HtmlTsValidateArray.minSelect(value, this.params.minSelect),
HtmlTsInputDictionary.get("choiceMinSelect").replace("%s", this.params.minSelect.toString())
)
}
if (this.params.maxSelect !== undefined) {
result.append(
HtmlTsValidateArray.maxSelect(value, this.params.maxSelect),
HtmlTsInputDictionary.get("choiceMaxSelect").replace("%s", this.params.maxSelect.toString())
)
}
return result;
}
}
export default HtmlTsInputChoiceValidatorMulti; | import HtmlTsInputValidatorResult from "./Core/HtmlTsInputValidatorResult";
import {HtmlTsInputTextValidatorMapType, HtmlTsInputValidatorBaseTypes} from "./Core/HtmlTsInputValidatorTypes";
import HtmlTsValidateArray from "../../Validate/HtmlTsValidateArray"; |
|
main.rs | use bracket_lib::prelude::*;
const SCREEN_WIDTH: i32 = 80;
const SCREEN_HEIGHT: i32 = 50;
const FRAME_DURATION: f32 = 75.0;
enum GameMode {
Menu,
Playing,
End,
}
struct State {
mode: GameMode,
player: Player,
frame_time: f32,
obstacle: Obstacle,
score: i32,
}
impl State {
fn new() -> Self {
State {
mode: GameMode::Menu,
player: Player::new(5, 25),
frame_time: 0.0,
obstacle: Obstacle::new(SCREEN_WIDTH, 0),
score: 0,
}
}
fn menu(&mut self, ctx: &mut BTerm) {
ctx.cls();
ctx.print_centered(5, "Welcome to Flappy Dragon");
ctx.print_centered(8, "(P) Play");
ctx.print_centered(9, "(Q) Quit");
if let Some(key) = ctx.key {
match key {
VirtualKeyCode::P => self.restart(),
VirtualKeyCode::Q => ctx.quitting = true,
_ => {}
}
}
}
fn | (&mut self, ctx: &mut BTerm) {
ctx.cls_bg(NAVY);
self.frame_time += ctx.frame_time_ms;
if self.frame_time > FRAME_DURATION {
self.frame_time = 0.0;
self.player.gravity_and_move();
}
if let Some(VirtualKeyCode::Space) = ctx.key {
self.player.flap();
}
self.player.render(ctx);
ctx.print(0, 0, "Press SPACE to flap.");
ctx.print(0, 1, &format!("Score: {}", self.score));
self.obstacle.render(ctx, self.player.x);
if self.player.x > self.obstacle.x {
self.score += 1;
self.obstacle = Obstacle::new(self.player.x + SCREEN_WIDTH, self.score);
}
if self.player.y > SCREEN_HEIGHT || self.obstacle.hit_obstacle(&self.player) {
self.mode = GameMode::End;
}
if self.player.y > SCREEN_HEIGHT {
self.mode = GameMode::End;
}
}
fn restart(&mut self) {
self.mode = GameMode::Playing;
self.player = Player::new(5, 25);
self.obstacle = Obstacle::new(SCREEN_WIDTH, 0);
self.score = 0;
self.frame_time = 0.0;
}
fn end(&mut self, ctx: &mut BTerm) {
ctx.cls();
ctx.print_centered(5, "Game over!");
ctx.print_centered(8, "(P) Play");
ctx.print_centered(9, "(Q) Quit");
if let Some(key) = ctx.key {
match key {
VirtualKeyCode::P => self.restart(),
VirtualKeyCode::Q => ctx.quitting = true,
_ => {}
}
}
}
}
impl GameState for State {
fn tick(&mut self, ctx: &mut BTerm) {
match self.mode {
GameMode::Menu => self.menu(ctx),
GameMode::Playing => self.play(ctx),
GameMode::End => self.end(ctx),
}
}
}
struct Player {
x: i32,
y: i32,
velocity: f32,
}
impl Player {
fn new(x: i32, y: i32) -> Self {
Player {
x,
y,
velocity: 0.0,
}
}
fn render(&mut self, ctx: &mut BTerm) {
ctx.set(0, self.y, YELLOW, BLACK, to_cp437('@'));
}
fn gravity_and_move(&mut self) {
if self.velocity < 2.0 {
self.velocity += 0.2;
}
self.y += self.velocity as i32;
self.x += 1;
if self.y < 0 {
self.y = 0
}
}
fn flap(&mut self) {
self.velocity = -2.0;
}
}
struct Obstacle {
x: i32,
gap_y: i32,
size: i32,
}
impl Obstacle {
fn new(x: i32, score: i32) -> Self {
let mut random = RandomNumberGenerator::new();
Obstacle {
x,
gap_y: random.range(10, 40),
size: i32::max(2, 20 - score),
}
}
fn render(&mut self, ctx: &mut BTerm, player_x: i32) {
let screen_x = self.x - player_x;
let half_size = self.size / 2;
for y in 0..self.gap_y - half_size {
ctx.set(screen_x, y, RED, BLACK, to_cp437('/'))
}
for y in self.gap_y + half_size..SCREEN_HEIGHT {
ctx.set(screen_x, y, RED, BLACK, to_cp437('/'))
}
}
fn hit_obstacle(&self, player: &Player) -> bool {
let half_size = self.size / 2;
let does_x_match = player.x == self.x;
let player_above_gap = player.y < self.gap_y - half_size;
let player_below_gap = player.y > self.gap_y + half_size;
does_x_match && (player_above_gap || player_below_gap)
}
}
fn main() -> BError {
let context = BTermBuilder::simple80x50()
.with_title("Flappy Dragon")
.build()?;
main_loop(context, State::new())
}
| play |
launch.rs | // This is a simplified rust-reimplementation of the gst-launch-<version>
// cli tool. It has no own parameters and simply parses the cli arguments
// as launch syntax.
// When the parsing succeeded, the pipeline is run until the stream ends or an error happens.
extern crate gstreamer as gst;
use gst::prelude::*;
use std::env;
use std::process;
#[path = "../examples-common.rs"]
mod examples_common;
fn example_main() {
// Get a string containing the passed pipeline launch syntax
let pipeline_str = env::args().collect::<Vec<String>>()[1..].join(" ");
gst::init().unwrap();
// Let GStreamer create a pipeline from the parsed launch syntax on the cli.
// In comparision to the launch_glib_main example, this is using the advanced launch syntax
// parsing API of GStreamer. The function returns a Result, handing us the pipeline if
// parsing and creating succeeded, and hands us detailed error information if something
// went wrong. The error is passed as gst::ParseError. In this example, we separately
// handle the NoSuchElement error, that GStreamer uses to notify us about elements
// used within the launch syntax, that are not available (not installed).
// Especially GUIs should probably handle this case, to tell users that they need to
// install the corresponding gstreamer plugins.
let mut context = gst::ParseContext::new();
let pipeline =
match gst::parse_launch_full(&pipeline_str, Some(&mut context), gst::ParseFlags::NONE) {
Ok(pipeline) => pipeline,
Err(err) => |
};
let bus = pipeline.get_bus().unwrap();
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
break;
}
_ => (),
}
}
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
}
fn main() {
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}
| {
if let Some(gst::ParseError::NoSuchElement) = err.kind::<gst::ParseError>() {
println!("Missing element(s): {:?}", context.get_missing_elements());
} else {
println!("Failed to parse pipeline: {}", err);
}
process::exit(-1)
} |
rpc_pubsub_service.rs | //! The `pubsub` module implements a threaded subscription service on client RPC request
use crate::{
rpc_pubsub::{RpcSafePubSub, RpcSafePubSubImpl, MAX_ACTIVE_SUBSCRIPTIONS},
rpc_subscriptions::RpcSubscriptions,
};
use jsonrpc_pubsub::{PubSubHandler, Session};
use jsonrpc_ws_server::{RequestContext, ServerBuilder};
use std::{
net::SocketAddr,
sync::{
atomic::{AtomicBool, Ordering},
Arc,
},
thread::{self, sleep, Builder, JoinHandle},
time::Duration,
};
#[derive(Debug, Clone)]
pub struct PubSubConfig {
pub enable_vote_subscription: bool,
// See the corresponding fields in
// https://github.com/paritytech/ws-rs/blob/be4d47575bae55c60d9f51b47480d355492a94fc/src/lib.rs#L131
// for a complete description of each field in this struct
pub max_connections: usize,
pub max_fragment_size: usize,
pub max_in_buffer_capacity: usize,
pub max_out_buffer_capacity: usize,
pub max_active_subscriptions: usize,
}
impl Default for PubSubConfig {
fn default() -> Self {
Self {
enable_vote_subscription: false,
max_connections: 1000, // Arbitrary, default of 100 is too low
max_fragment_size: 50 * 1024, // 50KB
max_in_buffer_capacity: 50 * 1024, // 50KB
max_out_buffer_capacity: 15 * 1024 * 1024, // max account size (10MB), then 5MB extra for base64 encoding overhead/etc
max_active_subscriptions: MAX_ACTIVE_SUBSCRIPTIONS,
}
}
}
pub struct PubSubService {
thread_hdl: JoinHandle<()>,
}
impl PubSubService {
pub fn new(
pubsub_config: PubSubConfig,
subscriptions: &Arc<RpcSubscriptions>,
pubsub_addr: SocketAddr,
exit: &Arc<AtomicBool>,
) -> Self {
info!("rpc_pubsub bound to {:?}", pubsub_addr);
let rpc = RpcSafePubSubImpl::new(
subscriptions.clone(),
pubsub_config.max_active_subscriptions,
);
let exit_ = exit.clone();
let thread_hdl = Builder::new()
.name("solana-pubsub".to_string())
.spawn(move || { | io.extend_with(rpc.to_delegate());
let server = ServerBuilder::with_meta_extractor(io, |context: &RequestContext| {
info!("New pubsub connection");
let session = Arc::new(Session::new(context.sender()));
session.on_drop(|| {
info!("Pubsub connection dropped");
});
session
})
.max_connections(pubsub_config.max_connections)
.max_payload(pubsub_config.max_fragment_size)
.max_in_buffer_capacity(pubsub_config.max_in_buffer_capacity)
.max_out_buffer_capacity(pubsub_config.max_out_buffer_capacity)
.start(&pubsub_addr);
if let Err(e) = server {
warn!(
"Pubsub service unavailable error: {:?}. \n\
Also, check that port {} is not already in use by another application",
e,
pubsub_addr.port()
);
return;
}
while !exit_.load(Ordering::Relaxed) {
sleep(Duration::from_millis(100));
}
server.unwrap().close();
})
.unwrap();
Self { thread_hdl }
}
pub fn close(self) -> thread::Result<()> {
self.join()
}
pub fn join(self) -> thread::Result<()> {
self.thread_hdl.join()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::optimistically_confirmed_bank_tracker::OptimisticallyConfirmedBank;
use solana_runtime::{
bank::Bank,
bank_forks::BankForks,
commitment::BlockCommitmentCache,
genesis_utils::{create_genesis_config, GenesisConfigInfo},
};
use std::{
net::{IpAddr, Ipv4Addr},
sync::RwLock,
};
#[test]
fn test_pubsub_new() {
let pubsub_addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), 0);
let exit = Arc::new(AtomicBool::new(false));
let GenesisConfigInfo { genesis_config, .. } = create_genesis_config(10_000);
let bank = Bank::new(&genesis_config);
let bank_forks = Arc::new(RwLock::new(BankForks::new(bank)));
let optimistically_confirmed_bank =
OptimisticallyConfirmedBank::locked_from_bank_forks_root(&bank_forks);
let subscriptions = Arc::new(RpcSubscriptions::new(
&exit,
bank_forks,
Arc::new(RwLock::new(BlockCommitmentCache::new_for_tests())),
optimistically_confirmed_bank,
));
let pubsub_service =
PubSubService::new(PubSubConfig::default(), &subscriptions, pubsub_addr, &exit);
let thread = pubsub_service.thread_hdl.thread();
assert_eq!(thread.name().unwrap(), "solana-pubsub");
}
} | let mut io = PubSubHandler::default(); |
tray.rs | use winapi;
use crate::misc;
use std::env;
use std::mem::{ transmute as tm, zeroed };
use std::ptr::null_mut;
use winapi::um::shellapi::*;
use winapi::um::winuser::*;
use winapi::shared::minwindef::*;
use winapi::shared::windef::*;
use winapi::um::libloaderapi::*;
use misc::fatal;
struct Tray;
impl Tray {
const INTERACTION: UINT = WM_APP + 1;
const BUTTON_ABOUT: UINT = WM_APP + 2;
const BUTTON_CLOSE_GURA: UINT = WM_APP + 3;
unsafe fn hinstance() -> HINSTANCE {
GetModuleHandleA(null_mut())
}
unsafe fn hwnd(hinstance: HINSTANCE) -> HWND {
// Create a dummy window class with a custom callback.
let dummy_window_class = (&mut {
let mut dummy_window_class: WNDCLASSA = zeroed();
dummy_window_class.hInstance = tm(hinstance);
dummy_window_class.lpfnWndProc = Some(__raw_callback__);
dummy_window_class.lpszClassName = "dummy\0".as_ptr() as *mut i8;
dummy_window_class
}) as *mut WNDCLASSA;
// Register the dummy window class.
RegisterClassA(dummy_window_class);
// Spawn the dummy window.
let hwnd = CreateWindowExA(
0,
"dummy\0".as_ptr() as *mut i8,
"dummy\0".as_ptr() as *mut i8,
0,
0, 0,
0, 0,
null_mut() as _,
null_mut() as _,
std::mem::transmute(hinstance),
null_mut() as _);
if hwnd.is_null() {
fatal("Unexpected runtime error.", "Failed to create a dummy window.");
}
ShowWindow(hwnd, SW_HIDE);
SetWindowLongPtrA(hwnd, GWL_STYLE, 0);
SetWindowLongPtrA(
hwnd,
GWL_EXSTYLE,
(WS_EX_TOOLWINDOW & !WS_EX_APPWINDOW) as isize);
hwnd
}
unsafe fn callback(
hwnd: HWND,
umsg: UINT,
wparam: WPARAM,
lparam: LPARAM,
) -> LRESULT {
match umsg {
WM_CREATE => {
let data = (&mut {
let mut data: NOTIFYICONDATAA = zeroed();
data.uID = 1;
data.uFlags = NIF_ICON | NIF_MESSAGE | NIF_TIP;
// Associate the tray icon with this window.
data.hWnd = tm(hwnd);
// Set the message that Windows will send when our tray
// icon is interacted with.
data.uCallbackMessage = Tray::INTERACTION;
// Extract the programs icon for use in the tray.
ExtractIconExA(
env::current_exe()
.unwrap()
.to_str()
.unwrap()
.as_ptr() as *mut i8,
0,
null_mut(),
&mut data.hIcon as *mut HICON,
1);
// Instead of using a `*mut i8` like the other APIs,
// this one uses `[i8; 128]`, so we need to copy our
// `&'static str` to an `[i8; 128]` to pass it.
//
// Gotta love the consistency of Windows :^)
data.szTip = {
let mut sz_tip: [i8; 128] = [0; 128];
"Desktop Sharkie\0"
.bytes()
.zip(sz_tip.iter_mut())
.for_each(|(byte, ptr)| *ptr = tm(byte));
sz_tip
};
data
}) as *mut NOTIFYICONDATAA;
Shell_NotifyIconA(NIM_ADD, data);
},
WM_CLOSE => {
let data = (&mut {
let mut data: NOTIFYICONDATAA = zeroed();
data.uID = 1;
data.hWnd = tm(hwnd);
data
}) as *mut NOTIFYICONDATAA;
Shell_NotifyIconA(NIM_DELETE, data);
std::process::exit(0);
},
WM_COMMAND => match wparam as UINT {
Tray::BUTTON_ABOUT => {
todo!();
},
Tray::BUTTON_CLOSE_GURA => {
SendMessageA(hwnd, WM_CLOSE, 0, 0);
},
_ => (),
},
Tray::INTERACTION => match lparam as UINT {
WM_RBUTTONUP => {
ShowWindow(hwnd, SW_SHOW);
SetForegroundWindow(hwnd);
SetFocus(hwnd);
let hmenu = {
let hmenu = CreatePopupMenu();
/*InsertMenuA(
hmenu,
0,
MF_BYPOSITION | MF_STRING,
Tray::BUTTON_ABOUT as usize,
"About...\0".as_ptr() as *mut i8);*/
InsertMenuA(
hmenu,
1,
MF_BYPOSITION | MF_STRING,
Tray::BUTTON_CLOSE_GURA as usize,
"Close Goomba Roomba\0".as_ptr() as *mut i8);
hmenu
};
let cursor_pos = {
let mut cursor_pos: POINT = zeroed();
GetCursorPos(&mut cursor_pos as *mut POINT);
cursor_pos
};
TrackPopupMenu(
hmenu,
TPM_LEFTALIGN,
cursor_pos.x,
cursor_pos.y,
0,
hwnd,
null_mut());
DestroyMenu(hmenu);
ShowWindow(hwnd, SW_HIDE);
},
_ => (),
},
_ => return DefWindowProcA(hwnd, umsg, wparam, lparam),
}
(&0 as *const i32) as LRESULT
}
}
pub fn configure_tray() |
unsafe extern "system" fn __raw_callback__(
hwnd: HWND,
umsg: UINT,
wparam: WPARAM,
lparam: LPARAM,
) -> LRESULT {
Tray::callback(hwnd, umsg, wparam, lparam)
}
| {
unsafe {
let hinstance = Tray::hinstance();
Tray::hwnd(hinstance);
}
} |
0152.maximum-product-subarray.152.ts | function | (nums: number[]): number {
let result = nums[0];
let max = nums[0];
let min = nums[0];
for (let i = 1; i < nums.length; i++) {
const a = max * nums[i];
const b = min * nums[i];
result = Math.max(
a,
b,
nums[i],
result
);
max = Math.max(a, b, nums[i]);
min = Math.min(a, b, nums[i]);
}
return result;
}
| maxProduct |
primo.py | # -*- coding: utf-8 -*-
def run():
|
def is_prime(number):
if number < 2:
return False
elif number == 2:
return True
elif number > 2 and number % 2 == 0:
return False
else:
for i in range(3, number):
print(i)
if number % i == 0:
return False
return True
if __name__ == '__main__':
run() | number = int(raw_input('Ingresa un numero: '))
state = is_prime(number)
if state == True:
print('Es primo')
else:
print('No es primo') |
utils.js | /**
* 模块名:共通脚本
* 程序名: 通用工具函数
* Copyright(c) 2013-2015 liuhuisheng [ [email protected] ]
**/
var utils = {};
/**
* 格式化字符串
* 用法:
.formatString("{0}-{1}","a","b");
*/
utils.formatString = function () {
for (var i = 1; i < arguments.length; i++) {
var exp = new RegExp('\\{' + (i - 1) + '\\}', 'gm');
arguments[0] = arguments[0].replace(exp, arguments[i]);
}
return arguments[0];
};
/**
* 格式化时间显示方式
* 用法:format="yyyy-MM-dd hh:mm:ss";
*/
utils.formatDate = function (v, format) {
if (!v) return "";
var d = v;
if (typeof v === 'string') {
if (v.indexOf("/Date(") > -1)
d = new Date(parseInt(v.replace("/Date(", "").replace(")/", ""), 10));
else
d = new Date(Date.parse(v.replace(/-/g, "/").replace("T", " ").split(".")[0]));//.split(".")[0] 用来处理出现毫秒的情况,截取掉.xxx,否则会出错
}
var o = {
"M+": d.getMonth() + 1, //month
"d+": d.getDate(), //day
"h+": d.getHours(), //hour
"m+": d.getMinutes(), //minute
"s+": d.getSeconds(), //second
"q+": Math.floor((d.getMonth() + 3) / 3), //quarter
"S": d.getMilliseconds() //millisecond
};
if (/(y+)/.test(format)) {
format = format.replace(RegExp.$1, (d.getFullYear() + "").substr(4 - RegExp.$1.length));
}
for (var k in o) {
if (new RegExp("(" + k + ")").test(format)) {
format = format.replace(RegExp.$1, RegExp.$1.length == 1 ? o[k] : ("00" + o[k]).substr(("" + o[k]).length));
}
}
return format;
};
/**
* 格式化数字显示方式
* 用法
* formatNumber(12345.999,'#,##0.00');
* formatNumber(12345.999,'#,##0.##');
* formatNumber(123,'000000');
*/
utils.formatNumber = function (v, pattern) {
if (v == null)
return v;
var strarr = v ? v.toString().split('.') : ['0'];
var fmtarr = pattern ? pattern.split('.') : [''];
var retstr = '';
// 整数部分
var str = strarr[0];
var fmt = fmtarr[0];
var i = str.length - 1;
var comma = false;
for (var f = fmt.length - 1; f >= 0; f--) {
switch (fmt.substr(f, 1)) {
case '#':
if (i >= 0) retstr = str.substr(i--, 1) + retstr;
break;
case '0':
if (i >= 0) retstr = str.substr(i--, 1) + retstr;
else retstr = '0' + retstr;
break;
case ',':
comma = true;
retstr = ',' + retstr;
break;
}
}
if (i >= 0) {
if (comma) {
var l = str.length;
for (; i >= 0; i--) {
retstr = str.substr(i, 1) + retstr;
if (i > 0 && ((l - i) % 3) == 0) retstr = ',' + retstr;
}
}
else retstr = str.substr(0, i + 1) + retstr;
}
retstr = retstr + '.';
// 处理小数部分
str = strarr.length > 1 ? strarr[1] : '';
fmt = fmtarr.length > 1 ? fmtarr[1] : '';
i = 0; | case '#':
if (i < str.length) retstr += str.substr(i++, 1);
break;
case '0':
if (i < str.length) retstr += str.substr(i++, 1);
else retstr += '0';
break;
}
}
return retstr.replace(/^,+/, '').replace(/\.$/, '');
};
/**
* json格式转树状结构
* @param {json} json数据
* @param {String} id的字符串
* @param {String} 父id的字符串
* @param {String} children的字符串
* @return {Array} 数组
*/
utils.toTreeData = function (a, idStr, pidStr, childrenStr) {
var r = [], hash = {},len = (a||[]).length;
for (var i=0; i < len; i++) {
hash[a[i][idStr]] = a[i];
}
for (var j=0; j < len; j++) {
var aVal = a[j], hashVP = hash[aVal[pidStr]];
if (hashVP) {
!hashVP[childrenStr] && (hashVP[childrenStr] = []);
hashVP[childrenStr].push(aVal);
} else {
r.push(aVal);
}
}
return r;
};
utils.eachTreeRow = function(treeData,eachHandler) {
for (var i in treeData) {
if (eachHandler(treeData[i]) == false) break;
if (treeData[i].children)
utils.eachTreeRow(treeData[i].children, eachHandler);
}
};
utils.isInChild = function (treeData,pid,id) {
var isChild = false;
utils.eachTreeRow(treeData, function (curNode) {
if (curNode.id == pid) {
utils.eachTreeRow([curNode], function (row) {
if (row.id == id) {
isChild = true;
return false;
}
});
return false;
}
});
return isChild;
};
utils.fnValueToText = function (list, value, text) {
var map = {};
for (var i in list) {
map[list[i][value || 'value']] = list[i][text || 'text'];
}
var fnConvert = function (v, r) {
return map[v];
};
return fnConvert;
};
utils.compareObject = function (v1, v2) {
var countProps = function (obj) {
var count = 0;
for (k in obj) if (obj.hasOwnProperty(k)) count++;
return count;
};
if (typeof (v1) !== typeof (v2)) {
return false;
}
if (typeof (v1) === "function") {
return v1.toString() === v2.toString();
}
if (v1 instanceof Object && v2 instanceof Object) {
if (countProps(v1) !== countProps(v2)) {
return false;
}
var r = true;
for (k in v1) {
r = utils.compareObject(v1[k], v2[k]);
if (!r) {
return false;
}
}
return true;
} else {
return v1 === v2;
}
};
utils.minusArray = function (arr1, arr2) {
var arr = [];
for (var i in arr1) {
var b = true;
for (var j in arr2) {
if (utils.compareObject(arr1[i],arr2[j])) {
b = false;
break;
}
}
if (b) {
arr.push(arr1[i]);
}
}
return arr;
};
utils.diffrence = function (obj1, obj2, reserve,ignore) {
var obj = {}, reserve = reserve || [], ignore = ignore || [], reserveMap = {}, ignoreMap = {};
for (var i in reserve)
reserveMap[reserve[i]] = true;
for (var i in ignore)
ignoreMap[ignore[i]] = true;
for (var k in obj1) {
if (!ignoreMap[k] && (obj1[k] != obj2[k] || reserveMap[k])) {
obj[k] = obj1[k];
}
}
return obj;
};
utils.filterProperties = function (obj, props, ignore) {
var ret;
if (obj instanceof Array || Object.prototype.toString.call(obj) === "[object Array]") {
ret = [];
for (var k in obj) {
ret.push(utils.filterProperties(obj[k], props, ignore));
}
}
else if (typeof obj === 'object') {
ret = {};
if (ignore) {
var map = {};
for (var k in props)
map[props[k]] = true;
for (var i in obj) {
if (!map[i]) ret[i] = obj[i];
}
}
else {
for (var i in props) {
var arr = props[i].split(" as ");
ret[arr[1] || arr[0]] = obj[arr[0]];
}
}
}
else {
ret = obj;
}
return ret;
};
utils.copyProperty = function (obj, sourcePropertyName, newPropertyName, overWrite) {
if (obj instanceof Array || Object.prototype.toString.call(obj) === "[object Array]") {
for (var k in obj)
utils.copyProperty(obj[k], sourcePropertyName, newPropertyName);
}
else if (typeof obj === 'object') {
if (sourcePropertyName instanceof Array || Object.prototype.toString.call(sourcePropertyName) === "[object Array]") {
for (var i in sourcePropertyName) {
utils.copyProperty(obj, sourcePropertyName[i], newPropertyName[i]);
}
}
else if (typeof sourcePropertyName === 'string') {
if ((obj[newPropertyName] && overWrite) || (!obj[newPropertyName]))
obj[newPropertyName] = obj[sourcePropertyName];
}
}
return obj;
};
utils.clearIframe = function (context) {
var frame = $('iframe', context).add(parent.$('iframe', context));
if (frame.length > 0) {
frame[0].contentWindow.document.write('');
frame[0].contentWindow.close();
frame.remove();
if ($.browser.msie) {
CollectGarbage();
}
}
};
utils.getThisIframe = function () {
if (!parent) return null;
var iframes = parent.document.getElementsByTagName('iframe');
if (iframes.length == 0) return null;
for (var i = 0; i < iframes.length; ++i) {
var iframe = iframes[i];
if (iframe.contentWindow === self) {
return iframe;
}
}
return null;
}
utils.functionComment = function(fn){
return fn.toString().replace(/^.*\r?\n?.*\/\*|\*\/([.\r\n]*).+?$/gm,'');
};
utils.uuid = (function () { var a = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz".split(""); return function (b, f) { var h = a, e = [], d = Math.random; f = f || h.length; if (b) { for (var c = 0; c < b; c++) { e[c] = h[0 | d() * f]; } } else { var g; e[8] = e[13] = e[18] = e[23] = "-"; e[14] = "4"; for (var c = 0; c < 36; c++) { if (!e[c]) { g = 0 | d() * 16; e[c] = h[(c == 19) ? (g & 3) | 8 : g & 15]; } } } return e.join("").toLowerCase(); }; })();
utils.getRequest = function (name, url) {
var url = url|| window.location.href;
var theRequest = new Object();
if (url.indexOf("?") != -1) {
var str = url.split("?")[1];
strs = str.split("&");
for (var i = 0; i < strs.length; i++) {
theRequest[strs[i].split("=")[0]] = unescape(strs[i].split("=")[1]);
}
}
return theRequest[name];
}; | for (var f = 0; f < fmt.length; f++) {
switch (fmt.substr(f, 1)) { |
currency-mask.directive.ts | import { AfterViewInit, Directive, DoCheck, ElementRef, forwardRef, HostListener, Inject, Input, KeyValueDiffer, KeyValueDiffers, OnInit, Optional } from "@angular/core";
import { AbstractControl, ControlValueAccessor, NG_VALIDATORS, NG_VALUE_ACCESSOR, Validator } from "@angular/forms";
import { CurrencyMaskConfig, CURRENCY_MASK_CONFIG } from "./currency-mask.config";
import { InputHandler } from "./input.handler";
export const CURRENCYMASKDIRECTIVE_VALUE_ACCESSOR: any = {
provide: NG_VALUE_ACCESSOR,
useExisting: forwardRef(() => CurrencyMaskDirective),
multi: true
};
@Directive({
selector: "[currencyMask]",
providers: [
CURRENCYMASKDIRECTIVE_VALUE_ACCESSOR,
{ provide: NG_VALIDATORS, useExisting: CurrencyMaskDirective, multi: true }
]
})
export class CurrencyMaskDirective implements AfterViewInit, ControlValueAccessor, DoCheck, OnInit, Validator {
@Input() max: number;
@Input() min: number;
@Input() options: any = {};
inputHandler: InputHandler;
keyValueDiffer: KeyValueDiffer<any, any>;
optionsTemplate = {
align: "right",
allowNegative: true,
decimal: ".",
precision: 2,
prefix: "$ ",
suffix: "",
thousands: ","
};
constructor(@Optional() @Inject(CURRENCY_MASK_CONFIG) private currencyMaskConfig: CurrencyMaskConfig, private elementRef: ElementRef, private keyValueDiffers: KeyValueDiffers) { | this.keyValueDiffer = keyValueDiffers.find({}).create();
}
ngAfterViewInit() {
this.elementRef.nativeElement.style.textAlign = this.options.align ? this.options.align : this.optionsTemplate.align;
}
ngDoCheck() {
if (this.keyValueDiffer.diff(this.options)) {
this.elementRef.nativeElement.style.textAlign = this.options.align ? this.options.align : this.optionsTemplate.align;
this.inputHandler.updateOptions((<any>Object).assign({}, this.optionsTemplate, this.options));
}
}
ngOnInit() {
this.inputHandler = new InputHandler(this.elementRef.nativeElement, (<any>Object).assign({}, this.optionsTemplate, this.options));
}
@HostListener("blur", ["$event"])
handleBlur(event: any) {
this.inputHandler.getOnModelTouched().apply(event);
}
@HostListener("click", ["$event"])
handleClick(event: any) {
this.inputHandler.handleClick(event, this.isChromeAndroid());
}
@HostListener("cut", ["$event"])
handleCut(event: any) {
if (!this.isChromeAndroid()) {
this.inputHandler.handleCut(event);
}
}
@HostListener("input", ["$event"])
handleInput(event: any) {
if (this.isChromeAndroid()) {
this.inputHandler.handleInput(event);
}
}
@HostListener("keydown", ["$event"])
handleKeydown(event: any) {
if (!this.isChromeAndroid()) {
this.inputHandler.handleKeydown(event);
}
}
@HostListener("keypress", ["$event"])
handleKeypress(event: any) {
if (!this.isChromeAndroid()) {
this.inputHandler.handleKeypress(event);
}
}
@HostListener("keyup", ["$event"])
handleKeyup(event: any) {
if (!this.isChromeAndroid()) {
this.inputHandler.handleKeyup(event);
}
}
@HostListener("paste", ["$event"])
handlePaste(event: any) {
if (!this.isChromeAndroid()) {
this.inputHandler.handlePaste(event);
}
}
isChromeAndroid(): boolean {
return /chrome/i.test(navigator.userAgent) && /android/i.test(navigator.userAgent);
}
registerOnChange(callbackFunction: Function): void {
this.inputHandler.setOnModelChange(callbackFunction);
}
registerOnTouched(callbackFunction: Function): void {
this.inputHandler.setOnModelTouched(callbackFunction);
}
setDisabledState(value: boolean): void {
this.elementRef.nativeElement.disabled = value;
}
validate(abstractControl: AbstractControl): { [key: string]: any; } {
let result: any = {};
if (abstractControl.value > this.max) {
result.max = true;
}
if (abstractControl.value < this.min) {
result.min = true;
}
return result != {} ? result : null;
}
writeValue(value: number): void {
this.inputHandler.setValue(value);
}
} | if (currencyMaskConfig) {
this.optionsTemplate = currencyMaskConfig;
}
|
reader_test.go | package sparkey
import (
"testing"
)
var readerTestData = struct {
ct CompressionType
blockSize int
key string
val string
}{
Snappy,
16,
"abc",
"12345",
}
func TestReader(t *testing.T) | {
setup()
defer teardown()
ct := Snappy
blockSize := 16
key := "abc"
val := "12345"
// write a log file
lw, _ := CreateLog(testFilename, ct, blockSize)
lw.Put(key, val)
lw.Close()
// write a hash file
WriteHash(testHashFilename, testFilename, Auto)
// create a reader for that file
re, err := NewReader(testFilename, testHashFilename)
if err != nil {
t.Fatalf("creating new Reader: %v", err)
}
// verify headers and stuff
if re.LogFilename != testFilename {
t.Errorf("LogFilename is %v, want %v", re.LogFilename, testFilename)
}
if re.Compression != ct {
t.Errorf("Compression is %v, want %v", re.Compression, ct)
}
if re.CompressionBlockSize != blockSize {
t.Errorf("CompressionBlockSize is %v, want %v", re.CompressionBlockSize, blockSize)
}
if re.MaxKeyLen != uint64(len(key)) {
t.Errorf("MaxKeyLen is %v, want %v", re.MaxKeyLen, len(key))
}
if re.MaxValueLen != uint64(len(val)) {
t.Errorf("MaxValueLen is %v, want %v", re.MaxValueLen, len(val))
}
// make an iterator to get the data and verify state
it, err := re.Iter()
if err != nil {
t.Fatalf("creating iterator: %v", err)
}
if is, _ := it.State(); is != New {
t.Errorf("iterator state is %v, want %v", is, New)
}
// iterate to the first record and verify iterator state
if err := it.Next(); err != nil {
t.Fatalf("Iterator.Next: %v", err)
}
if is, _ := it.State(); is != Active {
t.Errorf("iterator state is %v, want %v", is, Active)
}
// verify correct data is read
if it.Key != key {
t.Errorf("read key %v, want %v", it.Key, key)
}
if it.Value != val {
t.Errorf("read value %v, want %v", it.Value, val)
}
// iterate to end and verify state
if err := it.Next(); err != nil {
t.Fatalf("Iterator.Next: %v", err)
}
if is, _ := it.State(); is != Closed {
t.Errorf("iterator state is %v, want %v", is, Closed)
}
} |
|
mintPayments.js | // @ts-check
import { Far } from '@endo/marshal';
import { AmountMath } from '@agoric/ertp';
import '../../exported.js';
/**
* This is a very simple contract that creates a new issuer and mints payments
* from it, in order to give an example of how that can be done. This contract
* sends new tokens to anyone who has an invitation.
*
* The expectation is that most contracts that want to do something similar
* would use the ability to mint new payments internally rather than sharing
* that ability widely as this one does.
*
* To pay others in tokens, the creator of the instance can make
* invitations for them, which when used to make an offer, will payout
* the specified amount of tokens.
*
* @type {ContractStartFn<{getTokenIssuer: unknown}, {makeInvitation: unknown, getTokenIssuer: unknown}>} | */
const start = async zcf => {
// Create the internal token mint for a fungible digital asset. Note
// that 'Tokens' is both the keyword and the allegedName.
const zcfMint = await zcf.makeZCFMint('Tokens');
// AWAIT
// Now that ZCF has saved the issuer, brand, and local AmountMath, they
// can be accessed synchronously.
const { issuer, brand } = zcfMint.getIssuerRecord();
const mintPayment = value => seat => {
const amount = AmountMath.make(brand, value);
// Synchronously mint and allocate amount to seat.
zcfMint.mintGains(harden({ Token: amount }), seat);
// Exit the seat so that the user gets a payout.
seat.exit();
// Since the user is getting the payout through Zoe, we can
// return anything here. Let's return some helpful instructions.
return 'Offer completed. You should receive a payment from Zoe';
};
const creatorFacet = Far('creatorFacet', {
// The creator of the instance can send invitations to anyone
// they wish to.
makeInvitation: (value = 1000n) =>
zcf.makeInvitation(mintPayment(value), 'mint a payment'),
getTokenIssuer: () => issuer,
});
const publicFacet = Far('publicFacet', {
// Make the token issuer public. Note that only the mint can
// make new digital assets. The issuer is ok to make public.
getTokenIssuer: () => issuer,
});
// Return the creatorFacet to the creator, so they can make
// invitations for others to get payments of tokens. Publish the
// publicFacet.
return harden({ creatorFacet, publicFacet });
};
harden(start);
export { start }; | |
contract_with_abi.rs | //! Test suite for the Web and headless browsers.
#![cfg(target_arch = "wasm32")]
use wasm_bindgen::prelude::*;
use wasm_bindgen_test::*;
use ethers::{
contract::abigen,
prelude::{ContractFactory, LocalWallet, Provider, SignerMiddleware},
providers::Ws,
};
use std::sync::Arc;
wasm_bindgen_test_configure!(run_in_browser);
// Generate the type-safe contract bindings by providing the ABI
// definition in human readable format
abigen!(
SimpleContract,
"../contract_abi.json",
event_derives(serde::Deserialize, serde::Serialize)
);
#[wasm_bindgen_test]
async fn connect_and_deploy() | {
console_log!("starting");
// a private key of a launched ganache `yarn ganache`
let wallet: LocalWallet = ethers_wasm::KEY.parse().unwrap();
let provider = Provider::new(Ws::connect("ws://localhost:8545").await.unwrap());
let client = Arc::new(SignerMiddleware::new(provider, wallet));
let bytecode = hex::decode(ethers_wasm::utils::SIMPLECONTRACT_BIN).unwrap();
let factory = ContractFactory::new(SIMPLECONTRACT_ABI.clone(), bytecode.into(), client.clone());
let contract = factory
.deploy("initial value".to_string())
.unwrap()
.send()
.await
.unwrap();
let addr = contract.address();
console_log!("deployed to {}", addr);
let contract = SimpleContract::new(addr, client.clone());
let _receipt = contract
.set_value("hi".to_owned())
.send()
.await
.unwrap()
.await
.unwrap();
// get all events
let logs = contract
.value_changed_filter()
.from_block(0u64)
.query()
.await
.unwrap();
let value = contract.get_value().call().await.unwrap();
console_log!(
"Value: {}. Logs: {:?}",
value,
JsValue::from_serde(&logs).unwrap()
);
} |
|
comdlg32.go | // Copyright 2010 The win Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package win
import (
"syscall"
"unsafe"
)
// Common error codes
const (
CDERR_DIALOGFAILURE = 0xFFFF
CDERR_FINDRESFAILURE = 0x0006
CDERR_INITIALIZATION = 0x0002
CDERR_LOADRESFAILURE = 0x0007
CDERR_LOADSTRFAILURE = 0x0005
CDERR_LOCKRESFAILURE = 0x0008
CDERR_MEMALLOCFAILURE = 0x0009
CDERR_MEMLOCKFAILURE = 0x000A
CDERR_NOHINSTANCE = 0x0004
CDERR_NOHOOK = 0x000B
CDERR_NOTEMPLATE = 0x0003
CDERR_REGISTERMSGFAIL = 0x000C
CDERR_STRUCTSIZE = 0x0001
)
// PrintDlg specific error codes
const (
PDERR_CREATEICFAILURE = 0x100A
PDERR_DEFAULTDIFFERENT = 0x100C
PDERR_DNDMMISMATCH = 0x1009
PDERR_GETDEVMODEFAIL = 0x1005
PDERR_INITFAILURE = 0x1006
PDERR_LOADDRVFAILURE = 0x1004
PDERR_NODEFAULTPRN = 0x1008
PDERR_NODEVICES = 0x1007
PDERR_PARSEFAILURE = 0x1002
PDERR_PRINTERNOTFOUND = 0x100B
PDERR_RETDEFFAILURE = 0x1003
PDERR_SETUPFAILURE = 0x1001
)
// ChooseFont specific error codes
const (
CFERR_MAXLESSTHANMIN = 0x2002
CFERR_NOFONTS = 0x2001
)
// GetOpenFileName and GetSaveFileName specific error codes
const (
FNERR_BUFFERTOOSMALL = 0x3003
FNERR_INVALIDFILENAME = 0x3002
FNERR_SUBCLASSFAILURE = 0x3001
)
// FindText and ReplaceText specific error codes
const (
FRERR_BUFFERLENGTHZERO = 0x4001
)
// GetOpenFileName and GetSaveFileName flags
const (
OFN_ALLOWMULTISELECT = 0x00000200
OFN_CREATEPROMPT = 0x00002000
OFN_DONTADDTORECENT = 0x02000000
OFN_ENABLEHOOK = 0x00000020
OFN_ENABLEINCLUDENOTIFY = 0x00400000
OFN_ENABLESIZING = 0x00800000
OFN_ENABLETEMPLATE = 0x00000040
OFN_ENABLETEMPLATEHANDLE = 0x00000080
OFN_EXPLORER = 0x00080000
OFN_EXTENSIONDIFFERENT = 0x00000400
OFN_FILEMUSTEXIST = 0x00001000
OFN_FORCESHOWHIDDEN = 0x10000000
OFN_HIDEREADONLY = 0x00000004
OFN_LONGNAMES = 0x00200000
OFN_NOCHANGEDIR = 0x00000008
OFN_NODEREFERENCELINKS = 0x00100000
OFN_NOLONGNAMES = 0x00040000
OFN_NONETWORKBUTTON = 0x00020000
OFN_NOREADONLYRETURN = 0x00008000
OFN_NOTESTFILECREATE = 0x00010000
OFN_NOVALIDATE = 0x00000100
OFN_OVERWRITEPROMPT = 0x00000002
OFN_PATHMUSTEXIST = 0x00000800
OFN_READONLY = 0x00000001
OFN_SHAREAWARE = 0x00004000
OFN_SHOWHELP = 0x00000010
)
// GetOpenFileName and GetSaveFileName extended flags
const (
OFN_EX_NOPLACESBAR = 0x00000001
)
// PrintDlg[Ex] result actions
const (
PD_RESULT_APPLY = 2
PD_RESULT_CANCEL = 0
PD_RESULT_PRINT = 1
)
// PrintDlg[Ex] flags
const (
PD_ALLPAGES = 0x00000000
PD_COLLATE = 0x00000010
PD_CURRENTPAGE = 0x00400000
PD_DISABLEPRINTTOFILE = 0x00080000
PD_ENABLEPRINTTEMPLATE = 0x00004000
PD_ENABLEPRINTTEMPLATEHANDLE = 0x00010000
PD_EXCLUSIONFLAGS = 0x01000000 | PD_NOWARNING = 0x00000080
PD_PAGENUMS = 0x00000002
PD_PRINTTOFILE = 0x00000020
PD_RETURNDC = 0x00000100
PD_RETURNDEFAULT = 0x00000400
PD_RETURNIC = 0x00000200
PD_SELECTION = 0x00000001
PD_USEDEVMODECOPIES = 0x00040000
PD_USEDEVMODECOPIESANDCOLLATE = 0x00040000
PD_USELARGETEMPLATE = 0x10000000
)
// PrintDlgEx exclusion flags
const (
PD_EXCL_COPIESANDCOLLATE = DM_COPIES | DM_COLLATE
)
const START_PAGE_GENERAL = 0xffffffff
type (
LPOFNHOOKPROC uintptr
HPROPSHEETPAGE HANDLE
LPUNKNOWN uintptr
)
type OPENFILENAME struct {
LStructSize uint32
HwndOwner HWND
HInstance HINSTANCE
LpstrFilter *uint16
LpstrCustomFilter *uint16
NMaxCustFilter uint32
NFilterIndex uint32
LpstrFile *uint16
NMaxFile uint32
LpstrFileTitle *uint16
NMaxFileTitle uint32
LpstrInitialDir *uint16
LpstrTitle *uint16
Flags uint32
NFileOffset uint16
NFileExtension uint16
LpstrDefExt *uint16
LCustData uintptr
LpfnHook LPOFNHOOKPROC
LpTemplateName *uint16
PvReserved unsafe.Pointer
DwReserved uint32
FlagsEx uint32
}
type PRINTPAGERANGE struct {
NFromPage uint32
NToPage uint32
}
type DEVNAMES struct {
WDriverOffset uint16
WDeviceOffset uint16
WOutputOffset uint16
WDefault uint16
}
type PRINTDLGEX struct {
LStructSize uint32
HwndOwner HWND
HDevMode HGLOBAL
HDevNames HGLOBAL
HDC HDC
Flags uint32
Flags2 uint32
ExclusionFlags uint32
NPageRanges uint32
NMaxPageRanges uint32
LpPageRanges *PRINTPAGERANGE
NMinPage uint32
NMaxPage uint32
NCopies uint32
HInstance HINSTANCE
LpPrintTemplateName *uint16
LpCallback LPUNKNOWN
NPropertyPages uint32
LphPropertyPages *HPROPSHEETPAGE
NStartPage uint32
DwResultAction uint32
}
var (
// Library
libcomdlg32 uintptr
// Functions
commDlgExtendedError uintptr
getOpenFileName uintptr
getSaveFileName uintptr
printDlgEx uintptr
)
func init() {
// Library
libcomdlg32 = MustLoadLibrary("comdlg32.dll")
// Functions
commDlgExtendedError = MustGetProcAddress(libcomdlg32, "CommDlgExtendedError")
getOpenFileName = MustGetProcAddress(libcomdlg32, "GetOpenFileNameW")
getSaveFileName = MustGetProcAddress(libcomdlg32, "GetSaveFileNameW")
printDlgEx = MustGetProcAddress(libcomdlg32, "PrintDlgExW")
}
func CommDlgExtendedError() uint32 {
ret, _, _ := syscall.Syscall(commDlgExtendedError, 0,
0,
0,
0)
return uint32(ret)
}
func GetOpenFileName(lpofn *OPENFILENAME) bool {
ret, _, _ := syscall.Syscall(getOpenFileName, 1,
uintptr(unsafe.Pointer(lpofn)),
0,
0)
return ret != 0
}
func GetSaveFileName(lpofn *OPENFILENAME) bool {
ret, _, _ := syscall.Syscall(getSaveFileName, 1,
uintptr(unsafe.Pointer(lpofn)),
0,
0)
return ret != 0
}
func PrintDlgEx(lppd *PRINTDLGEX) HRESULT {
ret, _, _ := syscall.Syscall(printDlgEx, 1,
uintptr(unsafe.Pointer(lppd)),
0,
0)
return HRESULT(ret)
} | PD_HIDEPRINTTOFILE = 0x00100000
PD_NOCURRENTPAGE = 0x00800000
PD_NOPAGENUMS = 0x00000008
PD_NOSELECTION = 0x00000004 |
Zad6.py | """
Sprawdz czy istnieje permutacja danego slowa bedaca palindromem.
"""
# Wersja 1
def znajdz_permutacje(napis, start, koniec, wynik=[]):
if start >= koniec:
if "".join(napis) not in wynik:
wynik.append("".join(napis))
else:
for i in range(start, koniec):
napis[start], napis[i] = napis[i], napis[start]
znajdz_permutacje(napis, start + 1, koniec, wynik)
napis[start], napis[i] = napis[i], napis[start]
return wynik
def czy_palindrom(slowo):
for i in range(len(slowo) // 2):
if slowo[i] != slowo[-i - 1]:
|
return True
def czy_istnieje_permutacja_bedaca_palindromem_v1(slowo):
permutacje = znajdz_permutacje(list(slowo), 0, len(slowo))
wynik = []
for p in permutacje:
if czy_palindrom(p):
wynik.append(p)
return wynik
# testy poprawnosci
slowo = "adamm"
wynik = ["madam", "amdma"]
assert sorted(czy_istnieje_permutacja_bedaca_palindromem_v1(slowo)) == sorted(wynik)
| return False |
client.py | import hashlib
import random
import lycanthropy.sql.interface
import lycanthropy.crypto
import jwt
def decodeToken(token,config):
rawData = jwt.decode(
token,
config['secret'],
algorithms=['HS256']
)
return rawData
def monitoringToken(user,config,remote,identity):
userData = lycanthropy.sql.interface.filterUser({'username':user})[0]
token = jwt.encode({
'user':user,
'_wolfmon':identity,
'campaigns':userData['campaigns'],
'roles':userData['roles'],
'_host':remote
},
config['secret'],
algorithm='HS256'
).decode('utf-8')
return token
def apiToken(user,config,remote):
userData = lycanthropy.sql.interface.filterUser({'username':user})[0]
token = jwt.encode({
'user':user,
'campaigns':userData['campaigns'],
'roles':userData['roles'],
'_host':remote
},
config['secret'],
algorithm='HS256'
).decode('utf-8')
return token
def getCampaignAccess(user,config,token,remote,wolfmon):
decoded = decodeToken(token,config)
if decoded['user'] == user and decoded['_host'] == remote and wolfmon == decoded['_wolfmon']:
userData = lycanthropy.sql.interface.filterUser({'username': user})[0]
return userData['campaigns'].split(',')
else:
return 'error'
def verifyToken(user,config,token,remote):
decoded = decodeToken(token,config)
if decoded['user'] == user and decoded['_host'] == remote:
return True
else:
return False
def verifyAuth(user,password):
userData = lycanthropy.sql.interface.filterUser({'username':user})[0]
print(userData)
if userData == []:
return False
else:
reconstruct = mkHash(password,userData['password'].split('.')[0])
print(reconstruct)
if reconstruct == userData['password']:
return True
else:
return False
def mkHash(password,salt):
passHmac = hashlib.pbkdf2_hmac('sha256',password.encode('utf-8'),salt.encode('utf-8'),100000)
return '{}.{}'.format(salt,passHmac.hex())
def | ():
alpha = "ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890"
strOut = []
for i in range(32):
strOut.append(
alpha[random.randint(
0,
len(alpha)-1
)]
)
return "".join(strOut)
def mkUser(user,password):
pwdSalt = mkSalt()
passObj = mkHash(password,pwdSalt)
return passObj
| mkSalt |
services_compliance_usa2p.go | /*
* Twilio - Messaging
*
* This is the public Twilio REST API.
*
* API version: 1.22.0
* Contact: [email protected]
*/
// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT.
package openapi
import (
"encoding/json"
"fmt"
"net/url"
"strings"
"github.com/twilio/twilio-go/client"
)
// Optional parameters for the method 'CreateUsAppToPerson'
type CreateUsAppToPersonParams struct {
// A2P Brand Registration SID
BrandRegistrationSid *string `json:"BrandRegistrationSid,omitempty"`
// A short description of what this SMS campaign does.
Description *string `json:"Description,omitempty"`
// Indicates that this SMS campaign will send messages that contain links.
HasEmbeddedLinks *bool `json:"HasEmbeddedLinks,omitempty"`
// Indicates that this SMS campaign will send messages that contain phone numbers.
HasEmbeddedPhone *bool `json:"HasEmbeddedPhone,omitempty"`
// Message samples, at least 2 and up to 5 sample messages, <=1024 chars each.
MessageSamples *[]string `json:"MessageSamples,omitempty"`
// A2P Campaign Use Case. Examples: [ 2FA, EMERGENCY, MARKETING..]
UsAppToPersonUsecase *string `json:"UsAppToPersonUsecase,omitempty"`
}
func (params *CreateUsAppToPersonParams) SetBrandRegistrationSid(BrandRegistrationSid string) *CreateUsAppToPersonParams {
params.BrandRegistrationSid = &BrandRegistrationSid
return params
}
func (params *CreateUsAppToPersonParams) SetDescription(Description string) *CreateUsAppToPersonParams {
params.Description = &Description
return params
}
func (params *CreateUsAppToPersonParams) SetHasEmbeddedLinks(HasEmbeddedLinks bool) *CreateUsAppToPersonParams {
params.HasEmbeddedLinks = &HasEmbeddedLinks
return params
}
func (params *CreateUsAppToPersonParams) SetHasEmbeddedPhone(HasEmbeddedPhone bool) *CreateUsAppToPersonParams {
params.HasEmbeddedPhone = &HasEmbeddedPhone
return params
}
func (params *CreateUsAppToPersonParams) SetMessageSamples(MessageSamples []string) *CreateUsAppToPersonParams {
params.MessageSamples = &MessageSamples
return params
}
func (params *CreateUsAppToPersonParams) SetUsAppToPersonUsecase(UsAppToPersonUsecase string) *CreateUsAppToPersonParams {
params.UsAppToPersonUsecase = &UsAppToPersonUsecase
return params
}
func (c *ApiService) CreateUsAppToPerson(MessagingServiceSid string, params *CreateUsAppToPersonParams) (*MessagingV1UsAppToPerson, error) {
path := "/v1/Services/{MessagingServiceSid}/Compliance/Usa2p"
path = strings.Replace(path, "{"+"MessagingServiceSid"+"}", MessagingServiceSid, -1)
data := url.Values{}
headers := make(map[string]interface{})
if params != nil && params.BrandRegistrationSid != nil {
data.Set("BrandRegistrationSid", *params.BrandRegistrationSid)
}
if params != nil && params.Description != nil {
data.Set("Description", *params.Description)
}
if params != nil && params.HasEmbeddedLinks != nil {
data.Set("HasEmbeddedLinks", fmt.Sprint(*params.HasEmbeddedLinks))
}
if params != nil && params.HasEmbeddedPhone != nil {
data.Set("HasEmbeddedPhone", fmt.Sprint(*params.HasEmbeddedPhone))
}
if params != nil && params.MessageSamples != nil {
for _, item := range *params.MessageSamples {
data.Add("MessageSamples", item)
}
}
if params != nil && params.UsAppToPersonUsecase != nil {
data.Set("UsAppToPersonUsecase", *params.UsAppToPersonUsecase)
}
resp, err := c.requestHandler.Post(c.baseURL+path, data, headers)
if err != nil {
return nil, err
}
defer resp.Body.Close()
ps := &MessagingV1UsAppToPerson{}
if err := json.NewDecoder(resp.Body).Decode(ps); err != nil {
return nil, err
}
return ps, err
}
func (c *ApiService) DeleteUsAppToPerson(MessagingServiceSid string, Sid string) error {
path := "/v1/Services/{MessagingServiceSid}/Compliance/Usa2p/{Sid}"
path = strings.Replace(path, "{"+"MessagingServiceSid"+"}", MessagingServiceSid, -1)
path = strings.Replace(path, "{"+"Sid"+"}", Sid, -1)
data := url.Values{}
headers := make(map[string]interface{})
resp, err := c.requestHandler.Delete(c.baseURL+path, data, headers)
if err != nil {
return err
}
defer resp.Body.Close()
return nil
}
func (c *ApiService) FetchUsAppToPerson(MessagingServiceSid string, Sid string) (*MessagingV1UsAppToPerson, error) {
path := "/v1/Services/{MessagingServiceSid}/Compliance/Usa2p/{Sid}"
path = strings.Replace(path, "{"+"MessagingServiceSid"+"}", MessagingServiceSid, -1)
path = strings.Replace(path, "{"+"Sid"+"}", Sid, -1)
data := url.Values{}
headers := make(map[string]interface{})
resp, err := c.requestHandler.Get(c.baseURL+path, data, headers)
if err != nil {
return nil, err
}
defer resp.Body.Close()
ps := &MessagingV1UsAppToPerson{}
if err := json.NewDecoder(resp.Body).Decode(ps); err != nil {
return nil, err
}
return ps, err
}
// Optional parameters for the method 'ListUsAppToPerson'
type ListUsAppToPersonParams struct {
// How many resources to return in each list page. The default is 50, and the maximum is 1000.
PageSize *int `json:"PageSize,omitempty"`
// Max number of records to return.
Limit *int `json:"limit,omitempty"`
}
func (params *ListUsAppToPersonParams) SetPageSize(PageSize int) *ListUsAppToPersonParams {
params.PageSize = &PageSize
return params
}
func (params *ListUsAppToPersonParams) SetLimit(Limit int) *ListUsAppToPersonParams {
params.Limit = &Limit
return params
}
// Retrieve a single page of UsAppToPerson records from the API. Request is executed immediately.
func (c *ApiService) PageUsAppToPerson(MessagingServiceSid string, params *ListUsAppToPersonParams, pageToken, pageNumber string) (*ListUsAppToPersonResponse, error) { | path = strings.Replace(path, "{"+"MessagingServiceSid"+"}", MessagingServiceSid, -1)
data := url.Values{}
headers := make(map[string]interface{})
if params != nil && params.PageSize != nil {
data.Set("PageSize", fmt.Sprint(*params.PageSize))
}
if pageToken != "" {
data.Set("PageToken", pageToken)
}
if pageNumber != "" {
data.Set("Page", pageNumber)
}
resp, err := c.requestHandler.Get(c.baseURL+path, data, headers)
if err != nil {
return nil, err
}
defer resp.Body.Close()
ps := &ListUsAppToPersonResponse{}
if err := json.NewDecoder(resp.Body).Decode(ps); err != nil {
return nil, err
}
return ps, err
}
// Lists UsAppToPerson records from the API as a list. Unlike stream, this operation is eager and loads 'limit' records into memory before returning.
func (c *ApiService) ListUsAppToPerson(MessagingServiceSid string, params *ListUsAppToPersonParams) ([]MessagingV1UsAppToPerson, error) {
if params == nil {
params = &ListUsAppToPersonParams{}
}
params.SetPageSize(client.ReadLimits(params.PageSize, params.Limit))
response, err := c.PageUsAppToPerson(MessagingServiceSid, params, "", "")
if err != nil {
return nil, err
}
curRecord := 0
var records []MessagingV1UsAppToPerson
for response != nil {
records = append(records, response.Compliance...)
var record interface{}
if record, err = client.GetNext(c.baseURL, response, &curRecord, params.Limit, c.getNextListUsAppToPersonResponse); record == nil || err != nil {
return records, err
}
response = record.(*ListUsAppToPersonResponse)
}
return records, err
}
// Streams UsAppToPerson records from the API as a channel stream. This operation lazily loads records as efficiently as possible until the limit is reached.
func (c *ApiService) StreamUsAppToPerson(MessagingServiceSid string, params *ListUsAppToPersonParams) (chan MessagingV1UsAppToPerson, error) {
if params == nil {
params = &ListUsAppToPersonParams{}
}
params.SetPageSize(client.ReadLimits(params.PageSize, params.Limit))
response, err := c.PageUsAppToPerson(MessagingServiceSid, params, "", "")
if err != nil {
return nil, err
}
curRecord := 0
//set buffer size of the channel to 1
channel := make(chan MessagingV1UsAppToPerson, 1)
go func() {
for response != nil {
for item := range response.Compliance {
channel <- response.Compliance[item]
}
var record interface{}
if record, err = client.GetNext(c.baseURL, response, &curRecord, params.Limit, c.getNextListUsAppToPersonResponse); record == nil || err != nil {
close(channel)
return
}
response = record.(*ListUsAppToPersonResponse)
}
close(channel)
}()
return channel, err
}
func (c *ApiService) getNextListUsAppToPersonResponse(nextPageUrl string) (interface{}, error) {
if nextPageUrl == "" {
return nil, nil
}
resp, err := c.requestHandler.Get(nextPageUrl, nil, nil)
if err != nil {
return nil, err
}
defer resp.Body.Close()
ps := &ListUsAppToPersonResponse{}
if err := json.NewDecoder(resp.Body).Decode(ps); err != nil {
return nil, err
}
return ps, nil
} | path := "/v1/Services/{MessagingServiceSid}/Compliance/Usa2p"
|
player_inventory.go | package inventory
type PlayerInventory struct {
PlayerId string `json:"playerId"`
CommonWildcards int `json:"wcCommon"`
UncommonWildcards int `json:"wcUncommon"`
RareWildcards int `json:"wcRare"`
MythicWildcards int `json:"wcMythic"`
Gold int `json:"gold"`
Gems int `json:"gems"`
DraftTokens int `json:"draftTokens"` | Boosters string `json:"boosters"`
VanityItems struct {
Pets []struct {
Name string `json:"name"`
Mods []Mod `json:"mods"`
} `json:"pets"`
Avatars []struct {
Name string `json:"name"`
Mods []string `json:"mods"`
} `json:"avatars"`
CardBacks []struct {
Name string `json:"name"`
Mods []string `json:"mods"`
} `json:"cardBacks"`
} `json:"vanityItems"`
Vouchers string `json:"vouchers"`
VanitySelections struct {
AvatarSelection string `json:"avatarSelection"`
CardBackSelection string `json:"cardBackSelection"`
PetSelection string `json:"petSelection"`
PetModSelections []string `json:"petModSelections"`
} `json:"vanitySelections"`
}
type Mod struct {
Type string `json:"type"`
Value string `json:"value"`
} | SealedTokens int `json:"sealedTokens"`
WildcardTackPosition int `json:"wcTrackPosition"`
VaultProgress float64 `json:"vaultProgress"` |
send_license_usage.py | import posthoganalytics
import requests
from dateutil.relativedelta import relativedelta
from django.utils import timezone
from ee.clickhouse.client import sync_execute
from ee.models.license import License
from posthog.models import User
def | ():
license = License.objects.first_valid()
if not license:
return
try:
date_from = (timezone.now() - relativedelta(days=1)).replace(hour=0, minute=0, second=0, microsecond=0)
date_to = timezone.now().replace(hour=0, minute=0, second=0, microsecond=0)
events_count = sync_execute(
"select count(1) from events where timestamp >= %(date_from)s and timestamp < %(date_to)s",
{"date_from": date_from, "date_to": date_to},
)[0][0]
response = requests.post(
"https://license.posthog.com/licenses/usage",
data={"date": date_from.strftime("%Y-%m-%d"), "key": license.key, "events_count": events_count,},
)
response.raise_for_status()
if not response.ok:
posthoganalytics.capture(
User.objects.first().distinct_id, # type: ignore
"send license usage data error",
{
"error": response.content,
"status_code": response.status_code,
"date": date_from.strftime("%Y-%m-%d"),
"events_count": events_count,
},
)
except Exception as err:
posthoganalytics.capture(
User.objects.first().distinct_id, # type: ignore
"send license usage data error",
{"error": str(err), "date": date_from.strftime("%Y-%m-%d")},
)
| send_license_usage |
test_migrations.py | # encoding: utf-8
# STANDARD LIB
from unittest import skipIf
# THIRD PARTY
from django.apps.registry import apps # Apps
from django.conf import settings
from django.db import connection, models
from django.db.migrations.state import ProjectState
from django.test import override_settings
from google.appengine.api import datastore
from google.appengine.runtime import DeadlineExceededError
# DJANGAE
from djangae.contrib import sleuth
from djangae.db.migrations import operations
from djangae.db.migrations.mapper_library import (
_get_range,
_mid_key,
_mid_string,
_next_string,
shard_query,
ShardedTaskMarker,
start_mapping,
)
from djangae.test import TestCase
# Workaround for https://code.djangoproject.com/ticket/28188
def return_a_string():
return "squirrel"
class TestModel(models.Model):
name = models.CharField(max_length=100)
class Meta:
app_label = "djangae"
class OtherModel(models.Model):
name = models.CharField(max_length=100)
class Meta:
app_label = "djangae"
class OtherAppModel(models.Model):
name = models.CharField(max_length=100)
class Meta:
app_label = "testapp"
class UniqueException(Exception):
""" An exception which we can explicity throw and catch. """
pass
def tickle_entity(entity):
entity['is_tickled'] = True
datastore.Put(entity)
def tickle_entity_volitle(entity):
""" Like `tickle_entity`, but raises DeadlineExceededError every 3rd call. """
call_count = getattr(tickle_entity_volitle, "call_count", 1)
tickle_entity_volitle.call_count = call_count + 1
if call_count % 3 == 0:
raise DeadlineExceededError()
else:
tickle_entity(entity)
def flush_task_markers():
""" Delete all ShardedTaskMarker objects from the DB.
Useful to call in setUp(), as Django doesn't wipe this kind because there's
no model for it.
"""
namespaces = set()
namespaces.add(settings.DATABASES['default'].get('NAMESPACE', ''))
namespaces.add(settings.DATABASES.get('ns1', {}).get('NAMESPACE', ''))
for namespace in namespaces:
query = datastore.Query(
ShardedTaskMarker.KIND,
namespace=namespace,
keys_only=True
).Run()
datastore.Delete([x for x in query])
class MigrationOperationTests(TestCase):
multi_db = True
def setUp(self):
# We need to clean out the migration task markers from the Datastore between each test, as
# the standard flush only cleans out models
|
def start_operation(self, operation, detonate=True):
# Make a from_state and a to_state to pass to the operation, these can just be the
# current state of the models
from_state = ProjectState.from_apps(apps)
to_state = from_state.clone()
schema_editor = connection.schema_editor()
app_label = TestModel._meta.app_label
# If we just start the operation then it will hang forever waiting for its mapper task to
# complete, so we won't even be able to call process_task_queues(). So to avoid that we
# detonate the _wait_until_task_finished method. Then tasks can be processed after that.
if detonate:
with sleuth.detonate(
"djangae.tests.test_migrations.operations.%s._wait_until_task_finished" % operation.__class__.__name__,
UniqueException
):
try:
operation.database_forwards(app_label, schema_editor, from_state, to_state)
except UniqueException:
pass
else:
operation.database_forwards(app_label, schema_editor, from_state, to_state)
def get_entities(self, model=TestModel, namespace=None):
namespace = namespace or settings.DATABASES['default'].get('NAMESPACE', '')
query = datastore.Query(
model._meta.db_table,
namespace=namespace,
)
return [x for x in query.Run()]
def test_run_operation_creates_and_updates_task_marker(self):
""" If we run one of our custom operations, then it should create the task marker in the DB
and defer a task, then set the marker to 'is_finished' when done.
"""
TestModel.objects.create()
operation = operations.AddFieldData(
"testmodel", "new_field", models.CharField(max_length=100, default="squirrel")
)
self.start_operation(operation)
# Now check that the task marker has been created.
# Usefully, calling database_forwards() on the operation will have caused it to set the
# `identifier` attribute on itself, meaning we can now just call _get_task_marker()
task_marker = datastore.Get(
[ShardedTaskMarker.get_key(operation.identifier, operation.namespace)]
)[0]
if task_marker is None:
self.fail("Migration operation did not create its task marker")
self.assertFalse(task_marker.get("is_finished"))
self.assertNumTasksEquals(1)
self.process_task_queues()
# Now check that the task marker has been marked as finished
task_marker = datastore.Get(
[ShardedTaskMarker.get_key(operation.identifier, operation.namespace)]
)[0]
self.assertTrue(task_marker["is_finished"])
self.assertNumTasksEquals(0)
def test_starting_operation_twice_does_not_trigger_task_twice(self):
""" If we run an operation, and then try to run it again before the task has finished
processing, then it should not trigger a second task.
"""
TestModel.objects.create()
operation = operations.AddFieldData(
"testmodel", "new_field", models.CharField(max_length=100, default="squirrel")
)
self.start_operation(operation)
task_marker = datastore.Get(
ShardedTaskMarker.get_key(operation.identifier, operation.namespace)
)
self.assertFalse(task_marker["is_finished"])
# We expect there to be a task queued for processing the operation
self.assertNumTasksEquals(1)
# Now try to run it again
self.start_operation(operation)
# We expect there to still be the same number of tasks
self.assertNumTasksEquals(1)
def test_running_finished_operation_does_not_trigger_new_task(self):
""" If we re-trigger an operation which has already been run and finished, it should simply
return without starting a new task or updating the task marker.
"""
TestModel.objects.create()
operation = operations.AddFieldData(
"testmodel", "new_field", models.CharField(max_length=100, default="squirrel")
)
# Run the operation and check that it finishes
with sleuth.watch("djangae.db.migrations.operations.AddFieldData._start_task") as start:
self.start_operation(operation)
self.assertTrue(start.called)
task_marker = datastore.Get(
ShardedTaskMarker.get_key(operation.identifier, operation.namespace)
)
self.assertFalse(task_marker["is_finished"])
self.assertNumTasksEquals(1)
self.process_task_queues()
task_marker = datastore.Get(
ShardedTaskMarker.get_key(operation.identifier, operation.namespace)
)
self.assertTrue(task_marker["is_finished"])
# Run the operation again. It should see that's it's finished and just return immediately.
self.assertNumTasksEquals(0)
with sleuth.watch("djangae.db.migrations.operations.AddFieldData._start_task") as start:
self.start_operation(operation, detonate=False)
self.assertFalse(start.called)
self.assertNumTasksEquals(0)
task_marker = datastore.Get(
ShardedTaskMarker.get_key(operation.identifier, operation.namespace)
)
self.assertTrue(task_marker["is_finished"])
def test_queue_option(self):
""" The `queue` kwarg should determine the task queue that the operation runs on. """
for x in xrange(3):
TestModel.objects.create()
operation = operations.AddFieldData(
"testmodel", "new_field", models.CharField(max_length=100, default=return_a_string),
queue="another",
# Ensure that we trigger a re-defer, so that we test that the correct queue is used for
# subsequent tasks, not just the first one
entities_per_task=1,
shard_count=1
)
self.start_operation(operation)
# The task(s) should not be in the default queue, but in the "another" queue instead
self.assertEqual(self.get_task_count("default"), 0)
self.assertTrue(self.get_task_count("another") > 0)
# And if we only run the tasks on the "another" queue, the whole operation should complete.
self.process_task_queues("another")
# And the entities should be updated
entities = self.get_entities()
self.assertTrue(all(entity['new_field'] == 'squirrel' for entity in entities))
def test_default_queue_setting(self):
""" If no `queue` kwarg is passed then the DJANGAE_MIGRATION_DEFAULT_QUEUE setting should
be used to determine the task queue.
"""
for x in xrange(2):
TestModel.objects.create()
operation = operations.AddFieldData(
"testmodel", "new_field", models.CharField(max_length=100, default="squirrel"),
)
# Check that starting the operation with a different setting correctly affects the queue.
# Note that here we don't check that *all* tasks go on the correct queue, just the first
# one. We test that more thoroughly in `test_queue_option` above.
with override_settings(DJANGAE_MIGRATION_DEFAULT_QUEUE="another"):
self.start_operation(operation)
self.assertEqual(self.get_task_count("default"), 0)
self.assertTrue(self.get_task_count("another") > 0)
self.flush_task_queues()
flush_task_markers()
# santity checks:
assert getattr(settings, "DJANGAE_MIGRATION_DEFAULT_QUEUE", None) is None
assert self.get_task_count() == 0
# Trigger the operation without that setting. The task(s) should go on the default queue.
self.start_operation(operation)
self.assertTrue(self.get_task_count("default") > 0)
def test_uid_allows_separate_identical_operations_to_be_run(self):
""" By passing the 'uid' kwarg to an operation, we should allow it to be run, even if an
otherwise idential operation has already been run.
"""
operation1 = operations.AddFieldData(
"testmodel", "new_field", models.BooleanField(default=True)
)
operation2 = operations.AddFieldData(
"testmodel", "new_field", models.BooleanField(default=True)
)
operation3 = operations.AddFieldData(
"testmodel", "new_field", models.BooleanField(default=True), uid="x"
)
# Create a model instance and run the first operation on it
instance = TestModel.objects.create()
self.start_operation(operation1)
self.process_task_queues()
# Check that the migration ran successfully
entity = self.get_entities()[0]
self.assertTrue(entity["new_field"])
# Now create another entity and make sure that the second migration (which is idential)
# does NOT run on it
instance.delete()
instance = TestModel.objects.create()
self.start_operation(operation2)
self.process_task_queues()
entity = self.get_entities()[0]
self.assertIsNone(entity.get("new_field"))
# Now run the third operation, which is identical but has a uid, so SHOULD be run
self.start_operation(operation3)
self.process_task_queues()
entity = self.get_entities()[0]
self.assertTrue(entity["new_field"])
def test_addfielddata(self):
""" Test the AddFieldData operation. """
for x in xrange(2):
TestModel.objects.create()
# Just for sanity, check that none of the entities have the new field value yet
entities = self.get_entities()
self.assertFalse(any(entity.get("new_field") for entity in entities))
operation = operations.AddFieldData(
"testmodel", "new_field", models.CharField(max_length=100, default="squirrel")
)
self.start_operation(operation)
self.process_task_queues()
# The entities should now all have the 'new_field' actually mapped over
entities = self.get_entities()
self.assertTrue(all(entity['new_field'] == 'squirrel' for entity in entities))
def test_removefielddata(self):
""" Test the RemoveFieldData operation. """
for x in xrange(2):
TestModel.objects.create(name="name_%s" % x)
# Just for sanity, check that all of the entities have `name` value
entities = self.get_entities()
self.assertTrue(all(entity["name"] for entity in entities))
operation = operations.RemoveFieldData(
"testmodel", "name", models.CharField(max_length=100)
)
self.start_operation(operation)
self.process_task_queues()
# The entities should now all have the 'name' value removed
entities = self.get_entities()
self.assertFalse(any(entity.get("name") for entity in entities))
def test_copyfielddata(self):
""" Test the CopyFieldData operation. """
for x in xrange(2):
TestModel.objects.create(name="name_%s" % x)
# Just for sanity, check that none of the entities have the new "new_field" value
entities = self.get_entities()
self.assertFalse(any(entity.get("new_field") for entity in entities))
operation = operations.CopyFieldData(
"testmodel", "name", "new_field"
)
self.start_operation(operation)
self.process_task_queues()
# The entities should now all have the "new_field" value
entities = self.get_entities()
self.assertTrue(all(entity["new_field"] == entity["name"] for entity in entities))
def test_deletemodeldata(self):
""" Test the DeleteModelData operation. """
for x in xrange(2):
TestModel.objects.create()
# Just for sanity, check that the entities exist!
entities = self.get_entities()
self.assertEqual(len(entities), 2)
operation = operations.DeleteModelData("testmodel")
self.start_operation(operation)
self.process_task_queues()
# The entities should now all be gone
entities = self.get_entities()
self.assertEqual(len(entities), 0)
def test_copymodeldata_overwrite(self):
""" Test the CopyModelData operation with overwrite_existing=True. """
# Create the TestModel instances, with OtherModel instances with matching PKs
for x in xrange(2):
instance = TestModel.objects.create(name="name_which_will_be_copied")
OtherModel.objects.create(name="original_name", id=instance.pk)
# Just for sanity, check that the entities exist
testmodel_entities = self.get_entities()
othermodel_entities = self.get_entities(model=OtherModel)
self.assertEqual(len(testmodel_entities), 2)
self.assertEqual(len(othermodel_entities), 2)
operation = operations.CopyModelData(
"testmodel", "djangae", "othermodel", overwrite_existing=True
)
self.start_operation(operation)
self.process_task_queues()
# The OtherModel entities should now all have a name lof "name_which_will_be_copied"
othermodel_entities = self.get_entities(model=OtherModel)
self.assertTrue(all(
entity["name"] == "name_which_will_be_copied" for entity in othermodel_entities
))
def test_copymodeldata_no_overwrite(self):
""" Test the CopyModelData operation with overwrite_existing=False. """
# Create the TestModel instances, with OtherModel instances with matching PKs only for
# odd PKs
for x in xrange(1, 5):
TestModel.objects.create(id=x, name="name_which_will_be_copied")
if x % 2:
OtherModel.objects.create(id=x, name="original_name")
# Just for sanity, check that the entities exist
testmodel_entities = self.get_entities()
othermodel_entities = self.get_entities(model=OtherModel)
self.assertEqual(len(testmodel_entities), 4)
self.assertEqual(len(othermodel_entities), 2)
operation = operations.CopyModelData(
"testmodel", "djangae", "othermodel", overwrite_existing=False
)
self.start_operation(operation)
self.process_task_queues()
# We now expect there to be 4 OtherModel entities, but only the ones which didn't exist
# already (i.e. the ones with even PKs) should have the name copied from the TestModel
othermodel_entities = self.get_entities(model=OtherModel)
self.assertEqual(len(othermodel_entities), 4)
for entity in othermodel_entities:
if entity.key().id() % 2:
self.assertEqual(entity["name"], "original_name")
else:
self.assertEqual(entity["name"], "name_which_will_be_copied")
@skipIf("ns1" not in settings.DATABASES, "This test is designed for the Djangae testapp settings")
def test_copymodeldatatonamespace_overwrite(self):
""" Test the CopyModelDataToNamespace operation with overwrite_existing=True. """
ns1 = settings.DATABASES["ns1"]["NAMESPACE"]
# Create instances, with copies in the other namespace with matching IDs
for x in xrange(2):
instance = TestModel.objects.create(name="name_which_will_be_copied")
instance.save(using="ns1")
# Just for sanity, check that the entities exist
entities = self.get_entities()
ns1_entities = self.get_entities(namespace=ns1)
self.assertEqual(len(entities), 2)
self.assertEqual(len(ns1_entities), 2)
operation = operations.CopyModelDataToNamespace(
"testmodel", ns1, overwrite_existing=True
)
self.start_operation(operation)
self.process_task_queues()
# The entities in ns1 should now all have a name lof "name_which_will_be_copied"
ns1_entities = self.get_entities(namespace=ns1)
self.assertTrue(all(
entity["name"] == "name_which_will_be_copied" for entity in ns1_entities
))
@skipIf("ns1" not in settings.DATABASES, "This test is designed for the Djangae testapp settings")
def test_copymodeldatatonamespace_no_overwrite(self):
""" Test the CopyModelDataToNamespace operation with overwrite_existing=False. """
ns1 = settings.DATABASES["ns1"]["NAMESPACE"]
# Create the TestModel instances, with OtherModel instances with matching PKs only for
# odd PKs
for x in xrange(1, 5):
TestModel.objects.create(id=x, name="name_which_will_be_copied")
if x % 2:
ns1_instance = TestModel(id=x, name="original_name")
ns1_instance.save(using="ns1")
# Just for sanity, check that the entities exist
entities = self.get_entities()
ns1_entities = self.get_entities(namespace=ns1)
self.assertEqual(len(entities), 4)
self.assertEqual(len(ns1_entities), 2)
operation = operations.CopyModelDataToNamespace(
"testmodel", ns1, overwrite_existing=False
)
self.start_operation(operation)
self.process_task_queues()
# We now expect there to be 4 entities in the new namespace, but only the ones which didn't
# exist already (i.e. the ones with even PKs) should have their `name` updated
ns1_entities = self.get_entities(namespace=ns1)
self.assertEqual(len(ns1_entities), 4)
for entity in ns1_entities:
if entity.key().id() % 2:
self.assertEqual(entity["name"], "original_name")
else:
self.assertEqual(entity["name"], "name_which_will_be_copied")
@skipIf(
"ns1" not in settings.DATABASES or "testapp" not in settings.INSTALLED_APPS,
"This test is designed for the Djangae testapp settings"
)
def test_copymodeldatatonamespace_new_app_label(self):
""" Test the CopyModelDataToNamespace operation with new data being saved to a new model in
a new app as well as in a new namespace.
"""
ns1 = settings.DATABASES["ns1"]["NAMESPACE"]
for x in xrange(2):
TestModel.objects.create(name="name_which_will_be_copied")
# Just for sanity, check that the entities exist
entities = self.get_entities()
new_entities = self.get_entities(model=OtherAppModel, namespace=ns1)
self.assertEqual(len(entities), 2)
self.assertEqual(len(new_entities), 0)
operation = operations.CopyModelDataToNamespace(
"testmodel", ns1, to_app_label="testapp", to_model_name="otherappmodel"
)
self.start_operation(operation)
self.process_task_queues()
# The entities in ns1 should now all have a name lof "name_which_will_be_copied"
new_entities = self.get_entities(model=OtherAppModel, namespace=ns1)
self.assertEqual(len(new_entities), 2)
self.assertTrue(all(
entity["name"] == "name_which_will_be_copied" for entity in new_entities
))
def test_mapfunctiononentities(self):
""" Test the MapFunctionOnEntities operation. """
for x in xrange(2):
TestModel.objects.create()
# Test that our entities have not had our function called on them
entities = self.get_entities()
self.assertFalse(any(entity.get("is_tickled") for entity in entities))
operation = operations.MapFunctionOnEntities("testmodel", tickle_entity)
self.start_operation(operation)
self.process_task_queues()
entities = self.get_entities()
self.assertEqual(len(entities), 2)
self.assertTrue(all(entity.get("is_tickled") for entity in entities))
class MidStringTestCase(TestCase):
""" Tests for the _mid_string function in the mapper_library. """
def test_handles_args_in_either_order(self):
""" It shouldn't matter whether we pass the "higher" string as the first or second param. """
low = "aaaaa"
high = "zzzzz"
mid1 = _mid_string(low, high)
mid2 = _mid_string(low, high)
self.assertEqual(mid1, mid2)
self.assertTrue(low < mid1 < high)
def test_basic_behaviour(self):
""" Test finding the midpoint between two string in an obvious case. """
start = "a"
end = "c"
self.assertEqual(_mid_string(start, end), "b")
def test_slightly_less_basic_behaviour(self):
start = "aaaaaaaaaaaa"
end = "z"
mid_low_apprx = "l"
mid_high_apprx = "n"
result = _mid_string(start, end)
self.assertTrue(mid_low_apprx < result < mid_high_apprx)
def test_handles_strings_of_different_lengths(self):
""" Strings of different lengths should return another of a length mid way between """
start = "aaa"
end = "zzzzzzzzzzzzz"
mid = _mid_string(start, end)
self.assertTrue(start < mid < end)
def test_handles_unicode(self):
""" It should be able to do comparisions on non-ascii strings. """
start = u"aaa£¢$›😇"
end = u"zzz🤡"
mid = _mid_string(start, end)
self.assertTrue(start < mid < end)
def test_does_not_return_string_starting_with_double_underscore(self):
""" A string that starts with a double underscore is not a valid Datastore key and so
should not be returned.
"""
# The true mid point between this start and end combination is a double underscore
start = "^^"
end = "``"
result = _mid_string(start, end)
self.assertNotEqual(result, "__")
class MidKeyTestCase(TestCase):
""" Tests for the `_mid_key` function. """
def test_mixed_integers_and_strings_not_allowed(self):
""" Finding the mid point between keys of different types is not currently supported and
should therefore raise an error.
"""
key1 = datastore.Key.from_path("my_kind", 1)
key2 = datastore.Key.from_path("my_kind", "1")
self.assertRaises(NotImplementedError, _mid_key, key1, key2)
def test_mid_integer_key(self):
""" Given 2 keys with integer `id_or_name` values, the returned key should have an
`id_or_name` which is an integer somewhere between the two.
"""
key1 = datastore.Key.from_path("my_kind", 1)
key2 = datastore.Key.from_path("my_kind", 100)
result = _mid_key(key1, key2)
self.assertEqual(result.kind(), key1.kind())
self.assertEqual(result.namespace(), key1.namespace())
self.assertTrue(1 < result.id_or_name() < 100)
def test_mid_string_key(self):
""" Given 2 keys with string `id_or_name` values, the returned key should have an
`id_or_name` which is a string somewhere between the two.
"""
key1 = datastore.Key.from_path("my_kind", "1")
key2 = datastore.Key.from_path("my_kind", "100")
result = _mid_key(key1, key2)
self.assertEqual(result.kind(), key1.kind())
self.assertEqual(result.namespace(), key1.namespace())
self.assertTrue("1" < result.id_or_name() < "100")
class NextStringTestCase(TestCase):
""" Tests for the _next_string function in the mapper_library. """
def test_basic_behaviour(self):
try:
unichr(65536)
# Python wide-unicode build (Linux) UTF-32
highest_unicode_char = unichr(0x10ffff)
except ValueError:
# Python narrow build (OSX)
# Python 2 using 16 bit unicode, so the highest possible character is (2**16) - 1
highest_unicode_char = unichr(2 ** 16 - 1)
checks = (
# Pairs of (input, expected_output)
("a", "b"),
("aaaa", "aaab"),
# unichr((2 ** 32) - 1) is the last possible unicode character
(highest_unicode_char, highest_unicode_char + unichr(1)),
(u"aaa" + highest_unicode_char, u"aaa" + highest_unicode_char + unichr(1)),
)
for input_text, expected_output in checks:
self.assertEqual(_next_string(input_text), expected_output)
class GetKeyRangeTestCase(TestCase):
""" Tests for the `_get_range` function. """
def test_integer_range(self):
""" Given 2 integer-based keys, it should return the range that the IDs span. """
key1 = datastore.Key.from_path("my_kind", 4012809128)
key2 = datastore.Key.from_path("my_kind", 9524773032)
self.assertEqual(_get_range(key1, key2), 9524773032 - 4012809128)
def test_string_range(self):
""" Given 2 string-based keys, it should return a representation of the range that the two
keys span.
"""
key1 = datastore.Key.from_path("my_kind", "a")
key2 = datastore.Key.from_path("my_kind", "b")
# The difference between "a" and "b" is 1 character
self.assertEqual(_get_range(key1, key2), unichr(1))
def test_mixed_keys_cause_exception(self):
""" Trying to get a range between 2 keys when one is an integer and the other is a string
should cause an explosion.
"""
key1 = datastore.Key.from_path("my_kind", "a")
key2 = datastore.Key.from_path("my_kind", 12345)
self.assertRaises(Exception, _get_range, key1, key2)
class ShardQueryTestCase(TestCase):
""" Tests for the `shard_query` function. """
def test_query_sharding(self):
ns1 = settings.DATABASES["default"]["NAMESPACE"]
for x in xrange(1, 21):
TestModel.objects.create(pk=x)
qry = datastore.Query(TestModel._meta.db_table, namespace=ns1)
shards = shard_query(qry, 1)
self.assertEqual(1, len(shards))
shards = shard_query(qry, 20)
self.assertEqual(20, len(shards))
shards = shard_query(qry, 50)
# We can't create 50 shards if there are only 20 objects
self.assertEqual(20, len(shards))
class MapperLibraryTestCase(TestCase):
""" Tests which check the behaviour of the mapper library directly. """
def setUp(self):
# We need to clean out the migration task markers from the Datastore between each test, as
# the standard flush only cleans out models
super(MapperLibraryTestCase, self).setUp()
flush_task_markers()
def _get_testmodel_query(self, db="default"):
namespace = settings.DATABASES[db].get('NAMESPACE', '')
return datastore.Query(
TestModel._meta.db_table,
namespace=namespace
)
def _get_taskmarker_query(self, namespace=""):
return datastore.Query("ShardedTaskMarker", namespace=namespace)
def test_basic_processing(self):
""" Test that calling `start_mapping` with some sensible parameters will do the right
processing.
"""
objs = []
for x in xrange(2):
objs.append(TestModel(name="Test-%s" % x))
TestModel.objects.bulk_create(objs)
start_mapping("my_lovely_mapper", self._get_testmodel_query(), tickle_entity)
self.process_task_queues()
# And check that every entity has been tickled
self.assertTrue(all(e['is_tickled'] for e in self._get_testmodel_query().Run()))
def test_cannot_start_same_mapping_twice(self):
""" Calling `start_mapping` with the same parameters twice then it should NOT create 2
mappers.
"""
objs = []
for x in xrange(2):
objs.append(TestModel(name="Test-%s" % x))
TestModel.objects.bulk_create(objs)
assert self._get_taskmarker_query().Count() == 0 # Sanity
marker = start_mapping("my_test_mapper", self._get_testmodel_query(), tickle_entity)
task_count = self.get_task_count()
assert marker # Sanity
assert task_count # Sanity
# Now try to defer the same mapper again
marker = start_mapping("my_test_mapper", self._get_testmodel_query(), tickle_entity)
# That shouldn't have worked, so the number of tasks should remain unchanged
self.assertEqual(self.get_task_count(), task_count)
# And it should not have returned a marker
self.assertIsNone(marker)
def test_can_start_same_mapping_in_2_different_namespaces(self):
""" Calling `start_mapping` with the same parameters but with different namespaces on the
query should work and correctly defer 2 processing tasks.
"""
dbs = ("default", "ns1")
# Create some objects in 2 different namespaces
for db in dbs:
objs = []
for x in xrange(2):
objs.append(TestModel(name="Test-%s" % x))
TestModel.objects.using(db).bulk_create(objs)
# Start the same mapper twice but in 2 different namespaces, and check that they both work
current_task_count = self.get_task_count()
markers = set()
for db in dbs:
marker = start_mapping("my_test_mapper", self._get_testmodel_query(db), tickle_entity)
self.assertIsNotNone(marker)
self.assertFalse(marker in markers)
markers.add(marker)
new_task_count = self.get_task_count()
self.assertTrue(new_task_count > current_task_count)
current_task_count = new_task_count
def test_mapper_will_continue_after_deadline_exceeded_error(self):
""" If DeadlineExceededError is encountered when processing one of the entities, the mapper
should redefer and continue.
"""
objs = []
for x in xrange(8):
objs.append(TestModel(name="Test-%s" % x))
TestModel.objects.bulk_create(objs)
identifier = "my_test_mapper"
query = self._get_testmodel_query()
# Reset the call_count on tickle_entity_volitle. We can't use sleuth.watch because a
# wrapped function can't be pickled
tickle_entity_volitle.call_count = 0
# Run the mapper and run all the tasks
start_mapping(
identifier, query, tickle_entity_volitle, shard_count=1,
)
self.process_task_queues()
# Check that the tickle_entity_volitle function was called more times than there are
# entities (because some calls should have failed and been retried)
# self.assertTrue(tickle_entity_volitle.call_count > TestModel.objects.count())
# And check that every entity has been tickled
self.assertTrue(all(e['is_tickled'] for e in self._get_testmodel_query().Run()))
| super(MigrationOperationTests, self).setUp()
flush_task_markers() |
__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*- | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .core import new_cluster | # Copyright 1999-2018 Alibaba Group Holding Ltd.
# |
napi_define_class.rs | use crate::env::Env;
use crate::ffi::*;
use crate::function::create_function_template;
use deno_core::v8;
#[no_mangle]
pub unsafe extern "C" fn napi_define_class(
env: napi_env,
utf8name: *const c_char,
length: usize,
constructor: napi_callback,
callback_data: *mut c_void,
property_count: usize,
properties: *const napi_property_descriptor,
result: *mut napi_value,
) -> napi_status {
let mut env = &mut *(env as *mut Env);
let name = std::ffi::CStr::from_ptr(utf8name).to_str().unwrap();
let tpl: v8::Local<v8::FunctionTemplate> = std::mem::transmute(
create_function_template(env, Some(name), constructor, callback_data),
);
let napi_properties = std::slice::from_raw_parts(properties, property_count);
for p in napi_properties {
let name_str = CStr::from_ptr(p.utf8name).to_str().unwrap();
let name = v8::String::new(env.scope, name_str).unwrap();
if !(p.method as *const c_void).is_null() {
let function: v8::Local<v8::FunctionTemplate> = std::mem::transmute(
create_function_template(env, Some(name_str), p.method, p.data),
);
let proto = tpl.prototype_template(env.scope);
proto.set(name.into(), function.into());
} else if !(p.getter as *const c_void).is_null()
|| !(p.setter as *const c_void).is_null()
{
let getter: Option<v8::Local<v8::FunctionTemplate>> =
if !(p.getter as *const c_void).is_null() {
Some(std::mem::transmute(create_function_template(
env,
Some(name_str),
p.getter,
p.data,
)))
} else {
None
};
let setter: Option<v8::Local<v8::FunctionTemplate>> =
if !(p.setter as *const c_void).is_null() {
Some(std::mem::transmute(create_function_template(
env,
Some(name_str),
p.setter,
p.data,
)))
} else {
None
};
let proto = tpl.prototype_template(env.scope);
let base_name = CStr::from_ptr(p.utf8name).to_str().unwrap();
let getter_name =
v8::String::new(env.scope, format!("get_{}", base_name).as_str())
.unwrap();
let setter_name =
v8::String::new(env.scope, format!("set_{}", base_name).as_str())
.unwrap();
// TODO: use set_accessor & set_accessor_with_setter
match (getter, setter) {
(Some(getter), None) => {
proto.set(getter_name.into(), getter.into());
}
(Some(getter), Some(setter)) => {
proto.set(getter_name.into(), getter.into());
proto.set(setter_name.into(), setter.into());
}
(None, Some(setter)) => {
proto.set(setter_name.into(), setter.into());
}
(None, None) => unreachable!(),
}
} else {
let proto = tpl.prototype_template(env.scope); | proto.set(name.into(), std::mem::transmute(p.value));
}
}
let value: v8::Local<v8::Value> = tpl.get_function(env.scope).unwrap().into();
*result = std::mem::transmute(value);
napi_ok
} | |
mod.rs | //! Cretonne DSL classes.
//!
//! This module defines the classes that are used to define Cretonne
//! instructions and other entitties.
pub mod types;
/// Convert the string `s` to CamelCase.
fn _camel_case(s: &str) -> String {
let mut output_chars = String::with_capacity(s.len());
let mut capitalize = true;
for curr_char in s.chars() {
if curr_char == '_' {
capitalize = true;
} else {
if capitalize | else {
output_chars.push(curr_char);
}
capitalize = false;
}
}
output_chars
}
/// Check if `x` is a power of two.
fn _is_power_of_two(x: u8) -> bool {
x > 0 && x & (x - 1) == 0
}
/// Compute the next power of two that is greater than `x`.
fn _next_power_of_two(x: u8) -> u8 {
let mut s = 1;
let mut res = x;
while res & (res + 1) != 0 {
res |= res >> s;
s *= 2;
}
res + 1
}
#[cfg(test)]
mod tests {
use super::_camel_case as camel_case;
use super::_is_power_of_two as is_power_of_two;
use super::_next_power_of_two as next_power_of_two;
#[test]
fn camel_case_works() {
assert_eq!(camel_case("x"), "X");
assert_eq!(camel_case("camel_case"), "CamelCase");
}
#[test]
fn is_power_of_two_works() {
assert_eq!(is_power_of_two(1), true);
assert_eq!(is_power_of_two(2), true);
assert_eq!(is_power_of_two(4), true);
assert_eq!(is_power_of_two(8), true);
assert_eq!(is_power_of_two(3), false);
assert_eq!(is_power_of_two(7), false);
}
#[test]
fn next_power_of_two_works() {
assert_eq!(next_power_of_two(0), 1);
assert_eq!(next_power_of_two(1), 2);
assert_eq!(next_power_of_two(2), 4);
assert_eq!(next_power_of_two(3), 4);
assert_eq!(next_power_of_two(4), 8);
}
}
| {
output_chars.extend(curr_char.to_uppercase());
} |
models.py | import logging
import uuid
from django.conf import settings
from django.core.validators import RegexValidator
from django.contrib.gis.db import models
from django.contrib.gis.geos import Polygon
from django.contrib.auth import get_user_model
from django.contrib.postgres.search import SearchVectorField
from django.contrib.postgres.indexes import GinIndex, BTreeIndex
from django.utils import timezone
from django.utils.html import mark_safe
from django.utils.translation import gettext_lazy as _
from django.urls import reverse
from djmoney.money import Money
from djmoney.models.fields import MoneyField
from .pricing import ProductPriceCalculator
from .helpers import RandomFileName, send_geoshop_email
LOGGER = logging.getLogger(__name__)
# Get the UserModel
UserModel = get_user_model()
class AbstractIdentity(models.Model):
"""
Common properties for identities, addresses and temporary users
"""
first_name = models.CharField(_('first_name'), max_length=50, blank=True)
last_name = models.CharField(_('last_name'), max_length=150, blank=True)
email = models.EmailField(_('email'), max_length=254, blank=True)
street = models.CharField(_('street'), max_length=100, blank=True)
street2 = models.CharField(_('street2'), max_length=100, blank=True)
postcode = models.CharField(_('postcode'), max_length=10, blank=True)
city = models.CharField(_('city'), max_length=50, blank=True)
country = models.CharField(_('country'), max_length=50, blank=True)
company_name = models.CharField(
_('company_name'), max_length=250, blank=True)
phone = models.CharField(_('phone'), max_length=50, blank=True)
class Meta:
abstract = True
def __str__(self):
if self.company_name:
return '%s %s (%s)' % (self.last_name, self.first_name, self.company_name)
return '%s %s' % (self.last_name, self.first_name)
class Contact(AbstractIdentity):
"""
Address book of contacts linked to an user that stores addresses
previously filled by the user.
"""
belongs_to = models.ForeignKey(
UserModel, on_delete=models.CASCADE, verbose_name=_('belongs_to'))
sap_id = models.BigIntegerField(_('sap_id'), null=True, blank=True)
subscribed = models.BooleanField(_('subscribed'), default=False)
is_active = models.BooleanField(_('is_active'), default=True)
class Meta:
db_table = 'contact'
verbose_name = _('contact')
class Copyright(models.Model):
description = models.TextField(blank=True)
class Meta:
db_table = 'copyright'
verbose_name = _('copyright')
def __str__(self):
return self.description
class Document(models.Model):
"""
Named links to more informations on metadata
"""
name = models.CharField(_('name'), max_length=80)
link = models.URLField(
_('link'),
help_text=_('Please complete the above URL'),
default='https://sitn.ne.ch',
max_length=2000
)
class Meta:
db_table = 'document'
verbose_name = _('document')
def __str__(self):
return '%s (%s)' % (self.name, self.link.split("/")[-1])
class DataFormat(models.Model):
name = models.CharField(_('name'), max_length=100, blank=True)
class Meta:
db_table = 'data_format'
verbose_name = _('data_format')
def __str__(self):
return self.name
class OrderType(models.Model):
name = models.CharField(_('name'), max_length=30, blank=True)
class Meta:
db_table = 'order_type'
verbose_name = _('order type')
verbose_name_plural = _('order types')
def __str__(self):
return self.name
class Identity(AbstractIdentity):
"""
All users have an Identity but not all identities are users.
"""
user = models.OneToOneField(
UserModel, on_delete=models.SET_NULL, verbose_name=_('user'), blank=True, null=True)
sap_id = models.BigIntegerField(_('sap_id'), null=True, blank=True)
ide_id = models.CharField(_('ide_number'), max_length=15, null=True, blank=True, validators=[
RegexValidator(
regex=r'^CHE-([0-9]{3}\.){2}[0-9]{3}$',
message=_('IDE number is not valid'),
),
])
contract_accepted = models.DateField(_('contract_accepted'), null=True, blank=True)
is_public = models.BooleanField(_('is_public'), default=False)
subscribed = models.BooleanField(_('subscribed'), default=False)
birthday = models.DateField(_('birthday'), null=True, blank=True)
class Meta:
db_table = 'identity'
verbose_name = _('identity')
class Metadata(models.Model):
"""
Describes one or more Products. Every metadata can have one or more contact persons
"""
id_name = models.CharField(_('id_name'), max_length=50, unique=True)
name = models.CharField(_('name'), max_length=300, blank=True)
description_short = models.CharField(_('description_short'), max_length=500, blank=True)
description_long = models.TextField(_('description_long'), blank=True)
datasource = models.CharField(_('datasource'), max_length=260, blank=True, null=True)
scale = models.CharField(_('scale'), max_length=500, blank=True)
geocat_link = models.CharField(_('geocat_link'), max_length=2000, blank=True)
legend_link = models.CharField(_('legend_link'), max_length=2000, blank=True)
image_link = models.CharField(_('image_link'), max_length=250, default=settings.DEFAULT_METADATA_IMAGE_URL, blank=True)
copyright = models.ForeignKey(
Copyright, models.SET_NULL, verbose_name=_('copyright'), blank=True, null=True)
documents = models.ManyToManyField(Document, verbose_name=_('documents'), blank=True)
contact_persons = models.ManyToManyField(
Identity,
verbose_name=_('contact_persons'),
related_name='contact_persons',
through='MetadataContact')
modified_date = models.DateTimeField(auto_now=True)
modified_user = models.ForeignKey(
UserModel,
models.PROTECT,
verbose_name=_('modified_user'),
related_name='modified_user')
class Meta:
db_table = 'metadata'
verbose_name = _('metadata')
def __str__(self):
return self.id_name
def get_legend_link(self):
if self.legend_link is None or self.legend_link == '':
return None
# When legend_link is 0, returns legend from mapserver
if self.legend_link == '0':
return settings.AUTO_LEGEND_URL + self.id_name
# When legend_link is intra, returns legend from intranet mapserver
if self.legend_link == 'intra':
return settings.INTRA_LEGEND_URL + self.id_name
if self.legend_link.startswith('http'):
return self.legend_link
return settings.MEDIA_URL + self.legend_link
def legend_tag(self):
if self.get_legend_link():
return mark_safe('<img src="%s" />' % self.get_legend_link())
legend_tag.short_description = _('legend')
def image_tag(self):
if self.image_link is None or self.image_link == '':
return mark_safe('<img src="%s%s" />' % (settings.MEDIA_URL, 'no_image.jpg'))
return mark_safe('<img src="%s%s" />' % (settings.MEDIA_URL, self.image_link))
image_tag.short_description = _('image')
class MetadataContact(models.Model):
"""
Links Metadata with the persons to contact (Identity) depending on the role they play for metadata.
"""
metadata = models.ForeignKey(Metadata, models.CASCADE, verbose_name=_('metadata'))
contact_person = models.ForeignKey(
Identity, models.CASCADE, verbose_name=_('contact_person'), limit_choices_to={'is_public': True})
metadata_role = models.CharField(_('role'), max_length=150, default='Gestionnaire')
class Meta:
db_table = 'metadata_contact_persons'
verbose_name = _('metadata_contact')
def __str__(self):
return '%s - %s (%s)' % (self.contact_person, self.metadata, self.metadata_role)
class Pricing(models.Model):
"""
Pricing for free products, single tax products or area priced products.
For free products, set base_fee and unit_price both to 0.
For unique price set base_fee to desired amount and unit_price to 0.
For price based on area, provide unit_price
For prices based on a PricingGeometry, create the pricing layer and
link it to pricing_layer field.
"""
class PricingType(models.TextChoices):
FREE = 'FREE', _('Free')
SINGLE = 'SINGLE', _('Single')
BY_NUMBER_OBJECTS = 'BY_NUMBER_OBJECTS', _('By number of objects')
BY_AREA = 'BY_AREA', _('By area')
FROM_PRICING_LAYER = 'FROM_PRICING_LAYER', _('From a pricing layer')
FROM_CHILDREN_OF_GROUP = 'FROM_CHILDREN_OF_GROUP', _('From children products of this group')
MANUAL = 'MANUAL', _('Manual')
name = models.CharField(_('name'), max_length=100, null=True, blank=True)
pricing_type = models.CharField(
_('pricing_type'), max_length=30, choices=PricingType.choices)
base_fee = MoneyField(
_('base_fee'), max_digits=14, decimal_places=2, default_currency='CHF', null=True, blank=True)
min_price = MoneyField(
_('min_price'), max_digits=14, decimal_places=2, default_currency='CHF', null=True, blank=True)
max_price = MoneyField(
_('max_price'), max_digits=14, decimal_places=2, default_currency='CHF', null=True, blank=True)
unit_price = MoneyField(
_('unit_price'), max_digits=14, decimal_places=2, default_currency='CHF', null=True, blank=True)
class Meta:
db_table = 'pricing'
verbose_name = _('pricing')
def get_price(self, polygon):
"""
Returns the price of a product given a polygon
"""
price = ProductPriceCalculator.get_price(
pricing_instance=self,
polygon=polygon
)
if price is None:
return None, None
if self.min_price and price < self.min_price:
return self.min_price, self.base_fee
# if max_price is reached, will force customer ask for a quote
if self.max_price and price > self.max_price:
return None, None
return price, self.base_fee
def __str__(self):
return '%s + %s CHF de taxe' % (self.name, self.base_fee)
class PricingGeometry(models.Model):
"""
Areas defining prices must be grouped by name.
"""
name = models.CharField(_('name'), max_length=300, null=True)
unit_price = MoneyField(
_('price'), max_digits=14, decimal_places=2, default_currency='CHF', null=True)
geom = models.GeometryField(_('geom'), srid=settings.DEFAULT_SRID)
pricing = models.ForeignKey(Pricing, models.CASCADE, verbose_name=_('pricing'), null=True)
class Meta:
db_table = 'pricing_layer'
verbose_name = _('pricing_layer')
indexes = (BTreeIndex(fields=('name',)),)
def __str__(self):
return self.name
class Product(models.Model):
"""
A product is mostly a table or a raster. It can also be a group of products.
Products with a PUBLISHED status are available in catalogue.
A product with a status PUBLISHED_ONLY_IN_GROUP cannot be found on
catalogue but can be ordered by the group he belongs to.
Example:
PFP3_categorie_1 and PFP3_categorie_2 have a PUBLISHED_ONLY_IN_GROUP status:
they cannot be found as is in the catalogue, but they belong to another
product (with group_id property): PFP3 that has a PUBLISHED status.
"""
class ProductStatus(models.TextChoices):
DRAFT = 'DRAFT', _('Draft')
PUBLISHED = 'PUBLISHED', _('Published')
PUBLISHED_ONLY_IN_GROUP = 'PUBLISHED_ONLY_IN_GROUP', _('Published only in group')
DEPRECATED = 'DEPRECATED', _('Deprecated')
metadata = models.ForeignKey(
Metadata, models.SET_NULL, verbose_name=_('metadata'), blank=True, null=True)
label = models.CharField(_('label'), max_length=250, unique=True)
status = models.CharField(
_('status'), max_length=30, choices=ProductStatus.choices, default=ProductStatus.DRAFT)
group = models.ForeignKey(
'self', models.SET_NULL, verbose_name=_('group'), blank=True, null=True, related_name='products')
provider = models.ForeignKey(
UserModel, models.PROTECT, verbose_name=_('provider'), null=True,
limit_choices_to={
'groups__name': 'extract'
})
pricing = models.ForeignKey(Pricing, models.PROTECT, verbose_name=_('pricing'))
free_when_subscribed = models.BooleanField(_('free_when_subscribed'), default=False)
order = models.BigIntegerField(_('order_index'), blank=True, null=True)
thumbnail_link = models.CharField(
_('thumbnail_link'), max_length=250, default=settings.DEFAULT_PRODUCT_THUMBNAIL_URL)
ts = SearchVectorField(null=True)
geom = models.PolygonField(_('geom'), srid=settings.DEFAULT_SRID, default=Polygon.from_bbox(
(2519900, 1186430, 2578200, 1227030)
))
class Meta:
db_table = 'product'
verbose_name = _('product')
ordering = ['order']
# https://www.postgresql.org/docs/10/gin-intro.html
indexes = [GinIndex(fields=["ts"])]
def __str__(self):
return self.label
def thumbnail_tag(self):
if self.thumbnail_link is None or self.thumbnail_link == '':
return mark_safe('<img src="%s%s" />' % (settings.MEDIA_URL, 'no_image.jpg'))
return mark_safe('<img src="%s%s" />' % (settings.MEDIA_URL, self.thumbnail_link))
thumbnail_tag.short_description = _('thumbnail')
class Order(models.Model):
"""
processing_fee should default to the maximum of base fees in the order but can then be edited mannually
"""
class OrderStatus(models.TextChoices):
DRAFT = 'DRAFT', _('Draft')
PENDING = 'PENDING', _('Pending')
QUOTE_DONE = 'QUOTE_DONE', _('Quote done')
READY = 'READY', _('Ready')
IN_EXTRACT = 'IN_EXTRACT', _('In extract')
PARTIALLY_DELIVERED = 'PARTIALLY_DELIVERED', _('Partially delivered')
PROCESSED = 'PROCESSED', _('Processed')
ARCHIVED = 'ARCHIVED', _('Archived')
REJECTED = 'REJECTED', _('Rejected')
title = models.CharField(_('title'), max_length=255, validators=[
RegexValidator(
regex=r'^[^<>%$"\(\)\n\r]*$',
message=_('Title contains forbidden characters'),
),
])
description = models.TextField(_('description'), blank=True)
processing_fee = MoneyField(
_('processing_fee'), max_digits=14, decimal_places=2, default_currency='CHF', blank=True, null=True)
total_without_vat = MoneyField(
_('total_without_vat'), max_digits=14, decimal_places=2, default_currency='CHF', blank=True, null=True)
part_vat = MoneyField(
_('part_vat'), max_digits=14, decimal_places=2, default_currency='CHF', blank=True, null=True)
total_with_vat = MoneyField(
_('total_with_vat'), max_digits=14, decimal_places=2, default_currency='CHF', blank=True, null=True)
geom = models.PolygonField(_('geom'), srid=settings.DEFAULT_SRID)
client = models.ForeignKey(UserModel, models.PROTECT, verbose_name=_('client'), blank=True)
invoice_contact = models.ForeignKey(
Contact,
models.PROTECT,
verbose_name=_('invoice_contact'),
related_name='invoice_contact',
blank=True,
null=True
)
invoice_reference = models.CharField(_('invoice_reference'), max_length=255, blank=True)
email_deliver = models.EmailField(_('email_deliver'), max_length=254, blank=True, null=True)
order_type = models.ForeignKey(OrderType, models.PROTECT, verbose_name=_('order_type'))
status = models.CharField(
_('status'), max_length=20, choices=OrderStatus.choices, default=OrderStatus.DRAFT)
date_ordered = models.DateTimeField(_('date_ordered'), blank=True, null=True)
date_downloaded = models.DateTimeField(_('date_downloaded'), blank=True, null=True)
date_processed = models.DateTimeField(_('date_processed'), blank=True, null=True)
extract_result = models.FileField(upload_to='extract', null=True, blank=True)
download_guid = models.UUIDField(_('download_guid'), null=True, blank=True)
class Meta:
db_table = 'order'
ordering = ['-date_ordered']
verbose_name = _('order')
def _reset_prices(self):
self.processing_fee = None
self.total_without_vat = None
self.part_vat = None
self.total_with_vat = None
def set_price(self):
"""
Sets price information if all items have prices
"""
self._reset_prices()
items = self.items.all()
if items == []:
return False
self.total_without_vat = Money(0, 'CHF')
self.processing_fee = Money(0, 'CHF')
for item in items:
if item.base_fee is None:
self._reset_prices()
return False
if item.base_fee > self.processing_fee:
self.processing_fee = item.base_fee
self.total_without_vat += item.price
self.total_without_vat += self.processing_fee
self.part_vat = self.total_without_vat * settings.VAT
self.total_with_vat = self.total_without_vat + self.part_vat
return True
def quote_done(self):
"""Admins confirmation they have given a manual price"""
price_is_set = self.set_price()
if price_is_set:
self.status = self.OrderStatus.QUOTE_DONE
self.save()
send_geoshop_email(
_('Geoshop - Quote has been done'),
recipient=self.email_deliver or self.client.identity,
template_name='email_quote_done',
template_data={
'order_id': self.id,
'first_name': self.client.identity.first_name,
'last_name': self.client.identity.last_name
}
)
return price_is_set
def _expand_product_groups(self):
"""
When a product is a group of products, the group is deleted from cart and
is replaced with one OrderItem for each product inside the group.
"""
items = self.items.all()
for item in items:
# if product is a group (if product has children)
if item.product.products.exists():
for product in item.product.products.all():
# only pick products that intersect current order geom
if product.geom.intersects(self.geom):
new_item = OrderItem(
order=self,
product=product,
data_format=item.data_format
)
# If the data format for the group is not available for the item,
# pick the first possible
if item.data_format not in item.available_formats:
new_item.data_format = product.product_formats.all().first().data_format
new_item.set_price()
new_item.save()
item.delete()
def confirm(self):
"""Customer's confirmations he wants to proceed with the order"""
self._expand_product_groups()
items = self.items.all()
has_all_prices_calculated = True
for item in items:
if item.price_status == OrderItem.PricingStatus.PENDING:
item.ask_price()
has_all_prices_calculated = has_all_prices_calculated and False
else:
item.status = OrderItem.OrderItemStatus.IN_EXTRACT
if has_all_prices_calculated:
self.date_ordered = timezone.now()
self.download_guid = uuid.uuid4()
self.status = Order.OrderStatus.READY
else:
self.status = Order.OrderStatus.PENDING
def next_status_on_extract_input(self):
"""Controls status when Extract uploads a file or cancel an order item"""
previous_accepted_status = [
Order.OrderStatus.READY,
Order.OrderStatus.IN_EXTRACT,
Order.OrderStatus.PARTIALLY_DELIVERED
]
if self.status not in previous_accepted_status:
raise Exception("Order has an inappropriate status after input")
items_statuses = set(self.items.all().values_list('status', flat=True))
if OrderItem.OrderItemStatus.IN_EXTRACT in items_statuses:
if OrderItem.OrderItemStatus.PROCESSED in items_statuses:
self.status = Order.OrderStatus.PARTIALLY_DELIVERED
else:
self.status = Order.OrderStatus.READY
else:
if OrderItem.OrderItemStatus.PROCESSED in items_statuses:
self.status = Order.OrderStatus.PROCESSED
self.date_processed = timezone.now()
send_geoshop_email(
_('Geoshop - Download ready'),
recipient=self.email_deliver or self.client.identity,
template_name='email_download_ready',
template_data={
'order_id': self.id,
'download_guid': self.download_guid,
'front_url': '{}://{}{}'.format(
settings.FRONT_PROTOCOL,
settings.FRONT_URL,
settings.FRONT_HREF
),
'first_name': self.client.identity.first_name,
'last_name': self.client.identity.last_name,
}
)
else:
self.status = Order.OrderStatus.REJECTED
return self.status
@property
def geom_srid(self):
return self.geom.srid
@property
def geom_area(self):
return self.geom.area
def __str__(self):
return '%s - %s' % (self.id, self.title)
class OrderItem(models.Model):
"""
Cart item.
"""
class PricingStatus(models.TextChoices):
PENDING = 'PENDING', _('Pending')
CALCULATED = 'CALCULATED', _('Calculated')
IMPORTED = 'IMPORTED', _('Imported') # from old database
class OrderItemStatus(models.TextChoices):
PENDING = 'PENDING', _('Pending')
IN_EXTRACT = 'IN_EXTRACT', _('In extract')
PROCESSED = 'PROCESSED', _('Processed')
ARCHIVED = 'ARCHIVED', _('Archived')
REJECTED = 'REJECTED', _('Rejected')
order = models.ForeignKey(
Order, models.CASCADE, related_name='items', verbose_name=_('order'), blank=True, null=True)
product = models.ForeignKey(
Product, models.PROTECT, verbose_name=_('product'), blank=True, null=True)
data_format = models.ForeignKey(
DataFormat, models.PROTECT, verbose_name=_('data_format'), blank=True, null=True)
srid = models.IntegerField(_('srid'), default=settings.DEFAULT_SRID)
last_download = models.DateTimeField(_('last_download'), blank=True, null=True)
price_status = models.CharField(
_('price_status'), max_length=20, choices=PricingStatus.choices, default=PricingStatus.PENDING)
status = models.CharField(
_('status'), max_length=20, choices=OrderItemStatus.choices, default=OrderItemStatus.PENDING)
_price = MoneyField(
_('price'), max_digits=14, decimal_places=2, default_currency='CHF', null=True, blank=True)
_base_fee = MoneyField(
_('base_fee'), max_digits=14, decimal_places=2, default_currency='CHF', null=True, blank=True)
extract_result = models.FileField(upload_to=RandomFileName('extract'), null=True, blank=True)
comment = models.TextField(_('comment'), null=True, blank=True)
class Meta:
db_table = 'order_item'
verbose_name = _('order_item')
@property
def available_formats(self):
queryset = ProductFormat.objects.filter(
product=self.product).values_list('data_format__name', flat=True)
return list(queryset)
def _get_price_values(self, price_value):
if self.price_status == OrderItem.PricingStatus.PENDING:
LOGGER.info("You are trying to get a pricing value but pricing status is still PENDING")
return None
return price_value
@property
def price(self):
return self._get_price_values(self._price)
@property
def base_fee(self):
return self._get_price_values(self._base_fee)
def set_price(self, price=None, base_fee=None):
"""
Sets price and updates price status
"""
self._price = None
self._base_fee = None
self.price_status = OrderItem.PricingStatus.PENDING
# prices are 0 when user or invoice_contact is subscribed to the product
if self.product.free_when_subscribed:
if self.order.client.identity.subscribed or (
self.order.invoice_contact is not None and self.order.invoice_contact.subscribed):
self._price = Money(0, 'CHF')
self._base_fee = Money(0, 'CHF')
self.price_status = OrderItem.PricingStatus.CALCULATED
return
# prices are 0 when order is for public authorities or academic purposes
if self.order.order_type.name in ( 'Communal', 'Cantonal', 'Fédéral', 'Académique'):
self._price = Money(0, 'CHF')
self._base_fee = Money(0, 'CHF')
self.price_status = OrderItem.PricingStatus.CALCULATED
return
if self.product.pricing.pricing_type != Pricing.PricingType.MANUAL:
if self.product.pricing.pricing_type == Pricing.PricingType.FROM_CHILDREN_OF_GROUP:
self._price = Money(0, 'CHF')
self._base_fee = Money(0, 'CHF')
for product in self.product.products.all():
if product.geom.intersects(self.order.geom):
price, base_fee = product.pricing.get_price(self.order.geom)
if price:
self._price += price
if base_fee:
self._base_fee = base_fee if base_fee > self._base_fee else self._base_fee
else:
self._price, self._base_fee = self.product.pricing.get_price(self.order.geom)
if self._price is not None:
self.price_status = OrderItem.PricingStatus.CALCULATED
return
else:
if price is not None:
self._price = price
self._base_fee = base_fee
self.price_status = OrderItem.PricingStatus.CALCULATED
return
self.price_status = OrderItem.PricingStatus.PENDING
return
def ask_price(self):
if self.product.pricing.pricing_type == Pricing.PricingType.MANUAL:
send_geoshop_email(
_('Geoshop - Quote requested'),
template_name='email_admin',
template_data={
'messages': [_('A new quote has been requested:')],
'details': {
_('order'): self.order.id,
_('product'): self.product.label,
_('link'): reverse("admin:api_order_change", args=[self.order.id])
}
}
)
class ProductField(models.Model):
"""
Describes fields and their types of products.
"""
class ProductFieldType(models.TextChoices):
REAL = 'REAL', 'Real'
DATE = 'DATE', 'Date'
CHAR = 'CHAR', 'Character'
VARCHAR = 'VARCHAR', 'Varying character'
INT = 'INT', 'Integer'
BIGINT = 'BIGINT', 'Big integer'
FLOAT = 'FLOAT', 'Floating number'
db_name = models.CharField(_('db_name'), max_length=50, blank=True)
export_name = models.CharField(_('export_name'), max_length=50, blank=True)
field_type = models.CharField(
_('field_type'), max_length=10, choices=ProductFieldType.choices, blank=True)
field_length = models.SmallIntegerField(_('field_length'), )
product = models.ForeignKey(Product, verbose_name=_('product'), on_delete=models.CASCADE)
class Meta:
db_ | class ProductFormat(models.Model):
product = models.ForeignKey(
Product, models.CASCADE, verbose_name=_('product'), related_name='product_formats')
data_format = models.ForeignKey(DataFormat, models.CASCADE, verbose_name=_('data_format'))
# extraction manuelle ou automatique
is_manual = models.BooleanField(_('is_manual'), default=False)
class Meta:
db_table = 'product_format'
unique_together = (('product', 'data_format'),)
verbose_name = _('product_format')
class UserChange(AbstractIdentity):
"""
Stores temporary data in order to proceed user profile change requests.
"""
client = models.ForeignKey(UserModel, models.CASCADE, verbose_name=_('client'))
ide_id = models.CharField(_('ide_number'), max_length=15, null=True, blank=True, validators=[
RegexValidator(
regex=r'^CHE-([0-9]{3}\.){2}[0-9]{3}$',
message=_('IDE number is not valid'),
),
])
class Meta:
db_table = 'user_change'
verbose_name = _('user_change')
| table = 'product_field'
verbose_name = _('product_field')
|
lib.rs | //! This crate provides ragel language support for the [tree-sitter][] parsing library.
//!
//! Typically, you will use the [language][language func] function to add this language to a
//! tree-sitter [Parser][], and then use the parser to parse some code:
//!
//! ```
//! let code = "";
//! let mut parser = tree_sitter::Parser::new();
//! parser.set_language(tree_sitter_ragel::language()).expect("Error loading ragel grammar");
//! let tree = parser.parse(code, None).unwrap();
//! ```
//!
//! [Language]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Language.html
//! [language func]: fn.language.html
//! [Parser]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Parser.html
//! [tree-sitter]: https://tree-sitter.github.io/
use tree_sitter::Language;
extern "C" {
fn tree_sitter_ragel() -> Language;
}
/// Get the tree-sitter [Language][] for this grammar.
///
/// [Language]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Language.html
pub fn language() -> Language |
/// The content of the [`node-types.json`][] file for this grammar.
///
/// [`node-types.json`]: https://tree-sitter.github.io/tree-sitter/using-parsers#static-node-types
pub const NODE_TYPES: &'static str = include_str!("../../src/node-types.json");
// Uncomment these to include any queries that this grammar contains
// pub const HIGHLIGHTS_QUERY: &'static str = include_str!("../../queries/highlights.scm");
// pub const INJECTIONS_QUERY: &'static str = include_str!("../../queries/injections.scm");
// pub const LOCALS_QUERY: &'static str = include_str!("../../queries/locals.scm");
// pub const TAGS_QUERY: &'static str = include_str!("../../queries/tags.scm");
#[cfg(test)]
mod tests {
#[test]
fn test_can_load_grammar() {
let mut parser = tree_sitter::Parser::new();
parser
.set_language(super::language())
.expect("Error loading ragel language");
}
}
| {
unsafe { tree_sitter_ragel() }
} |
manage-tour.module_20190912221428.ts | import { NgModule } from '@angular/core';
import { ThemeModule } from '../../@theme/theme.module';
import { SharedModule } from '../../shared/share.module';
import { Ng2SmartTableModule } from 'ng2-smart-table';
import { CommonModule } from '@angular/common';
import { AddTourComponent } from './add-tour/add-tour.component';
import { HttpModule } from '@angular/http';
import { Ng2CompleterModule } from 'ng2-completer';
import { ManageTourComponent, ButtonViewComponent } from './manage-tour.component';
import { NgxCurrencyModule } from 'ngx-currency';
import { NbButtonModule, NbCardModule, NbSelectModule, NbInputModule } from '@nebular/theme';
import { ReactiveFormsModule } from '@angular/forms';
import { FormsModule } from '@angular/forms';
@NgModule({
imports: [
ReactiveFormsModule,
FormsModule,
ThemeModule,
SharedModule,
Ng2SmartTableModule,
CommonModule,
HttpModule,
Ng2CompleterModule,
NgxCurrencyModule,
NbButtonModule,
NbCardModule,
NbSelectModule,
NbInputModule,
],
exports: [ManageTourComponent],
declarations: [ManageTourComponent, AddTourComponent, ButtonViewComponent],
entryComponents: [ManageTourComponent, AddTourComponent, ButtonViewComponent],
})
export class | {}
| ManageTourModule |
tag_postproc.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# author: Vadász Noémi
# created: 2019/03/28
# feldolgozza a google spreadsheetsben annotált, előtte emtsv-vel elemzett korpuszfájlt
# bemenet
# csv (google spreadsheetsből importált)
# token, összes elemzés, tő, részletes címke, tag, helyes, javított tő, tokenizálás, javított token, megjegyzés
# tokenenként annyi sor, ahány különböző emmorph elemzés (címke+tő kombináció) van az anas oszlopban
# a kézzel kiválasztott címke+tő kombinációnál a 6. (helyes) oszlopban X szerepel
# ha nem üres a 6. oszlop, akkor a 2. oszlopba annak tartalma megy (lemma javítása)
# ha nem üres a 7. oszlop, akkor a tokenizálás javítását szolgáló parancsokat végre kell hajtani
# ha nem üres a 8. oszlop, akkor a 0. oszlopba annak tartalma megy (token javítása)
# kimenet | # form, anas, lemma, xpostag
# soronként egy token
# mondatok között üres sor
import csv
import sys
# 0: string
# 1: anas
# 2: lemma
# 3: hfstana
# 4: tag
# 5: helyes
# 6: javított tő
# 7: tokenizálás
# 8: javított token
# 9: megjegyzés
def read_file():
"""
stdin-ről olvas
első sor: header
feldolgozza a tokenizálás javítását szolgáló parancsokat, ennek megfelelően tárolja el a sorokat
"""
empty_line = dict()
empty_line['string'] = ''
lines = list()
newtoken = dict()
reader = csv.reader(sys.stdin)
next(reader)
for line in reader:
# új token
if line[0] or line[7] == 'token beszúr':
if newtoken:
lines.append(newtoken)
newtoken = dict()
if line[7] not in ('token össze', 'token töröl'):
if line[7] == 'token beszúr':
newtoken['anas'] = '[]'
newtoken['anas'] = line[1]
# jó token
if not line[8]:
newtoken['string'] = line[0]
# hibás token
else:
newtoken['string'] = line[8]
# jó tő
if not line[6]:
newtoken['lemma'] = line[2]
# hibás tő
else:
newtoken['lemma'] = line[6]
# jó vagy javított címke
if line[5]:
# jó címke
if line[5] == 'X':
newtoken['hfstana'] = line[4]
# javított címke
else:
newtoken['hfstana'] = line[5]
# összetokenizálás
else:
# összetokenizálás első sora
if line[6] and line[7]:
newtoken['string'] = line[8]
newtoken['lemma'] = line[6]
newtoken['anas'] = line[1]
# jó címke
if line[5] == 'X':
newtoken['hfstana'] = line[4]
# javított címke
else:
newtoken['hfstana'] = line[5]
# alternatív címkék
else:
# alternatív címke és tő megadva
if 'X' in line[5]:
newtoken['lemma'] = line[2]
newtoken['hfstana'] = line[4]
# széttokenizálás
elif line[7] == 'token szét':
lines.append(newtoken)
newtoken = dict()
newtoken['anas'] = '[]'
newtoken['string'] = line[8]
newtoken['lemma'] = line[6]
newtoken['hfstana'] = line[5]
# mondat széttokenizálása
elif all(cell == '' for cell in line) or line[7] == 'mondat szét':
lines.append(newtoken)
lines.append(empty_line)
newtoken = dict()
lines.append(newtoken)
return lines
def print_file(lines):
"""
stdout-ra ír
xtsv kimenet
"""
print('form\tanas\tlemma\txpostag')
for line in lines:
if len(line) > 1:
print(line['string'], line['anas'], line['lemma'], line['hfstana'], sep='\t')
else:
print(line['string'])
print('')
def main():
lines = read_file()
print_file(lines)
if __name__ == "__main__":
main() | # xtsv |
dependency_watchdog_test.go | // Copyright (c) 2021 SAP SE or an SAP affiliate company. All rights reserved. This file is licensed under the Apache Software License, v. 2 except as noted otherwise in the LICENSE file | // you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package kubeapiserver_test
import (
. "github.com/gardener/gardener/pkg/operation/botanist/component/kubeapiserver"
restarterapi "github.com/gardener/dependency-watchdog/pkg/restarter/api"
scalerapi "github.com/gardener/dependency-watchdog/pkg/scaler/api"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
autoscalingv1 "k8s.io/api/autoscaling/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/utils/pointer"
)
var _ = Describe("DependencyWatchdog", func() {
Describe("#DependencyWatchdogEndpointConfiguration", func() {
It("should compute the correct configuration", func() {
config, err := DependencyWatchdogEndpointConfiguration()
Expect(config).To(Equal(map[string]restarterapi.Service{
"kube-apiserver": {
Dependants: []restarterapi.DependantPods{
{
Name: "controlplane",
Selector: &metav1.LabelSelector{
MatchExpressions: []metav1.LabelSelectorRequirement{
{
Key: "gardener.cloud/role",
Operator: "In",
Values: []string{"controlplane"},
},
{
Key: "role",
Operator: "NotIn",
Values: []string{"main", "apiserver"},
},
},
},
},
},
},
}))
Expect(err).NotTo(HaveOccurred())
})
})
Describe("#DependencyWatchdogProbeConfiguration", func() {
It("should compute the correct configuration", func() {
config, err := DependencyWatchdogProbeConfiguration()
Expect(config).To(ConsistOf(scalerapi.ProbeDependants{
Name: "shoot-kube-apiserver",
Probe: &scalerapi.ProbeConfig{
External: &scalerapi.ProbeDetails{KubeconfigSecretName: "dependency-watchdog-external-probe"},
Internal: &scalerapi.ProbeDetails{KubeconfigSecretName: "dependency-watchdog-internal-probe"},
PeriodSeconds: pointer.Int32(30),
},
DependantScales: []*scalerapi.DependantScaleDetails{
{
ScaleRef: autoscalingv1.CrossVersionObjectReference{
APIVersion: "apps/v1",
Kind: "Deployment",
Name: "kube-controller-manager",
},
ScaleUpDelaySeconds: pointer.Int32(120),
},
{
ScaleRef: autoscalingv1.CrossVersionObjectReference{
APIVersion: "apps/v1",
Kind: "Deployment",
Name: "machine-controller-manager",
},
ScaleUpDelaySeconds: pointer.Int32(60),
ScaleRefDependsOn: []autoscalingv1.CrossVersionObjectReference{
{
APIVersion: "apps/v1",
Kind: "Deployment",
Name: "kube-controller-manager",
},
},
},
{
ScaleRef: autoscalingv1.CrossVersionObjectReference{
APIVersion: "apps/v1",
Kind: "Deployment",
Name: "cluster-autoscaler",
},
ScaleRefDependsOn: []autoscalingv1.CrossVersionObjectReference{
{
APIVersion: "apps/v1",
Kind: "Deployment",
Name: "machine-controller-manager",
},
},
},
},
}))
Expect(err).NotTo(HaveOccurred())
})
})
}) | //
// Licensed under the Apache License, Version 2.0 (the "License"); |
conf.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# issho documentation build configuration file, created by
# sphinx-quickstart on Fri Jun 9 13:47:02 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath(".."))
import issho
# -- General configuration ---------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ["sphinx.ext.autodoc", "sphinx.ext.viewcode"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"
# The master toctree document.
master_doc = "index"
# General information about the project.
project = u"issho"
copyright = u"2019, Michael Bilow"
author = u"Michael Bilow"
# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The short X.Y version.
version = issho.__version__
# The full version, including alpha/beta/rc tags.
release = issho.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "alabaster"
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# -- Options for HTMLHelp output ---------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = "isshodoc"
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto, manual, or own class]).
latex_documents = [
(master_doc, "issho.tex", u"issho Documentation", u"Michael Bilow", "manual")
]
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, "issho", u"issho Documentation", [author], 1)]
# -- Options for Texinfo output ----------------------------------------
| # (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"issho",
u"issho Documentation",
author,
"issho",
"Simple connections & command execution with a remote host.",
"issho Documentation",
)
] | # Grouping the document tree into Texinfo files. List of tuples |
AppInitCreators.ts | import * as A from "../Actions";
import * as D from "../../Domain";
import * as T from "../ActionTypes";
import { Message } from "../../components/messages/Properties";
import { createDefaultAction, createGenericAction } from "./BaseCreators";
export function createAppInitAction(): A.IDefaultAction {
return createDefaultAction(T.APP_INIT);
}
| export function createAppConfiguredAction(): A.IDefaultAction {
return createDefaultAction(T.APP_CONFIGURED);
}
export function createConfigReceivedAction(config: D.IConfig) {
return createGenericAction<D.IConfig>(T.CONFIG_RECEIVED, config);
} | |
index.tsx | import React from 'react';
import { withTheme } from 'styled-components';
import { mergeStyles, Theme } from '@lapidist/styles';
import { BoxProps } from '../box';
import { panelHeadingStyles } from './styles';
import { Heading, HeadingSize } from '../heading';
export * from './styles';
export type PanelHeadingPropType = BoxProps;
export interface PanelHeadingProps {
readonly kind: string;
readonly theme: Theme;
readonly size?: HeadingSize;
}
const BasePanelHeading: React.FC<PanelHeadingPropType & PanelHeadingProps> = ({
as,
styles,
kind,
theme,
children,
size,
...restProps
}) => (
<Heading
as={as}
size={size}
styles={mergeStyles(panelHeadingStyles({ kind, theme }), styles)}
{...restProps}
>
{children}
</Heading>
);
export const PanelHeading = withTheme(BasePanelHeading); | PanelHeading.displayName = 'PanelHeading'; | |
server.go | package main
import (
"log"
"net/http"
"fmt"
"flag"
"time"
"strings"
"bytes"
"encoding/json"
"code.google.com/p/go.net/websocket"
"runtime"
)
type NoSuchClientError struct {
uuid string
}
func (e *NoSuchClientError) Error() string {
return fmt.Sprintf("No such client connected: %s", e.uuid)
}
type WebFSHandler struct {
}
func quietPanicRecover() {
if r := recover(); r != nil {
fmt.Println("Recovered from panic", r)
}
}
func (self *WebFSHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
defer r.Body.Close()
parts := strings.Split(r.URL.Path, "/")
id := parts[0]
defer quietPanicRecover()
req, err := NewClientRequest(id)
if err != nil {
http.Error(w, err.Error(), http.StatusGone)
return
}
// First send request line
requestLine := fmt.Sprintf("%s %s", r.Method, "/" + strings.Join(parts[1:], "/"))
fmt.Println(requestLine)
req.ch <- []byte(requestLine)
// Then send headers
var headerBuffer bytes.Buffer
for h, v := range r.Header {
headerBuffer.Write([]byte(fmt.Sprintf("%s: %s\n", h, v)))
}
req.ch <- headerBuffer.Bytes()
// Send body
for {
buffer := make([]byte, BUFFER_SIZE)
n, _ := r.Body.Read(buffer)
if n == 0 {
break
}
req.ch <- buffer[:n]
}
req.ch <- DELIMITERBUFFER
statusCodeBuffer, ok := <-req.ch
if !ok {
http.Error(w, "Connection closed", http.StatusInternalServerError)
return
}
statusCode := BytesToInt(statusCodeBuffer)
headersBuffer, ok := <-req.ch
if !ok {
http.Error(w, "Connection close", http.StatusInternalServerError)
return
}
headers := strings.Split(string(headersBuffer), "\n")
for _, header := range headers {
headerParts := strings.Split(header, ": ")
w.Header().Set(headerParts[0], headerParts[1])
}
w.WriteHeader(statusCode)
for {
buffer, ok := <-req.ch
if !ok {
w.Write([]byte("Connection closed"))
break
}
if IsDelimiter(buffer) {
break
}
_, err := w.Write(buffer)
if err != nil {
fmt.Println("Got error", err)
break
}
}
}
var clients map[string]*Client = make(map[string]*Client)
type Client struct {
currentRequestId byte
writeChannel chan []byte
pendingRequests []*ClientRequest
}
func (c *Client) close() {
for i := range(c.pendingRequests) {
if c.pendingRequests[i] != nil {
c.pendingRequests[i].close()
}
}
close(c.writeChannel)
}
func NewClient(uuid string) *Client {
client := &Client {
writeChannel: make(chan []byte),
pendingRequests: make([]*ClientRequest, 255),
}
clients[uuid] = client
return client
}
type ClientRequest struct {
requestId byte
// Reusing channel for reading and writing
ch chan[] byte
}
func (cr *ClientRequest) close() {
close(cr.ch)
}
func addRequestId(requestId byte, buffer []byte) []byte {
newBuffer := make([]byte, len(buffer)+1)
newBuffer[0] = requestId
for i := range buffer {
newBuffer[i+1] = buffer[i]
}
return newBuffer
}
func NewClientRequest(uuid string) (*ClientRequest, error) {
client, ok := clients[uuid]
if !ok {
return nil, &NoSuchClientError{uuid}
}
client.currentRequestId = (client.currentRequestId + 1) % 255
requestId := client.currentRequestId
req := &ClientRequest {
requestId: requestId,
ch: make(chan []byte),
}
client.pendingRequests[requestId] = req
go func() {
defer quietPanicRecover()
// Listen on req.ch and move messages over (after
// adding requestId to the write channel
// stop after the delimiter, no more reading will need
// to happen
for {
buffer, ok := <-req.ch
if !ok {
break
}
clients[uuid].writeChannel <- addRequestId(requestId, buffer)
if IsDelimiter(buffer) {
break
}
}
}()
return req, nil
}
func socketServer(ws *websocket.Conn) {
defer ws.Close()
buffer := make([]byte, BUFFER_SIZE)
n, err := ws.Read(buffer)
var hello HelloMessage
err = json.Unmarshal(buffer[:n], &hello)
if err != nil {
fmt.Println("Could not parse welcome message.")
return
}
fmt.Println("Client", hello.UUID, "connected")
client := NewClient(hello.UUID)
closeSocket := func() {
client, ok := clients[hello.UUID];
if ok {
fmt.Println("Client disconnected", hello.UUID)
client.close()
delete(clients, hello.UUID)
} // else was already closed before
}
defer closeSocket()
// Read frame from socket and forward it to request channel
go func() {
defer quietPanicRecover()
for {
requestId, buffer, err := ReadFrame(ws)
if err != nil {
//fmt.Println("Read error", err)
closeSocket()
return
}
req := client.pendingRequests[requestId]
if req == nil {
fmt.Println("Got response for non-existent request", requestId, string(buffer))
continue
}
req.ch <- buffer
}
}()
for {
writeBuffer, request_ok := <-client.writeChannel
if !request_ok {
return
}
err = WriteFrame(ws, writeBuffer[0], writeBuffer[1:])
if err != nil {
fmt.Println("Got error", err)
return
}
}
}
func PrintStats() {
var memStats runtime.MemStats
for {
runtime.ReadMemStats(&memStats)
clientCount := 0
for _ = range clients {
clientCount++
}
fmt.Printf("Clients: %d Goroutines: %d Memory: %dK\n", clientCount, runtime.NumGoroutine(), memStats.Alloc / 1024)
time.Sleep(10e9) // Every 10 seconds
}
}
func ParseServerFlags(args []string) (ip string, port int, sslCrt string, sslKey string) {
var stats bool
config := ParseConfig()
flagSet := flag.NewFlagSet("zed", flag.ExitOnError)
flagSet.StringVar(&ip, "h", config.Server.Ip, "IP to bind to")
flagSet.IntVar(&port, "p", config.Server.Port, "Port to listen on")
flagSet.StringVar(&sslCrt, "sslcrt", config.Server.Sslcert, "Path to SSL certificate")
flagSet.StringVar(&sslKey, "sslkey", config.Server.Sslkey, "Path to SSL key")
flagSet.BoolVar(&stats, "stats", false, "Whether to print go-routine count and memory usage stats periodically.")
flagSet.Parse(args)
if stats {
go PrintStats()
}
flagSet.Parse(args)
return
}
func RunServer(ip string, port int, sslCrt string, sslKey string, withSignaling bool) | {
http.Handle("/fs/", http.StripPrefix("/fs/", &WebFSHandler{}))
http.Handle("/clientsocket", websocket.Handler(socketServer))
if withSignaling {
http.Handle("/signalsocket", websocket.Handler(HandleSignalSocket))
http.HandleFunc("/signal", HandleSignal)
}
if sslCrt != "" {
fmt.Printf("Zed server now running on wss://%s:%d\n", ip, port)
log.Fatal(http.ListenAndServeTLS(fmt.Sprintf("%s:%d", ip, port), sslCrt, sslKey, nil))
} else {
fmt.Printf("Zed server now running on ws://%s:%d\n", ip, port)
log.Fatal(http.ListenAndServe(fmt.Sprintf("%s:%d", ip, port), nil))
}
} |
|
municipality.service.ts | import { Injectable, Body } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { Repository } from 'typeorm';
import { Vereda } from '../entities/Vereda';
import { Municipio } from '../entities/Municipio';
import { CreateTownDto } from './dto/createTown.dto';
import { UpdateTownDto } from './dto/updateTown.dto';
@Injectable()
export class MunicipalityService {
constructor(
@InjectRepository(Municipio) private readonly municipalityRepository: Repository<Municipio>,
@InjectRepository(Vereda) private readonly townRepository: Repository<Vereda>
) { }
async createTown(body: CreateTownDto) {
try {
await this.townRepository.save(body)
return { success: 'OK' }
} catch (error) {
if (error.code = 'ER_NO_REFERENCED_ROW_2')
return { error: 'NOT_EXIST_MUNICIPALITY', detail: 'El municipio no existe!' }
return { error }
}
}
async createMunicipality(body: CreateTownDto) {
try {
await this.municipalityRepository.save(body)
return { success: 'OK' }
} catch (error) {
return { error }
}
}
async updateTown(body: UpdateTownDto) {
const exist = await this.townRepository.findOne({ select: ["nombre"], where: { id: body.id } })
if (!exist)
return { error: 'TOWN_NOT_EXIST', detail: 'La vereda no existe' }
await this.townRepository.update(body.id, body)
return { success: 'OK' }
}
async getTownMunicipality(idMunicipio: number) {
return await this.municipalityRepository.find({ relations: ["veredas"], where: { id: idMunicipio } })
}
async getMunicipalityByTown(idTown: number) {
return await this.municipalityRepository.createQueryBuilder()
.select(['Municipio.nombre'])
.innerJoin('Municipio.veredas', 'vereda')
.where('vereda.id=:idTown', { idTown })
.getOne()
} | .addSelect(['Organizacion.nombre', 'Organizacion.descripcion', 'Organizacion.contacto', 'Organizacion.temaCapacitacion', 'Organizacion.temaEmpresarial'])
.leftJoin('Municipio.veredas', 'Vereda')
.leftJoin('Vereda.organizacions', 'Organizacion')
.groupBy('Organizacion.id')
.getRawMany();
}
} | async quantityOrganizationsMunicipality() {
return await this.municipalityRepository.createQueryBuilder()
.select(['Municipio.nombre'])
.addSelect('count(Organizacion.id)', 'countOrganizacion') |
eval_context.go | package alerting
import (
"context"
"fmt"
"time"
"github.com/xformation/sdp/pkg/bus"
"github.com/xformation/sdp/pkg/log"
m "github.com/xformation/sdp/pkg/models"
"github.com/xformation/sdp/pkg/setting"
)
type EvalContext struct {
Firing bool
IsTestRun bool
EvalMatches []*EvalMatch
Logs []*ResultLogEntry
Error error
ConditionEvals string
StartTime time.Time
EndTime time.Time
Rule *Rule
log log.Logger
dashboardRef *m.DashboardRef
ImagePublicUrl string
ImageOnDiskPath string
NoDataFound bool
PrevAlertState m.AlertStateType
Ctx context.Context
}
func NewEvalContext(alertCtx context.Context, rule *Rule) *EvalContext |
type StateDescription struct {
Color string
Text string
Data string
}
func (c *EvalContext) GetStateModel() *StateDescription {
switch c.Rule.State {
case m.AlertStateOK:
return &StateDescription{
Color: "#36a64f",
Text: "OK",
}
case m.AlertStateNoData:
return &StateDescription{
Color: "#888888",
Text: "No Data",
}
case m.AlertStateAlerting:
return &StateDescription{
Color: "#D63232",
Text: "Alerting",
}
default:
panic("Unknown rule state " + c.Rule.State)
}
}
func (c *EvalContext) ShouldUpdateAlertState() bool {
return c.Rule.State != c.PrevAlertState
}
func (a *EvalContext) GetDurationMs() float64 {
return float64(a.EndTime.Nanosecond()-a.StartTime.Nanosecond()) / float64(1000000)
}
func (c *EvalContext) GetNotificationTitle() string {
return "[" + c.GetStateModel().Text + "] " + c.Rule.Name
}
func (c *EvalContext) GetDashboardUID() (*m.DashboardRef, error) {
if c.dashboardRef != nil {
return c.dashboardRef, nil
}
uidQuery := &m.GetDashboardRefByIdQuery{Id: c.Rule.DashboardId}
if err := bus.Dispatch(uidQuery); err != nil {
return nil, err
}
c.dashboardRef = uidQuery.Result
return c.dashboardRef, nil
}
const urlFormat = "%s?fullscreen=true&edit=true&tab=alert&panelId=%d&orgId=%d"
func (c *EvalContext) GetRuleUrl() (string, error) {
if c.IsTestRun {
return setting.AppUrl, nil
}
if ref, err := c.GetDashboardUID(); err != nil {
return "", err
} else {
return fmt.Sprintf(urlFormat, m.GetFullDashboardUrl(ref.Uid, ref.Slug), c.Rule.PanelId, c.Rule.OrgId), nil
}
}
| {
return &EvalContext{
Ctx: alertCtx,
StartTime: time.Now(),
Rule: rule,
Logs: make([]*ResultLogEntry, 0),
EvalMatches: make([]*EvalMatch, 0),
log: log.New("alerting.evalContext"),
PrevAlertState: rule.State,
}
} |
chord-list.constant.js | export const chordList = [
{
id: "1",
name: "F",
startString: 1,
structure: {
strings: {
sixthString: [1],
fifthString: [1, 3],
fourthString: [1, 3],
thirdString: [1, 2],
secondStrind: [1],
firstString: [1]
}
}
},
{
id: "2",
name: "F7",
startString: 1,
structure: {
strings: {
sixthString: [1],
fifthString: [1, 3],
fourthString: [1],
thirdString: [1, 2],
secondStrind: [1],
firstString: [1]
}
}
},
{
id: "3",
name: "Fm",
startString: 1,
structure: {
strings: {
sixthString: [1],
fifthString: [1, 3],
fourthString: [1, 3],
thirdString: [1],
secondStrind: [1],
firstString: [1]
}
}
},
{
id: "4",
name: "C",
startString: 1,
structure: {
strings: {
sixthString: [],
fifthString: [3],
fourthString: [2],
thirdString: [],
secondStrind: [1],
firstString: []
}
}
},
{
id: "5",
name: "C7",
startString: 1,
structure: {
strings: {
sixthString: [],
fifthString: [3],
fourthString: [2],
thirdString: [3],
secondStrind: [1],
firstString: []
}
}
},
{
id: "6",
name: "Cm",
startString: 2,
structure: {
strings: {
sixthString: [2],
fifthString: [2],
fourthString: [2, 4],
thirdString: [2, 4],
secondStrind: [2, 3],
firstString: [2]
}
}
},
{
id: "7",
name: "D",
startString: 1,
structure: {
strings: {
sixthString: [],
fifthString: [],
fourthString: [],
thirdString: [2],
secondStrind: [3],
firstString: [2]
}
}
},
{
id: "8",
name: "D7",
startString: 1,
structure: {
strings: {
sixthString: [],
fifthString: [],
fourthString: [],
thirdString: [2],
secondStrind: [1],
firstString: [2]
}
}
},
{
id: "9",
name: "Dm",
startString: 1,
structure: {
strings: { | sixthString: [],
fifthString: [],
fourthString: [],
thirdString: [2],
secondStrind: [3],
firstString: [1]
}
}
},
{
id: "10",
name: "E",
startString: 1,
structure: {
strings: {
sixthString: [],
fifthString: [2],
fourthString: [2],
thirdString: [1],
secondStrind: [],
firstString: []
}
}
},
{
id: "11",
name: "E7",
startString: 1,
structure: {
strings: {
sixthString: [],
fifthString: [2],
fourthString: [],
thirdString: [1],
secondStrind: [],
firstString: []
}
}
},
{
id: "11",
name: "Em",
startString: 1,
structure: {
strings: {
sixthString: [],
fifthString: [2],
fourthString: [2],
thirdString: [],
secondStrind: [],
firstString: []
}
}
},
{
id: "12",
name: "G",
startString: 1,
structure: {
strings: {
sixthString: [3],
fifthString: [2],
fourthString: [],
thirdString: [],
secondStrind: [],
firstString: [3]
}
}
},
{
id: "13",
name: "G7",
startString: 1,
structure: {
strings: {
sixthString: [3],
fifthString: [2],
fourthString: [],
thirdString: [],
secondStrind: [],
firstString: [1]
}
}
},
{
id: "14",
name: "Gm",
startString: 2,
structure: {
strings: {
sixthString: [2, 5],
fifthString: [2, 5],
fourthString: [2],
thirdString: [2],
secondStrind: [2],
firstString: [2]
}
}
},
{
id: "15",
name: "A",
startString: 1,
structure: {
strings: {
sixthString: [],
fifthString: [],
fourthString: [2],
thirdString: [2],
secondStrind: [2],
firstString: []
}
}
},
{
id: "16",
name: "A7",
startString: 1,
structure: {
strings: {
sixthString: [],
fifthString: [],
fourthString: [2],
thirdString: [],
secondStrind: [2],
firstString: []
}
}
},
{
id: "17",
name: "Am",
startString: 1,
structure: {
strings: {
sixthString: [],
fifthString: [],
fourthString: [2],
thirdString: [2],
secondStrind: [1],
firstString: []
}
}
},
{
id: "18",
name: "B",
startString: 2,
structure: {
strings: {
sixthString: [2],
fifthString: [2],
fourthString: [2, 4],
thirdString: [2, 4],
secondStrind: [2, 4],
firstString: [2]
}
}
},
{
id: "19",
name: "Bm",
startString: 2,
structure: {
strings: {
sixthString: [2],
fifthString: [2],
fourthString: [2, 4],
thirdString: [2, 4],
secondStrind: [2, 3],
firstString: [2]
}
}
},
{
id: "20",
name: "B7",
startString: 1,
structure: {
strings: {
sixthString: [],
fifthString: [2],
fourthString: [1],
thirdString: [2],
secondStrind: [],
firstString: [2]
}
}
}
]; | |
mod.rs | pub mod sub_struct;
use sub_struct::TestStructSub;
use flexpiler::common::rustc::Format;
use flexpiler::Error;
pub struct TestStruct {
pub a_i32: i32,
pub a_usize: usize,
pub a_string: String,
pub a_sub: TestStructSub,
}
impl flexpiler::identity::Trait for TestStruct {
fn | () -> String {
return std::string::String::from("TestStruct");
}
}
pub struct TestStructFlexpilerDeserializer {
pub a_i32_option: Option<i32>,
pub a_usize_option: Option<usize>,
pub a_string_option: Option<String>,
pub a_sub_option: Option<TestStructSub>,
}
impl Default for TestStructFlexpilerDeserializer {
fn default() -> Self {
TestStructFlexpilerDeserializer {
a_i32_option: None,
a_usize_option: None,
a_string_option: None,
a_sub_option: None,
}
}
}
impl std::convert::TryInto<TestStruct> for TestStructFlexpilerDeserializer {
type Error = flexpiler::Error<flexpiler::common::rustc::error::Source>;
fn try_into(self) -> Result<TestStruct, Self::Error> {
use flexpiler::deserializer::Trait as DeserializerTrait;
use flexpiler::deserializer::context::Trait as DeserializerContextTrait;
use flexpiler::error::Trait as ErrorTrait;
use flexpiler::error::propagation::Trait as ErrorPropagationTrait;
let a_i32 = match self.a_i32_option {
Some(value) => value,
None => {
let missing_field = flexpiler::common::rustc::error::MissingStructField {
struct_declaration_found: String::from("ManualSubTestStruct"),
field_declaration_expected: String::from("a_i32"),
};
let error = flexpiler::Error::gen(missing_field)
.propagate(<TestStructFlexpilerDeserializer as flexpiler::deserializer::context::Trait<TestStruct, flexpiler::common::rustc::Format>>::context());
return Err(error);
}
};
let a_usize = match self.a_usize_option {
Some(value) => value,
None => {
let missing_field = flexpiler::common::rustc::error::MissingStructField {
struct_declaration_found: String::from("ManualSubTestStruct"),
field_declaration_expected: String::from("a_usize"),
};
let error = flexpiler::Error::gen(missing_field)
.propagate(<TestStructFlexpilerDeserializer as flexpiler::deserializer::context::Trait<TestStruct, flexpiler::common::rustc::Format>>::context());
return Err(error);
}
};
let a_string = match self.a_string_option {
Some(value) => value,
None => {
let missing_field = flexpiler::common::rustc::error::MissingStructField {
struct_declaration_found: String::from("ManualSubTestStruct"),
field_declaration_expected: String::from("a_string"),
};
let error = flexpiler::Error::gen(missing_field)
.propagate(<TestStructFlexpilerDeserializer as flexpiler::deserializer::context::Trait<TestStruct, flexpiler::common::rustc::Format>>::context());
return Err(error);
}
};
let a_sub = match self.a_sub_option {
Some(value) => value,
None => {
let missing_field = flexpiler::common::rustc::error::MissingStructField {
struct_declaration_found: String::from("ManualSubTestStruct"),
field_declaration_expected: String::from("a_sub"),
};
let error = flexpiler::Error::gen(missing_field)
.propagate(<TestStructFlexpilerDeserializer as flexpiler::deserializer::context::Trait<TestStruct, flexpiler::common::rustc::Format>>::context());
return Err(error);
}
};
Ok(TestStruct {
a_i32,
a_usize,
a_string,
a_sub,
})
}
}
impl flexpiler::deserializer::Trait<
TestStruct,
flexpiler::common::rustc::Format,
> for TestStructFlexpilerDeserializer {
fn deserialize<ReaderType>(reader_mut_ref: &mut ReaderType)
-> flexpiler::deserializer::Result<
TestStruct,
flexpiler::common::rustc::deserializer::Context,
flexpiler::Error<flexpiler::common::rustc::error::Source>
>
where ReaderType: flexpiler::reader::Trait {
use flexpiler::deserializer::Trait as DeserializerTrait;
use flexpiler::deserializer::context::Trait as DeserializerContextTrait;
use flexpiler::error::Trait as ErrorTrait;
use flexpiler::error::propagation::Trait as ErrorPropagationTrait;
use flexpiler::identity::Trait;
use flexpiler::parser::Parse;
let mut deserializer = TestStructFlexpilerDeserializer::default();
let (identifier_data, identifier_finish) = match flexpiler::common::rustc::block::Identifier::parse(reader_mut_ref) {
Ok(flexpiler::common::rustc::block::identifier::Result::NoDataFound { finish }) => {
return flexpiler::deserializer::Result::NoDataFound {
context: finish.into()
};
},
Ok(flexpiler::common::rustc::block::identifier::Result::DataFound { data, finish }) => {
(data, finish)
},
Err(parser_error) => {
let error = flexpiler::Error::gen(parser_error)
.propagate(<TestStructFlexpilerDeserializer as flexpiler::deserializer::context::Trait<TestStruct, flexpiler::common::rustc::Format>>::context());
return flexpiler::deserializer::Result::Err(error);
}
};
let mut context: flexpiler::common::rustc::deserializer::Context = identifier_finish.into();
if context == flexpiler::common::rustc::deserializer::Context::Freestanding {
context = match flexpiler::common::rustc::block::ContextDenominator::parse(reader_mut_ref) {
Ok(result) => {
result.finish.into()
},
Err(parser_error) => {
let error = flexpiler::Error::gen(parser_error)
.propagate(<TestStructFlexpilerDeserializer as flexpiler::deserializer::context::Trait<TestStruct, flexpiler::common::rustc::Format>>::context());
return flexpiler::deserializer::Result::Err(error);
},
}
}
match context {
flexpiler::common::rustc::deserializer::Context::DataStart => {},
_ => {
let unexpected_context = flexpiler::common::rustc::error::UnexpectedContext {
context_found: context,
context_expected: flexpiler::error::ExpectedEntries::from(vec![
flexpiler::common::rustc::deserializer::Context::DataStart,
]),
};
let error = flexpiler::Error::gen(unexpected_context)
.propagate(<TestStructFlexpilerDeserializer as flexpiler::deserializer::context::Trait<TestStruct, flexpiler::common::rustc::Format>>::context());
return flexpiler::deserializer::Result::Err(error);
},
}
if identifier_data.as_str() != "TestStruct" {
let incompatible_struct_declaration = flexpiler::common::rustc::error::IncompatibleStructDeclaration {
struct_declaration_expected: String::from("TestStruct"),
struct_declaration_found: identifier_data,
};
let error = flexpiler::Error::gen(incompatible_struct_declaration)
.propagate(TestStructFlexpilerDeserializer::context());
return flexpiler::deserializer::Result::Err(error);
}
loop {
let declaration = match flexpiler::parser::parse::<
flexpiler::common::rustc::block::DeclarationOrDataEnd,
ReaderType
> (reader_mut_ref) {
Err(parser_error) => {
let error = flexpiler::error::Error::gen(parser_error)
.propagate(TestStructFlexpilerDeserializer::context());
return flexpiler::deserializer::Result::Err(error);
},
Ok(flexpiler::common::rustc::block::declaration_or_data_end::Result::DataEnd()) => {
break;
},
Ok(flexpiler::common::rustc::block::declaration_or_data_end::Result::Declaration(declaration)) => {
declaration
},
};
let mut context = match declaration.as_str() {
"a_usize" => {
let result = flexpiler::common::rustc::deserializer::PrimitiveUSize::deserialize(reader_mut_ref);
match result {
flexpiler::deserializer::Result::DataFound {data, context} => {
deserializer.a_usize_option = Some(data);
context
},
flexpiler::deserializer::Result::NoDataFound { context } => {
let unexpected_no_content = flexpiler::error::source::common::UnexpectedNoContent {
definition_expected: <usize as flexpiler::identity::Trait>::definition(),
};
let error_source_common: flexpiler::error::source::Common = unexpected_no_content.into();
let error = flexpiler::Error::gen(error_source_common)
.propagate(<TestStructFlexpilerDeserializer as flexpiler::deserializer::context::FieldTrait<TestStruct, flexpiler::common::rustc::Format>>::context_field("a_usize"));
return flexpiler::deserializer::Result::Err(error);
},
flexpiler::deserializer::Result::Err(error) => {
let error = error.propagate(<TestStructFlexpilerDeserializer as flexpiler::deserializer::context::FieldTrait<TestStruct, flexpiler::common::rustc::Format>>::context_field("a_usize"));
return flexpiler::deserializer::Result::Err(error);
}
}
},
"a_string" => {
match <std::string::String as flexpiler::Deserialization<flexpiler::common::rustc::Format>>::Deserializer::deserialize(reader_mut_ref) {
flexpiler::deserializer::Result::DataFound { data, context } => {
deserializer.a_string_option = Some(data);
context
},
flexpiler::deserializer::Result::NoDataFound { context } => {
let unexpected_no_content = flexpiler::error::source::common::UnexpectedNoContent {
definition_expected: <std::string::String as flexpiler::identity::Trait>::definition(),
};
let error_source_common: flexpiler::error::source::Common = unexpected_no_content.into();
let error = flexpiler::Error::gen(error_source_common)
.propagate(<TestStructFlexpilerDeserializer as flexpiler::deserializer::context::FieldTrait<TestStruct, flexpiler::common::rustc::Format>>::context_field("a_string"));
return flexpiler::deserializer::Result::Err(error);
}
flexpiler::deserializer::Result::Err(error) => {
let error = error
.propagate(<TestStructFlexpilerDeserializer as flexpiler::deserializer::context::FieldTrait<TestStruct, flexpiler::common::rustc::Format>>::context_field("a_string"));
return flexpiler::deserializer::Result::Err(error);
}
}
},
"a_i32" => {
let result = flexpiler::common::rustc::deserializer::PrimitiveI32::deserialize(reader_mut_ref);
match result {
flexpiler::deserializer::Result::DataFound{ data, context } => {
deserializer.a_i32_option = Some(data);
context
}
flexpiler::deserializer::Result::NoDataFound { context } => {
let unexpected_no_content = flexpiler::error::source::common::UnexpectedNoContent {
definition_expected: <i32 as flexpiler::identity::Trait>::definition(),
};
let error_source_common: flexpiler::error::source::Common = unexpected_no_content.into();
let error = flexpiler::Error::gen(error_source_common)
.propagate(<TestStructFlexpilerDeserializer as flexpiler::deserializer::context::FieldTrait<TestStruct, flexpiler::common::rustc::Format>>::context_field("a_i32"));
return flexpiler::deserializer::Result::Err(error);
},
flexpiler::deserializer::Result::Err(error) => {
let error = error.propagate(<TestStructFlexpilerDeserializer as flexpiler::deserializer::context::FieldTrait<TestStruct, flexpiler::common::rustc::Format>>::context_field("a_i32"));
return flexpiler::deserializer::Result::Err(error);
}
}
},
"a_sub" => {
let result = <TestStructSub as flexpiler::Deserialization<flexpiler::common::rustc::Format>>::Deserializer::deserialize(reader_mut_ref);
match result {
flexpiler::deserializer::Result::DataFound{ data, context } => {
deserializer.a_sub_option = Some(data);
context
},
flexpiler::deserializer::Result::NoDataFound{ context } => {
let unexpected_no_content = flexpiler::error::source::common::UnexpectedNoContent {
definition_expected: <TestStructSub as flexpiler::identity::Trait>::definition(),
};
let error_source_common: flexpiler::error::source::Common = unexpected_no_content.into();
let error = flexpiler::Error::gen(error_source_common)
.propagate(<TestStructFlexpilerDeserializer as flexpiler::deserializer::context::FieldTrait<TestStruct, flexpiler::common::rustc::Format>>::context_field("a_sub"));
return flexpiler::deserializer::Result::Err(error);
},
flexpiler::deserializer::Result::Err(error) => {
let error = error.propagate(<TestStructFlexpilerDeserializer as flexpiler::deserializer::context::FieldTrait<TestStruct, flexpiler::common::rustc::Format>>::context_field("a_sub"));
return flexpiler::deserializer::Result::Err(error);
},
}
},
_ => {
let unrecognized_field = flexpiler::common::rustc::error::UnrecognizedFieldDeclaration {
field_declaration_found: declaration,
field_declaration_expected_entries: flexpiler::error::ExpectedEntries::from(vec![
String::from("a_i32"),
String::from("a_usize"),
String::from("a_string"),
String::from("a_sub"),
]),
};
let error = flexpiler::Error::gen(unrecognized_field)
.propagate(TestStructFlexpilerDeserializer::context());
return flexpiler::deserializer::Result::Err(error);
}
};
if context == flexpiler::common::rustc::deserializer::Context::Freestanding {
match flexpiler::common::rustc::block::ContextDenominator::parse(reader_mut_ref) {
Ok(result) => {
context = result.finish.into();
},
Err(parser_error) => {
let error = flexpiler::Error::gen(parser_error)
.propagate(TestStructFlexpilerDeserializer::context());
return flexpiler::deserializer::Result::Err(error);
}
}
}
match context {
flexpiler::common::rustc::deserializer::Context::DataEnd => {
break;
},
flexpiler::common::rustc::deserializer::Context::Separator => {
//do nothing
},
_ => {
let unexpected_entry_finish = flexpiler::common::rustc::error::UnexpectedEntryFinishContext {
entry_declaration: declaration,
context_found: context,
context_expected: flexpiler::error::ExpectedEntries::from(vec![
flexpiler::common::rustc::deserializer::Context::DataEnd,
flexpiler::common::rustc::deserializer::Context::Separator,
])
};
let error = flexpiler::Error::gen(unexpected_entry_finish)
.propagate(TestStructFlexpilerDeserializer::context());
return flexpiler::deserializer::Result::Err(error);
}
}
}
return match <TestStructFlexpilerDeserializer as std::convert::TryInto<TestStruct>>::try_into(deserializer) {
Ok(data) => {
flexpiler::deserializer::Result::DataFound {
data,
context: flexpiler::common::rustc::deserializer::Context::Freestanding,
}
},
Err(error) => {
flexpiler::deserializer::Result::Err(error)
}
}
}
}
impl flexpiler::Deserialization<flexpiler::common::rustc::Format> for TestStruct {
type Deserializer = TestStructFlexpilerDeserializer;
}
#[test]
fn basic_deserialization_successful() {
use flexpiler::Deserialize;
use flexpiler::common::reader;
let mut reader = reader::String::from(
"TestStruct{ a_sub: TestStructSub{ a_usize: 60, }, a_string: \"Hello\", a_i32: -34, a_usize: 50 }"
);
let parse_result = TestStruct::deserialize(&mut reader);
let test_struct = match parse_result {
Ok(value) => value,
Err(error) => {
assert!(false, "simple_manual_test_struct_basic_serialisation_successful() test ended in a failed deserialization:\n{}", error);
return;
}
};
assert_eq!(test_struct.a_string, "Hello",
"simple_manual_test_struct_basic_serialisation_successful() deserialised value had unexpected string value {}, expected {}",
test_struct.a_string,
"Hello");
assert_eq!(test_struct.a_usize, 50,
"simple_manual_test_struct_basic_serialisation_successful() deserialised value had unexpected string value {}, expected {}",
test_struct.a_usize,
50);
assert_eq!(test_struct.a_i32, -34,
"simple_manual_test_struct_basic_serialisation_successful() deserialised value had unexpected string value {}, expected {}",
test_struct.a_i32,
-34);
assert_eq!(test_struct.a_sub.a_usize, 60,
"simple_manual_test_struct_basic_serialisation_successful() deserialised value had unexpected string value {}, expected {}",
test_struct.a_sub.a_usize,
60);
}
| definition |
__init__.py | from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_uploads import UploadSet, configure_uploads, IMAGES
from flask_mail import Mail
from flask_bootstrap import Bootstrap
from config import config_options
db = SQLAlchemy()
login_manager = LoginManager()
login_manager.session_protection = "strong"
login_manager.login_view = "auth.login"
photos = UploadSet("photos", IMAGES)
mail = Mail()
bootstrap = Bootstrap()
def create_app(config_name):
| app = Flask(__name__)
# Creating the app configurations
app.config.from_object(config_options[config_name])
# Initializing flask extensions
db.init_app(app)
login_manager.init_app(app)
mail.init_app(app)
bootstrap.init_app(app)
# Registering the main app Blueprint
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
# Registering auth blueprint
from .auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint, url_prefix = "/authenticate")
# Configure UploadSet
configure_uploads(app, photos)
return app |
|
parser.py | import imaplib
import re
import email
import base64
import quopri
import sys
import time
from datetime import datetime
from email.header import decode_header
from core.imbox.utils import str_encode, str_decode
import operator as op
import configparser
import os
from email import header
# 获取配置
base_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
conf = configparser.ConfigParser()
conf.read(os.path.join(base_dir, 'config.conf'))
attachment_path = conf.get("mailbox", "attachment_path")
illustrate_path = conf.get("mailbox", "illustrate_path")
class Struct:
def __init__(self, **entries):
self.__dict__.update(entries)
def keys(self):
return self.__dict__.keys()
def __repr__(self):
return str(self.__dict__)
def decode_mail_header(value, default_charset='ascii'):
"""
Decode a header value into a unicode string.
"""
try:
headers = decode_header(value)
except email.errors.HeaderParseError:
return str_decode(str_encode(value, default_charset, 'replace'), default_charset)
else:
for index, (text, charset) in enumerate(headers):
try:
headers[index] = str_decode(text, charset or default_charset, 'replace')
except LookupError:
# if the charset is unknown, force default
headers[index] = str_decode(text, default_charset, 'replace')
return ''.join(headers)
def decode_mail_header_backup(s):
"""
Decode a header value into a unicode string.
"""
ret = []
for b, e in header.decode_header(re.sub(r'\n\s+', ' ', s)):
if e:
if e.lower() == 'gb2312':
e = 'gb18030'
b = b.decode(e)
elif isinstance(b, bytes):
b = b.decode('ascii')
ret.append(b)
return ''.join(ret)
def get_mail | , header_name):
"""
Retrieve all email addresses from one message header.
"""
headers = [h for h in message.get_all(header_name, [])]
addresses = email.utils.getaddresses(headers)
for index, (address_name, address_email) in enumerate(addresses):
addresses[index] = {'name': decode_mail_header(address_name),
'email': address_email}
return addresses
def decode_param(param):
name, v = param.split('=', 1)
values = v.split('\n')
value_results = []
for value in values:
match = re.search(r'=\?((?:\w|-)+)\?([QB])\?(.+)\?=', value)
if match:
encoding, type_, code = match.groups()
if type_ == 'Q':
value = quopri.decodestring(code)
elif type_ == 'B':
value = base64.decodebytes(code.encode())
value = str_encode(value, encoding)
value_results.append(value)
if value_results:
v = ''.join(value_results)
return name, v
def parse_attachment(message_part):
# Check again if this is a valid attachment
content_disposition = message_part.get("Content-Disposition", None)
if content_disposition is not None and not message_part.is_multipart():
dispositions = [
disposition.strip()
for disposition in content_disposition.split(";")
if disposition.strip()
]
if dispositions[0].lower() in ["attachment", "inline"]:
file_data = message_part.get_payload(decode=True)
attachment = {
'content-type': message_part.get_content_type(),
'size': len(file_data),
# 'content': io.BytesIO(file_data)
}
filename = decode_mail_header(str(message_part.get_param('name')))
if filename:
attachment['filename'] = filename
if not os.path.exists(attachment_path):
os.makedirs(attachment_path)
with open(attachment_path + "/" + filename, "wb") as fw:
fw.write(file_data)
fw.close()
for param in dispositions[1:]:
if param:
name, value = decode_param(param)
if 'file' in name:
attachment['filename'] = value[1:-1] if value.startswith('"') else value
if 'create-date' in name:
attachment['create-date'] = value
return attachment
return None
def decode_content(message):
content = message.get_payload(decode=True)
charset = message.get_content_charset('utf-8')
try:
return content.decode(charset, 'ignore')
except LookupError:
return content.decode(charset.replace("-", ""), 'ignore')
except AttributeError:
return content
def fetch_email_by_uid(uid, connection, parser_policy):
message, data = connection.uid('fetch', uid, '(BODY.PEEK[] FLAGS)')
raw_headers, raw_email = data[0]
email_object = parse_email(uid, raw_email, policy=parser_policy)
flags = parse_flags(raw_headers.decode())
email_object.__dict__['flags'] = flags
return email_object
def parse_flags(headers):
"""Copied from https://github.com/girishramnani/gmail/blob/master/gmail/message.py"""
if len(headers) == 0:
return []
if sys.version_info[0] == 3:
headers = bytes(headers, "ascii")
return list(imaplib.ParseFlags(headers))
def parse_email(uid, raw_email, policy=None):
parsed_email = {'uid': bytes.decode(uid)}
# parsed_email = {'uid': uid}
if isinstance(raw_email, bytes):
raw_email = str_encode(raw_email, 'utf-8', errors='ignore')
if policy is not None:
email_parse_kwargs = dict(policy=policy)
else:
email_parse_kwargs = {}
try:
email_message = email.message_from_string(
raw_email, **email_parse_kwargs)
except UnicodeEncodeError:
email_message = email.message_from_string(
raw_email.encode('utf-8'), **email_parse_kwargs)
maintype = email_message.get_content_maintype()
# parsed_email['raw_email'] = raw_email
body = {
"plain": [],
"html": []
}
attachments = []
if maintype in ('multipart', 'image'):
for part in email_message.walk():
content_type = part.get_content_type()
part_maintype = part.get_content_maintype()
content_disposition = part.get('Content-Disposition', None)
if content_disposition or not part_maintype == "text":
content = part.get_payload(decode=True)
else:
content = decode_content(part)
is_inline = content_disposition is None \
or content_disposition.startswith("inline")
if content_type == "text/plain" and is_inline:
body['plain'].append(re.sub('\\r|\\n', '', str(content)))
elif content_type == "text/html" and is_inline:
body['html'].append(re.sub('\\r|\\n', '', str(content)))
elif content_type == "image/jpeg" and is_inline: # 正文插图
filename = decode_mail_header(str(part.get_param('name')))
if not op.eq(filename, 'None'):
filename = filename[:-4]
if not os.path.exists(illustrate_path):
os.makedirs(illustrate_path)
with open(illustrate_path + "/" + filename, "wb") as fw:
fw.write(content)
fw.close()
elif content_disposition: # 附件
attachment = parse_attachment(part)
if attachment:
attachments.append(attachment)
elif maintype == 'text':
payload = decode_content(email_message)
body['plain'].append(payload)
parsed_email['attachments'] = attachments
parsed_email['body'] = body
email_dict = dict(email_message.items())
parsed_email['sent_from'] = get_mail_addresses(email_message, 'from')
parsed_email['sent_to'] = get_mail_addresses(email_message, 'to')
parsed_email['cc'] = get_mail_addresses(email_message, 'cc')
parsed_email['bcc'] = get_mail_addresses(email_message, 'bcc')
value_headers_keys = ['subject', 'date', 'message-id']
key_value_header_keys = ['received-spf',
'mime-version',
'x-spam-status',
'x-spam-score',
'content-type']
parsed_email['headers'] = []
for key, value in email_dict.items():
if key.lower() in value_headers_keys:
valid_key_name = key.lower().replace('-', '_')
parsed_email[valid_key_name] = decode_mail_header(value)
if key.lower() in key_value_header_keys:
parsed_email['headers'].append({'Name': key,
'Value': value})
if parsed_email.get('date'):
timetuple = email.utils.parsedate(parsed_email['date'])
parsed_date = datetime.fromtimestamp(time.mktime(timetuple)) if timetuple else None
parsed_email['parsed_date'] = parsed_date.strftime("%Y-%m-%d %H:%M:%S")
return Struct(**parsed_email) | _addresses(message |
blocktools.py | #!/usr/bin/env python3
# Copyright (c) 2015-2016 The TMIcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Utilities for manipulating blocks and transactions."""
from .mininode import *
from .script import CScript, OP_TRUE, OP_CHECKSIG, OP_RETURN
# Create a block (with regtest difficulty)
def create_block(hashprev, coinbase, nTime=None):
block = CBlock()
if nTime is None:
import time
block.nTime = int(time.time()+600)
else:
block.nTime = nTime
block.hashPrevBlock = hashprev
block.nBits = 0x207fffff # Will break after a difficulty adjustment...
block.vtx.append(coinbase)
block.hashMerkleRoot = block.calc_merkle_root()
block.calc_sha256()
return block
# From BIP141
WITNESS_COMMITMENT_HEADER = b"\xaa\x21\xa9\xed"
def get_witness_script(witness_root, witness_nonce):
witness_commitment = uint256_from_str(hash256(ser_uint256(witness_root)+ser_uint256(witness_nonce)))
output_data = WITNESS_COMMITMENT_HEADER + ser_uint256(witness_commitment)
return CScript([OP_RETURN, output_data])
# According to BIP141, blocks with witness rules active must commit to the
# hash of all in-block transactions including witness.
def add_witness_commitment(block, nonce=0):
# First calculate the merkle root of the block's
# transactions, with witnesses.
witness_nonce = nonce
witness_root = block.calc_witness_merkle_root()
# witness_nonce should go to coinbase witness.
block.vtx[0].wit.vtxinwit = [CTxInWitness()]
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ser_uint256(witness_nonce)]
# witness commitment is the last OP_RETURN output in coinbase
block.vtx[0].vout.append(CTxOut(0, get_witness_script(witness_root, witness_nonce)))
block.vtx[0].rehash()
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
def serialize_script_num(value):
r = bytearray(0)
if value == 0:
return r
neg = value < 0
absvalue = -value if neg else value
while (absvalue):
r.append(int(absvalue & 0xff))
absvalue >>= 8
if r[-1] & 0x80:
r.append(0x80 if neg else 0)
elif neg:
r[-1] |= 0x80
return r
# Create a coinbase transaction, assuming no miner fees.
# If pubkey is passed in, the coinbase output will be a P2PK output;
# otherwise an anyone-can-spend output.
def create_coinbase(height, pubkey = None):
coinbase = CTransaction()
coinbase.vin.append(CTxIn(COutPoint(0, 0xffffffff),
ser_string(serialize_script_num(height)), 0xffffffff))
coinbaseoutput = CTxOut()
coinbaseoutput.nValue = 50 * COIN
halvings = int(height/150) # regtest
coinbaseoutput.nValue >>= halvings
if (pubkey != None):
coinbaseoutput.scriptPubKey = CScript([pubkey, OP_CHECKSIG])
else:
coinbaseoutput.scriptPubKey = CScript([OP_TRUE])
coinbase.vout = [ coinbaseoutput ]
coinbase.calc_sha256()
return coinbase
# Create a transaction.
# If the scriptPubKey is not specified, make it anyone-can-spend.
def create_transaction(prevtx, n, sig, value, scriptPubKey=CScript()):
|
def get_legacy_sigopcount_block(block, fAccurate=True):
count = 0
for tx in block.vtx:
count += get_legacy_sigopcount_tx(tx, fAccurate)
return count
def get_legacy_sigopcount_tx(tx, fAccurate=True):
count = 0
for i in tx.vout:
count += i.scriptPubKey.GetSigOpCount(fAccurate)
for j in tx.vin:
# scriptSig might be of type bytes, so convert to CScript for the moment
count += CScript(j.scriptSig).GetSigOpCount(fAccurate)
return count
| tx = CTransaction()
assert(n < len(prevtx.vout))
tx.vin.append(CTxIn(COutPoint(prevtx.sha256, n), sig, 0xffffffff))
tx.vout.append(CTxOut(value, scriptPubKey))
tx.calc_sha256()
return tx |
xiaolin-wus-line-algorithm-1.go | package raster
import "math"
func ipart(x float64) float64 {
return math.Floor(x)
}
func round(x float64) float64 {
return ipart(x + .5)
}
func fpart(x float64) float64 |
func rfpart(x float64) float64 {
return 1 - fpart(x)
}
// AaLine plots anti-aliased line by Xiaolin Wu's line algorithm.
func (g *Grmap) AaLine(x1, y1, x2, y2 float64) {
// straight translation of WP pseudocode
dx := x2 - x1
dy := y2 - y1
ax := dx
if ax < 0 {
ax = -ax
}
ay := dy
if ay < 0 {
ay = -ay
}
// plot function set here to handle the two cases of slope
var plot func(int, int, float64)
if ax < ay {
x1, y1 = y1, x1
x2, y2 = y2, x2
dx, dy = dy, dx
plot = func(x, y int, c float64) {
g.SetPx(y, x, uint16(c*math.MaxUint16))
}
} else {
plot = func(x, y int, c float64) {
g.SetPx(x, y, uint16(c*math.MaxUint16))
}
}
if x2 < x1 {
x1, x2 = x2, x1
y1, y2 = y2, y1
}
gradient := dy / dx
// handle first endpoint
xend := round(x1)
yend := y1 + gradient*(xend-x1)
xgap := rfpart(x1 + .5)
xpxl1 := int(xend) // this will be used in the main loop
ypxl1 := int(ipart(yend))
plot(xpxl1, ypxl1, rfpart(yend)*xgap)
plot(xpxl1, ypxl1+1, fpart(yend)*xgap)
intery := yend + gradient // first y-intersection for the main loop
// handle second endpoint
xend = round(x2)
yend = y2 + gradient*(xend-x2)
xgap = fpart(x2 + 0.5)
xpxl2 := int(xend) // this will be used in the main loop
ypxl2 := int(ipart(yend))
plot(xpxl2, ypxl2, rfpart(yend)*xgap)
plot(xpxl2, ypxl2+1, fpart(yend)*xgap)
// main loop
for x := xpxl1 + 1; x <= xpxl2-1; x++ {
plot(x, int(ipart(intery)), rfpart(intery))
plot(x, int(ipart(intery))+1, fpart(intery))
intery = intery + gradient
}
}
| {
return x - ipart(x)
} |
FloatingSuggestions.styles.ts | import { getGlobalClassNames, getTheme } from '@fluentui/style-utilities';
import type { IBaseFloatingSuggestionsStyles, IBaseFloatingSuggestionsStylesProps } from './FloatingSuggestions.types';
const GlobalClassNames = {
root: 'ms-FloatingSuggestions',
callout: 'ms-FloatingSuggestions-callout',
};
export const getStyles = (props: IBaseFloatingSuggestionsStylesProps): IBaseFloatingSuggestionsStyles => {
const theme = getTheme();
if (!theme) {
throw new Error('theme is undefined or null in Editing item getStyles function.');
}
// const { semanticColors } = theme;
const classNames = getGlobalClassNames(GlobalClassNames, theme);
return {
root: [classNames.root, {}],
callout: [
classNames.callout, | ['.ms-FloatingSuggestionsItem-itemButton']: {
padding: '0px',
border: 'none',
},
['.ms-FloatingSuggestionsList']: {
minWidth: '260px',
},
},
},
],
};
}; | {
selectors: { |
hydrateNotification.ts | import {NotificationOptions, Notification} from '../../../src/client';
export function | (options: NotificationOptions, fullNote: Notification): void {
// Handle `id` and `date` seperately as these cannot be predicted if undefined
if (options.id) {
expect(fullNote.id).toEqual(options.id);
} else {
expect(fullNote.id).toMatch(/[0-9]{9}/);
}
if (options.date) {
expect(fullNote.date).toEqual(options.date);
} else {
expect(fullNote.date).toBeInstanceOf(Date);
}
const expectedValues = {
body: options.body,
title: options.title,
icon: options.icon || '',
customData: options.customData !== undefined ? options.customData : {},
expires: options.expires !== undefined && options.expires !== null ? new Date(options.expires) : null,
buttons: options.buttons ? options.buttons.map(btn => ({...btn, iconUrl: btn.iconUrl || ''})) : []
};
expect(fullNote).toMatchObject(expectedValues);
}
| assertHydratedCorrectly |
abil.go | package yanfei
import (
"fmt"
"github.com/genshinsim/gcsim/pkg/core"
"github.com/genshinsim/gcsim/pkg/shield"
)
// Standard attack function with seal handling
func (c *char) Attack(p map[string]int) (int, int) {
f, a := c.ActionFrames(core.ActionAttack, p)
travel, ok := p["travel"]
if !ok {
travel = 20
}
done := false
addSeal := func(a core.AttackCB) {
if done {
return
}
if c.Tags["seal"] < c.maxTags {
c.Tags["seal"]++
}
c.sealExpiry = c.Core.F + 600
c.Core.Log.Debugw("yanfei gained a seal from normal attack", "frame", c.Core.F, "event", core.LogCharacterEvent, "char", c.Index, "current_seals", c.Tags["seal"], "expiry", c.sealExpiry)
done = true
}
ai := core.AttackInfo{
ActorIndex: c.Index,
Abil: fmt.Sprintf("Normal %v", c.NormalCounter),
AttackTag: core.AttackTagNormal,
ICDTag: core.ICDTagNormalAttack,
ICDGroup: core.ICDGroupDefault,
Element: core.Pyro,
Durability: 25,
Mult: attack[c.NormalCounter][c.TalentLvlAttack()],
}
c.Core.Combat.QueueAttack(ai, core.NewDefCircHit(0.1, false, core.TargettableEnemy), 0, f+travel, addSeal)
c.AdvanceNormalIndex()
// return animation cd
return f, a
}
// Charge attack function - handles seal use
func (c *char) ChargeAttack(p map[string]int) (int, int) {
//check for seal stacks
if c.Core.F > c.sealExpiry {
c.Tags["seal"] = 0
}
stacks := c.Tags["seal"]
//a1
// When Yan Fei's Charged Attack consumes Scarlet Seals, each Scarlet Seal consumed will increase her Pyro DMG by 5% for 6 seconds. When this effect is repeatedly triggered it will overwrite the oldest bonus first.
// The Pyro DMG bonus from Proviso is applied before charged attack damage is calculated.
m := make([]float64, core.EndStatType)
m[core.PyroP] = float64(stacks) * 0.05
c.AddMod(core.CharStatMod{
Key: "yanfei-a1",
Amount: func() ([]float64, bool) {
return m, true
},
Expiry: c.Core.F + 360,
})
f, a := c.ActionFrames(core.ActionCharge, p)
ai := core.AttackInfo{
ActorIndex: c.Index,
Abil: "Charge Attack",
AttackTag: core.AttackTagExtra,
ICDTag: core.ICDTagNone,
ICDGroup: core.ICDGroupDefault,
// StrikeType: core.StrikeTypeBlunt,
Element: core.Pyro,
Durability: 25,
Mult: charge[stacks][c.TalentLvlAttack()],
}
// TODO: Not sure of snapshot timing
c.Core.Combat.QueueAttack(ai, core.NewDefCircHit(2, false, core.TargettableEnemy), 0, f)
c.Core.Log.Debugw("yanfei charge attack consumed seals", "frame", c.Core.F, "event", core.LogCharacterEvent, "char", c.Index, "current_seals", c.Tags["seal"], "expiry", c.sealExpiry)
// Clear the seals next frame just in case for some reason we call stam check late
c.AddTask(func() {
c.Tags["seal"] = 0
c.sealExpiry = c.Core.F - 1
}, "clear-seals", 1)
return f, a
}
// Yanfei skill - Straightforward as it has little interactions with the rest of her kit
// Summons flames that deal AoE Pyro DMG. Opponents hit by the flames will grant Yanfei the maximum number of Scarlet Seals.
func (c *char) Skill(p map[string]int) (int, int) {
f, a := c.ActionFrames(core.ActionSkill, p)
done := false
addSeal := func(a core.AttackCB) {
if done |
// Create max seals on hit
if c.Tags["seal"] < c.maxTags {
c.Tags["seal"] = c.maxTags
}
c.sealExpiry = c.Core.F + 600
c.Core.Log.Debugw("yanfei gained max seals", "frame", c.Core.F, "event", core.LogCharacterEvent, "char", c.Index, "current_seals", c.Tags["seal"], "expiry", c.sealExpiry)
done = true
}
ai := core.AttackInfo{
ActorIndex: c.Index,
Abil: "Signed Edict",
AttackTag: core.AttackTagElementalArt,
ICDTag: core.ICDTagNone,
ICDGroup: core.ICDGroupDefault,
// StrikeType: core.StrikeTypeBlunt,
Element: core.Pyro,
Durability: 25,
Mult: skill[c.TalentLvlSkill()],
}
// TODO: Not sure of snapshot timing
c.Core.Combat.QueueAttack(ai, core.NewDefCircHit(2, false, core.TargettableEnemy), 0, f, addSeal)
c.QueueParticle("yanfei", 3, core.Pyro, f+100)
c.SetCD(core.ActionSkill, 540)
return f, a
}
// Burst - Deals burst damage and adds status for charge attack bonus
func (c *char) Burst(p map[string]int) (int, int) {
f, a := c.ActionFrames(core.ActionBurst, p)
// +1 is to make sure the scarlet seal grant works correctly on the last frame
// TODO: Not 100% sure whether this adds a seal at the exact moment the burst ends or not
c.Core.Status.AddStatus("yanfeiburst", 15*60+1)
m := make([]float64, core.EndStatType)
m[core.DmgP] = burstBonus[c.TalentLvlBurst()]
c.AddPreDamageMod(core.PreDamageMod{
Key: "yanfei-burst",
Amount: func(atk *core.AttackEvent, t core.Target) ([]float64, bool) {
if atk.Info.AttackTag == core.AttackTagExtra {
return m, true
}
return nil, false
},
Expiry: c.Core.F + 15*60,
})
done := false
addSeal := func(a core.AttackCB) {
if done {
return
}
// Create max seals on hit
if c.Tags["seal"] < c.maxTags {
c.Tags["seal"] = c.maxTags
}
c.sealExpiry = c.Core.F + 600
c.Core.Log.Debugw("yanfei gained max seals", "frame", c.Core.F, "event", core.LogCharacterEvent, "char", c.Index, "current_seals", c.Tags["seal"], "expiry", c.sealExpiry)
done = true
}
ai := core.AttackInfo{
ActorIndex: c.Index,
Abil: "Done Deal",
AttackTag: core.AttackTagElementalBurst,
ICDTag: core.ICDTagNone,
ICDGroup: core.ICDGroupDefault,
// StrikeType: core.StrikeTypeBlunt,
Element: core.Pyro,
Durability: 50,
Mult: burst[c.TalentLvlBurst()],
}
c.Core.Combat.QueueAttack(ai, core.NewDefCircHit(2, false, core.TargettableEnemy), 0, f, addSeal)
c.AddTask(c.burstAddSealHook(), "burst-add-seals-task", 60)
c.c4()
c.SetCD(core.ActionBurst, 20*60)
c.ConsumeEnergy(8)
return f, a
}
// Handles C4 shield creation
// When Done Deal is used:
// Creates a shield that absorbs up to 45% of Yan Fei's Max HP for 15s
// This shield absorbs Pyro DMG 250% more effectively
func (c *char) c4() {
if c.Base.Cons >= 4 {
c.Core.Shields.Add(&shield.Tmpl{
Src: c.Core.F,
ShieldType: core.ShieldYanfeiC4,
Name: "Yanfei C4",
HP: c.HPMax * .45,
Ele: core.Pyro,
Expires: c.Core.F + 15*60,
})
}
}
// Recurring task to add seals every second while burst is up
func (c *char) burstAddSealHook() func() {
return func() {
if c.Core.Status.Duration("yanfeiburst") == 0 {
return
}
if c.Tags["seal"] < c.maxTags {
c.Tags["seal"]++
}
c.sealExpiry = c.Core.F + 600
c.Core.Log.Debugw("yanfei gained seal from burst", "frame", c.Core.F, "event", core.LogCharacterEvent, "char", c.Index, "current_seals", c.Tags["seal"], "expiry", c.sealExpiry)
c.AddTask(c.burstAddSealHook(), "burst-add-seals", 60)
}
}
| {
return
} |
getJWKs.go | package service
import (
"net/http"
"github.com/lestrrat-go/jwx/jwk"
"github.com/plgd-dev/cloud/pkg/log"
)
func (requestHandler *RequestHandler) getJWKs(w http.ResponseWriter, r *http.Request) {
resp := map[string]interface{}{
"keys": []jwk.Key{
requestHandler.idTokenJwkKey,
requestHandler.accessTokenJwkKey,
},
}
if err := jsonResponseWriter(w, resp); err != nil |
}
| {
log.Errorf("failed to write response: %v", err)
} |
convertion_test.rs |
extern crate numerus;
#[cfg(test)]
mod test {
use numerus::{
int_to_roman_upper,
int_to_roman_lower,
roman_to_int
};
#[test]
fn test_full_convetion_uppercase() {
for i in 1..10000 {
let tmp = int_to_roman_upper(i).unwrap();
assert_eq!(roman_to_int(&tmp).unwrap(), i);
}
}
#[test]
fn | () {
for i in 1..10000 {
let tmp = int_to_roman_lower(i).unwrap();
assert_eq!(roman_to_int(&tmp).unwrap(), i);
}
}
} | test_full_convetion_lowercase |
container_stop.go | package server
import (
"github.com/sirupsen/logrus"
"golang.org/x/net/context"
pb "k8s.io/kubernetes/pkg/kubelet/apis/cri/v1alpha1/runtime" | _, err := s.ContainerServer.ContainerStop(ctx, req.ContainerId, req.Timeout)
if err != nil {
return nil, err
}
resp := &pb.StopContainerResponse{}
logrus.Debugf("StopContainerResponse %s: %+v", req.ContainerId, resp)
return resp, nil
} | )
// StopContainer stops a running container with a grace period (i.e., timeout).
func (s *Server) StopContainer(ctx context.Context, req *pb.StopContainerRequest) (*pb.StopContainerResponse, error) { |
listWebAppHostKeys.go | // *** WARNING: this file was generated by the Pulumi SDK Generator. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package v20190801
import (
"github.com/pulumi/pulumi/sdk/v2/go/pulumi"
)
// Functions host level keys.
func | (ctx *pulumi.Context, args *ListWebAppHostKeysArgs, opts ...pulumi.InvokeOption) (*ListWebAppHostKeysResult, error) {
var rv ListWebAppHostKeysResult
err := ctx.Invoke("azure-native:web/v20190801:listWebAppHostKeys", args, &rv, opts...)
if err != nil {
return nil, err
}
return &rv, nil
}
type ListWebAppHostKeysArgs struct {
// Site name.
Name string `pulumi:"name"`
// Name of the resource group to which the resource belongs.
ResourceGroupName string `pulumi:"resourceGroupName"`
}
// Functions host level keys.
type ListWebAppHostKeysResult struct {
// Host level function keys.
FunctionKeys map[string]string `pulumi:"functionKeys"`
// Secret key.
MasterKey *string `pulumi:"masterKey"`
// System keys.
SystemKeys map[string]string `pulumi:"systemKeys"`
}
| ListWebAppHostKeys |
mod.rs | pub mod error;
pub mod index_store;
pub mod meta_store;
use std::convert::TryInto;
use std::path::Path;
use chrono::Utc;
use error::{IndexResolverError, Result};
use heed::Env;
use index_store::{IndexStore, MapIndexStore};
use meilisearch_error::ResponseError;
use meta_store::{HeedMetaStore, IndexMetaStore};
use milli::update::DocumentDeletionResult;
use serde::{Deserialize, Serialize};
use tokio::task::spawn_blocking;
use uuid::Uuid;
use crate::index::update_handler::UpdateHandler;
use crate::index::{error::Result as IndexResult, Index};
use crate::options::IndexerOpts;
use crate::tasks::batch::Batch;
use crate::tasks::task::{DocumentDeletion, Job, Task, TaskContent, TaskEvent, TaskId, TaskResult};
use crate::tasks::Pending;
use crate::tasks::TaskPerformer;
use crate::update_file_store::UpdateFileStore;
use self::meta_store::IndexMeta;
pub type HardStateIndexResolver = IndexResolver<HeedMetaStore, MapIndexStore>;
/// An index uid is composed of only ascii alphanumeric characters, - and _, between 1 and 400
/// bytes long
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
pub struct IndexUid(#[cfg_attr(test, proptest(regex("[a-zA-Z0-9_-]{1,400}")))] String);
pub fn create_index_resolver(
path: impl AsRef<Path>,
index_size: usize,
indexer_opts: &IndexerOpts,
meta_env: heed::Env,
file_store: UpdateFileStore,
) -> anyhow::Result<HardStateIndexResolver> {
let uuid_store = HeedMetaStore::new(meta_env)?;
let index_store = MapIndexStore::new(&path, index_size, indexer_opts)?;
Ok(IndexResolver::new(uuid_store, index_store, file_store))
}
impl IndexUid {
pub fn new(uid: String) -> Result<Self> {
if !uid
.chars()
.all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_')
|| !(1..=400).contains(&uid.len())
{
Err(IndexResolverError::BadlyFormatted(uid))
} else {
Ok(Self(uid))
}
}
#[cfg(test)]
pub fn new_unchecked(s: impl AsRef<str>) -> Self {
Self(s.as_ref().to_string())
}
pub fn into_inner(self) -> String {
self.0
}
/// Return a reference over the inner str.
pub fn as_str(&self) -> &str {
&self.0
}
}
impl std::ops::Deref for IndexUid {
type Target = str;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl TryInto<IndexUid> for String {
type Error = IndexResolverError;
fn try_into(self) -> Result<IndexUid> {
IndexUid::new(self)
}
}
#[async_trait::async_trait]
impl<U, I> TaskPerformer for IndexResolver<U, I>
where
U: IndexMetaStore + Send + Sync + 'static,
I: IndexStore + Send + Sync + 'static,
{
type Error = ResponseError;
async fn process(&self, mut batch: Batch) -> Batch {
// Until batching is implemented, all batch should contain only one update.
debug_assert_eq!(batch.len(), 1);
match batch.tasks.first_mut() {
Some(Pending::Task(task)) => {
task.events.push(TaskEvent::Processing(Utc::now()));
match self.process_task(task).await {
Ok(success) => {
task.events.push(TaskEvent::Succeded {
result: success,
timestamp: Utc::now(),
});
}
Err(err) => task.events.push(TaskEvent::Failed {
error: err.into(),
timestamp: Utc::now(),
}),
}
}
Some(Pending::Job(job)) => {
let job = std::mem::take(job);
self.process_job(job).await;
}
None => (),
}
batch
}
async fn finish(&self, batch: &Batch) {
for task in &batch.tasks {
if let Some(content_uuid) = task.get_content_uuid() {
if let Err(e) = self.file_store.delete(content_uuid).await {
log::error!("error deleting update file: {}", e);
}
}
}
}
}
pub struct IndexResolver<U, I> {
index_uuid_store: U,
index_store: I,
file_store: UpdateFileStore,
}
impl IndexResolver<HeedMetaStore, MapIndexStore> {
pub fn load_dump(
src: impl AsRef<Path>,
dst: impl AsRef<Path>,
index_db_size: usize,
env: Env,
indexer_opts: &IndexerOpts,
) -> anyhow::Result<()> {
HeedMetaStore::load_dump(&src, env)?;
let indexes_path = src.as_ref().join("indexes");
let indexes = indexes_path.read_dir()?;
let update_handler = UpdateHandler::new(indexer_opts)?;
for index in indexes {
Index::load_dump(&index?.path(), &dst, index_db_size, &update_handler)?;
}
Ok(())
}
}
impl<U, I> IndexResolver<U, I>
where
U: IndexMetaStore,
I: IndexStore,
{
pub fn new(index_uuid_store: U, index_store: I, file_store: UpdateFileStore) -> Self {
Self {
index_uuid_store,
index_store,
file_store,
}
}
async fn process_task(&self, task: &Task) -> Result<TaskResult> {
let index_uid = task.index_uid.clone();
match &task.content {
TaskContent::DocumentAddition {
content_uuid,
merge_strategy,
primary_key,
..
} => {
let primary_key = primary_key.clone();
let content_uuid = *content_uuid;
let method = *merge_strategy;
let index = self.get_or_create_index(index_uid, task.id).await?;
let file_store = self.file_store.clone();
let result = spawn_blocking(move || {
index.update_documents(method, content_uuid, primary_key, file_store)
})
.await??;
Ok(result.into())
}
TaskContent::DocumentDeletion(DocumentDeletion::Ids(ids)) => {
let ids = ids.clone();
let index = self.get_index(index_uid.into_inner()).await?;
let DocumentDeletionResult {
deleted_documents, ..
} = spawn_blocking(move || index.delete_documents(&ids)).await??;
Ok(TaskResult::DocumentDeletion { deleted_documents })
}
TaskContent::DocumentDeletion(DocumentDeletion::Clear) => {
let index = self.get_index(index_uid.into_inner()).await?;
let deleted_documents = spawn_blocking(move || -> IndexResult<u64> {
let number_documents = index.stats()?.number_of_documents;
index.clear_documents()?;
Ok(number_documents)
})
.await??;
Ok(TaskResult::ClearAll { deleted_documents })
}
TaskContent::SettingsUpdate {
settings,
is_deletion,
} => {
let index = if *is_deletion {
self.get_index(index_uid.into_inner()).await?
} else {
self.get_or_create_index(index_uid, task.id).await?
};
let settings = settings.clone();
spawn_blocking(move || index.update_settings(&settings.check())).await??;
Ok(TaskResult::Other)
}
TaskContent::IndexDeletion => {
let index = self.delete_index(index_uid.into_inner()).await?;
let deleted_documents = spawn_blocking(move || -> IndexResult<u64> {
Ok(index.stats()?.number_of_documents)
})
.await??;
Ok(TaskResult::ClearAll { deleted_documents })
}
TaskContent::IndexCreation { primary_key } => {
let index = self.create_index(index_uid, task.id).await?;
if let Some(primary_key) = primary_key {
let primary_key = primary_key.clone();
spawn_blocking(move || index.update_primary_key(primary_key)).await??;
}
Ok(TaskResult::Other)
}
TaskContent::IndexUpdate { primary_key } => {
let index = self.get_index(index_uid.into_inner()).await?;
if let Some(primary_key) = primary_key {
let primary_key = primary_key.clone();
spawn_blocking(move || index.update_primary_key(primary_key)).await??;
}
Ok(TaskResult::Other)
}
}
}
async fn process_job(&self, job: Job) {
match job {
Job::Dump { ret, path } => {
log::trace!("The Dump task is getting executed");
if ret.send(self.dump(path).await).is_err() {
log::error!("The dump actor died.");
}
}
Job::Empty => log::error!("Tried to process an empty task."),
Job::Snapshot(job) => {
if let Err(e) = job.run().await {
log::error!("Error performing snapshot: {}", e);
}
}
}
}
pub async fn dump(&self, path: impl AsRef<Path>) -> Result<()> {
for (_, index) in self.list().await? {
index.dump(&path)?;
}
self.index_uuid_store.dump(path.as_ref().to_owned()).await?;
Ok(())
}
async fn create_index(&self, uid: IndexUid, creation_task_id: TaskId) -> Result<Index> {
match self.index_uuid_store.get(uid.into_inner()).await? {
(uid, Some(_)) => Err(IndexResolverError::IndexAlreadyExists(uid)),
(uid, None) => {
let uuid = Uuid::new_v4();
let index = self.index_store.create(uuid).await?;
match self
.index_uuid_store
.insert(
uid,
IndexMeta {
uuid,
creation_task_id,
},
)
.await
{
Err(e) => {
match self.index_store.delete(uuid).await {
Ok(Some(index)) => {
index.close();
}
Ok(None) => (),
Err(e) => log::error!("Error while deleting index: {:?}", e),
}
Err(e)
}
Ok(()) => Ok(index),
}
}
} | match self.create_index(uid, task_id).await {
Ok(index) => Ok(index),
Err(IndexResolverError::IndexAlreadyExists(uid)) => self.get_index(uid).await,
Err(e) => Err(e),
}
}
pub async fn list(&self) -> Result<Vec<(String, Index)>> {
let uuids = self.index_uuid_store.list().await?;
let mut indexes = Vec::new();
for (name, IndexMeta { uuid, .. }) in uuids {
match self.index_store.get(uuid).await? {
Some(index) => indexes.push((name, index)),
None => {
// we found an unexisting index, we remove it from the uuid store
let _ = self.index_uuid_store.delete(name).await;
}
}
}
Ok(indexes)
}
pub async fn delete_index(&self, uid: String) -> Result<Index> {
match self.index_uuid_store.delete(uid.clone()).await? {
Some(IndexMeta { uuid, .. }) => match self.index_store.delete(uuid).await? {
Some(index) => {
index.clone().close();
Ok(index)
}
None => Err(IndexResolverError::UnexistingIndex(uid)),
},
None => Err(IndexResolverError::UnexistingIndex(uid)),
}
}
pub async fn get_index(&self, uid: String) -> Result<Index> {
match self.index_uuid_store.get(uid).await? {
(name, Some(IndexMeta { uuid, .. })) => {
match self.index_store.get(uuid).await? {
Some(index) => Ok(index),
None => {
// For some reason we got a uuid to an unexisting index, we return an error,
// and remove the uuid from the uuid store.
let _ = self.index_uuid_store.delete(name.clone()).await;
Err(IndexResolverError::UnexistingIndex(name))
}
}
}
(name, _) => Err(IndexResolverError::UnexistingIndex(name)),
}
}
pub async fn get_index_creation_task_id(&self, index_uid: String) -> Result<TaskId> {
let (uid, meta) = self.index_uuid_store.get(index_uid).await?;
meta.map(
|IndexMeta {
creation_task_id, ..
}| creation_task_id,
)
.ok_or(IndexResolverError::UnexistingIndex(uid))
}
}
#[cfg(test)]
mod test {
use std::collections::BTreeMap;
use super::*;
use futures::future::ok;
use milli::update::{DocumentAdditionResult, IndexDocumentsMethod};
use nelson::Mocker;
use proptest::prelude::*;
use crate::index::{
error::{IndexError, Result as IndexResult},
Checked, IndexMeta, IndexStats, Settings,
};
use index_store::MockIndexStore;
use meta_store::MockIndexMetaStore;
proptest! {
#[test]
fn test_process_task(
task in any::<Task>(),
index_exists in any::<bool>(),
index_op_fails in any::<bool>(),
any_int in any::<u64>(),
) {
actix_rt::System::new().block_on(async move {
let uuid = Uuid::new_v4();
let mut index_store = MockIndexStore::new();
let mocker = Mocker::default();
// Return arbitrary data from index call.
match &task.content {
TaskContent::DocumentAddition{primary_key, ..} => {
let result = move || if !index_op_fails {
Ok(DocumentAdditionResult { indexed_documents: any_int, number_of_documents: any_int })
} else {
// return this error because it's easy to generate...
Err(IndexError::DocumentNotFound("a doc".into()))
};
if primary_key.is_some() {
mocker.when::<String, IndexResult<IndexMeta>>("update_primary_key")
.then(move |_| Ok(IndexMeta{ created_at: Utc::now(), updated_at: Utc::now(), primary_key: None }));
}
mocker.when::<(IndexDocumentsMethod, Uuid, Option<String>, UpdateFileStore), IndexResult<DocumentAdditionResult>>("update_documents")
.then(move |(_, _, _, _)| result());
}
TaskContent::SettingsUpdate{..} => {
let result = move || if !index_op_fails {
Ok(())
} else {
// return this error because it's easy to generate...
Err(IndexError::DocumentNotFound("a doc".into()))
};
mocker.when::<&Settings<Checked>, IndexResult<()>>("update_settings")
.then(move |_| result());
}
TaskContent::DocumentDeletion(DocumentDeletion::Ids(_ids)) => {
let result = move || if !index_op_fails {
Ok(any_int as u64)
} else {
// return this error because it's easy to generate...
Err(IndexError::DocumentNotFound("a doc".into()))
};
mocker.when::<&[String], IndexResult<u64>>("delete_documents")
.then(move |_| result());
},
TaskContent::DocumentDeletion(DocumentDeletion::Clear) => {
let result = move || if !index_op_fails {
Ok(())
} else {
// return this error because it's easy to generate...
Err(IndexError::DocumentNotFound("a doc".into()))
};
mocker.when::<(), IndexResult<()>>("clear_documents")
.then(move |_| result());
},
TaskContent::IndexDeletion => {
mocker.when::<(), ()>("close")
.times(index_exists as usize)
.then(move |_| ());
}
TaskContent::IndexUpdate { primary_key }
| TaskContent::IndexCreation { primary_key } => {
if primary_key.is_some() {
let result = move || if !index_op_fails {
Ok(IndexMeta{ created_at: Utc::now(), updated_at: Utc::now(), primary_key: None })
} else {
// return this error because it's easy to generate...
Err(IndexError::DocumentNotFound("a doc".into()))
};
mocker.when::<String, IndexResult<IndexMeta>>("update_primary_key")
.then(move |_| result());
}
}
}
mocker.when::<(), IndexResult<IndexStats>>("stats")
.then(|()| Ok(IndexStats { size: 0, number_of_documents: 0, is_indexing: Some(false), field_distribution: BTreeMap::new() }));
let index = Index::mock(mocker);
match &task.content {
// an unexisting index should trigger an index creation in the folllowing cases:
TaskContent::DocumentAddition { .. }
| TaskContent::SettingsUpdate { is_deletion: false, .. }
| TaskContent::IndexCreation { .. } if !index_exists => {
index_store
.expect_create()
.once()
.withf(move |&found| !index_exists || found == uuid)
.returning(move |_| Box::pin(ok(index.clone())));
},
TaskContent::IndexDeletion => {
index_store
.expect_delete()
// this is called only if the index.exists
.times(index_exists as usize)
.withf(move |&found| !index_exists || found == uuid)
.returning(move |_| Box::pin(ok(Some(index.clone()))));
}
// if index already exists, create index will return an error
TaskContent::IndexCreation { .. } if index_exists => (),
// The index exists and get should be called
_ if index_exists => {
index_store
.expect_get()
.once()
.withf(move |&found| found == uuid)
.returning(move |_| Box::pin(ok(Some(index.clone()))));
},
// the index doesn't exist and shouldn't be created, the uuidstore will return an error, and get_index will never be called.
_ => (),
}
let mut uuid_store = MockIndexMetaStore::new();
uuid_store
.expect_get()
.returning(move |uid| {
Box::pin(ok((uid, index_exists.then(|| crate::index_resolver::meta_store::IndexMeta {uuid, creation_task_id: 0 }))))
});
// we sould only be creating an index if the index doesn't alredy exist
uuid_store
.expect_insert()
.withf(move |_, _| !index_exists)
.returning(|_, _| Box::pin(ok(())));
uuid_store
.expect_delete()
.times(matches!(task.content, TaskContent::IndexDeletion) as usize)
.returning(move |_| Box::pin(ok(index_exists.then(|| crate::index_resolver::meta_store::IndexMeta { uuid, creation_task_id: 0}))));
let mocker = Mocker::default();
let update_file_store = UpdateFileStore::mock(mocker);
let index_resolver = IndexResolver::new(uuid_store, index_store, update_file_store);
let result = index_resolver.process_task(&task).await;
// Test for some expected output scenarios:
// Index creation and deletion cannot fail because of a failed index op, since they
// don't perform index ops.
if index_op_fails && !matches!(task.content, TaskContent::IndexDeletion | TaskContent::IndexCreation { primary_key: None } | TaskContent::IndexUpdate { primary_key: None })
|| (index_exists && matches!(task.content, TaskContent::IndexCreation { .. }))
|| (!index_exists && matches!(task.content, TaskContent::IndexDeletion
| TaskContent::DocumentDeletion(_)
| TaskContent::SettingsUpdate { is_deletion: true, ..}
| TaskContent::IndexUpdate { .. } ))
{
assert!(result.is_err(), "{:?}", result);
} else {
assert!(result.is_ok(), "{:?}", result);
}
});
}
}
} | }
/// Get or create an index with name `uid`.
pub async fn get_or_create_index(&self, uid: IndexUid, task_id: TaskId) -> Result<Index> { |
main.go | package main
import (
"bufio"
"encoding/json"
"flag"
"fmt"
"net/http"
"os"
"strings"
"sync"
"github.com/acarl005/stripansi"
)
var wg sync.WaitGroup
func main() |
func isStdin() bool {
f, e := os.Stdin.Stat()
if e != nil {
return false
}
if f.Mode()&os.ModeNamedPipe == 0 {
return false
}
return true
}
type data struct {
Chat_id string `json:"chat_id"`
Text string `json:"text"`
}
func teleman(url string, line string,chatid string) {
data, _ := json.Marshal(data{Chat_id:chatid,Text: stripansi.Strip(line)})
http.Post(url, "application/json", strings.NewReader(string(data)))
wg.Done()
}
| {
var oneLine, verboseMode bool
var ApiToken, lines string
flag.StringVar(&ApiToken, "u", "", "Telegram ApiToken")
flag.BoolVar(&oneLine, "1", false, "Send message line-by-line")
flag.BoolVar(&verboseMode, "v", false, "Verbose mode")
flag.Parse()
apitokenEnv := os.Getenv("TELEGRAM_API_TOKEN")
TelegramEnv := "https://api.telegram.org/bot"+apitokenEnv+"/sendMessage"
chatid := os.Getenv("TELEGRAM_CHAT_ID")
if TelegramEnv != "" {
ApiToken = TelegramEnv
} else {
if ApiToken == "" {
if verboseMode {
fmt.Println("Telegram ApiToken not set!")
}
}
}
if !isStdin() {
os.Exit(1)
}
sc := bufio.NewScanner(os.Stdin)
for sc.Scan() {
line := sc.Text()
fmt.Println(line)
if oneLine {
if ApiToken != "" {
wg.Add(1)
go teleman(ApiToken, line,chatid)
}
} else {
lines += line
lines += "\n"
}
}
if !oneLine {
wg.Add(1)
go teleman(ApiToken, lines,chatid)
}
wg.Wait()
} |
nameshark_vcard.py | # The MIT License (MIT)
#
# Copyright (c) 2016 Francis T. O'Donovan
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Convert vCard-formatted string to the JSON format expected by Name Shark."""
# coding=utf-8
import base64
import json
import collections
import argparse
import vobject
NAMES = collections.namedtuple('Names', ['first_name', 'surname'])
def get_pp_names(fn_field):
"""
Use probablepeople to extract firstname/surname from vCard 'fn' field.
:param fn_field: the input vCard 'fn' field.
:return: a namedtuple containing the first name and surname.
>>> get_names('John Smith')
Extracting data for John Smith
Names(first_name='John', surname='Smith')
"""
first_name = None
surname = None
try:
import probablepeople as pp # not python 2.6 compatible
# Use probablepeople to tag the parts of the name.
full_name_dict = pp.tag(fn_field)[0]
if 'GivenName' in full_name_dict:
# If probablepeople has successfully extracted the first name,
# use it.
first_name = full_name_dict['GivenName']
if 'Surname' in full_name_dict:
# If probablepeople has successfully extracted the surname,
# use it. | except (ImportError, SyntaxError, TypeError) as error:
print(error)
return NAMES(first_name, surname)
def get_names(fn_field):
"""
Extract the first name and surname from a vCard 'fn' field.
:param fn_field: the input vCard 'fn' field.
:return: a namedtuple containing the first name and surname.
>>> get_names('John Smith')
Extracting data for John Smith
Names(first_name='John', surname='Smith')
"""
names = get_pp_names(fn_field)
first_name = names.first_name
surname = names.surname
try:
fn_field_split = fn_field.split(' ')
except (TypeError, AttributeError):
fn_field_split = ['']
if first_name is None:
# If we can't get first name from probablepeople, assume it's the
# first part of the string.
first_name = fn_field_split[0]
if first_name == surname:
first_name = ''
if surname is None:
# If we can't get surname from probablepeople, assume it's the
# second part of the string, if that exists.
surname = fn_field_split[1] if len(fn_field_split) > 1 else ''
print('Extracting data for ' + first_name + ' ' + surname)
return NAMES(first_name, surname)
def get_photo(photo):
"""
Extract the photo data (if it exists) from a vCard 'photo' field.
:param photo: the input vCard 'photo' field.
:return: a base64-encoded string containing the photo data.
"""
if photo is not None:
photo_data = base64.b64encode(photo)
photo_data = 'data:image/jpeg;base64,' + photo_data.decode('utf8')
else:
photo_data = ''
return photo_data
def extract_contact_from_component(component):
"""
Extract the contact info from a vCard component.
:param component: the input vCard component text.
:return: a dictionary containing the extracted contact info.
"""
names = get_names(component.getChildValue('fn'))
photo_data = get_photo(component.getChildValue('photo'))
if photo_data == '':
print(
'Warning: Missing photo for ' + names.first_name + ' ' +
names.surname + '...!',
)
return {
'first': names.first_name, 'last': names.surname,
'photoData': photo_data, 'details': '',
}
def extract_contacts_from_vcard(vcard):
"""
Extract the contact info from a vCard.
:param vcard: the vCard text to convert.
:return: a list containing the extracted contact info.
"""
contacts = []
for v_component in vobject.readComponents(vcard):
entry = extract_contact_from_component(v_component)
contacts.append(entry)
return contacts
def convert_to_nameshark(group_name, contacts):
"""
Convert a list containing contact info into JSON for Name Shark.
:param group_name: the Name Shark group to use.
:param contacts:
:return: the list containing contact info extracted from a vCard.
"""
shark = {'name': group_name, 'contacts': contacts}
return json.dumps(shark, sort_keys=True, indent=4)
def vcard_to_nameshark(vcard, group_name):
"""
Convert vCard-formatted string to the JSON format expected by Name Shark.
:param vcard: the vCard text to convert.
:param group_name: the Name Shark group to use.
:return: JSON version of vCard input.
"""
contacts = extract_contacts_from_vcard(vcard)
return convert_to_nameshark(group_name, contacts)
def main():
"""
The main nameshark_vcard module.
:return: None
"""
parser = argparse.ArgumentParser()
parser.add_argument('file', help='the input file')
parser.add_argument('group', help='the output group name')
args = parser.parse_args()
with open(args.file, 'r') as input_file:
text = input_file.read()
json_str = vcard_to_nameshark(text, args.group)
with open(args.group + '.json', 'w') as output_file:
output_file.write(json_str)
if __name__ == '__main__':
main() | surname = full_name_dict['Surname'] |
account.ts | import { UUID } from 'angular2-uuid';
export class | {
name: string;
bankId: string;
branchId: string;
accountId: string;
private _id: string;
constructor() {
this._id = UUID.UUID();
}
get id(): string {
return this._id;
}
}
| Account |
builtin_filters.py | import json
from pathlib import Path as P
from typing import Dict
import _jsonnet
import click
from commodore import __install_dir__
from commodore.config import Config
from commodore.component import Component
from .jsonnet import jsonnet_runner
def _output_dir(work_dir: P, instance: str, path):
"""Compute directory in which to apply filter"""
return work_dir / "compiled" / instance / path
def _builtin_filter_helm_namespace(
work_dir: P, inv, component: Component, instance: str, path, **kwargs
):
if "namespace" not in kwargs:
raise click.ClickException(
"Builtin filter 'helm_namespace': filter argument 'namespace' is required"
)
create_namespace = kwargs.get("create_namespace", "false")
# Transform create_namespace to string as jsonnet extvars can only be
# strings
if isinstance(create_namespace, bool):
create_namespace = "true" if create_namespace else "false"
exclude_objects = kwargs.get("exclude_objects", [])
exclude_objects = "|".join([json.dumps(e) for e in exclude_objects])
output_dir = _output_dir(work_dir, instance, path)
# pylint: disable=c-extension-no-member
jsonnet_runner(
work_dir,
inv,
component.name,
instance,
path,
_jsonnet.evaluate_file,
__install_dir__ / "filters" / "helm_namespace.jsonnet",
namespace=kwargs["namespace"],
create_namespace=create_namespace,
exclude_objects=exclude_objects,
chart_output_dir=str(output_dir),
)
_builtin_filters = {
"helm_namespace": _builtin_filter_helm_namespace,
}
class UnknownBuiltinFilter(ValueError):
def __init__(self, filtername):
super().__init__(f"Unknown builtin filter: {filtername}")
self.filtername = filtername
# pylint: disable=too-many-arguments
def run_builtin_filter(
config: Config,
inv: Dict,
component: Component,
instance: str,
filterid: str,
path: P,
**filterargs: str,
):
if filterid not in _builtin_filters:
raise UnknownBuiltinFilter(filterid)
_builtin_filters[filterid](
config.work_dir, inv, component, instance, path, **filterargs
)
# pylint: disable=unused-argument
def validate_builtin_filter(config: Config, c: Component, instance: str, fd: Dict):
if fd["filter"] not in _builtin_filters:
raise UnknownBuiltinFilter(fd["filter"])
if "filterargs" not in fd:
|
fpath = _output_dir(config.work_dir, instance, fd["path"])
if not fpath.exists():
raise ValueError("Builtin filter called on path which doesn't exist")
| raise KeyError("Builtin filter is missing required key 'filterargs'") |
RegionDifference.ts | import { AccessTypes } from '../../../data/AccessTypes';
import { DataAccess } from '../../../data/DataAccess';
import { DataTree } from '../../../data/DataTree';
import { DataTypes } from '../../../data/DataTypes';
import { InputManager } from '../../../io/InputManager';
import { OutputManager } from '../../../io/OutputManager';
import { NCurve } from '../../../math/geometry/curve/NCurve';
import { RegionCSGNode } from './RegionCSGNode';
export class RegionDifference extends RegionCSGNode {
get displayName (): string {
return 'Region Difference';
}
public registerInputs (manager: InputManager): void {
manager.add('a', 'First planar closed curve', DataTypes.CURVE, AccessTypes.ITEM);
manager.add('b', 'Second planar closed curves set', DataTypes.CURVE, AccessTypes.LIST);
manager.add('n', 'Non-polyline curve resolution', DataTypes.NUMBER, AccessTypes.ITEM).setDefault(new DataTree().add([64]));
}
public registerOutputs (manager: OutputManager): void {
manager.add('R', 'Curve difference result', DataTypes.CURVE, AccessTypes.LIST);
}
public solve (access: DataAccess): void {
const a = access.getData(0) as NCurve;
const b = access.getDataList(1) as NCurve[];
const resolution = access.getData(2) as number;
const curves = [a].concat(b.slice());
if (!this.validatePlanarClosedCurves(curves)) {
throw new Error('Non planar closed curve included in inputs');
}
const region = this.getCurvePaths(curves, resolution);
const clip = new clipper.Clipper();
region.paths.forEach((path: any, i: number) => {
if (i === 0) {
clip.AddPaths([path], clipper.PolyType.ptSubject, true);
} else {
clip.AddPaths([path], clipper.PolyType.ptClip, true);
}
});
const solution = new clipper.Paths();
clip.Execute(clipper.ClipType.ctDifference, solution, clipper.PolyFillType.pftNonZero, clipper.PolyFillType.pftNonZero);
const result = this.getSolutionPolylines(solution, region.plane);
access.setDataList(0, result);
}
} | import clipper from 'clipper-lib'; |
|
db.rs | use std::error::Error;
use std::path::PathBuf;
use std::fs::{File, read_to_string, remove_file};
use std::io;
use ftp::{FtpStream, FtpError};
use md5::Context;
use rusqlite::{Connection, NO_PARAMS};
use crate::Node;
use crate::NCBI_FTP_HOST;
use crate::NCBI_FTP_PATH;
/// Open the taxonomy database in this directory.
fn open_db(dir: &PathBuf) -> Result<Connection, Box<dyn Error>> {
let dbpath = dir.join("taxonomy.db");
let conn = Connection::open(dbpath)?;
debug!("Database opened.");
Ok(conn)
}
/// Get the Taxonomy IDs corresponding to this scientific names. The used
/// name class are "scientific name", "synonym" and "genbank synonym".
/// Either return all the IDs or an error.
pub fn get_taxids(dir: &PathBuf, names: Vec<String>) -> Result<Vec<i64>, Box<dyn Error>> {
let mut taxids = vec![];
let conn = open_db(dir)?;
let mut stmt = conn.prepare("
SELECT tax_id FROM names
WHERE name_class IN ('scientific name', 'synonym', 'genbank synonym')
AND name=?")?;
for name in names.iter() {
let mut rows = stmt.query(&[name])?;
if let Some(row) = rows.next() {
// Here, row.get has no reason to return an error
// so row.get_unwrap should be safe
let row = row?;
taxids.push(row.get(0));
} else {
return Err(From::from(format!("No such scientific name: {}", name)));
}
}
Ok(taxids)
}
/// Get the Nodes corresponding to the IDs. The Nodes are ordered in the same
/// way as the IDs. If an ID is invalid, an error is returned.
pub fn get_nodes(dir: &PathBuf, ids: Vec<i64>) -> Result<Vec<Node>, Box<dyn Error>> {
let mut nodes = vec![];
let conn = open_db(dir)?;
let mut stmt = conn.prepare("
SELECT
nodes.tax_id,
nodes.parent_tax_id,
nodes.rank,
divisions.division,
code.name as code,
mito.name as mito,
names.name_class,
names.name,
nodes.comment
from nodes
inner join divisions on nodes.division_id = divisions.id
inner join names on nodes.tax_id = names.tax_id
inner join geneticCodes code on nodes.genetic_code_id = code.id
inner join geneticCodes mito on nodes.mito_genetic_code_id = mito.id
where nodes.tax_id=?")?;
for id in ids.iter() {
let mut rows = stmt.query(&[id])?;
let mut node: Node = Default::default();
// Here, row.get has no reason to return an error
// so row.get_unwrap should be safe
if let Some(row) = rows.next() {
let row = row?;
node.tax_id = row.get(0);
node.parent_tax_id = row.get(1);
node.rank = row.get(2);
node.division = row.get(3);
node.genetic_code = row.get(4);
let mito_code: String = row.get(5);
if mito_code != "Unspecified" {
node.mito_genetic_code = row.get(5);
}
let comments: String = row.get(8);
if !comments.is_empty() {
node.comments = Some(comments);
}
node.names.entry(row.get(6))
.or_insert_with(|| vec![row.get(7)]);
} else {
return Err(From::from(format!("No such ID: {}", id)));
}
while let Some(row) = rows.next() {
let row = row?;
node.names.entry(row.get(6))
.and_modify(|n| n.push(row.get(7)))
.or_insert_with(|| vec![row.get(7)]);
}
nodes.push(node);
}
Ok(nodes)
}
/// Get the Node corresponding to this unique ID, then all Nodes in the path
/// to the root (the special node with taxonomy ID 1). The Nodes are ordered,
/// with the root last.
pub fn get_lineage(dir: &PathBuf, id: i64) -> Result<Vec<Node>, Box<dyn Error>> {
let conn = open_db(dir)?;
let mut id = id;
let mut ids = vec![id];
let mut stmt = conn.prepare("SELECT parent_tax_id FROM nodes WHERE tax_id=?")?;
loop {
let parent_id = stmt.query_row(&[id], |row| {row.get(0)})?;
ids.push(parent_id);
id = parent_id;
if id == 1 {
break;
}
}
let mut lineage = get_nodes(dir, ids)?;
lineage.reverse();
Ok(lineage)
}
/// Get the children of the Node corresponding to this unique ID. If
/// `species_only` is true, then stop when the children are species, else
/// continue until the children are tips.
/// Note that the ID given as argument is included in the results. Thus, the | let mut temp_ids = vec![id];
let conn = open_db(dir)?;
let mut stmt = conn.prepare("SELECT tax_id, rank FROM nodes WHERE parent_tax_id=?")?;
while let Some(id) = temp_ids.pop() {
ids.push(id);
let mut rows = stmt.query(&[id])?;
while let Some(result_row) = rows.next() {
let row = result_row?;
let rank: String = row.get(1);
if species_only && rank == "species" {
ids.push(row.get(0));
} else {
temp_ids.push(row.get(0))
}
}
}
let nodes = get_nodes(dir, ids)?;
Ok(nodes)
}
//-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-
// Database downloading
/// Download the latest release of `taxdmp.zip` and `taxdmp.zip.md5`
/// from the NCBI FTP servers.
pub fn download_taxdump(datadir: &PathBuf, email: String) -> Result<(), Box<dyn Error>> {
debug!("Contacting {}...", NCBI_FTP_HOST);
let mut conn = FtpStream::connect(NCBI_FTP_HOST)?;
conn.login("ftp", &email)?;
debug!("Connected and logged.");
conn.cwd(NCBI_FTP_PATH)?;
debug!("Retrieving MD5 sum file...");
let path = datadir.join("taxdmp.zip.md5");
let mut file = File::create(path)?;
let mut cursor = conn.simple_retr("taxdmp.zip.md5")?;
io::copy(&mut cursor, &mut file)?;
debug!("Retrieving dumps file...");
conn.retr("taxdmp.zip", |stream| {
let path = datadir.join("taxdmp.zip");
let mut file = match File::create(path) {
Err(e) => return Err(FtpError::ConnectionError(e)),
Ok(f) => f
};
io::copy(stream, &mut file).map_err(FtpError::ConnectionError)
})?;
conn.quit()?;
debug!("We're done. Ending connection.");
Ok(())
}
/// Check the integrity of `taxdmp.zip` using `taxdmp.zip.md5`.
pub fn check_integrity(datadir: &PathBuf) -> Result<(), Box<dyn Error>> {
let path = datadir.join("taxdmp.zip");
let mut file = File::open(path)?;
let mut hasher = Context::new();
debug!("Computing MD5 sum...");
io::copy(&mut file, &mut hasher)?;
let digest = format!("{:x}", hasher.compute());
let path = datadir.join("taxdmp.zip.md5");
let mut ref_digest = read_to_string(path)?;
ref_digest.truncate(32);
if digest != ref_digest {
warn!("Expected sum is: {}", ref_digest);
warn!("Computed sum is: {}", digest);
panic!("Fail to check integrity.");
} else {
Ok(())
}
}
/// Extract all files from taxdmp.zip in the same directory.
pub fn extract_dump(datadir: &PathBuf) -> Result<(), Box<dyn Error>> {
let path = datadir.join("taxdmp.zip");
let file = File::open(path)?;
let mut archive = zip::ZipArchive::new(file)?;
for i in 0..archive.len() {
let mut file = archive.by_index(i)?;
let outpath = datadir.join(file.sanitized_name());
debug!("Extracted {}", outpath.as_path().display());
let mut outfile = File::create(&outpath)?;
io::copy(&mut file, &mut outfile)?;
}
Ok(())
}
/// Remove the downloaded and extracted files.
pub fn remove_temp_files(datadir: &PathBuf) -> Result<(), Box<dyn Error>> {
remove_file(datadir.join("taxdmp.zip"))?;
remove_file(datadir.join("taxdmp.zip.md5"))?;
remove_file(datadir.join("citations.dmp"))?;
remove_file(datadir.join("delnodes.dmp"))?;
remove_file(datadir.join("division.dmp"))?;
remove_file(datadir.join("gc.prt"))?;
remove_file(datadir.join("gencode.dmp"))?;
remove_file(datadir.join("merged.dmp"))?;
remove_file(datadir.join("names.dmp"))?;
remove_file(datadir.join("nodes.dmp"))?;
remove_file(datadir.join("readme.txt"))?;
Ok(())
}
//-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-
// Database initialization and population
static CREATE_TABLES_STMT: &str = "
DROP TABLE IF EXISTS divisions;
DROP TABLE IF EXISTS geneticCodes;
DROP TABLE IF EXISTS nodes;
DROP TABLE IF EXISTS names;
CREATE TABLE IF NOT EXISTS divisions (
id INTEGER NOT NULL PRIMARY KEY,
division TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS geneticCodes (
id INTEGER NOT NULL PRIMARY KEY,
name TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS nodes (
tax_id INTEGER NOT NULL PRIMARY KEY,
parent_tax_id INTEGER,
rank TEXT NOT NULL,
division_id INTEGER NOT NULL,
genetic_code_id INTEGER NOT NULL,
mito_genetic_code_id INTEGER NOT NULL,
comment TEXT,
FOREIGN KEY(division_id) REFERENCES divisions(id)
FOREIGN KEY(genetic_code_id) REFERENCES geneticCodes(id)
FOREIGN KEY(mito_genetic_code_id) REFERENCES geneticCodes(id)
);
CREATE TABLE IF NOT EXISTS names (
id INTEGER NOT NULL PRIMARY KEY,
tax_id INTEGER NOT NULL,
name TEXT NOT NULL,
name_class TEXT NOT NULL
);";
/// Initialize a the database by running the CREATE TABLE statements.
pub fn init_db(datadir: &PathBuf) -> Result<(), Box<dyn Error>> {
let path = datadir.join("taxonomy.db");
let conn = Connection::open(path)?;
debug!("Database opened.");
conn.execute_batch(CREATE_TABLES_STMT)?;
debug!("Tables created.");
Ok(())
}
/// Read the names.dmp file and insert the records into the database. When
/// it's done, create the indexes on names and name classes.
pub fn insert_names(datadir: &PathBuf) -> Result<(), Box<dyn Error>> {
debug!("Inserting names...");
let dbpath = datadir.join("taxonomy.db");
let conn = Connection::open(dbpath)?;
debug!("Database opened.");
let dumppath = datadir.join("names.dmp");
let file = File::open(dumppath)?;
let mut rdr = csv::ReaderBuilder::new()
.has_headers(false)
.delimiter(b'|')
.from_reader(file);
let mut stmts: Vec<String> = vec![String::from("BEGIN;")];
debug!("Beginning to read records from names.dmp.");
for (i, result) in rdr.records().enumerate() {
if i > 1 && i%10000 == 0 {
stmts.push(String::from("COMMIT;"));
let stmt = &stmts.join("\n");
conn.execute_batch(stmt)?;
debug!("Read {} records so far.", i);
stmts.clear();
stmts.push(String::from("BEGIN;"));
}
let record = result?;
let taxid: i64 = record[0].trim().parse()?;
let name: String = record[1].parse()?;
let name_class: String = record[3].parse()?;
stmts.push(format!("INSERT INTO names(tax_id, name, name_class)
VALUES ({}, '{}', '{}');",
taxid.to_string(),
name.trim().replace("'", "''"),
name_class.trim().replace("'", "''")));
}
// There could left records in stmts
stmts.push(String::from("COMMIT;"));
let stmt = &stmts.join("\n");
conn.execute_batch(stmt)?;
debug!("Done inserting names.");
debug!("Creating names indexes.");
conn.execute("CREATE INDEX idx_names_tax_id ON names(tax_id);", NO_PARAMS)?;
conn.execute("CREATE INDEX idx_names_name ON names(name);", NO_PARAMS)?;
Ok(())
}
/// Read the division.dmp file and insert the records into the database.
pub fn insert_divisions(datadir: &PathBuf) -> Result<(), Box<dyn Error>> {
debug!("Inserting divisions...");
let dbpath = datadir.join("taxonomy.db");
let conn = Connection::open(dbpath)?;
debug!("Database opened.");
let dumppath = datadir.join("division.dmp");
let file = File::open(dumppath)?;
let mut rdr = csv::ReaderBuilder::new()
.has_headers(false)
.delimiter(b'|')
.from_reader(file);
let mut stmts: Vec<String> = vec![String::from("BEGIN;")];
debug!("Beginning to read records from divisions.dmp.");
for result in rdr.records() {
let record = result?;
let id: i64 = record[0].trim().parse()?;
let name: String = record[2].trim().parse()?;
stmts.push(format!("INSERT INTO divisions VALUES ({}, '{}');",
id,
name.replace("'", "''")));
}
stmts.push(String::from("COMMIT;"));
let stmt = &stmts.join("\n");
conn.execute_batch(stmt)?;
debug!("Done inserting divisions.");
Ok(())
}
/// Read the gencode.dmp file and insert the records into the database.
pub fn insert_genetic_codes(datadir: &PathBuf) -> Result<(), Box<dyn Error>> {
debug!("Inserting genetic codes...");
let dbpath = datadir.join("taxonomy.db");
let conn = Connection::open(dbpath)?;
debug!("Database opened.");
let dumppath = datadir.join("gencode.dmp");
let file = File::open(dumppath)?;
let mut rdr = csv::ReaderBuilder::new()
.has_headers(false)
.delimiter(b'|')
.from_reader(file);
let mut stmts: Vec<String> = vec![String::from("BEGIN;")];
debug!("Beginning to read records from gencode.dmp.");
for result in rdr.records() {
let record = result?;
let id: i64 = record[0].trim().parse()?;
let name: String = record[2].trim().parse()?;
stmts.push(format!("INSERT INTO geneticCodes VALUES ({}, '{}');",
id,
name.replace("'", "''")));
}
stmts.push(String::from("COMMIT;"));
let stmt = &stmts.join("\n");
conn.execute_batch(stmt)?;
debug!("Done inserting genetic codes.");
Ok(())
}
/// Read the nodes.dmp file and insert the records into the database. When
/// it's done, create the index on `parent_tax_id`.
pub fn insert_nodes(datadir: &PathBuf) -> Result<(), Box<dyn Error>> {
debug!("Inserting nodes...");
let dbpath = datadir.join("taxonomy.db");
let conn = Connection::open(dbpath)?;
debug!("Database opened.");
let dumppath = datadir.join("nodes.dmp");
let file = File::open(dumppath)?;
let mut rdr = csv::ReaderBuilder::new()
.has_headers(false)
.delimiter(b'|')
.from_reader(file);
let mut stmts: Vec<String> = vec![
String::from("BEGIN;"),
// Special case: the root
String::from("INSERT INTO nodes VALUES (1, 1, 'no rank', 8, 0, 0, '');")
];
debug!("Beginning to read records from nodes.dmp.");
let mut records = rdr.records().enumerate();
records.next(); // We burn the root row
for (i, result) in records {
if i > 0 && i%10000 == 0 {
stmts.push(String::from("COMMIT;"));
let stmt = &stmts.join("\n");
conn.execute_batch(stmt)?;
debug!("Read {} records so far.", i);
stmts.clear();
stmts.push(String::from("BEGIN;"));
}
let record = result?;
let taxid: i64 = record[0].trim().parse()?;
let parent_taxid: i64 = record[1].trim().parse()?;
let rank: String = record[2].trim().parse()?;
let division_id: i64 = record[4].trim().parse()?;
let genetic_code_id: i64 = record[6].trim().parse()?;
let mito_genetic_code_id: i64 = record[8].trim().parse()?;
let comments: String = record[12].trim().parse()?;
stmts.push(format!(
"INSERT INTO nodes VALUES ({}, {}, '{}', {}, {}, {}, '{}');",
taxid.to_string(),
parent_taxid.to_string(),
rank,
division_id.to_string(),
genetic_code_id.to_string(),
mito_genetic_code_id.to_string(),
comments
));
}
// There could left records in stmts
stmts.push(String::from("COMMIT;"));
let stmt = &stmts.join("\n");
conn.execute_batch(stmt)?;
debug!("Done inserting nodes.");
debug!("Creating nodes indexes.");
conn.execute("CREATE INDEX idx_nodes_parent_id ON nodes(parent_tax_id);", NO_PARAMS)?;
Ok(())
} | /// resulting vector contains at least one element.
pub fn get_children(dir: &PathBuf, id: i64, species_only: bool) -> Result<Vec<Node>, Box<dyn Error>> {
let mut ids: Vec<i64> = vec![]; |
source_code.rs | // Author: Mario Sieg
// Project: Corium
//
// Apache License
// Version 2.0, January 2004
// http://www.apache.org/licenses/
//
// TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
//
// 1. Definitions.
//
// "License" shall mean the terms and conditions for use, reproduction,
// and distribution as defined by Sections 1 through 9 of this document.
//
// "Licensor" shall mean the copyright owner or entity authorized by
// the copyright owner that is granting the License.
//
// "Legal Entity" shall mean the union of the acting entity and all
// other entities that control, are controlled by, or are under common
// control with that entity. For the purposes of this definition,
// "control" means (i) the power, direct or indirect, to cause the
// direction or management of such entity, whether by contract or
// otherwise, or (ii) ownership of fifty percent (50%) or more of the
// outstanding shares, or (iii) beneficial ownership of such entity.
//
// "You" (or "Your") shall mean an individual or Legal Entity
// exercising permissions granted by this License.
//
// "Source" form shall mean the preferred form for making modifications,
// including but not limited to software source code, documentation
// source, and configuration files.
//
// "Object" form shall mean any form resulting from mechanical
// transformation or translation of a Source form, including but
// not limited to compiled object code, generated documentation,
// and conversions to other media types.
//
// "Work" shall mean the work of authorship, whether in Source or
// Object form, made available under the License, as indicated by a
// copyright notice that is included in or attached to the work
// (an example is provided in the Appendix below).
//
// "Derivative Works" shall mean any work, whether in Source or Object
// form, that is based on (or derived from) the Work and for which the
// editorial revisions, annotations, elaborations, or other modifications
// represent, as a whole, an original work of authorship. For the purposes
// of this License, Derivative Works shall not include works that remain
// separable from, or merely link (or bind by name) to the interfaces of,
// the Work and Derivative Works thereof.
//
// "Contribution" shall mean any work of authorship, including
// the original version of the Work and any modifications or additions
// to that Work or Derivative Works thereof, that is intentionally
// submitted to Licensor for inclusion in the Work by the copyright owner
// or by an individual or Legal Entity authorized to submit on behalf of
// the copyright owner. For the purposes of this definition, "submitted"
// means any form of electronic, verbal, or written communication sent
// to the Licensor or its representatives, including but not limited to
// communication on electronic mailing lists, source code control systems,
// and issue tracking systems that are managed by, or on behalf of, the
// Licensor for the purpose of discussing and improving the Work, but
// excluding communication that is conspicuously marked or otherwise
// designated in writing by the copyright owner as "Not a Contribution."
//
// "Contributor" shall mean Licensor and any individual or Legal Entity
// on behalf of whom a Contribution has been received by Licensor and
// subsequently incorporated within the Work.
//
// 2. Grant of Copyright License. Subject to the terms and conditions of
// this License, each Contributor hereby grants to You a perpetual,
// worldwide, non-exclusive, no-charge, royalty-free, irrevocable
// copyright license to reproduce, prepare Derivative Works of,
// publicly display, publicly perform, sublicense, and distribute the
// Work and such Derivative Works in Source or Object form.
//
// 3. Grant of Patent License. Subject to the terms and conditions of
// this License, each Contributor hereby grants to You a perpetual,
// worldwide, non-exclusive, no-charge, royalty-free, irrevocable
// (except as stated in this section) patent license to make, have made,
// use, offer to sell, sell, import, and otherwise transfer the Work,
// where such license applies only to those patent claims licensable
// by such Contributor that are necessarily infringed by their
// Contribution(s) alone or by combination of their Contribution(s)
// with the Work to which such Contribution(s) was submitted. If You
// institute patent litigation against any entity (including a
// cross-claim or counterclaim in a lawsuit) alleging that the Work
// or a Contribution incorporated within the Work constitutes direct
// or contributory patent infringement, then any patent licenses
// granted to You under this License for that Work shall terminate
// as of the date such litigation is filed.
//
// 4. Redistribution. You may reproduce and distribute copies of the
// Work or Derivative Works thereof in any medium, with or without
// modifications, and in Source or Object form, provided that You
// meet the following conditions:
//
// (a) You must give any other recipients of the Work or
// Derivative Works a copy of this License; and
//
// (b) You must cause any modified files to carry prominent notices
// stating that You changed the files; and
//
// (c) You must retain, in the Source form of any Derivative Works
// that You distribute, all copyright, patent, trademark, and
// attribution notices from the Source form of the Work,
// excluding those notices that do not pertain to any part of
// the Derivative Works; and
//
// (d) If the Work includes a "NOTICE" text file as part of its
// distribution, then any Derivative Works that You distribute must
// include a readable copy of the attribution notices contained
// within such NOTICE file, excluding those notices that do not
// pertain to any part of the Derivative Works, in at least one
// of the following places: within a NOTICE text file distributed
// as part of the Derivative Works; within the Source form or
// documentation, if provided along with the Derivative Works; or,
// within a display generated by the Derivative Works, if and
// wherever such third-party notices normally appear. The contents
// of the NOTICE file are for informational purposes only and
// do not modify the License. You may add Your own attribution
// notices within Derivative Works that You distribute, alongside
// or as an addendum to the NOTICE text from the Work, provided
// that such additional attribution notices cannot be construed
// as modifying the License.
//
// You may add Your own copyright statement to Your modifications and
// may provide additional or different license terms and conditions
// for use, reproduction, or distribution of Your modifications, or
// for any such Derivative Works as a whole, provided Your use,
// reproduction, and distribution of the Work otherwise complies with
// the conditions stated in this License.
//
// 5. Submission of Contributions. Unless You explicitly state otherwise,
// any Contribution intentionally submitted for inclusion in the Work
// by You to the Licensor shall be under the terms and conditions of
// this License, without any additional terms or conditions.
// Notwithstanding the above, nothing herein shall supersede or modify
// the terms of any separate license agreement you may have executed
// with Licensor regarding such Contributions.
//
// 6. Trademarks. This License does not grant permission to use the trade
// names, trademarks, service marks, or product names of the Licensor,
// except as required for reasonable and customary use in describing the
// origin of the Work and reproducing the content of the NOTICE file.
//
// 7. Disclaimer of Warranty. Unless required by applicable law or
// agreed to in writing, Licensor provides the Work (and each
// Contributor provides its Contributions) on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
// implied, including, without limitation, any warranties or conditions
// of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
// PARTICULAR PURPOSE. You are solely responsible for determining the
// appropriateness of using or redistributing the Work and assume any
// risks associated with Your exercise of permissions under this License.
//
// 8. Limitation of Liability. In no event and under no legal theory,
// whether in tort (including negligence), contract, or otherwise,
// unless required by applicable law (such as deliberate and grossly
// negligent acts) or agreed to in writing, shall any Contributor be
// liable to You for damages, including any direct, indirect, special,
// incidental, or consequential damages of any character arising as a
// result of this License or out of the use or inability to use the
// Work (including but not limited to damages for loss of goodwill,
// work stoppage, computer failure or malfunction, or any and all
// other commercial damages or losses), even if such Contributor
// has been advised of the possibility of such damages.
//
// 9. Accepting Warranty or Additional Liability. While redistributing
// the Work or Derivative Works thereof, You may choose to offer,
// and charge a fee for, acceptance of support, warranty, indemnity,
// or other liability obligations and/or rights consistent with this
// License. However, in accepting such obligations, You may act only
// on Your own behalf and on Your sole responsibility, not on behalf
// of any other Contributor, and only if You agree to indemnify,
// defend, and hold each Contributor harmless for any liability
// incurred by, or claims asserted against, such Contributor by reason
// of your accepting any such warranty or additional liability.
//
// END OF TERMS AND CONDITIONS
//
// APPENDIX: How to apply the Apache License to your work.
//
// To apply the Apache License to your work, attach the following
// boilerplate notice, with the fields enclosed by brackets "[]"
// replaced with your own identifying information. (Don't include
// the brackets!) The text should be enclosed in the appropriate
// comment syntax for the file format. We also recommend that a
// file or class name and description of purpose be included on the
// same "printed page" as the copyright notice for easier
// identification within third-party archives.
//
// Copyright 2021 Mario Sieg "pinsrq" <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#[cfg(windows)]
const LINE_ENDING: &str = "\r\n";
#[cfg(not(windows))]
const LINE_ENDING: &str = "\n";
use std::convert::From;
use std::fmt::Debug;
use std::path::Path;
#[macro_export]
macro_rules! include_corium_source {
($file:expr $(,)?) => {
crate::core::source_code::SourceCode::from(include_str!($file))
};
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct SourceCode(pub String);
impl SourceCode {
pub fn new(src: String) -> Self {
Self(src)
}
pub fn read<P: AsRef<Path> + Debug>(path: P) -> Self {
let source_code = std::fs::read_to_string(&path)
.unwrap_or_else(|_| panic!("Failed to read source file: {:?}", &path));
let mut src = Self(source_code);
src.post_process();
src
}
#[inline]
pub fn post_process(&mut self) {
self.0.push_str(LINE_ENDING);
}
pub fn dump_with_padding(&self) {
let str = self.0.clone();
let str = str.replace("\n", "\\n");
let str = str.replace("\r", "\\r");
let str = str.replace("\t", "\\t");
let str = str.replace("\\", "\\\\");
let str = str.replace("\0", "\\0");
println!("{}", str);
}
}
impl From<&str> for SourceCode {
fn | (x: &str) -> Self {
let mut src = Self(x.to_string());
src.post_process();
src
}
}
| from |
helpers.js | const hbs = require("hbs")
| return new Date().getFullYear()
})
hbs.registerHelper("title",(title)=>{
return `mark | ${title}`
}) | hbs.registerHelper("anio",()=>{ |
fonts_scanner.py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
# author: [email protected]
import os
from utils.get_logger import Log
def get_fonts_from_local():
log = Log()
logger = log.logger_generate('font_scanner')
# fonts_lists = []
for root, dirs, files in os.walk('../fonts'): | fonts_file_path = os.path.join(root, file)
if os.path.splitext(file)[1] == '.ttf' or os.path.splitext(file)[1] == '.otf':
# fonts_lists.append(os.path.join(root, file))
logger.info('Fonts file found: %s' % fonts_file_path)
yield fonts_file_path
else:
logger.info('Files which is not a fonts be ignored: %s' % file)
# logger.info('Fonts gonna to be uploaded are: %s' % fonts_lists)
# return fonts_lists
if __name__ == '__main__':
get_fonts_files = get_fonts_from_local()
for fonts_file in iter(get_fonts_files):
print(fonts_file) | logger.info('File found %s, dirs: %s' % (files, dirs))
for file in files:
logger.info('File found %s' % file) |
2115-find-all-possible-recipes-from-given-supplies.py | class Solution:
def findAllRecipes(self, recipes: List[str], ingredients: List[List[str]], supplies: List[str]) -> List[str]:
supplies, recipies = set(supplies), set(recipes)
indegree = {elem:0 for elem in recipies}
graph = defaultdict(list)
for i, recipie in enumerate(recipes):
for ingredient in ingredients[i]:
if ingredient not in supplies:
indegree[recipie] += 1
graph[ingredient].append(recipie)
queues = deque()
for key, value in indegree.items():
if value == 0:
queues.append(key)
result = []
while queues:
element = queues.popleft()
result.append(element) | for elem in graph[element]:
indegree[elem] -= 1
if indegree[elem] == 0:
queues.append(elem)
return result | |
lib.rs | #[macro_use]
extern crate diesel;
#[macro_use]
extern crate diesel_migrations;
pub mod schema;
pub mod models;
extern crate config;
extern crate shellexpand;
extern crate chrono;
use config::*;
use chrono::prelude::*;
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use models::*;
use schema::timers;
embed_migrations!("../../migrations");
// establish_connection returns a SqliteConnection to the
// TICK_DATABASE_FILE environment variable
pub fn establish_connection() -> SqliteConnection {
let config_path = shellexpand::tilde("~/.config/tick/config.yaml").to_string();
let mut settings = Config::default();
settings.merge(File::with_name(&config_path))
.unwrap_or_else(|_| panic!("Error finding config file {:?}", &config_path));
let mut db_path = settings.get_str("database_path")
.expect("The key `database_path` was not found in the config file.");
db_path = shellexpand::tilde(&db_path).to_string();
let connection = SqliteConnection::establish(&db_path)
.unwrap_or_else(|_| panic!("Error connecting to {:?}", &db_path));
// Run the migrations for the database
let _ = embedded_migrations::run(&connection);
// Return the SqliteConnection
connection
}
// create_timer takes a conenction and a name and start_entry string and creates
// a new timer.
pub fn create_timer<'a>(conn: &SqliteConnection, name: &'a str, start_entry: &'a str) -> usize {
let new_timer = NewTimer {
name,
start_time: Local::now().timestamp() as i32,
start_entry,
running: 1,
};
diesel::insert_into(timers::table)
.values(&new_timer)
.execute(conn)
.expect("Error saving new timer")
}
// latest_timer is a private function which gets the latest running timer by
// timer_name or the latest timer by "running" being true.
fn latest_timer<'a>(conn: &'a SqliteConnection, timer_name: &'a str) -> Result<models::Timer, diesel::result::Error> {
use schema::timers::dsl::*;
if timer_name != "" {
timers.filter(name.like(&timer_name))
.filter(running.eq(1))
.first(conn)
} else {
timers.filter(running.eq(1))
.first(conn)
}
}
// stop_timer takes a connection and a name and end_entry string and stops a
// running timer.
pub fn | <'a>(conn: &'a SqliteConnection, timer_name: &'a str, timer_end_entry: &'a str) -> () {
use schema::timers::dsl::*;
let timer = latest_timer(&conn, &timer_name);
match timer {
Ok(t) => {
diesel::update(timers.find(&t.id))
.set((
running.eq(0),
end_time.eq(Local::now().timestamp() as i32),
end_entry.eq(&timer_end_entry)
))
.execute(conn)
.expect(&format!("Unable to stop timer {}", &t.id));
},
Err(_) => println!("Are you sure a timer is running? Better go catch it.")
}
}
// list_timers takes a connection and returns ascending order timers.
pub fn list_timers<'a>(conn: &'a SqliteConnection) -> Vec<models::Timer> {
use schema::timers::dsl::*;
timers.order(id.asc())
.load::<Timer>(conn)
.expect("Error loading timers table")
}
// check_timer takes a connection and returns descending order running timers.
pub fn check_timer<'a>(conn: &'a SqliteConnection) -> Vec<models::Timer> {
use schema::timers::dsl::*;
timers.filter(running.eq(1))
.order(id.desc())
.load::<Timer>(conn)
.expect("Error getting running timer")
}
// remove_timer takes a connection and an ID and deletes the timer matching the ID.
pub fn remove_timer<'a>(conn: &'a SqliteConnection, lookup_id: &'a i32) -> usize {
use schema::timers::dsl::*;
diesel::delete(timers.find(&lookup_id))
.execute(conn)
.expect(&format!("Unable to remove timer matching id {}", &lookup_id))
}
// parse_date takes a timestamp number and returns a date-formatted string.
pub fn parse_date<'a>(ts: i32) -> String {
let timestring = format!("{:?}", ts);
let dt: DateTime<Local> = Local.datetime_from_str(×tring, "%s").unwrap();
dt.format("%Y-%m-%d").to_string()
}
// parse_time takes a timestamp number and returns a time-formatted string.
pub fn parse_time<'a>(ts: i32) -> String {
let timestring = format!("{:?}", ts);
let dt: DateTime<Local> = Local.datetime_from_str(×tring, "%s").unwrap();
if ts == 0 {
format!("NOW")
} else {
dt.format("%H:%M:%S").to_string()
}
}
// get_duration takes a start and end timestamp number and returns the delta
// between the start and end timestamp as a time-formatted string.
pub fn get_duration<'a>(s: i32, e: i32) -> String {
let mut now: i32 = Local::now().timestamp() as i32;
if e > s {
now = e;
}
let delta = now - s;
format!(
"{hours:02}:{minutes:02}:{seconds:02}",
hours = delta / 60 / 60,
minutes = delta / 60 % 60,
seconds = delta % 60
)
}
| stop_timer |
infinispan.go | // Copyright 2020 Red Hat, Inc. and/or its affiliates
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package steps
import (
"fmt"
infinispan "github.com/infinispan/infinispan-operator/pkg/apis/infinispan/v1"
"github.com/cucumber/godog"
"github.com/cucumber/messages-go/v10"
"github.com/kiegroup/kogito-cloud-operator/test/framework"
)
/*
DataTable for Infinispan:
| username | developer |
| password | mypass |
*/
const (
// DataTable first column
infinispanUsernameKey = "username"
infinispanPasswordKey = "password"
externalInfinispanSecret = "external-infinispan-secret"
kogitoExternalInfinispanSecret = "kogito-external-infinispan-secret"
usernameSecretKey = "user"
passwordSecretKey = "pass"
)
var performanceInfinispanContainerSpec = infinispan.InfinispanContainerSpec{
ExtraJvmOpts: "-Xmx2G",
Memory: "3Gi",
CPU: "1",
}
func registerInfinispanSteps(s *godog.Suite, data *Data) {
s.Step(`^Infinispan instance "([^"]*)" is deployed with configuration:$`, data.infinispanInstanceIsDeployedWithConfiguration)
s.Step(`^Infinispan instance "([^"]*)" is deployed for performance within (\d+) minute\(s\) with configuration:$`, data.infinispanInstanceIsDeployedForPerformanceWithinMinutesWithConfiguration)
}
func (data *Data) infinispanInstanceIsDeployedWithConfiguration(name string, table *messages.PickleStepArgument_PickleTable) error {
if err := createInfinispanSecret(data.Namespace, externalInfinispanSecret, table); err != nil {
return err
}
infinispan := framework.GetInfinispanStub(data.Namespace, name, externalInfinispanSecret)
if err := framework.DeployInfinispanInstance(data.Namespace, infinispan); err != nil {
return err
}
return framework.WaitForPodsWithLabel(data.Namespace, "app", "infinispan-pod", 1, 3)
}
func (data *Data) infinispanInstanceIsDeployedForPerformanceWithinMinutesWithConfiguration(name string, timeOutInMin int, table *messages.PickleStepArgument_PickleTable) error {
if err := createInfinispanSecret(data.Namespace, externalInfinispanSecret, table); err != nil {
return err
}
infinispan := framework.GetInfinispanStub(data.Namespace, name, externalInfinispanSecret)
// Add performance-specific container spec
infinispan.Spec.Container = performanceInfinispanContainerSpec
if err := framework.DeployInfinispanInstance(data.Namespace, infinispan); err != nil {
return err
}
return framework.WaitForInfinispanPodsToBeRunningWithConfig(data.Namespace, performanceInfinispanContainerSpec, 1, timeOutInMin)
}
// Misc methods
func createInfinispanSecret(namespace, secretName string, table *messages.PickleStepArgument_PickleTable) error {
credentials := make(map[string]string)
credentials["operator"] = "supersecretoperatorpassword" // Credentials required by Infinispan operator
if username, password, err := getInfinispanCredentialsFromTable(table); err != nil | else if len(username) > 0 {
// User defined credentials
credentials[username] = password
}
return framework.CreateInfinispanSecret(namespace, secretName, credentials)
}
// Table parsing
func getInfinispanCredentialsFromTable(table *messages.PickleStepArgument_PickleTable) (username, password string, err error) {
if len(table.Rows) == 0 { // Using default configuration
return
}
if len(table.Rows[0].Cells) != 2 {
return "", "", fmt.Errorf("expected table to have exactly two columns")
}
for _, row := range table.Rows {
firstColumn := getFirstColumn(row)
switch firstColumn {
case infinispanUsernameKey:
username = getSecondColumn(row)
case infinispanPasswordKey:
password = getSecondColumn(row)
default:
return "", "", fmt.Errorf("Unrecognized configuration option: %s", firstColumn)
}
}
return
}
| {
return err
} |
device_power_test_suite.py | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This test suite verifies device_power capability."""
from typing import Type
from gazoo_device.tests.functional_tests.utils import gdm_test_base
import retry
class DevicePowerTestSuite(gdm_test_base.GDMTestBase):
"""Test suite for the device_power capability."""
@classmethod
def is_applicable_to(cls, device_type: str,
device_class: Type[gdm_test_base.DeviceType],
device_name: str) -> bool:
"""Determine if this test suite can run on the given device."""
if not device_class.has_capabilities(["device_power"]):
return False
props = ["device_power.hub_name", "device_power.port_number"]
return cls.check_properties_set(device_name, props)
@classmethod
def requires_pairing(cls) -> bool:
"""Returns True if the device must be paired to run this test suite."""
return False
@retry.retry(tries=2, delay=30)
def | (self):
"""Verifies on() and off() methods work."""
original_mode = self.device.device_power.port_mode
try:
self.device.device_power.off()
self.assertEqual(
self.device.device_power.port_mode, "off",
f"{self.device.name} port {self.device.device_power.port_number} "
"should have been set to off")
self.device.device_power.on()
on_modes = ["on", "charge", "sync"]
self.assertIn(
self.device.device_power.port_mode, on_modes,
f"{self.device.name} port {self.device.device_power.port_number} "
f"should have been set to one of {on_modes}")
finally:
if original_mode == "off":
self.logger.info(
"Restoring device power back to its original mode 'off'")
self.device.device_power.off()
if __name__ == "__main__":
gdm_test_base.main()
| test_device_power_on_off |
num.rs | use std::str::{FromStr, from_utf8_unchecked};
pub fn parse_int<T: FromStr>(bytes: &[u8]) -> Option<T> |
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_int() {
assert_eq!(parse_int::<u16>(b"0"), Some(0));
assert_eq!(parse_int::<u8>(b"12"), Some(12));
assert_eq!(parse_int::<u16>(b"2018"), Some(2018));
assert_eq!(parse_int::<u32>(b"2018"), Some(2018));
assert_eq!(parse_int::<u32>(b"01d8"), None);
assert_eq!(parse_int::<u32>(b"-018"), None);
assert_eq!(parse_int::<i16>(b"0"), Some(0));
assert_eq!(parse_int::<i16>(b"-12"), Some(-12));
assert_eq!(parse_int::<i16>(b"2018"), Some(2018));
assert_eq!(parse_int::<i32>(b"-018"), Some(-18));
assert_eq!(parse_int::<i32>(b"-0d18"), None);
}
proptest! {
#[test]
fn test_parse_int_with_u8(v: u8) {
let input = format!("{}", v);
prop_assert_eq!(parse_int::<u8>(input.as_bytes()), Some(v));
}
#[test]
fn test_parse_int_with_u16(v: u16) {
let input = format!("{}", v);
prop_assert_eq!(parse_int::<u16>(input.as_bytes()), Some(v));
}
#[test]
fn test_parse_int_with_u32(v: u32) {
let input = format!("{}", v);
prop_assert_eq!(parse_int::<u32>(input.as_bytes()), Some(v));
}
#[test]
fn test_parse_int_with_u64(v: u64) {
let input = format!("{}", v);
prop_assert_eq!(parse_int::<u64>(input.as_bytes()), Some(v));
}
#[test]
fn test_parse_int_with_i8(v: i8) {
let input = format!("{}", v);
prop_assert_eq!(parse_int::<i8>(input.as_bytes()), Some(v));
}
#[test]
fn test_parse_int_with_i16(v: i16) {
let input = format!("{}", v);
prop_assert_eq!(parse_int::<i16>(input.as_bytes()), Some(v));
}
#[test]
fn test_parse_int_with_i32(v: i32) {
let input = format!("{}", v);
prop_assert_eq!(parse_int::<i32>(input.as_bytes()), Some(v));
}
#[test]
fn test_parse_int_with_i64(v: i64) {
let input = format!("{}", v);
prop_assert_eq!(parse_int::<i64>(input.as_bytes()), Some(v));
}
}
}
| {
// `unsafe` here should be okay because `from_str()` converts back
// to `&[u8]` and only cares about ASCII digits
let chars = unsafe { from_utf8_unchecked(bytes) };
<(T)>::from_str(chars).ok()
} |
etcd_test.go | // +build integration
package collector
import (
"context"
"encoding/base64"
"encoding/json"
"fmt"
"os"
"strings"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/coreos/etcd/clientv3"
"github.com/inconshreveable/log15"
)
func init() {
if v, ok := os.LookupEnv("ETCD_ENDPOINTS"); ok {
etcdEndpoints = strings.Split(v, ",")
}
}
var etcdEndpoints = []string{"http://etcd:2379"}
func Test_detectorLinkKey(t *testing.T) |
func Test_eventKey(t *testing.T) {
tests := []struct {
name string
namespace string
detectorID string
deviceID string
timestamp time.Time
want string
}{
{
name: "simple",
namespace: "ns",
detectorID: "detID",
deviceID: "devID",
timestamp: time.Unix(1563568110, 0),
want: "/ns/detectors/detID/1563568100/devID",
},
{
name: "simple",
namespace: "ns",
detectorID: "detID",
deviceID: "devID",
timestamp: time.Unix(1563568120, 0),
want: "/ns/detectors/detID/1563568100/devID",
},
{
name: "simple",
namespace: "ns",
detectorID: "detID",
deviceID: "devID",
timestamp: time.Unix(1563568180, 0),
want: "/ns/detectors/detID/1563568100/devID",
},
{
name: "simple",
namespace: "ns",
detectorID: "detID",
deviceID: "devID",
timestamp: time.Unix(1563568190, 0),
want: "/ns/detectors/detID/1563568100/devID",
},
{
name: "simple",
namespace: "ns",
detectorID: "detID",
deviceID: "devID",
timestamp: time.Unix(1563568200, 0),
want: "/ns/detectors/detID/1563568200/devID",
},
{
name: "simple",
namespace: "ns",
detectorID: "detID",
deviceID: "devID",
timestamp: time.Unix(1563568210, 0),
want: "/ns/detectors/detID/1563568200/devID",
},
{
name: "simple",
namespace: "ns",
detectorID: "detID",
deviceID: "devID",
timestamp: time.Unix(1563568220, 0),
want: "/ns/detectors/detID/1563568200/devID",
},
}
for _, test := range tests {
tt := test
t.Run(tt.name, func(t *testing.T) {
got := EventKey(tt.namespace, tt.detectorID, tt.deviceID, tt.timestamp)
// Need to remove non deterministic part of ulid.
sp := strings.Split(got, "/")
last := strings.Split(sp[len(sp)-1], ".")
sp[len(sp)-1] = last[1]
got = strings.Join(sp, "/")
if got != tt.want {
t.Errorf("detectorEventKey() = %v, want %v", got, tt.want)
}
})
}
}
func Test_etcdRepository_CreateDetectionEvent(t *testing.T) {
type tval struct {
Timestamp int64
}
prefix := "/integration_tests"
repo, err := NewEtcdRepository(prefix, etcdEndpoints, log15.New())
if err != nil {
t.Fatalf("failed to create etcd repository: %v", err)
}
r := repo.(*etcdRepository)
tests := []struct {
name string
detectorID string
deviceID string
timestamp time.Time
wantErr bool
}{
{
name: "basic",
detectorID: "xxx-1",
deviceID: "ddd-1",
timestamp: time.Unix(0, 1000),
},
{
name: "basic",
detectorID: "xxx-2",
deviceID: "ddd-2",
timestamp: time.Unix(0, 2000),
},
{
name: "basic",
detectorID: "xxx-3",
deviceID: "ddd-3",
timestamp: time.Unix(0, 3000),
},
}
for _, test := range tests {
tt := test
t.Run(tt.name, func(t *testing.T) {
if err := r.CreateDetectionEvent(context.Background(), tt.detectorID, tt.deviceID, tt.timestamp); (err != nil) != tt.wantErr {
t.Errorf("etcdRepository.CreateDetectionEvent() error = %v, wantErr %v", err, tt.wantErr)
}
value := &tval{
Timestamp: tt.timestamp.UnixNano(),
}
assertETCDKeyExists(t, EventKey(prefix, tt.detectorID, tt.deviceID, tt.timestamp), value, &tval{}, r.cli)
})
}
}
func Test_etcdRepository_save(t *testing.T) {
type tval struct {
Timestamp string
}
prefix := "/integration_tests"
repo, err := NewEtcdRepository(prefix, etcdEndpoints, log15.New())
if err != nil {
t.Fatalf("failed to create etcd repository: %v", err)
}
r := repo.(*etcdRepository)
tests := []struct {
name string
key string
value interface{}
wantErr bool
}{
{
name: "push",
key: "example-key-1",
value: &tval{time.Now().Format(time.RFC3339)},
wantErr: false,
},
{
name: "push",
key: "example-key-2",
value: &tval{time.Now().Format(time.RFC3339)},
wantErr: false,
},
{
name: "push",
key: "example-key-3",
value: &tval{time.Now().Format(time.RFC3339)},
wantErr: false,
},
}
for _, test := range tests {
tt := test
t.Run(tt.name, func(t *testing.T) {
if err := r.save(context.Background(), tt.key, tt.value); (err != nil) != tt.wantErr {
t.Errorf("etcdRepository.save() error = %v, wantErr %v", err, tt.wantErr)
}
assertETCDKeyExists(t, tt.key, tt.value, &tval{}, r.cli)
})
}
}
func assertETCDKeyExists(t *testing.T, key string, value, result interface{}, cli *clientv3.Client) {
t.Helper()
resp, err := cli.Get(context.Background(), key)
if err != nil {
t.Fatalf("failed to check key: %v", err)
}
if resp.Count != 1 {
t.Fatalf("have number of returned values not equal 1: %v", resp.Kvs)
}
rawValue := resp.Kvs[0].Value
val, err := base64.StdEncoding.DecodeString(string(rawValue))
if err != nil {
t.Fatalf("failed to decode base64: %v", err)
}
err = json.Unmarshal(val, result)
if err != nil {
fmt.Println(string(val))
t.Fatalf("failed to unmarshal value: %s", string(val))
}
assert.Equal(t, value, result)
}
func Test_etcdRepository_CreateDetectorLink(t *testing.T) {
r := &etcdRepository{}
assert.Panics(t, func() { _ = r.CreateDetectorLink(context.Background(), "a", "b", 5) })
}
func Test_etcdRepository_CreateActiveDetectors(t *testing.T) {
r := &etcdRepository{}
assert.Panics(t, func() { _ = r.CreateActiveDetectors(context.Background(), "a", "b", time.Time{}) })
}
func Test_etcdRepository_CreateBatteryVoltage(t *testing.T) {
r := &etcdRepository{}
assert.Panics(t, func() { _ = r.CreateBatteryVoltage(context.Background(), "a", 0, time.Time{}) })
}
func Test_etcdRepository_CreateDetectionCount(t *testing.T) {
r := &etcdRepository{}
assert.Panics(t, func() { _ = r.CreateDetectionCount(context.Background(), "a", "b", 1, time.Time{}) })
}
func Test_etcdRepository_CreateCoordinates(t *testing.T) {
r := &etcdRepository{}
assert.Panics(t, func() { _ = r.CreateCoordinates(context.Background(), "a", 1, 1) })
}
| {
tests := []struct {
name string
namespace string
destDetectorID string
srcDetectorID string
want string
}{
{
name: "simple",
namespace: "somenamespace",
destDetectorID: "dest-det-id",
srcDetectorID: "src-det-id",
want: "/somenamespace/links/dest-det-id/src-det-id",
},
}
for _, test := range tests {
tt := test
t.Run(test.name, func(t *testing.T) {
if got := detectorLinkKey(tt.namespace, tt.destDetectorID, tt.srcDetectorID); got != tt.want {
t.Errorf("detectorLinkPath() = %v, want %v", got, tt.want)
}
})
}
} |
build.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations |
use std::env;
fn main() {
let sdk_dir = env::var("SGX_SDK").unwrap_or_else(|_| "/opt/intel/sgxsdk".to_string());
println!("cargo:rustc-link-search=native={}/lib64", sdk_dir);
println!("cargo:rustc-link-lib=static=sgx_tcrypto");
} | // under the License.. |
cursor.rs | use crate::{values as val, St, StyleValues, UpdateStyleValues};
#[derive(Clone, Debug, Copy, PartialEq, Eq, Display, From)]
pub enum Cursor {
#[from]
Alias(val::Alias),
#[from]
AllScroll(val::AllScroll),
#[from]
Auto(val::Auto),
#[from]
Cell(val::Cell),
#[from]
ContextMenu(val::ContextMenu),
#[from]
ColResize(val::ColResize),
#[from]
Copy(val::Copy),
#[from]
Crosshair(val::Crosshair),
#[from]
Default(val::Default),
#[from]
EResize(val::EResize),
#[from]
EwResize(val::EwResize),
#[from]
Grab(val::Grab),
#[from]
Grabbing(val::Grabbing),
#[from]
Help(val::Help),
#[from]
Move(val::Move),
#[from]
NResize(val::NResize),
#[from]
NeResize(val::NeResize),
#[from]
NeswResize(val::NeswResize),
#[from]
NsResize(val::NsResize),
#[from]
NwResize(val::NwResize),
#[from]
NwseResize(val::NwseResize),
#[from]
NoDrop(val::NoDrop),
#[from]
None(val::None),
#[from]
NotAllowed(val::NotAllowed),
#[from]
Pointer(val::Pointer),
#[from]
Progress(val::Progress),
#[from]
RowResize(val::RowResize),
#[from]
SResize(val::SResize),
#[from]
SeResize(val::SeResize),
#[from]
SwResize(val::SwResize),
#[from]
Text(val::Text),
// TODO: Handle Url value
// Url(Vec<String>),
#[from]
VerticalText(val::VerticalText),
#[from]
WResize(val::WResize),
#[from]
Wait(val::Wait),
#[from]
ZoomIn(val::ZoomIn),
#[from]
ZoomOut(val::ZoomOut),
#[from]
Initial(val::Initial),
#[from]
Inherit(val::Inherit),
}
impl UpdateStyleValues for Cursor {
fn | (self, values: StyleValues) -> StyleValues {
values.add(St::Cursor, self)
}
}
| update_style_values |
xetex_stringpool.rs | #![allow(
dead_code,
mutable_transmutes,
non_camel_case_types,
non_snake_case,
non_upper_case_globals,
unused_assignments,
unused_mut
)]
use libc::strlen;
use crate::xetex_errors::overflow;
use crate::xetex_ini::{
buffer, init_pool_ptr, init_str_ptr, max_strings, pool_ptr, pool_size, str_pool, str_ptr,
str_start,
};
pub type size_t = u64;
pub type UnicodeScalar = i32;
pub type pool_pointer = i32;
pub type str_number = i32;
pub type packed_UTF16_code = u16;
/* tectonic/xetex-stringpool.c: preloaded "string pool" constants
Copyright 2017-2018 the Tectonic Project
Licensed under the MIT License.
*/
static mut string_constants: [*const i8; 3] = [
b"this marks the start of the stringpool\x00" as *const u8 as *const i8,
b"\x00" as *const u8 as *const i8,
0 as *const i8,
];
#[no_mangle]
pub unsafe extern "C" fn load_pool_strings(mut spare_size: i32) -> i32 {
let mut s: *const i8 = 0 as *const i8;
let mut i: i32 = 0i32;
let mut g: str_number = 0i32;
loop {
let fresh0 = i;
i = i + 1;
s = string_constants[fresh0 as usize];
if s.is_null() {
break;
}
let mut len = strlen(s);
let total_len = len;
if total_len >= spare_size as usize {
return 0i32;
}
loop {
let fresh1 = len;
len = len.wrapping_sub(1);
if !(fresh1 > 0) {
break;
}
let fresh2 = s;
s = s.offset(1);
let fresh3 = pool_ptr;
pool_ptr = pool_ptr + 1;
*str_pool.offset(fresh3 as isize) = *fresh2 as packed_UTF16_code
}
g = make_string()
/* Returns 0 on error. */
}
g
}
#[no_mangle]
pub unsafe extern "C" fn length(mut s: str_number) -> i32 {
if s as i64 >= 65536 {
*str_start.offset(((s + 1i32) as i64 - 65536) as isize)
- *str_start.offset((s as i64 - 65536) as isize)
} else if s >= 32i32 && s < 127i32 {
1
} else if s <= 127i32 {
3
} else if s < 256i32 {
4
} else {
8
}
}
#[no_mangle]
pub unsafe extern "C" fn make_string() -> str_number {
if str_ptr == max_strings {
overflow(
b"number of strings\x00" as *const u8 as *const i8,
max_strings - init_str_ptr,
);
}
str_ptr += 1;
*str_start.offset((str_ptr - 65536i32) as isize) = pool_ptr;
str_ptr - 1i32
}
#[no_mangle]
pub unsafe extern "C" fn | (mut s: str_number) {
let mut i: i32 = 0;
let mut j: pool_pointer = 0;
i = length(s);
if pool_ptr + i > pool_size {
overflow(
b"pool size\x00" as *const u8 as *const i8,
pool_size - init_pool_ptr,
);
}
j = *str_start.offset((s as i64 - 65536) as isize);
while i > 0i32 {
*str_pool.offset(pool_ptr as isize) = *str_pool.offset(j as isize);
pool_ptr += 1;
j += 1;
i -= 1
}
}
#[no_mangle]
pub unsafe extern "C" fn str_eq_buf(mut s: str_number, mut k: i32) -> bool {
let mut j: pool_pointer = 0;
j = *str_start.offset((s as i64 - 65536) as isize);
while j < *str_start.offset(((s + 1i32) as i64 - 65536) as isize) {
if *buffer.offset(k as isize) as i64 >= 65536 {
if *str_pool.offset(j as isize) as i64
!= 55296 + (*buffer.offset(k as isize) as i64 - 65536) / 1024 as i64
{
return false;
} else {
if *str_pool.offset((j + 1i32) as isize) as i64
!= 56320 + (*buffer.offset(k as isize) as i64 - 65536) % 1024 as i64
{
return false;
} else {
j += 1
}
}
} else if *str_pool.offset(j as isize) as i32 != *buffer.offset(k as isize) {
return false;
}
j += 1;
k += 1
}
true
}
#[no_mangle]
pub unsafe extern "C" fn str_eq_str(mut s: str_number, mut t: str_number) -> bool {
let mut j: pool_pointer = 0;
let mut k: pool_pointer = 0;
if length(s) != length(t) {
return false;
}
if length(s) == 1i32 {
if (s as i64) < 65536 {
if (t as i64) < 65536 {
if s != t {
return false;
}
} else if s
!= *str_pool.offset(*str_start.offset((t as i64 - 65536) as isize) as isize) as i32
{
return false;
}
} else if (t as i64) < 65536 {
if *str_pool.offset(*str_start.offset((s as i64 - 65536) as isize) as isize) as i32 != t
{
return false;
}
} else if *str_pool.offset(*str_start.offset((s as i64 - 65536) as isize) as isize) as i32
!= *str_pool.offset(*str_start.offset((t as i64 - 65536) as isize) as isize) as i32
{
return false;
}
} else {
j = *str_start.offset((s as i64 - 65536) as isize);
k = *str_start.offset((t as i64 - 65536) as isize);
while j < *str_start.offset(((s + 1i32) as i64 - 65536) as isize) {
if *str_pool.offset(j as isize) as i32 != *str_pool.offset(k as isize) as i32 {
return false;
}
j += 1;
k += 1
}
}
true
}
#[no_mangle]
pub unsafe extern "C" fn search_string(mut search: str_number) -> str_number {
let mut s: str_number = 0;
let mut len: i32 = 0;
len = length(search);
if len == 0i32 {
return (65536 + 1i32 as i64) as str_number;
} else {
s = search - 1i32;
while s as i64 > 65535 {
if length(s) == len {
if str_eq_str(s, search) {
return s;
}
}
s -= 1
}
}
0i32
}
/* tectonic/xetex-stringpool.h: preloaded "string pool" constants
Copyright 2017 the Tectonic Project
Licensed under the MIT License.
*/
#[no_mangle]
pub unsafe extern "C" fn slow_make_string() -> str_number {
let mut s: str_number = 0;
let mut t: str_number = 0;
t = make_string();
s = search_string(t);
if s > 0i32 {
str_ptr -= 1;
pool_ptr = *str_start.offset((str_ptr - 65536i32) as isize);
return s;
}
t
}
| append_str |
MOUNT_ARRAY_ALD_MOUNTED_ERROR.py | #!/usr/bin/env python3
import subprocess
import os
import sys
sys.path.append("../lib/")
import json_parser
import ibofos
import cli
import test_result
import MOUNT_ARRAY_BASIC_1
def clear_result():
if os.path.exists( __file__ + ".result"):
os.remove( __file__ + ".result")
def set_result(detail):
code = json_parser.get_response_code(detail)
result = test_result.expect_false(code)
with open(__file__ + ".result", "w") as result_file:
result_file.write(result + " (" + str(code) + ")" + "\n" + detail)
def | ():
clear_result()
MOUNT_ARRAY_BASIC_1.execute()
out = cli.mount_ibofos()
return out
if __name__ == "__main__":
out = execute()
set_result(out)
ibofos.kill_ibofos() | execute |
tracing.py | # Copyright (C) 2018 SignalFx, Inc. All rights reserved.
from bson import json_util as json
from opentracing.ext import tags
import pymongo.monitoring
from six import text_type
import opentracing
class CommandTracing(pymongo.monitoring.CommandListener):
| _scopes = {}
def __init__(self, tracer=None, span_tags=None):
try:
global_tracer = opentracing.global_tracer()
except AttributeError:
global_tracer = opentracing.tracer
self._tracer = tracer or global_tracer
self._span_tags = span_tags or {}
def started(self, event):
scope = self._tracer.start_active_span(event.command_name)
self._scopes[event.request_id] = scope
span = scope.span
span.set_tag(tags.DATABASE_TYPE, 'mongodb')
span.set_tag(tags.COMPONENT, 'PyMongo')
span.set_tag(tags.DATABASE_INSTANCE, event.database_name)
for tag, value in self._span_tags.items():
span.set_tag(tag, value)
if not event.command:
return
command_name, collection = next(iter(event.command.items()))
span.set_tag('command.name', command_name)
namespace = text_type('{}.{}').format(event.database_name, collection)
span.set_tag('namespace', namespace)
span.set_tag('command', json.dumps(event.command)[:512])
def succeeded(self, event):
scope = self._scopes.pop(event.request_id, None)
if scope is None:
return
span = scope.span
span.set_tag('event.reply', json.dumps(event.reply)[:512])
span.set_tag('reported_duration', event.duration_micros)
scope.close()
def failed(self, event):
scope = self._scopes.pop(event.request_id, None)
if scope is None:
return
span = scope.span
span.set_tag(tags.ERROR, True)
span.set_tag('event.failure', json.dumps(event.failure))
span.set_tag('reported_duration', event.duration_micros)
scope.close() |
|
models.py | #!/usr/bin/env python
#-*- encoding: UTF-8 -*-
###############################################
# Todos los derechos reservados a: #
# CreceLibre Consultores en Tecnologías Ltda. #
# #
# ©Milton Inostroza Aguilera #
# [email protected] #
# 2009 #
###############################################
from django.db import models
from AlyMoly.mantenedor.models import Producto, Promocion, Trabajador
class Turno(models.Model):
"""
estado:
1 --> abierto
2 --> cerrado
"""
fecha_apertura_sistema = models.DateTimeField()
fecha_cierre_sistema = models.DateTimeField(null=True, blank=True)
estado = models.IntegerField(default=1, blank=True)
trabajador = models.ForeignKey(Trabajador, blank=True)
monto_apertura_caja = models.IntegerField(default=0)
monto_cierre_calculado = models.IntegerField(default=0, blank=True)
monto_afecto = models.IntegerField(default=0, blank=True)
monto_exento = models.IntegerField(default=0, blank=True)
def monto_cierre_informado(self):
return self.boletadeposito.total
def estado_turno(self):
if self.estado == 1:
return "Abierto"
else:
return "Cerrado"
def save(self, force_insert=False, force_update=False):
"""
Al guardar un turno abierto se verifica que el trabajador ya no cuente con un
turno abierto anteriormente.
"""
if self.estado == 1 and len(Turno.objects.exclude(id=self.id).filter(trabajador__id=self.trabajador.id).filter(estado=1)) > 0:
raise Exception(u"Usted ya cuenta con un turno abierto.")
super(Turno, self).save(force_insert, force_update)
class BoletaDeposito(models.Model):
turno = models.OneToOneField(Turno, blank=True)
veintemil = models.PositiveIntegerField(default=0, blank=True)
diezmil = models.PositiveIntegerField(default=0, blank=True)
cincomil = models.PositiveIntegerField(default=0, blank=True)
dosmil = models.PositiveIntegerField(default=0, blank=True)
mil = models.PositiveIntegerField(default=0, blank=True)
quinientos = models.PositiveIntegerField(default=0, blank=True)
cien = models.PositiveIntegerField(default=0, blank=True)
cincuenta = models.PositiveIntegerField(default=0, blank=True)
diez = models.PositiveIntegerField(default=0, blank=True)
tarjetas = models.PositiveIntegerField(default=0, blank=True)
otros = models.PositiveIntegerField(default=0, blank=True)
total = models.PositiveIntegerField(default=0, blank=True)
class Venta(models.Model):
"" |
class LineaDetalle(models.Model):
cantidad = models.IntegerField()
precio_venta = models.IntegerField()
precio_venta_total = models.IntegerField()
producto = models.ForeignKey(Producto, null=True, blank=True)
promocion = models.ForeignKey(Promocion, null=True, blank=True)
venta = models.ForeignKey('Venta')
| "
medio_pago:
1 --> efectivo
2 --> otro
"""
fecha_venta = models.DateTimeField()
folio_boleta = models.PositiveIntegerField(null=True, blank=True)
monto_total = models.PositiveIntegerField()
monto_afecto = models.PositiveIntegerField()
monto_exento = models.PositiveIntegerField()
cantidad_productos = models.PositiveIntegerField()
medio_pago = models.PositiveIntegerField()
monto_pago = models.PositiveIntegerField(null=True)
turno = models.ForeignKey('Turno')
def __unicode__(self):
return u"%s-%s" % (self.id, self.folio_boleta)
|
kendo.culture.tzm-Tfng.min.js | /**
* Copyright 2015 Telerik AD
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0 | * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
!function(e,define){define([],e)}(function(){return function(e){var t=e.kendo||(e.kendo={cultures:{}});t.cultures["tzm-Tfng"]={name:"tzm-Tfng",numberFormat:{pattern:["-n"],decimals:2,",":" ",".":",",groupSize:[3],percent:{pattern:["-n %","n %"],decimals:2,",":" ",".":",",groupSize:[3],symbol:"%"},currency:{pattern:["-n $","n $"],decimals:2,",":" ",".":",",groupSize:[3],symbol:"ⴷⵔ"}},calendars:{standard:{days:{names:["ⴰⵙⴰⵎⴰⵙ","ⴰⵢⵏⴰⵙ","ⴰⵙⵉⵏⴰⵙ","ⴰⴽⵕⴰⵙ","ⴰⴽⵡⴰⵙ","ⴰⵙⵉⵎⵡⴰⵙ","ⴰⵙⵉⴹⵢⴰⵙ"],namesAbbr:["ⵙⵎⵙ","ⵢⵏⵙ","ⵙⵏⵙ","ⴽⵕⵙ","ⴽⵡⵙ","ⵙⵎⵡ","ⵙⴹⵙ"],namesShort:["ⵙⵎ","ⵢⵏ","ⵙⵏ","ⴽⵕ","ⴽⵡ","ⵙⵡ","ⵙⴹ"]},months:{names:["ⵉⵏⵏⴰⵢⵔ","ⴱⵕⴰⵢⵕ","ⵎⴰⵕⵚ","ⵉⴱⵔⵉⵔ","ⵎⴰⵢⵢⵓ","ⵢⵓⵏⵢⵓ","ⵢⵓⵍⵢⵓⵣ","ⵖⵓⵛⵜ","ⵛⵓⵜⴰⵏⴱⵉⵔ","ⴽⵜⵓⴱⵕ","ⵏⵓⵡⴰⵏⴱⵉⵔ","ⴷⵓⵊⴰⵏⴱⵉⵔ",""],namesAbbr:["ⵏⵢⵔ","ⴱⵕⵢ","ⵎⵕⵚ","ⴱⵔⵔ","ⵎⵢⵢ","ⵢⵏⵢ","ⵢⵍⵢ","ⵖⵛⵜ","ⵛⵜⵏ","ⴽⵜⴱ","ⵏⵡⴱ","ⴷⵊⵏ",""]},AM:[""],PM:[""],patterns:{d:"dd-MM-yyyy",D:"dddd, dd MMMM, yyyy",F:"dddd, dd MMMM, yyyy H:mm:ss",g:"dd-MM-yyyy H:mm",G:"dd-MM-yyyy H:mm:ss",m:"dd MMMM",M:"dd MMMM",s:"yyyy'-'MM'-'dd'T'HH':'mm':'ss",t:"H:mm",T:"H:mm:ss",u:"yyyy'-'MM'-'dd HH':'mm':'ss'Z'",y:"MMMM, yyyy",Y:"MMMM, yyyy"},"/":"-",":":":",firstDay:1}}}}(this),window.kendo},"function"==typeof define&&define.amd?define:function(e,t){t()}); | *
* Unless required by applicable law or agreed to in writing, software |
App.js | import React, { PureComponent } from 'react';
import './App.css';
import 'bootstrap/dist/css/bootstrap.min.css';
// import AdSense from 'react-adsense-ad';
import { SocialIcon } from 'react-social-icons';
import Tab from 'react-bootstrap/Tab'
import Tabs from 'react-bootstrap/Tabs'
import './index.css'
import Resume from './containers/Resume';
import Websites from './containers/Websites';
import Animations from './containers/Animations';
class App extends PureComponent{
// Click Event.
btnClick(website) {
window.open(website);
}
render(){
const card_height = '300px';
const card_width = '300px';
return (
<div className="App">
<div style={{color:'white', textAlign: 'center', marginTop: '20px'}}>
<h1>
Maria E. Ramos Morales - Portfolio
</h1>
<p style={{margin:'0 auto', width:'50%'}}>
Software Engineer. BS and MS degrees in Computer Science. Experience with web developing and data science. Has interests in writing tutorials, web developing, and creating animations.
</p>
<div style={styles.iconsSpace}>
<SocialIcon url="https://medium.com/@mariaeramosmorales" style={styles.icon}/>
<SocialIcon url="https://www.linkedin.com/in/maria-ramos-morales-00194843/" style={styles.icon}/>
<SocialIcon url="https://github.com/meramos" style={styles.lastIcon}/>
</div>
</div>
<Tabs defaultActiveKey="resume" id="uncontrolled-tab-example" style={styles.tabStyle} >
<Tab eventKey="resume" title="My Resume" style={{height: '100%', background: 'rgb(0, 25, 49)'}}>
<Resume />
</Tab>
<Tab eventKey="websites" title="Websites" style={{height: '100%', background: 'rgb(0, 25, 49)'}}>
<Websites
card_height = {card_height}
card_width = {card_width}
/>
</Tab>
<Tab eventKey="animations" title="Animations">
<Animations
card_height = {card_height}
card_width = {card_width}
/>
</Tab>
{/* <Tab eventKey="writing" title="Writing">
LILILI
</Tab> */}
</Tabs>
</div>
);
}
}
const styles = {
icon: {
height:'40px',
width:'40px',
marginRight:'10px'
},
lastIcon: {
height:'40px',
width:'40px'
},
iconsSpace: {
backgroundColor:'white',
width:'20%',
margin:'0 auto', | display: 'flex',
justifyContent: 'center',
alignItems: 'center',
flexWrap: 'wrap',
// padding: '20px',
}
};
export default App; | marginTop:'10px',
marginBottom: '20px'
},
tabStyle: { |
vpcgw_cli.go | // This file was automatically generated. DO NOT EDIT.
// If you have any remark or suggestion do not hesitate to open an issue.
package vpcgw
import (
"context"
"reflect"
"github.com/scaleway/scaleway-cli/internal/core"
"github.com/scaleway/scaleway-sdk-go/api/vpcgw/v1"
"github.com/scaleway/scaleway-sdk-go/scw"
)
// always import dependencies
var (
_ = scw.RegionFrPar
)
func GetGeneratedCommands() *core.Commands {
return core.NewCommands(
vpcGwRoot(),
vpcGwGateway(),
vpcGwGatewayNetwork(),
vpcGwDHCP(),
vpcGwDHCPEntry(),
vpcGwPatRule(),
vpcGwIP(),
vpcGwGatewayType(),
vpcGwGatewayList(),
vpcGwGatewayGet(),
vpcGwGatewayCreate(),
vpcGwGatewayUpdate(),
vpcGwGatewayDelete(),
vpcGwGatewayUpgrade(),
vpcGwGatewayNetworkList(),
vpcGwGatewayNetworkGet(),
vpcGwGatewayNetworkCreate(),
vpcGwGatewayNetworkUpdate(),
vpcGwGatewayNetworkDelete(),
vpcGwDHCPList(),
vpcGwDHCPGet(),
vpcGwDHCPCreate(),
vpcGwDHCPUpdate(),
vpcGwDHCPDelete(),
vpcGwDHCPEntryList(),
vpcGwDHCPEntryGet(),
vpcGwDHCPEntryCreate(),
vpcGwDHCPEntryUpdate(),
vpcGwDHCPEntrySet(),
vpcGwDHCPEntryDelete(),
vpcGwPatRuleList(),
vpcGwPatRuleGet(),
vpcGwPatRuleCreate(),
vpcGwPatRuleUpdate(),
vpcGwPatRuleSet(),
vpcGwPatRuleDelete(),
vpcGwGatewayTypeList(),
vpcGwIPList(),
vpcGwIPGet(),
vpcGwIPCreate(),
vpcGwIPUpdate(),
vpcGwIPDelete(),
)
}
func vpcGwRoot() *core.Command {
return &core.Command{
Short: `VPC Public Gateway API`,
Long: ``,
Namespace: "vpc-gw",
}
}
func vpcGwGateway() *core.Command {
return &core.Command{
Short: `VPC Public Gateway management`,
Long: `The VPC Public Gateway is a building block for your infrastructure on Scaleway's shared public cloud. It provides a set of managed network services and features for Scaleway's Private Networks such as DHCP, NAT and routing.
`,
Namespace: "vpc-gw",
Resource: "gateway",
}
}
func vpcGwGatewayNetwork() *core.Command {
return &core.Command{
Short: `Gateway Networks management`,
Long: `A Gateway Network represents the connection of a Private Network to a VPC Public Gateway. It holds configuration options relative to this specific connection, such as the DHCP configuration.
`,
Namespace: "vpc-gw",
Resource: "gateway-network",
}
}
func vpcGwDHCP() *core.Command {
return &core.Command{
Short: `DHCP configuration management`,
Long: `DHCP configuration allows you to set parameters for assignment of IP addresses to devices on a Private Network attached to a VPC Public Gateway (subnet, lease time etc).
`,
Namespace: "vpc-gw",
Resource: "dhcp",
}
}
func vpcGwDHCPEntry() *core.Command |
func vpcGwPatRule() *core.Command {
return &core.Command{
Short: `PAT rules management`,
Long: `PAT (Port Address Translation) rules are global to a gateway. They define the forwarding of a public port to a specific instance on a Private Network.
`,
Namespace: "vpc-gw",
Resource: "pat-rule",
}
}
func vpcGwIP() *core.Command {
return &core.Command{
Short: `IP address management`,
Long: `A VPC Public Gateway has a public IP address, allowing it to reach the public internet, as well as forward (masquerade) traffic from member instances of attached Private Networks.
`,
Namespace: "vpc-gw",
Resource: "ip",
}
}
func vpcGwGatewayType() *core.Command {
return &core.Command{
Short: ``,
Long: `Gateways come in multiple shapes and size, which are described by the various gateway types.
`,
Namespace: "vpc-gw",
Resource: "gateway-type",
}
}
func vpcGwGatewayList() *core.Command {
return &core.Command{
Short: `List VPC Public Gateways`,
Long: `List VPC Public Gateways.`,
Namespace: "vpc-gw",
Resource: "gateway",
Verb: "list",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.ListGatewaysRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "order-by",
Short: `Order in which to return results`,
Required: false,
Deprecated: false,
Positional: false,
EnumValues: []string{"created_at_asc", "created_at_desc", "name_asc", "name_desc", "type_asc", "type_desc", "status_asc", "status_desc"},
},
{
Name: "project-id",
Short: `Include only gateways in this project`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "name",
Short: `Filter gateways including this name`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "tags.{index}",
Short: `Filter gateways with these tags`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "type",
Short: `Filter gateways of this type`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "status",
Short: `Filter gateways in this status (unknown for any)`,
Required: false,
Deprecated: false,
Positional: false,
EnumValues: []string{"unknown", "stopped", "allocating", "configuring", "running", "stopping", "failed", "deleting", "deleted", "locked"},
},
{
Name: "private-network-id",
Short: `Filter gateways attached to this private network`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "organization-id",
Short: `Include only gateways in this organization`,
Required: false,
Deprecated: false,
Positional: false,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.ListGatewaysRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
resp, err := api.ListGateways(request, scw.WithAllPages())
if err != nil {
return nil, err
}
return resp.Gateways, nil
},
View: &core.View{Fields: []*core.ViewField{
{
FieldName: "ID",
},
{
FieldName: "Name",
},
{
FieldName: "Status",
},
{
FieldName: "Tags",
},
{
FieldName: "GatewayNetworks",
},
{
FieldName: "UpstreamDNSServers",
},
{
FieldName: "UpdatedAt",
},
{
FieldName: "CreatedAt",
},
{
FieldName: "Zone",
},
{
FieldName: "ProjectID",
},
{
FieldName: "OrganizationID",
},
}},
}
}
func vpcGwGatewayGet() *core.Command {
return &core.Command{
Short: `Get a VPC Public Gateway`,
Long: `Get a VPC Public Gateway.`,
Namespace: "vpc-gw",
Resource: "gateway",
Verb: "get",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.GetGatewayRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "gateway-id",
Short: `ID of the gateway to fetch`,
Required: true,
Deprecated: false,
Positional: true,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.GetGatewayRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
return api.GetGateway(request)
},
}
}
func vpcGwGatewayCreate() *core.Command {
return &core.Command{
Short: `Create a VPC Public Gateway`,
Long: `Create a VPC Public Gateway.`,
Namespace: "vpc-gw",
Resource: "gateway",
Verb: "create",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.CreateGatewayRequest{}),
ArgSpecs: core.ArgSpecs{
core.ProjectIDArgSpec(),
{
Name: "name",
Short: `Name of the gateway`,
Required: false,
Deprecated: false,
Positional: false,
Default: core.RandomValueGenerator("gw"),
},
{
Name: "tags.{index}",
Short: `Tags for the gateway`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "type",
Short: `Gateway type`,
Required: false,
Deprecated: false,
Positional: false,
Default: core.DefaultValueSetter("VPC-GW-S"),
},
{
Name: "upstream-dns-servers.{index}",
Short: `Override the gateway's default recursive DNS servers, if DNS features are enabled`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "ip-id",
Short: `Attach an existing IP to the gateway`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "enable-smtp",
Short: `Allow SMTP traffic to pass through the gateway`,
Required: false,
Deprecated: false,
Positional: false,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.CreateGatewayRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
return api.CreateGateway(request)
},
}
}
func vpcGwGatewayUpdate() *core.Command {
return &core.Command{
Short: `Update a VPC Public Gateway`,
Long: `Update a VPC Public Gateway.`,
Namespace: "vpc-gw",
Resource: "gateway",
Verb: "update",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.UpdateGatewayRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "gateway-id",
Short: `ID of the gateway to update`,
Required: true,
Deprecated: false,
Positional: true,
},
{
Name: "name",
Short: `Name fo the gateway`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "tags.{index}",
Short: `Tags for the gateway`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "upstream-dns-servers.{index}",
Short: `Override the gateway's default recursive DNS servers, if DNS features are enabled`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "enable-bastion",
Short: `Enable SSH bastion on the gateway`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "bastion-port",
Short: `Port of the SSH bastion`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "enable-smtp",
Short: `Allow SMTP traffic to pass through the gateway`,
Required: false,
Deprecated: false,
Positional: false,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.UpdateGatewayRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
return api.UpdateGateway(request)
},
}
}
func vpcGwGatewayDelete() *core.Command {
return &core.Command{
Short: `Delete a VPC Public Gateway`,
Long: `Delete a VPC Public Gateway.`,
Namespace: "vpc-gw",
Resource: "gateway",
Verb: "delete",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.DeleteGatewayRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "gateway-id",
Short: `ID of the gateway to delete`,
Required: true,
Deprecated: false,
Positional: true,
},
{
Name: "cleanup-dhcp",
Short: `Whether to cleanup attached DHCP configurations`,
Required: false,
Deprecated: false,
Positional: false,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.DeleteGatewayRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
e = api.DeleteGateway(request)
if e != nil {
return nil, e
}
return &core.SuccessResult{
Resource: "gateway",
Verb: "delete",
}, nil
},
}
}
func vpcGwGatewayUpgrade() *core.Command {
return &core.Command{
Short: `Upgrade a VPC Public Gateway to the latest version`,
Long: `Upgrade a VPC Public Gateway to the latest version.`,
Namespace: "vpc-gw",
Resource: "gateway",
Verb: "upgrade",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.UpgradeGatewayRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "gateway-id",
Short: `ID of the gateway to upgrade`,
Required: true,
Deprecated: false,
Positional: true,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.UpgradeGatewayRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
return api.UpgradeGateway(request)
},
}
}
func vpcGwGatewayNetworkList() *core.Command {
return &core.Command{
Short: `List gateway connections to Private Networks`,
Long: `List gateway connections to Private Networks.`,
Namespace: "vpc-gw",
Resource: "gateway-network",
Verb: "list",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.ListGatewayNetworksRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "order-by",
Short: `Order in which to return results`,
Required: false,
Deprecated: false,
Positional: false,
EnumValues: []string{"created_at_asc", "created_at_desc", "status_asc", "status_desc"},
},
{
Name: "gateway-id",
Short: `Filter by gateway`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "private-network-id",
Short: `Filter by private network`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "enable-masquerade",
Short: `Filter by masquerade enablement`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "dhcpid",
Short: `Filter by DHCP configuration`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "status",
Short: `Filter GatewayNetworks by this status (unknown for any)`,
Required: false,
Deprecated: false,
Positional: false,
EnumValues: []string{"unknown", "created", "attaching", "configuring", "ready", "detaching", "deleted"},
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.ListGatewayNetworksRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
resp, err := api.ListGatewayNetworks(request, scw.WithAllPages())
if err != nil {
return nil, err
}
return resp.GatewayNetworks, nil
},
View: &core.View{Fields: []*core.ViewField{
{
FieldName: "ID",
},
{
FieldName: "GatewayID",
},
{
FieldName: "PrivateNetworkID",
},
{
FieldName: "Status",
},
{
FieldName: "Address",
},
{
FieldName: "MacAddress",
},
{
FieldName: "EnableDHCP",
},
{
FieldName: "EnableMasquerade",
},
{
FieldName: "CreatedAt",
},
{
FieldName: "UpdatedAt",
},
{
FieldName: "Zone",
},
}},
}
}
func vpcGwGatewayNetworkGet() *core.Command {
return &core.Command{
Short: `Get a gateway connection to a Private Network`,
Long: `Get a gateway connection to a Private Network.`,
Namespace: "vpc-gw",
Resource: "gateway-network",
Verb: "get",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.GetGatewayNetworkRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "gateway-network-id",
Short: `ID of the GatewayNetwork to fetch`,
Required: true,
Deprecated: false,
Positional: true,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.GetGatewayNetworkRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
return api.GetGatewayNetwork(request)
},
}
}
func vpcGwGatewayNetworkCreate() *core.Command {
return &core.Command{
Short: `Attach a gateway to a Private Network`,
Long: `Attach a gateway to a Private Network.`,
Namespace: "vpc-gw",
Resource: "gateway-network",
Verb: "create",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.CreateGatewayNetworkRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "gateway-id",
Short: `Gateway to connect`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "private-network-id",
Short: `Private Network to connect`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "enable-masquerade",
Short: `Whether to enable masquerade on this network`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "dhcpid",
Short: `Existing configuration`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "address",
Short: `Static IP address in CIDR format to to use without DHCP`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "enable-dhcp",
Short: `Whether to enable DHCP on this Private Network`,
Required: false,
Deprecated: false,
Positional: false,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.CreateGatewayNetworkRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
return api.CreateGatewayNetwork(request)
},
}
}
func vpcGwGatewayNetworkUpdate() *core.Command {
return &core.Command{
Short: `Update a gateway connection to a Private Network`,
Long: `Update a gateway connection to a Private Network.`,
Namespace: "vpc-gw",
Resource: "gateway-network",
Verb: "update",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.UpdateGatewayNetworkRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "gateway-network-id",
Short: `ID of the GatewayNetwork to update`,
Required: true,
Deprecated: false,
Positional: true,
},
{
Name: "enable-masquerade",
Short: `New masquerade enablement`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "dhcpid",
Short: `New DHCP configuration`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "enable-dhcp",
Short: `Whether to enable DHCP on the connected Private Network`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "address",
Short: `New static IP address`,
Required: false,
Deprecated: false,
Positional: false,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.UpdateGatewayNetworkRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
return api.UpdateGatewayNetwork(request)
},
}
}
func vpcGwGatewayNetworkDelete() *core.Command {
return &core.Command{
Short: `Detach a gateway from a Private Network`,
Long: `Detach a gateway from a Private Network.`,
Namespace: "vpc-gw",
Resource: "gateway-network",
Verb: "delete",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.DeleteGatewayNetworkRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "gateway-network-id",
Short: `GatewayNetwork to delete`,
Required: true,
Deprecated: false,
Positional: true,
},
{
Name: "cleanup-dhcp",
Short: `Whether to cleanup the attached DHCP configuration`,
Required: false,
Deprecated: false,
Positional: false,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.DeleteGatewayNetworkRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
e = api.DeleteGatewayNetwork(request)
if e != nil {
return nil, e
}
return &core.SuccessResult{
Resource: "gateway-network",
Verb: "delete",
}, nil
},
}
}
func vpcGwDHCPList() *core.Command {
return &core.Command{
Short: `List DHCP configurations`,
Long: `List DHCP configurations.`,
Namespace: "vpc-gw",
Resource: "dhcp",
Verb: "list",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.ListDHCPsRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "order-by",
Short: `Order in which to return results`,
Required: false,
Deprecated: false,
Positional: false,
EnumValues: []string{"created_at_asc", "created_at_desc", "subnet_asc", "subnet_desc"},
},
{
Name: "project-id",
Short: `Include only DHCPs in this project`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "address",
Short: `Filter on gateway address`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "has-address",
Short: `Filter on subnets containing address`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "organization-id",
Short: `Include only DHCPs in this organization`,
Required: false,
Deprecated: false,
Positional: false,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.ListDHCPsRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
resp, err := api.ListDHCPs(request, scw.WithAllPages())
if err != nil {
return nil, err
}
return resp.Dhcps, nil
},
View: &core.View{Fields: []*core.ViewField{
{
FieldName: "ID",
},
{
FieldName: "Subnet",
},
{
FieldName: "Address",
},
{
FieldName: "EnableDynamic",
},
{
FieldName: "PoolLow",
},
{
FieldName: "PoolHigh",
},
{
FieldName: "PushDefaultRoute",
},
{
FieldName: "PushDNSServer",
},
{
FieldName: "DNSLocalName",
},
{
FieldName: "DNSServersOverride",
},
{
FieldName: "DNSSearch",
},
{
FieldName: "ValidLifetime",
},
{
FieldName: "RenewTimer",
},
{
FieldName: "RebindTimer",
},
{
FieldName: "UpdatedAt",
},
{
FieldName: "CreatedAt",
},
{
FieldName: "Zone",
},
{
FieldName: "ProjectID",
},
{
FieldName: "OrganizationID",
},
}},
}
}
func vpcGwDHCPGet() *core.Command {
return &core.Command{
Short: `Get a DHCP configuration`,
Long: `Get a DHCP configuration.`,
Namespace: "vpc-gw",
Resource: "dhcp",
Verb: "get",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.GetDHCPRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "dhcpid",
Short: `ID of the DHCP config to fetch`,
Required: true,
Deprecated: false,
Positional: true,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.GetDHCPRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
return api.GetDHCP(request)
},
}
}
func vpcGwDHCPCreate() *core.Command {
return &core.Command{
Short: `Create a DHCP configuration`,
Long: `Create a DHCP configuration.`,
Namespace: "vpc-gw",
Resource: "dhcp",
Verb: "create",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.CreateDHCPRequest{}),
ArgSpecs: core.ArgSpecs{
core.ProjectIDArgSpec(),
{
Name: "subnet",
Short: `Subnet for the DHCP server`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "address",
Short: `Address of the DHCP server. This will be the gateway's address in the private network. Defaults to the first address of the subnet`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "pool-low",
Short: `Low IP (included) of the dynamic address pool`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "pool-high",
Short: `High IP (included) of the dynamic address pool`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "enable-dynamic",
Short: `Whether to enable dynamic pooling of IPs`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "valid-lifetime.seconds",
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "valid-lifetime.nanos",
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "renew-timer.seconds",
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "renew-timer.nanos",
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "rebind-timer.seconds",
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "rebind-timer.nanos",
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "push-default-route",
Short: `Whether the gateway should push a default route to DHCP clients or only hand out IPs. Defaults to true`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "push-dns-server",
Short: `Whether the gateway should push custom DNS servers to clients`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "dns-servers-override.{index}",
Short: `Override the DNS server list pushed to DHCP clients, instead of the gateway itself`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "dns-search.{index}",
Short: `Additional DNS search paths`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "dns-local-name",
Short: `TLD given to hosts in the Private Network`,
Required: false,
Deprecated: false,
Positional: false,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.CreateDHCPRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
return api.CreateDHCP(request)
},
}
}
func vpcGwDHCPUpdate() *core.Command {
return &core.Command{
Short: `Update a DHCP configuration`,
Long: `Update a DHCP configuration.`,
Namespace: "vpc-gw",
Resource: "dhcp",
Verb: "update",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.UpdateDHCPRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "dhcpid",
Short: `DHCP config to update`,
Required: true,
Deprecated: false,
Positional: true,
},
{
Name: "subnet",
Short: `Subnet for the DHCP server`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "address",
Short: `Address of the DHCP server. This will be the gateway's address in the private network`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "pool-low",
Short: `Low IP (included) of the dynamic address pool`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "pool-high",
Short: `High IP (included) of the dynamic address pool`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "enable-dynamic",
Short: `Whether to enable dynamic pooling of IPs`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "valid-lifetime.seconds",
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "valid-lifetime.nanos",
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "renew-timer.seconds",
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "renew-timer.nanos",
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "rebind-timer.seconds",
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "rebind-timer.nanos",
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "push-default-route",
Short: `Whether the gateway should push a default route to DHCP clients or only hand out IPs`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "push-dns-server",
Short: `Whether the gateway should push custom DNS servers to clients`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "dns-servers-override.{index}",
Short: `Override the DNS server list pushed to DHCP clients, instead of the gateway itself`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "dns-search.{index}",
Short: `Additional DNS search paths`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "dns-local-name",
Short: `TLD given to hosts in the Private Network`,
Required: false,
Deprecated: false,
Positional: false,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.UpdateDHCPRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
return api.UpdateDHCP(request)
},
}
}
func vpcGwDHCPDelete() *core.Command {
return &core.Command{
Short: `Delete a DHCP configuration`,
Long: `Delete a DHCP configuration.`,
Namespace: "vpc-gw",
Resource: "dhcp",
Verb: "delete",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.DeleteDHCPRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "dhcpid",
Short: `DHCP config id to delete`,
Required: true,
Deprecated: false,
Positional: true,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.DeleteDHCPRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
e = api.DeleteDHCP(request)
if e != nil {
return nil, e
}
return &core.SuccessResult{
Resource: "dhcp",
Verb: "delete",
}, nil
},
}
}
func vpcGwDHCPEntryList() *core.Command {
return &core.Command{
Short: `List DHCP entries`,
Long: `List DHCP entries.`,
Namespace: "vpc-gw",
Resource: "dhcp-entry",
Verb: "list",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.ListDHCPEntriesRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "order-by",
Short: `Order in which to return results`,
Required: false,
Deprecated: false,
Positional: false,
EnumValues: []string{"created_at_asc", "created_at_desc", "ip_address_asc", "ip_address_desc", "hostname_asc", "hostname_desc"},
},
{
Name: "gateway-network-id",
Short: `Filter entries based on the gateway network they are on`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "mac-address",
Short: `Filter entries on their MAC address`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "ip-address",
Short: `Filter entries on their IP address`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "hostname",
Short: `Filter entries on their hostname substring`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "type",
Short: `Filter entries on their type`,
Required: false,
Deprecated: false,
Positional: false,
EnumValues: []string{"unknown", "reservation", "lease"},
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.ListDHCPEntriesRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
resp, err := api.ListDHCPEntries(request, scw.WithAllPages())
if err != nil {
return nil, err
}
return resp.DHCPEntries, nil
},
View: &core.View{Fields: []*core.ViewField{
{
FieldName: "ID",
},
{
FieldName: "GatewayNetworkID",
},
{
FieldName: "IPAddress",
},
{
FieldName: "MacAddress",
},
{
FieldName: "Hostname",
},
{
FieldName: "Type",
},
{
FieldName: "UpdatedAt",
},
{
FieldName: "CreatedAt",
},
{
FieldName: "Zone",
},
}},
}
}
func vpcGwDHCPEntryGet() *core.Command {
return &core.Command{
Short: `Get DHCP entries`,
Long: `Get DHCP entries.`,
Namespace: "vpc-gw",
Resource: "dhcp-entry",
Verb: "get",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.GetDHCPEntryRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "dhcp-entry-id",
Short: `ID of the DHCP entry to fetch`,
Required: true,
Deprecated: false,
Positional: true,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.GetDHCPEntryRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
return api.GetDHCPEntry(request)
},
}
}
func vpcGwDHCPEntryCreate() *core.Command {
return &core.Command{
Short: `Create a static DHCP reservation`,
Long: `Create a static DHCP reservation.`,
Namespace: "vpc-gw",
Resource: "dhcp-entry",
Verb: "create",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.CreateDHCPEntryRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "gateway-network-id",
Short: `GatewayNetwork on which to create a DHCP reservation`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "mac-address",
Short: `MAC address to give a static entry to`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "ip-address",
Short: `IP address to give to the machine`,
Required: false,
Deprecated: false,
Positional: false,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.CreateDHCPEntryRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
return api.CreateDHCPEntry(request)
},
}
}
func vpcGwDHCPEntryUpdate() *core.Command {
return &core.Command{
Short: `Update a DHCP entry`,
Long: `Update a DHCP entry.`,
Namespace: "vpc-gw",
Resource: "dhcp-entry",
Verb: "update",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.UpdateDHCPEntryRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "dhcp-entry-id",
Short: `DHCP entry ID to update`,
Required: true,
Deprecated: false,
Positional: true,
},
{
Name: "ip-address",
Short: `New IP address to give to the machine`,
Required: false,
Deprecated: false,
Positional: false,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.UpdateDHCPEntryRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
return api.UpdateDHCPEntry(request)
},
}
}
func vpcGwDHCPEntrySet() *core.Command {
return &core.Command{
Short: `Set all DHCP reservations on a Gateway Network`,
Long: `Set the list of DHCP reservations attached to a Gateway Network. Reservations are identified by their MAC address, and will sync the current DHCP entry list to the given list, creating, updating or deleting DHCP entries.
`,
Namespace: "vpc-gw",
Resource: "dhcp-entry",
Verb: "set",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.SetDHCPEntriesRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "gateway-network-id",
Short: `Gateway Network on which to set DHCP reservation list`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "dhcp-entries.{index}.mac-address",
Short: `MAC address to give a static entry to`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "dhcp-entries.{index}.ip-address",
Short: `IP address to give to the machine`,
Required: false,
Deprecated: false,
Positional: false,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.SetDHCPEntriesRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
return api.SetDHCPEntries(request)
},
}
}
func vpcGwDHCPEntryDelete() *core.Command {
return &core.Command{
Short: `Delete a DHCP reservation`,
Long: `Delete a DHCP reservation.`,
Namespace: "vpc-gw",
Resource: "dhcp-entry",
Verb: "delete",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.DeleteDHCPEntryRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "dhcp-entry-id",
Short: `DHCP entry ID to delete`,
Required: true,
Deprecated: false,
Positional: true,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.DeleteDHCPEntryRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
e = api.DeleteDHCPEntry(request)
if e != nil {
return nil, e
}
return &core.SuccessResult{
Resource: "dhcp-entry",
Verb: "delete",
}, nil
},
}
}
func vpcGwPatRuleList() *core.Command {
return &core.Command{
Short: `List PAT rules`,
Long: `List PAT rules.`,
Namespace: "vpc-gw",
Resource: "pat-rule",
Verb: "list",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.ListPATRulesRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "order-by",
Short: `Order in which to return results`,
Required: false,
Deprecated: false,
Positional: false,
EnumValues: []string{"created_at_asc", "created_at_desc", "public_port_asc", "public_port_desc"},
},
{
Name: "gateway-id",
Short: `Fetch rules for this gateway`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "private-ip",
Short: `Fetch rules targeting this private ip`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "protocol",
Short: `Fetch rules for this protocol`,
Required: false,
Deprecated: false,
Positional: false,
EnumValues: []string{"unknown", "both", "tcp", "udp"},
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.ListPATRulesRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
resp, err := api.ListPATRules(request, scw.WithAllPages())
if err != nil {
return nil, err
}
return resp.PatRules, nil
},
View: &core.View{Fields: []*core.ViewField{
{
FieldName: "ID",
},
{
FieldName: "GatewayID",
},
{
FieldName: "PublicPort",
},
{
FieldName: "PrivateIP",
},
{
FieldName: "PrivatePort",
},
{
FieldName: "Protocol",
},
{
FieldName: "UpdatedAt",
},
{
FieldName: "CreatedAt",
},
{
FieldName: "Zone",
},
}},
}
}
func vpcGwPatRuleGet() *core.Command {
return &core.Command{
Short: `Get a PAT rule`,
Long: `Get a PAT rule.`,
Namespace: "vpc-gw",
Resource: "pat-rule",
Verb: "get",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.GetPATRuleRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "pat-rule-id",
Short: `PAT rule to get`,
Required: true,
Deprecated: false,
Positional: true,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.GetPATRuleRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
return api.GetPATRule(request)
},
}
}
func vpcGwPatRuleCreate() *core.Command {
return &core.Command{
Short: `Create a PAT rule`,
Long: `Create a PAT rule.`,
Namespace: "vpc-gw",
Resource: "pat-rule",
Verb: "create",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.CreatePATRuleRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "gateway-id",
Short: `Gateway on which to attach the rule to`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "public-port",
Short: `Public port to listen on`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "private-ip",
Short: `Private IP to forward data to`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "private-port",
Short: `Private port to translate to`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "protocol",
Short: `Protocol the rule should apply to`,
Required: false,
Deprecated: false,
Positional: false,
EnumValues: []string{"unknown", "both", "tcp", "udp"},
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.CreatePATRuleRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
return api.CreatePATRule(request)
},
}
}
func vpcGwPatRuleUpdate() *core.Command {
return &core.Command{
Short: `Update a PAT rule`,
Long: `Update a PAT rule.`,
Namespace: "vpc-gw",
Resource: "pat-rule",
Verb: "update",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.UpdatePATRuleRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "pat-rule-id",
Short: `PAT rule to update`,
Required: true,
Deprecated: false,
Positional: true,
},
{
Name: "public-port",
Short: `Public port to listen on`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "private-ip",
Short: `Private IP to forward data to`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "private-port",
Short: `Private port to translate to`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "protocol",
Short: `Protocol the rule should apply to`,
Required: false,
Deprecated: false,
Positional: false,
EnumValues: []string{"unknown", "both", "tcp", "udp"},
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.UpdatePATRuleRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
return api.UpdatePATRule(request)
},
}
}
func vpcGwPatRuleSet() *core.Command {
return &core.Command{
Short: `Set all PAT rules on a Gateway`,
Long: `Set the list of PAT rules attached to a Gateway. Rules are identified by their public port and protocol. This will sync the current PAT rule list with the givent list, creating, updating or deleting PAT rules.
`,
Namespace: "vpc-gw",
Resource: "pat-rule",
Verb: "set",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.SetPATRulesRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "gateway-id",
Short: `Gateway on which to set the PAT rules`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "pat-rules.{index}.public-port",
Short: `Public port to listen on`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "pat-rules.{index}.private-ip",
Short: `Private IP to forward data to`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "pat-rules.{index}.private-port",
Short: `Private port to translate to`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "pat-rules.{index}.protocol",
Short: `Protocol the rule should apply to`,
Required: false,
Deprecated: false,
Positional: false,
EnumValues: []string{"unknown", "both", "tcp", "udp"},
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.SetPATRulesRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
return api.SetPATRules(request)
},
}
}
func vpcGwPatRuleDelete() *core.Command {
return &core.Command{
Short: `Delete a PAT rule`,
Long: `Delete a PAT rule.`,
Namespace: "vpc-gw",
Resource: "pat-rule",
Verb: "delete",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.DeletePATRuleRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "pat-rule-id",
Short: `PAT rule to delete`,
Required: true,
Deprecated: false,
Positional: true,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.DeletePATRuleRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
e = api.DeletePATRule(request)
if e != nil {
return nil, e
}
return &core.SuccessResult{
Resource: "pat-rule",
Verb: "delete",
}, nil
},
}
}
func vpcGwGatewayTypeList() *core.Command {
return &core.Command{
Short: `List VPC Public Gateway types`,
Long: `List VPC Public Gateway types.`,
Namespace: "vpc-gw",
Resource: "gateway-type",
Verb: "list",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.ListGatewayTypesRequest{}),
ArgSpecs: core.ArgSpecs{
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.ListGatewayTypesRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
return api.ListGatewayTypes(request)
},
}
}
func vpcGwIPList() *core.Command {
return &core.Command{
Short: `List IPs`,
Long: `List IPs.`,
Namespace: "vpc-gw",
Resource: "ip",
Verb: "list",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.ListIPsRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "order-by",
Short: `Order in which to return results`,
Required: false,
Deprecated: false,
Positional: false,
EnumValues: []string{"created_at_asc", "created_at_desc", "ip_asc", "ip_desc", "reverse_asc", "reverse_desc"},
},
{
Name: "project-id",
Short: `Include only IPs in this project`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "tags.{index}",
Short: `Filter IPs with these tags`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "reverse",
Short: `Filter by reverse containing this string`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "is-free",
Short: `Filter whether the IP is attached to a gateway or not`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "organization-id",
Short: `Include only IPs in this organization`,
Required: false,
Deprecated: false,
Positional: false,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.ListIPsRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
resp, err := api.ListIPs(request, scw.WithAllPages())
if err != nil {
return nil, err
}
return resp.IPs, nil
},
View: &core.View{Fields: []*core.ViewField{
{
FieldName: "ID",
},
{
FieldName: "Address",
},
{
FieldName: "Reverse",
},
{
FieldName: "Tags",
},
{
FieldName: "UpdatedAt",
},
{
FieldName: "CreatedAt",
},
{
FieldName: "Zone",
},
{
FieldName: "ProjectID",
},
{
FieldName: "OrganizationID",
},
}},
}
}
func vpcGwIPGet() *core.Command {
return &core.Command{
Short: `Get an IP`,
Long: `Get an IP.`,
Namespace: "vpc-gw",
Resource: "ip",
Verb: "get",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.GetIPRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "ip-id",
Short: `ID of the IP to get`,
Required: true,
Deprecated: false,
Positional: true,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.GetIPRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
return api.GetIP(request)
},
}
}
func vpcGwIPCreate() *core.Command {
return &core.Command{
Short: `Reserve an IP`,
Long: `Reserve an IP.`,
Namespace: "vpc-gw",
Resource: "ip",
Verb: "create",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.CreateIPRequest{}),
ArgSpecs: core.ArgSpecs{
core.ProjectIDArgSpec(),
{
Name: "tags.{index}",
Short: `Tags to give to the IP`,
Required: false,
Deprecated: false,
Positional: false,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.CreateIPRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
return api.CreateIP(request)
},
}
}
func vpcGwIPUpdate() *core.Command {
return &core.Command{
Short: `Update an IP`,
Long: `Update an IP.`,
Namespace: "vpc-gw",
Resource: "ip",
Verb: "update",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.UpdateIPRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "ip-id",
Short: `ID of the IP to update`,
Required: true,
Deprecated: false,
Positional: true,
},
{
Name: "tags.{index}",
Short: `Tags to give to the IP`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "reverse",
Short: `Reverse to set on the IP. Empty string to unset`,
Required: false,
Deprecated: false,
Positional: false,
},
{
Name: "gateway-id",
Short: `Gateway to attach the IP to. Empty string to detach`,
Required: false,
Deprecated: false,
Positional: false,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.UpdateIPRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
return api.UpdateIP(request)
},
}
}
func vpcGwIPDelete() *core.Command {
return &core.Command{
Short: `Delete an IP`,
Long: `Delete an IP.`,
Namespace: "vpc-gw",
Resource: "ip",
Verb: "delete",
// Deprecated: false,
ArgsType: reflect.TypeOf(vpcgw.DeleteIPRequest{}),
ArgSpecs: core.ArgSpecs{
{
Name: "ip-id",
Short: `ID of the IP to delete`,
Required: true,
Deprecated: false,
Positional: true,
},
core.ZoneArgSpec(scw.ZoneFrPar1, scw.ZoneFrPar2, scw.ZoneNlAms1, scw.ZonePlWaw1),
},
Run: func(ctx context.Context, args interface{}) (i interface{}, e error) {
request := args.(*vpcgw.DeleteIPRequest)
client := core.ExtractClient(ctx)
api := vpcgw.NewAPI(client)
e = api.DeleteIP(request)
if e != nil {
return nil, e
}
return &core.SuccessResult{
Resource: "ip",
Verb: "delete",
}, nil
},
}
}
| {
return &core.Command{
Short: `DHCP entries management`,
Long: `DHCP entries hold both dynamic DHCP leases (IP addresses dynamically assigned by the gateway to instances) and static user-created DHCP reservations.
`,
Namespace: "vpc-gw",
Resource: "dhcp-entry",
}
} |
blob_sql.go | // Copyright © 2022 Kaleido, Inc.
//
// SPDX-License-Identifier: Apache-2.0
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package sqlcommon
import (
"context"
"database/sql"
sq "github.com/Masterminds/squirrel"
"github.com/hyperledger/firefly/internal/i18n"
"github.com/hyperledger/firefly/internal/log"
"github.com/hyperledger/firefly/pkg/database"
"github.com/hyperledger/firefly/pkg/fftypes"
)
var (
blobColumns = []string{
"hash",
"payload_ref",
"peer",
"created",
"size",
}
blobFilterFieldMap = map[string]string{
"payloadref": "payload_ref",
}
)
func (s *SQLCommon) InsertBlob(ctx context.Context, blob *fftypes.Blob) (err error) {
ctx, tx, autoCommit, err := s.beginOrUseTx(ctx)
if err != nil {
return err
}
defer s.rollbackTx(ctx, tx, autoCommit)
sequence, err := s.insertTx(ctx, tx,
sq.Insert("blobs").
Columns(blobColumns...).
Values(
blob.Hash,
blob.PayloadRef,
blob.Peer,
blob.Created,
blob.Size,
),
nil, // no change events for blobs
)
if err != nil {
return err
}
blob.Sequence = sequence
return s.commitTx(ctx, tx, autoCommit)
}
func (s *SQLCommon) blobResult(ctx context.Context, row *sql.Rows) (*fftypes.Blob, error) {
blob := fftypes.Blob{}
err := row.Scan(
&blob.Hash,
&blob.PayloadRef,
&blob.Peer,
&blob.Created,
&blob.Size,
&blob.Sequence,
)
if err != nil {
return nil, i18n.WrapError(ctx, err, i18n.MsgDBReadErr, "blobs")
}
return &blob, nil
}
func (s *SQLCommon) getBlobPred(ctx context.Context, desc string, pred interface{}) (message *fftypes.Blob, err error) {
cols := append([]string{}, blobColumns...)
cols = append(cols, sequenceColumn)
rows, _, err := s.query(ctx,
sq.Select(cols...).
From("blobs").
Where(pred).
Limit(1),
)
if err != nil {
return nil, err
}
defer rows.Close()
if !rows.Next() {
log.L(ctx).Debugf("Blob '%s' not found", desc)
return nil, nil
}
blob, err := s.blobResult(ctx, rows)
if err != nil {
return nil, err
}
return blob, nil
}
func (s *SQLCommon) GetBlobMatchingHash(ctx context.Context, hash *fftypes.Bytes32) (message *fftypes.Blob, err error) {
return s.getBlobPred(ctx, hash.String(), sq.Eq{
"hash": hash,
})
}
func (s *SQLCommon) GetBlobs(ctx context.Context, filter database.Filter) (message []*fftypes.Blob, res *database.FilterResult, err error) {
cols := append([]string{}, blobColumns...)
cols = append(cols, sequenceColumn)
query, fop, fi, err := s.filterSelect(ctx, "", sq.Select(cols...).From("blobs"), filter, blobFilterFieldMap, []interface{}{"sequence"})
if err != nil {
return nil, nil, err
}
rows, tx, err := s.query(ctx, query)
if err != nil { | defer rows.Close()
blob := []*fftypes.Blob{}
for rows.Next() {
d, err := s.blobResult(ctx, rows)
if err != nil {
return nil, nil, err
}
blob = append(blob, d)
}
return blob, s.queryRes(ctx, tx, "blobs", fop, fi), err
}
func (s *SQLCommon) DeleteBlob(ctx context.Context, sequence int64) (err error) {
ctx, tx, autoCommit, err := s.beginOrUseTx(ctx)
if err != nil {
return err
}
defer s.rollbackTx(ctx, tx, autoCommit)
err = s.deleteTx(ctx, tx, sq.Delete("blobs").Where(sq.Eq{
sequenceColumn: sequence,
}), nil /* no change events for blobs */)
if err != nil {
return err
}
return s.commitTx(ctx, tx, autoCommit)
}
|
return nil, nil, err
}
|
two_stage_with_MetaEmbedding.py | import torch
import torch.nn as nn
import numpy as np
# from mmdet.core import bbox2result, bbox2roi, build_assigner, build_sampler
from ..builder import DETECTORS, build_backbone, build_head, build_neck
from .base import BaseDetector
from tqdm import tqdm
from mmdet.datasets import build_dataloader, build_dataset
from mmcv import Config
from mmdet.core import bbox2roi
from functools import partial
from torch.utils.data.dataloader import DataLoader
@DETECTORS.register_module()
class TwoStageDetectorMetaEmbedding(BaseDetector):
"""Base class for two-stage detectors.
Two-stage detectors typically consisting of a region proposal network and a
task-specific regression head.
"""
def __init__(self,
backbone,
neck=None,
rpn_head=None,
roi_head=None,
train_cfg=None,
test_cfg=None,
init_centroids=False,
pretrained=None):
super(TwoStageDetectorMetaEmbedding, self).__init__()
self.backbone = build_backbone(backbone)
self.device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
self.init_centroids = init_centroids
if neck is not None:
self.neck = build_neck(neck)
if rpn_head is not None:
rpn_train_cfg = train_cfg.rpn if train_cfg is not None else None
rpn_head_ = rpn_head.copy()
rpn_head_.update(train_cfg=rpn_train_cfg, test_cfg=test_cfg.rpn)
self.rpn_head = build_head(rpn_head_)
"""if self.init_centroids:
for p in self.parameters():
p.requires_grad = False"""
if roi_head is not None:
# update train and test cfg here for now
# TODO: refactor assigner & sampler
rcnn_train_cfg = train_cfg.rcnn if train_cfg is not None else None
roi_head.update(train_cfg=rcnn_train_cfg)
roi_head.update(test_cfg=test_cfg.rcnn)
self.roi_head = build_head(roi_head)
if self.init_centroids:
self.centroids = self.roi_head.loss_feat.centroids.data
else:
self.centroids = None
self.train_cfg = train_cfg
self.test_cfg = test_cfg
self.init_weights(pretrained=pretrained)
if roi_head["type"] == "MetaEmbedding_RoIHead":
# calculate init_centroids using training dataset
if self.train_cfg is not None:
if init_centroids:
cfg = Config.fromfile(
"/mmdetection/configs/faster_rcnn_meta/faster_rcnn_r50_c4_meta_smd_stage2.py")
dataset = build_dataset(cfg.centroids_cal)
# data = build_dataloader(dataset, samples_per_gpu=1, workers_per_gpu=0, num_gpus=1, shuffle=False)
# print(data[0])
self.roi_head.loss_feat.centroids.data = self.centroids_cal(dataset)
@property
def | (self):
return hasattr(self, 'rpn_head') and self.rpn_head is not None
@property
def with_roi_head(self):
return hasattr(self, 'roi_head') and self.roi_head is not None
def init_weights(self, pretrained=None):
super(TwoStageDetectorMetaEmbedding, self).init_weights(pretrained)
self.backbone.init_weights(pretrained=pretrained)
if self.with_neck:
if isinstance(self.neck, nn.Sequential):
for m in self.neck:
m.init_weights()
else:
self.neck.init_weights()
if self.with_rpn:
self.rpn_head.init_weights()
if self.with_roi_head:
self.roi_head.init_weights(pretrained)
def extract_feat(self, img):
"""Directly extract features from the backbone+neck
"""
x = self.backbone(img)
if self.with_neck:
x = self.neck(x)
return x
def forward_dummy(self, img):
"""Used for computing network flops.
See `mmdetection/tools/get_flops.py`
"""
outs = ()
# backbone
x = self.extract_feat(img)
# rpn
if self.with_rpn:
rpn_outs = self.rpn_head(x)
outs = outs + (rpn_outs, )
proposals = torch.randn(1000, 4).to(img.device)
# roi_head
roi_outs = self.roi_head.forward_dummy(x, proposals)
outs = outs + (roi_outs, )
return outs
def forward_train(self,
img,
img_metas,
gt_bboxes,
gt_labels,
gt_bboxes_ignore=None,
gt_masks=None,
proposals=None,
**kwargs):
"""
Args:
img (Tensor): of shape (N, C, H, W) encoding input images.
Typically these should be mean centered and std scaled.
img_metas (list[dict]): list of image info dict where each dict
has: 'img_shape', 'scale_factor', 'flip', and may also contain
'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'.
For details on the values of these keys see
`mmdet/datasets/pipelines/formatting.py:Collect`.
gt_bboxes (list[Tensor]): each item are the truth boxes for each
image in [tl_x, tl_y, br_x, br_y] format.
gt_labels (list[Tensor]): class indices corresponding to each box
gt_bboxes_ignore (None | list[Tensor]): specify which bounding
boxes can be ignored when computing the loss.
gt_masks (None | Tensor) : true segmentation masks for each box
used if the architecture supports a segmentation task.
proposals : override rpn proposals with custom proposals. Use when
`with_rpn` is False.
Returns:
dict[str, Tensor]: a dictionary of loss components
"""
x = self.extract_feat(img)
losses = dict()
# RPN forward and loss
if self.with_rpn:
proposal_cfg = self.train_cfg.get('rpn_proposal',
self.test_cfg.rpn)
rpn_losses, proposal_list = self.rpn_head.forward_train(
x,
img_metas,
gt_bboxes,
gt_labels=None,
gt_bboxes_ignore=gt_bboxes_ignore,
proposal_cfg=proposal_cfg)
losses.update(rpn_losses)
else:
proposal_list = proposals
"""roi_losses = self.roi_head.forward_train(x, img_metas, proposal_list,
gt_bboxes, gt_labels,
gt_bboxes_ignore, gt_masks,
**kwargs)"""
roi_losses = self.roi_head(x,
centroids=self.centroids,
img_metas=img_metas,
proposal_list=proposal_list,
gt_bboxes=gt_bboxes,
gt_labels=gt_labels,
gt_bboxes_ignore=gt_bboxes_ignore,
gt_masks=gt_masks,
test=False,
**kwargs)
losses.update(roi_losses)
return losses
async def async_simple_test(self,
img,
img_meta,
proposals=None,
rescale=False):
"""Async test without augmentation."""
assert self.with_bbox, 'Bbox head must be implemented.'
x = self.extract_feat(img)
if proposals is None:
proposal_list = await self.rpn_head.async_simple_test_rpn(
x, img_meta)
else:
proposal_list = proposals
return await self.roi_head.async_simple_test(
x, proposal_list, img_meta, rescale=rescale)
def simple_test(self, img, img_metas, proposals=None, rescale=False):
"""Test without augmentation."""
# assert self.with_bbox, 'Bbox head must be implemented.'
x = self.extract_feat(img)
if proposals is None:
proposal_list = self.rpn_head.simple_test_rpn(x, img_metas)
else:
proposal_list = proposals
return self.roi_head(x,
centroids=self.centroids,
proposal_list=proposal_list,
img_metas=img_metas,
test=True)
def aug_test(self, imgs, img_metas, rescale=False):
"""Test with augmentations.
If rescale is False, then returned bboxes and masks will fit the scale
of imgs[0].
"""
# recompute feats to save memory
x = self.extract_feats(imgs)
proposal_list = self.rpn_head.aug_test_rpn(x, img_metas)
return self.roi_head.aug_test(
x, proposal_list, img_metas, rescale=rescale)
def centroids_cal(self, data):
centroids = torch.zeros(self.roi_head.num_classes,
self.roi_head.feat_dim,
14,
14).cuda()
print('Calculating centroids.')
# Calculate initial centroids only on training data.
with torch.set_grad_enabled(False):
self.backbone.cuda()
self.rpn_head.cuda()
self.roi_head.cuda()
class_data_num = [0, 0, 0, 0, 0, 0]
# class_data_num = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
for i in tqdm(range(len(data))):
"""imgs, gt_labels, gt_bboxes, img_metas = inputs["img"], \
inputs["gt_labels"], \
inputs["gt_bboxes"],\
inputs["img_metas"]"""
imgs, gt_labels, gt_bboxes, img_metas = \
torch.unsqueeze(data[i]['img'], 0).to(next(self.backbone.parameters()).device), \
[data[i]['gt_labels'].to(next(self.backbone.parameters()).device)], \
[data[i]['gt_bboxes'].to(next(self.backbone.parameters()).device)], \
[data[i]['img_metas']]
# Calculate Features of each training data
feats = self.backbone(imgs)
"""proposal_list = self.rpn_head.simple_test_rpn(feats, img_metas)
num_imgs = len(img_metas)
# if gt_bboxes_ignore is None:
gt_bboxes_ignore = [None for _ in range(num_imgs)]
sampling_results = []
for i in range(num_imgs):
assign_result = self.roi_head.std_roi_head.bbox_assigner.assign(
proposal_list[i], gt_bboxes[i], gt_bboxes_ignore[i],
gt_labels[i])
sampling_result = self.roi_head.std_roi_head.bbox_sampler.sample(
assign_result,
proposal_list[i],
gt_bboxes[i],
gt_labels[i],
feats=[lvl_feat[i][None] for lvl_feat in feats])
sampling_results.append(sampling_result)
rois = bbox2roi([res.bboxes for res in sampling_results])"""
rois = bbox2roi(gt_bboxes)
bbox_feats = self.roi_head.std_roi_head.bbox_roi_extractor(
feats[:self.roi_head.std_roi_head.bbox_roi_extractor.num_inputs], rois)
"""labels = self.roi_head.std_roi_head.bbox_head.get_targets(sampling_results, gt_bboxes,
gt_labels, self.train_cfg.rcnn)[0]
# Add all calculated features to center tensor
for i in range(len(labels)):
label = labels[i]
if label < self.roi_head.num_classes:
centroids[label] += bbox_feats[i]
class_data_num[label] += 1"""
for j in range(len(gt_labels[0])):
label = gt_labels[0][j]
centroids[label] += bbox_feats[j]
class_data_num[label] += 1
for i in range(len(class_data_num)):
if class_data_num[i] == 0:
class_data_num[i] = 1
# Average summed features with class count
centroids /= torch.tensor(class_data_num).float().unsqueeze(1).unsqueeze(2).\
unsqueeze(3).repeat(1, 1024, 14, 14).cuda()
return centroids
def class_count(data):
labels = np.array(data.dataset.labels)
class_data_num = []
for l in np.unique(labels):
class_data_num.append(len(labels[labels == l]))
return class_data_num | with_rpn |
test_custom.py | #!/usr/bin/env python
#coding=utf-8
from pyecharts import Bar, Line, Scatter, EffectScatter, Kline
def test_custom():
# custom_0
attr = ['A', 'B', 'C', 'D', 'E', 'F']
v1 = [10, 20, 30, 40, 50, 60]
v2 = [15, 25, 35, 45, 55, 65]
v3 = [38, 28, 58, 48, 78, 68]
bar = Bar("Line - Bar 示例")
bar.add("bar", attr, v1)
line = Line()
line.add("line", v2, v3)
bar.custom(line.get_series())
bar.show_config()
bar.render()
# custom_1
v1 = [10, 20, 30, 40, 50, 60]
v2 = [30, 30, 30, 30, 30, 30]
v3 = [50, 50, 50, 50, 50, 50]
v4 = [10, 10, 10, 10, 10, 10]
es = EffectScatter("Scatter - EffectScatter 示例")
es.add("es", v1, v2)
scatter = Scatter()
scatter.add("scatter", v1, v3)
es.custom(scatter.get_series())
es_1 = EffectScatter()
es_1.add("es_1", v1, v4, symbol='pin', effect_scale=5)
es.custom(es_1.get_series())
es.show_config()
es.render()
# custom_2
import random
v1 = [[2320.26, 2320.26, 2287.3, 2362.94],
[2300, 2291.3, 2288.26, 2308.38],
[2295.35, 2346.5, 2295.35, 2345.92],
[2347.22, 2358.98, 2337.35, 2363.8],
[2360.75, 2382.48, 2347.89, 2383.76], | [2432.68, 2334.48, 2427.7, 2441.73],
[2430.69, 2418.53, 2394.22, 2433.89],
[2416.62, 2432.4, 2414.4, 2443.03],
[2441.91, 2421.56, 2418.43, 2444.8],
[2420.26, 2382.91, 2373.53, 2427.07],
[2383.49, 2397.18, 2370.61, 2397.94],
[2378.82, 2325.95, 2309.17, 2378.82],
[2322.94, 2314.16, 2308.76, 2330.88],
[2320.62, 2325.82, 2315.01, 2338.78],
[2313.74, 2293.34, 2289.89, 2340.71],
[2297.77, 2313.22, 2292.03, 2324.63],
[2322.32, 2365.59, 2308.92, 2366.16],
[2364.54, 2359.51, 2330.86, 2369.65],
[2332.08, 2273.4, 2259.25, 2333.54],
[2274.81, 2326.31, 2270.1, 2328.14],
[2333.61, 2347.18, 2321.6, 2351.44],
[2340.44, 2324.29, 2304.27, 2352.02],
[2326.42, 2318.61, 2314.59, 2333.67],
[2314.68, 2310.59, 2296.58, 2320.96],
[2309.16, 2286.6, 2264.83, 2333.29],
[2282.17, 2263.97, 2253.25, 2286.33],
[2255.77, 2270.28, 2253.31, 2276.22]]
attr = ["2017/7/{}".format(i + 1) for i in range(31)]
kline = Kline("Kline - Line 示例")
kline.add("日K", attr, v1)
line_1 = Line()
line_1.add("line-1", attr, [random.randint(2400, 2500) for _ in range(31)])
line_2 = Line()
line_2.add("line-2", attr, [random.randint(2400, 2500) for _ in range(31)])
kline.custom(line_1.get_series())
kline.custom(line_2.get_series())
kline.show_config()
kline.render() | [2383.43, 2385.42, 2371.23, 2391.82],
[2377.41, 2419.02, 2369.57, 2421.15],
[2425.92, 2428.15, 2417.58, 2440.38],
[2411, 2433.13, 2403.3, 2437.42], |
test_helpers.py | import unittest
import importlib
from pbutils.streams import warn
# Try to import flask settings module:
settings = None
try:
pkg_root = __name__.split('.')[0]
settings_modname = '{}.settings'.format(pkg_root)
settings = importlib.import_module(settings_modname)
except ImportError as e:
warn('Unable to import {}: {}'.format(settings_modname, str(e)))
class BaseTest(unittest.TestCase):
if settings is not None:
base_url = 'http://{}'.format(settings.FLASK_SERVER_NAME)
else:
base_url = 'http://localhost:5000'
def setUp(self):
self.client = app.test_client()
self.client.testing = True
try:
self.reset_fixture()
except AttributeError as e:
if str(e) == 'reset_fixture':
print('{} has no method "reset_fixture()", skipping'.format(self.__class__))
else:
|
def make_url(cls, url):
return cls.base_url + url
def _test_status(self, url, method, data, status_code, content_type):
''' issue a <method> request on url and verify the expected status_code was found. return resp.json() '''
real_url = self.make_url(url)
req = getattr(self.client, method.lower())
args = {'follow_redirects': True} # not needed for this site, but...
if data:
if content_type == 'application/json':
args['data'] = json.dumps(data)
elif content_type == 'application/x-www-form-urlencoded':
args['data'] = data
args['content_type'] = content_type
resp = req(real_url, **args)
self.assertEqual(resp.status_code, status_code)
try:
return json.loads(str(resp.data.decode()))
except (TypeError, ValueError):
return resp.data.decode()
def _test_get_status(self, url, status_code=200):
return self._test_status(url, 'GET', None, status_code, None)
def _test_post_status(self, url, data, status_code=201, content_type='application/json'):
return self._test_status(url, 'POST', data, status_code, content_type)
def _test_put_status(self, url, data, status_code=204, content_type='application/json'):
return self._test_status(url, 'PUT', data, status_code, content_type)
def _test_delete_status(self, url, status_code=204):
return self._test_status(url, 'DELETE', None, status_code, None)
| raise |
location.rs | use crate::{
error::{BundleError, MrBundleResult},
ResourceBytes,
};
use holochain_util::ffs;
use std::path::{Path, PathBuf};
/// Where to find a Resource.
///
/// This representation, with named fields, is chosen so that in the yaml config
/// either "path", "url", or "bundled" can be specified due to this field
/// being flattened.
#[derive(Clone, Debug, Hash, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "snake_case")]
#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
#[allow(missing_docs)]
pub enum Location {
/// Expect file to be part of this bundle
Bundled(PathBuf),
/// Get file from local filesystem (not bundled)
Path(PathBuf),
/// Get file from URL
Url(String),
}
impl Location {
/// Make a relative Path absolute if possible, given the `root_dir`
pub fn normalize(&self, root_dir: Option<&PathBuf>) -> MrBundleResult<Location> {
if let Location::Path(path) = self {
if path.is_relative() {
if let Some(dir) = root_dir {
Ok(Location::Path(ffs::sync::canonicalize(dir.join(&path))?))
} else {
Err(BundleError::RelativeLocalPath(path.to_owned()).into())
}
} else {
Ok(self.clone())
}
} else {
Ok(self.clone())
}
}
}
pub(crate) async fn resolve_local(path: &Path) -> MrBundleResult<ResourceBytes> {
Ok(ffs::read(path).await?)
}
pub(crate) async fn resolve_remote(url: &str) -> MrBundleResult<ResourceBytes> {
Ok(reqwest::get(url)
.await?
.bytes()
.await?
.into_iter()
.collect())
}
#[cfg(test)]
mod tests {
use super::*;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize)]
struct TunaSalad {
celery: Vec<Location>,
#[serde(flatten)]
mayo: Location,
}
/// Test that Location serializes in a convenient way suitable for
/// human-readable manifests, e.g. YAML
///
/// The YAML produced by this test looks like:
/// ---
/// celery:
/// - bundled: b
/// - path: p
/// url: "http://r.co"
#[test]
fn location_flattening() |
}
| {
use serde_yaml::Value;
let tuna = TunaSalad {
celery: vec![Location::Bundled("b".into()), Location::Path("p".into())],
mayo: Location::Url("http://r.co".into()),
};
let val = serde_yaml::to_value(&tuna).unwrap();
println!("yaml produced:\n{}", serde_yaml::to_string(&tuna).unwrap());
assert_eq!(val["celery"][0]["bundled"], Value::from("b"));
assert_eq!(val["celery"][1]["path"], Value::from("p"));
assert_eq!(val["url"], Value::from("http://r.co"));
} |
textWrap.ts | import { FitTextLineResult, TextSizeMeasurer, FitTextResult } from '../types/textWrap'
const overflowText = (
text: string,
maxWidth: number,
measureText: TextSizeMeasurer,
trimText: string = '...',
): FitTextLineResult => {
let shortenedText = ''
let charIndex = 0
let shortenedTextWidth = measureText(shortenedText).width
while (charIndex < text.length && shortenedTextWidth < maxWidth) {
shortenedText += text[charIndex]
shortenedTextWidth = measureText(shortenedText).width
charIndex += 1
}
// If text didn't fit the width, add on the trim text
if (charIndex < text.length) {
// Work backwards to determine how many chars to lose to make space for trim text
while (charIndex > 0 && shortenedTextWidth > maxWidth) {
charIndex -= 1
shortenedText = shortenedText.slice(0, charIndex) + trimText
shortenedTextWidth = measureText(shortenedText).width
}
}
return { text: shortenedText, width: shortenedTextWidth, numCharsOverflowed: text.length - charIndex }
}
export const fitText = (
text: string,
maxWidth: number,
maxHeight: number,
measureText: TextSizeMeasurer,
trimText: string = '...',
): FitTextResult => {
const textSizeMeasurement = measureText(text)
const initialTextWidth = textSizeMeasurement.width
// If all text fits on a single line, then simply return one line, no overflowing
if (initialTextWidth < maxWidth) {
return {
textLines: [{ text, width: initialTextWidth, numCharsOverflowed: 0 }],
width: initialTextWidth,
numCharsOverflowed: 0,
}
}
const numLines = Math.floor(maxHeight / textSizeMeasurement.height)
// If text doesn't fit line but we only have one line, then return one overflowed line
if (numLines === 1) {
const overflowedText = overflowText(text, maxWidth, measureText, trimText) | return {
textLines: [overflowedText],
width: overflowedText.width,
numCharsOverflowed: overflowedText.numCharsOverflowed,
}
}
// Begin incrementally wrapping text onto new lines
const words = text.split(' ').map(w => w.trim())
const fittedTextLineResults: FitTextLineResult[] = []
let wordIndex = 0
/* Iterate over the lines apart from the last one, since any remaining
* text post-wrapping will be overflowed.
*/
for (let lineIndex = 0; lineIndex < numLines - 1; lineIndex += 1) {
let lineText = ''
let lineWidth = 0
let currentWordsOnLine = 0
let noMoreWordsFit = false
while (!noMoreWordsFit && wordIndex < words.length) {
const prospectiveNewLine = `${lineText} ${words[wordIndex]}`
const newLineWidth = measureText(prospectiveNewLine).width
if (newLineWidth < maxWidth) {
wordIndex += 1
currentWordsOnLine += 1
lineWidth = newLineWidth
lineText = prospectiveNewLine
}
else {
noMoreWordsFit = true
}
}
/*
* This is the case for if the current word, on it's own, did not fit on a line.
* Therefore, it cannot be wrapped, and must be overflowed in the current line.
* If we didn't do this, then the above for-loop would continue trying to place
* it into every single line, failing every time.
*/
let numCharsOverflowed = 0
if (currentWordsOnLine === 0 && wordIndex < words.length) {
const overflowedText = overflowText(words[wordIndex], maxWidth, measureText, trimText)
lineText = overflowedText.text
lineWidth = overflowedText.width
numCharsOverflowed = overflowedText.numCharsOverflowed
wordIndex += 1
}
// Add the resulting line info to array
fittedTextLineResults.push({
text: lineText.trim(),
width: lineWidth,
numCharsOverflowed,
})
}
// Overflow any remaining text into the last line
const remainingText = wordIndex < words.length ? words.slice(wordIndex).join(' ').trim() : null
if (remainingText?.length > 0)
fittedTextLineResults.push(overflowText(remainingText, maxWidth, measureText, trimText))
return {
textLines: fittedTextLineResults,
width: Math.max(...fittedTextLineResults.map(l => l.width)),
numCharsOverflowed: fittedTextLineResults.reduce((acc, l) => acc + l.numCharsOverflowed, 0),
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.