file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
lib.rs | #![allow(deprecated)]
use {
serde::{Deserialize, Serialize},
paychains_sdk::{
account::Account,
clock::Slot,
commitment_config::CommitmentLevel,
fee_calculator::FeeCalculator,
hash::Hash,
message::Message,
pubkey::Pubkey,
signature::Signature,
transaction::{self, Transaction, TransactionError},
},
};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum TransactionConfirmationStatus {
Processed,
Confirmed,
Finalized,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TransactionStatus {
pub slot: Slot,
pub confirmations: Option<usize>, // None = rooted
pub err: Option<TransactionError>,
pub confirmation_status: Option<TransactionConfirmationStatus>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct TransactionSimulationDetails {
pub logs: Vec<String>,
pub units_consumed: u64,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BanksTransactionResultWithSimulation {
pub result: Option<transaction::Result<()>>,
pub simulation_details: Option<TransactionSimulationDetails>,
}
#[tarpc::service]
pub trait Banks {
async fn send_transaction_with_context(transaction: Transaction);
#[deprecated(
since = "1.9.0",
note = "Please use `get_fee_for_message_with_commitment_and_context` instead"
)]
async fn get_fees_with_commitment_and_context(
commitment: CommitmentLevel,
) -> (FeeCalculator, Hash, Slot);
async fn get_transaction_status_with_context(signature: Signature)
-> Option<TransactionStatus>;
async fn get_slot_with_context(commitment: CommitmentLevel) -> Slot;
async fn get_block_height_with_context(commitment: CommitmentLevel) -> u64;
async fn process_transaction_with_preflight_and_commitment_and_context(
transaction: Transaction,
commitment: CommitmentLevel,
) -> BanksTransactionResultWithSimulation;
async fn process_transaction_with_commitment_and_context(
transaction: Transaction,
commitment: CommitmentLevel,
) -> Option<transaction::Result<()>>;
async fn get_account_with_commitment_and_context(
address: Pubkey,
commitment: CommitmentLevel,
) -> Option<Account>;
async fn get_latest_blockhash_with_context() -> Hash;
async fn get_latest_blockhash_with_commitment_and_context(
commitment: CommitmentLevel,
) -> Option<(Hash, u64)>;
async fn get_fee_for_message_with_commitment_and_context(
commitment: CommitmentLevel,
message: Message,
) -> Option<u64>;
}
#[cfg(test)]
mod tests {
use {
super::*,
tarpc::{client, transport},
};
#[test]
fn test_banks_client_new() |
}
| {
let (client_transport, _server_transport) = transport::channel::unbounded();
BanksClient::new(client::Config::default(), client_transport);
} |
gameScript.js | var bomb = [],gameStatus=true,points=0;
function startGame()
{
document.getElementById("gameScore").innerHTML = `Game Score: ${points}`;
for(let i=0;i<10;i++)
{
let randVar = Math.floor(Math.random()*81)+1;
if(!bomb.includes(randVar))
{
bomb[i] = randVar;
}
else
i--;
}
}
function game(event)
{
if(gameStatus)
{
let curr_id = event.target.id;
curr_id = Number(curr_id.substr(5));
if(!bomb.includes(curr_id))
{
if(event.target.style.backgroundColor!="green")
{
points++;
event.target.style.backgroundColor = "green";
document.getElementById("gameScore").innerHTML = `Game Score: ${points}`;
}
if(points==71)
{
document.getElementById("resultDisplay").innerHTML = "Congratulation! You Won";
document.getElementById("resultDisplay").style.marginLeft = "40%";
document.getElementById("resultDisplay").style.color = "fuchsia";
gameStatus=false;
}
}
else
{
document.getElementById("resultDisplay").innerHTML = "Game Over";
document.getElementById("resultDisplay").style.marginLeft = "43.5%";
document.getElementById("resultDisplay").style.color = "red";
for(let p=0;p<10;p++)
{
document.getElementById(`cell_${bomb[p]}`).style.backgroundImage = "url('https://img.icons8.com/emoji/48/000000/bomb-emoji.png')";
document.getElementById(`cell_${bomb[p]}`).style.backgroundSize = "50px";
document.getElementById(`cell_${bomb[p]}`).style.backgroundColor = "red";
}
gameStatus=false;
}
}
}
function | ()
{
document.getElementById("gameScore").innerHTML = "";
document.getElementById("resultDisplay").innerHTML = "";
for(let i=1;i<=81;i++)
{
document.getElementById(`cell_${i}`).style.backgroundColor = "aqua";
document.getElementById(`cell_${i}`).style.backgroundImage = "none";
}
gameStatus=true;
points=0;
startGame();
} | resetGame |
doc.go | // Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. | // Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Code generated by protoc-gen-go_gapic. DO NOT EDIT.
// Package automl is an auto-generated package for the
// Cloud AutoML API.
//
// Train high-quality custom machine learning models with minimum effort and
// machine learning expertise.
//
// Use of Context
//
// The ctx passed to NewClient is used for authentication requests and
// for creating the underlying connection, but is not used for subsequent calls.
// Individual methods on the client use the ctx given to them.
//
// To close the open connection, use the Close() method.
//
// For information about setting deadlines, reusing contexts, and more
// please visit pkg.go.dev/cloud.google.com/go.
package automl // import "cloud.google.com/go/automl/apiv1"
import (
"context"
"os"
"runtime"
"strconv"
"strings"
"unicode"
"google.golang.org/api/option"
"google.golang.org/grpc/metadata"
)
// For more information on implementing a client constructor hook, see
// https://github.com/googleapis/google-cloud-go/wiki/Customizing-constructors.
type clientHookParams struct{}
type clientHook func(context.Context, clientHookParams) ([]option.ClientOption, error)
const versionClient = "20201116"
func insertMetadata(ctx context.Context, mds ...metadata.MD) context.Context {
out, _ := metadata.FromOutgoingContext(ctx)
out = out.Copy()
for _, md := range mds {
for k, v := range md {
out[k] = append(out[k], v...)
}
}
return metadata.NewOutgoingContext(ctx, out)
}
func checkDisableDeadlines() (bool, error) {
raw, ok := os.LookupEnv("GOOGLE_API_GO_EXPERIMENTAL_DISABLE_DEFAULT_DEADLINE")
if !ok {
return false, nil
}
b, err := strconv.ParseBool(raw)
return b, err
}
// DefaultAuthScopes reports the default set of authentication scopes to use with this package.
func DefaultAuthScopes() []string {
return []string{
"https://www.googleapis.com/auth/cloud-platform",
}
}
// versionGo returns the Go runtime version. The returned string
// has no whitespace, suitable for reporting in header.
func versionGo() string {
const develPrefix = "devel +"
s := runtime.Version()
if strings.HasPrefix(s, develPrefix) {
s = s[len(develPrefix):]
if p := strings.IndexFunc(s, unicode.IsSpace); p >= 0 {
s = s[:p]
}
return s
}
notSemverRune := func(r rune) bool {
return !strings.ContainsRune("0123456789.", r)
}
if strings.HasPrefix(s, "go1") {
s = s[2:]
var prerelease string
if p := strings.IndexFunc(s, notSemverRune); p >= 0 {
s, prerelease = s[:p], s[p:]
}
if strings.HasSuffix(s, ".") {
s += "0"
} else if strings.Count(s, ".") < 2 {
s += ".0"
}
if prerelease != "" {
s += "-" + prerelease
}
return s
}
return "UNKNOWN"
} | // You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
// |
103.0-BDP-cascade-invert.py | # %% [markdown]
# #
import itertools
import os
import time
from itertools import chain
import colorcet as cc
import matplotlib as mpl
import matplotlib.pyplot as plt
import networkx as nx
import numpy as np
import pandas as pd
import seaborn as sns
from anytree import LevelOrderGroupIter, Node, RenderTree
from joblib import Parallel, delayed
from mpl_toolkits.axes_grid1 import make_axes_locatable
from sklearn.decomposition import PCA
from graspy.plot import heatmap, pairplot
from src.data import load_metagraph
from src.graph import MetaGraph, preprocess
from src.io import savecsv, savefig, saveskels
from src.traverse import (
cascades_from_node,
generate_cascade_tree,
generate_random_walks,
path_to_visits,
to_markov_matrix,
to_path_graph,
)
from src.visualization import (
CLASS_COLOR_DICT,
barplot_text,
draw_networkx_nice,
draw_separators,
matrixplot,
remove_shared_ax,
remove_spines,
screeplot,
sort_meta,
stacked_barplot,
)
FNAME = os.path.basename(__file__)[:-3]
print(FNAME)
def stashfig(name, **kws):
|
def stashcsv(df, name, **kws):
savecsv(df, name, foldername=FNAME, save_on=True, **kws)
#%% Load and preprocess the data
VERSION = "2020-03-09"
print(f"Using version {VERSION}")
plot_examples = False
plot_embed = False
plot_full_mat = False
graph_type = "Gad"
threshold = 0
weight = "weight"
mg = load_metagraph(graph_type, VERSION)
mg = preprocess(
mg,
threshold=threshold,
sym_threshold=False,
remove_pdiff=True,
binarize=False,
weight=weight,
)
print(f"Preprocessed graph {graph_type} with threshold={threshold}, weight={weight}")
# TODO update this with the mixed groups
# TODO make these functional for selecting proper paths
out_classes = [
"O_dSEZ",
"O_dSEZ;CN",
"O_dSEZ;LHN",
"O_dVNC",
"O_dVNC;O_RG",
"O_dVNC;CN",
"O_RG",
"O_dUnk",
"O_RG-IPC",
"O_RG-ITP",
"O_RG-CA-LP",
]
from_groups = [
("sens-ORN",),
("sens-photoRh5", "sens-photoRh6"),
("sens-MN",),
("sens-thermo",),
("sens-vtd",),
("sens-AN",),
]
from_group_names = ["Odor", "Photo", "MN", "Temp", "VTD", "AN"]
out_groups = [
("motor-mAN", "motormVAN", "motor-mPaN"),
("O_dSEZ", "O_dVNC;O_dSEZ", "O_dSEZ;CN", "LHN;O_dSEZ"),
("O_dVNC", "O_dVNC;CN", "O_RG;O_dVNC", "O_dVNC;O_dSEZ"),
("O_RG", "O_RG-IPC", "O_RG-ITP", "O_RG-CA-LP", "O_RG;O_dVNC"),
("O_dUnk",),
]
out_group_names = ["Motor", "SEZ", "VNC", "RG", "dUnk"]
from_classes = list(chain.from_iterable(from_groups)) # make this a flat list
out_classes = list(chain.from_iterable(out_groups))
class_key = "Merge Class"
adj = nx.to_numpy_array(mg.g, weight=weight, nodelist=mg.meta.index.values)
n_verts = len(adj)
meta = mg.meta.copy()
g = mg.g.copy()
meta["idx"] = range(len(meta))
from_inds = meta[meta[class_key].isin(from_classes)]["idx"].values
out_inds = meta[meta[class_key].isin(out_classes)]["idx"].values
ind_map = dict(zip(meta.index, meta["idx"]))
g = nx.relabel_nodes(g, ind_map, copy=True)
out_ind_map = dict(zip(out_inds, range(len(out_inds))))
# %% [markdown]
# # Use a method to generate visits
path_type = "cascade"
if path_type == "cascade":
p = 0.01
not_probs = (
1 - p
) ** adj # probability of none of the synapses causing postsynaptic
probs = 1 - not_probs # probability of ANY of the synapses firing onto next
elif path_type == "fancy-cascade":
alpha = 0.5
flat = np.full(adj.shape, alpha)
deg = meta["dendrite_input"].values
deg[deg == 0] = 1
flat = flat / deg[None, :]
not_probs = np.power((1 - flat), adj)
probs = 1 - not_probs
#%%
seed = 8888
max_depth = 10
n_bins = 10
n_sims = 100
method = "tree"
normalize_n_source = False
basename = f"-{graph_type}-t{threshold}-pt{path_type}-b{n_bins}-n{n_sims}-m{method}"
basename += f"-norm{normalize_n_source}"
basename += f"-plus-inverted"
np.random.seed(seed)
if method == "tree":
seeds = np.random.choice(int(1e8), size=len(from_inds), replace=False)
outs = Parallel(n_jobs=1, verbose=10)(
delayed(cascades_from_node)(
fi, probs, out_inds, max_depth, n_sims, seed, n_bins, method
)
for fi, seed in zip(from_inds, seeds)
)
elif method == "path":
outs = []
for start_ind in from_inds:
temp_hist = cascades_from_node(
start_ind, probs, out_inds, max_depth, n_sims, seed, n_bins, method
)
outs.append(temp_hist)
from_hist_mat = np.concatenate(outs, axis=-1)
###
# invert
if method == "tree":
seeds = np.random.choice(int(1e8), size=len(out_inds), replace=False)
outs = Parallel(n_jobs=1, verbose=10)(
delayed(cascades_from_node)(
fi, probs.T, from_inds, max_depth, n_sims, seed, n_bins, method
)
for fi, seed in zip(out_inds, seeds)
)
elif method == "path":
outs = []
for start_ind in from_inds:
temp_hist = cascades_from_node(
start_ind, probs.T, out_inds, max_depth, n_sims, seed, n_bins, method
)
outs.append(temp_hist)
out_hist_mat = np.concatenate(outs, axis=-1)
# generate_cascade_paths(start_ind, probs, 1, stop_inds=out_inds, max_depth=10)
# %% [markdown]
# # Sort metadata
full_hist_mat = np.concatenate((from_hist_mat, out_hist_mat), axis=1)
hist_mat = full_hist_mat
# row metadata
ids = pd.Series(index=meta["idx"], data=meta.index, name="id")
to_class = ids.map(meta["Merge Class"])
to_class.name = "to_class"
row_df = pd.concat([ids, to_class], axis=1)
# col metadata
orders = pd.Series(data=len(from_inds) * list(range(n_bins)), name="order")
from_idx = pd.Series(data=np.repeat(from_inds, n_bins), name="idx")
from_ids = from_idx.map(ids)
from_ids.name = "id"
from_class = from_ids.map(meta["Merge Class"])
from_class.name = "class"
from_col_df = pd.concat([orders, from_idx, from_ids, from_class], axis=1)
orders = pd.Series(data=len(out_inds) * list(range(n_bins)), name="order")
out_idx = pd.Series(data=np.repeat(out_inds, n_bins), name="idx")
out_ids = out_idx.map(ids)
out_ids.name = "id"
out_class = out_ids.map(meta["Merge Class"])
out_class.name = "class"
out_col_df = pd.concat([orders, out_idx, out_ids, out_class], axis=1)
col_df = pd.concat([from_col_df, out_col_df], axis=0, ignore_index=True)
# %% [markdown]
# #
log_mat = np.log10(hist_mat + 1)
if plot_full_mat:
shape = log_mat.shape
figsize = (10, 20)
fig, ax = plt.subplots(1, 1, figsize=figsize)
matrixplot(
log_mat,
ax=ax,
col_meta=col_df,
col_sort_class=["from_class"],
row_meta=row_df,
row_sort_class=["to_class"],
plot_type="scattermap",
sizes=(0.5, 0.5),
tick_rot=45,
)
stashfig("log-full-scatter" + basename)
fig, ax = plt.subplots(1, 1, figsize=figsize)
matrixplot(
log_mat,
ax=ax,
col_meta=col_df,
col_sort_class=["from_class"],
row_colors=CLASS_COLOR_DICT,
row_meta=row_df,
row_sort_class=["to_class"],
plot_type="heatmap",
sizes=(0.5, 0.5),
tick_rot=45,
)
stashfig("log-full-heat" + basename)
# %% [markdown]
# # Screeplots
if plot_embed:
screeplot(hist_mat.astype(float), title="Raw hist mat (full)")
stashfig("scree-raw-mat" + basename)
screeplot(log_mat, title="Log hist mat (full)")
stashfig("scree-log-mat" + basename)
# %% [markdown]
# # Pairplots
if plot_embed:
pca = PCA(n_components=6)
embed = pca.fit_transform(log_mat)
loadings = pca.components_.T
pg = pairplot(
embed,
labels=to_class.values,
palette=CLASS_COLOR_DICT,
height=5,
title="Node response embedding (log)",
)
pg._legend.remove()
stashfig("node-pca-log" + basename)
pg = pairplot(
loadings,
labels=from_class.values,
height=5,
title="Source class embedding (log)",
)
stashfig("source-pca-log" + basename)
pca = PCA(n_components=6)
embed = pca.fit_transform(hist_mat.astype(float))
loadings = pca.components_.T
pg = pairplot(
embed,
labels=to_class.values,
palette=CLASS_COLOR_DICT,
height=5,
title="Node response embedding (raw)",
)
pg._legend.remove()
stashfig("node-pca-log" + basename)
pg = pairplot(
loadings,
labels=from_class.values,
height=5,
title="Source class embedding (raw)",
)
stashfig("source-pca-log" + basename)
# %% [markdown]
# # Collapse that matrix
hist_mat = full_hist_mat
collapsed_hist = []
collapsed_col_df = []
groups = from_groups + out_groups
names = from_group_names + out_group_names
for fg, fg_name in zip(groups, names):
from_df = col_df[col_df["class"].isin(fg)]
n_in_group = len(from_df)
for order in from_df["order"].unique():
inds = from_df[from_df["order"] == order].index
col = hist_mat[:, inds].sum(axis=1)
if normalize_n_source:
col = col.astype(float)
col /= n_in_group
collapsed_hist.append(col)
row = {"order": order, "class": fg_name}
collapsed_col_df.append(row)
collapsed_col_df = pd.DataFrame(collapsed_col_df)
collapsed_hist = np.array(collapsed_hist).T
log_collapsed_hist = np.log10(collapsed_hist + 1)
# %% [markdown]
# #
if plot_embed:
pca = PCA(n_components=6)
embed = pca.fit_transform(log_collapsed_hist)
loadings = pca.components_.T
pg = pairplot(
embed,
labels=to_class.values,
palette=CLASS_COLOR_DICT,
height=5,
title="Collapsed node response embedding (log)",
)
pg._legend.remove()
stashfig("coll-node-pca-log" + basename)
pg = pairplot(
loadings,
labels=collapsed_col_df["from_class"].values,
height=5,
title="Collapsed source class embedding (log)",
)
stashfig("coll-source-pca-log" + basename)
pca = PCA(n_components=6)
embed = pca.fit_transform(collapsed_hist.astype(float))
loadings = pca.components_.T
pg = pairplot(
embed,
labels=to_class.values,
palette=CLASS_COLOR_DICT,
height=5,
title="Collapsed node response embedding (raw)",
)
pg._legend.remove()
stashfig("coll-node-pca-log" + basename)
pg = pairplot(
loadings,
labels=collapsed_col_df["from_class"].values,
height=5,
title="Collapsed source class embedding (raw)",
)
stashfig("coll-source-pca-log" + basename)
# %% [markdown]
# # Compute mean visit over all sources, for plotting
def mean_visit(row):
n_groups = len(row) // n_bins
s = 0
for i in range(n_groups):
group = row[i * n_bins : (i + 1) * n_bins]
for j, val in enumerate(group):
s += j * val
s /= row.sum()
return s
visits = []
for r in collapsed_hist:
mv = mean_visit(r)
visits.append(mv)
visits = np.array(visits)
visits[np.isnan(visits)] = n_bins + 1
row_df["visit_order"] = visits
mean_visit_order = row_df.groupby(["to_class"])["visit_order"].mean()
row_df["group_visit_order"] = row_df["to_class"].map(mean_visit_order)
row_df["n_visit"] = collapsed_hist.sum(axis=1)
# %% [markdown]
# #
fig, ax = plt.subplots(1, 1, figsize=(15, 15))
sns.set_context("talk", font_scale=0.8)
gridline_kws = dict(color="grey", linestyle="--", alpha=0.7, linewidth=0.3)
matrixplot(
log_collapsed_hist,
ax=ax,
col_meta=collapsed_col_df,
col_sort_class=["class"],
row_meta=row_df,
row_sort_class=["to_class"],
row_colors=CLASS_COLOR_DICT,
row_class_order="group_visit_order",
row_item_order=["visit_order"],
plot_type="heatmap",
tick_rot=0,
row_ticks=False,
gridline_kws=gridline_kws,
)
stashfig("collapsed-log-heat" + basename)
# %% [markdown]
# #
sns.set_context("talk", font_scale=1)
gridline_kws = dict(color="grey", linestyle="--", alpha=0.7, linewidth=0.3)
fig, ax = plt.subplots(1, 1, figsize=(25, 15))
ax, divider, top_cax, left_cax = matrixplot(
log_collapsed_hist.T,
ax=ax,
row_meta=collapsed_col_df,
row_sort_class=["class"],
col_meta=row_df,
col_sort_class=["to_class"],
col_colors=CLASS_COLOR_DICT,
col_class_order="group_visit_order",
col_item_order=["visit_order"],
plot_type="heatmap",
tick_rot=45,
col_ticks=False,
gridline_kws=gridline_kws,
)
cax = divider.append_axes("right", size="1%", pad=0.02, sharey=ax)
remove_shared_ax(cax)
sns.heatmap(
collapsed_col_df["order"][:, None], ax=cax, cbar=False, cmap="RdBu", center=0
)
cax.set_xticks([])
cax.set_yticks([])
cax.set_ylabel(r"Hops $\to$", rotation=-90, ha="center", va="center", labelpad=20)
cax.yaxis.set_label_position("right")
top_cax.set_yticks([0.5])
top_cax.set_yticklabels(["Class"], va="center")
ax.set_xlabel("Neuron")
ax.set_ylabel("Source class")
stashfig("collapsed-log-heat-transpose" + basename, dpi=200)
fig, ax = plt.subplots(1, 1, figsize=(25, 15))
ax, divider, top_cax, left_cax = matrixplot(
log_collapsed_hist.T,
ax=ax,
row_meta=collapsed_col_df,
row_sort_class=["class"],
col_meta=row_df,
col_sort_class=["to_class"],
col_colors=CLASS_COLOR_DICT,
col_class_order="group_visit_order",
col_item_order=["visit_order"],
plot_type="heatmap",
tick_rot=45,
col_ticks=True,
gridline_kws=gridline_kws,
)
cax = divider.append_axes("right", size="1%", pad=0.02, sharey=ax)
remove_shared_ax(cax)
sns.heatmap(
collapsed_col_df["order"][:, None], ax=cax, cbar=False, cmap="RdBu", center=0
)
cax.set_xticks([])
cax.set_yticks([])
cax.set_ylabel(r"Hops $\to$", rotation=-90, ha="center", va="center", labelpad=20)
cax.yaxis.set_label_position("right")
top_cax.set_yticks([0.5])
top_cax.set_yticklabels(["Class"], va="center")
ax.set_xlabel("Neuron")
ax.set_ylabel("Source class")
stashfig("collapsed-log-heat-transpose-labeled" + basename, dpi=200)
# %% [markdown]
# # clustermap the matrix
sns.set_context("talk", font_scale=1)
linkage = "average"
metric = "euclidean"
colors = np.vectorize(CLASS_COLOR_DICT.get)(row_df["to_class"])
perm_inds, sort_collapsed_col_df = sort_meta(
collapsed_col_df, sort_class=["from_class"]
)
sort_log_collapsed_hist = log_collapsed_hist[:, perm_inds]
cg = sns.clustermap(
data=sort_log_collapsed_hist.T,
col_cluster=True,
row_cluster=False,
col_colors=colors,
cmap="RdBu_r",
center=0,
cbar_pos=None,
method=linkage,
metric=metric,
)
ax = cg.ax_heatmap
draw_separators(
ax,
ax_type="y",
sort_meta=sort_collapsed_col_df,
sort_class=["from_class"],
tick_rot=0,
)
ax.xaxis.set_ticks([])
# ax.set_ylabel(r"Visits over time $\to$")
ax.set_xlabel("Neuron")
ax.yaxis.tick_left()
# ax.set_yticklabels(ax.get_yticklabels(), ha="left")
stashfig("collapsed-log-clustermap" + basename)
# stashfig("collapsed-log-clustermap" + basename, fmt="pdf")
# %% [markdown]
# # Do some plotting for illustration only
if plot_examples:
sns.set_context("talk")
sns.set_palette("Set1")
examples = [742, 605, 743, 2282, 596, 2367, 1690, 2313]
for target_ind in examples:
row = collapsed_hist[target_ind, :]
perm_inds, sort_col_df = sort_meta(collapsed_col_df, sort_class=["from_class"])
sort_row = row[perm_inds]
fig, ax = plt.subplots(1, 1)
xs = np.arange(len(sort_row)) + 0.5
divider = make_axes_locatable(ax)
bot_cax = divider.append_axes("bottom", size="3%", pad=0.02, sharex=ax)
remove_shared_ax(bot_cax)
ax.bar(x=xs, height=sort_row, width=0.8)
draw_separators(
ax, sort_meta=sort_col_df, sort_class=["from_class"], tick_rot=0
)
ax.set_xlim(0, len(xs))
ax.set_ylabel("# hits @ time")
sns.heatmap(
collapsed_col_df["order"][None, :],
ax=bot_cax,
cbar=False,
cmap="RdBu",
center=0,
)
bot_cax.set_xticks([])
bot_cax.set_yticks([])
bot_cax.set_xlabel(r"Hops $\to$", x=0.1, ha="left", labelpad=-22)
bot_cax.set_xticks([20.5, 24.5, 28.5])
bot_cax.set_xticklabels([1, 5, 9], rotation=0)
ax.spines["right"].set_visible(False)
ax.spines["top"].set_visible(False)
target_skid = meta.iloc[target_ind, :].name
ax.set_title(
f"Response for cell {target_skid} ({meta[meta['idx'] == target_ind]['Merge Class'].values[0]})"
)
stashfig(f"{target_skid}-response-hist" + basename)
| savefig(name, foldername=FNAME, save_on=True, **kws) |
utils.py | from urllib.request import urlopen
import torch
from torch import nn
import numpy as np
from skimage.morphology import label
import os
from HD_BET.paths import folder_with_parameter_files
def get_params_fname(fold):
return os.path.join(folder_with_parameter_files, "%d.model" % fold)
def maybe_download_parameters(fold=0, force_overwrite=False):
"""
Downloads the parameters for some fold if it is not present yet.
:param fold:
:param force_overwrite: if True the old parameter file will be deleted (if present) prior to download
:return:
"""
assert 0 <= fold <= 4, "fold must be between 0 and 4"
if not os.path.isdir(folder_with_parameter_files):
maybe_mkdir_p(folder_with_parameter_files)
out_filename = get_params_fname(fold)
if force_overwrite and os.path.isfile(out_filename):
os.remove(out_filename)
if not os.path.isfile(out_filename):
url = "https://zenodo.org/record/2540695/files/%d.model?download=1" % fold
print("Downloading", url, "...")
data = urlopen(url).read()
with open(out_filename, 'wb') as f:
f.write(data)
def init_weights(module):
if isinstance(module, nn.Conv3d):
module.weight = nn.init.kaiming_normal(module.weight, a=1e-2)
if module.bias is not None:
module.bias = nn.init.constant(module.bias, 0)
def softmax_helper(x):
rpt = [1 for _ in range(len(x.size()))]
rpt[1] = x.size(1)
x_max = x.max(1, keepdim=True)[0].repeat(*rpt)
e_x = torch.exp(x - x_max)
return e_x / e_x.sum(1, keepdim=True).repeat(*rpt)
class SetNetworkToVal(object):
def __init__(self, use_dropout_sampling=False, norm_use_average=True):
self.norm_use_average = norm_use_average
self.use_dropout_sampling = use_dropout_sampling
def __call__(self, module):
if isinstance(module, nn.Dropout3d) or isinstance(module, nn.Dropout2d) or isinstance(module, nn.Dropout):
module.train(self.use_dropout_sampling)
elif isinstance(module, nn.InstanceNorm3d) or isinstance(module, nn.InstanceNorm2d) or \
isinstance(module, nn.InstanceNorm1d) \
or isinstance(module, nn.BatchNorm2d) or isinstance(module, nn.BatchNorm3d) or \
isinstance(module, nn.BatchNorm1d):
module.train(not self.norm_use_average)
def | (seg):
# basically look for connected components and choose the largest one, delete everything else
print("running postprocessing... ")
mask = seg != 0
lbls = label(mask, connectivity=mask.ndim)
lbls_sizes = [np.sum(lbls == i) for i in np.unique(lbls)]
largest_region = np.argmax(lbls_sizes[1:]) + 1
seg[lbls != largest_region] = 0
return seg
def subdirs(folder, join=True, prefix=None, suffix=None, sort=True):
if join:
l = os.path.join
else:
l = lambda x, y: y
res = [l(folder, i) for i in os.listdir(folder) if os.path.isdir(os.path.join(folder, i))
and (prefix is None or i.startswith(prefix))
and (suffix is None or i.endswith(suffix))]
if sort:
res.sort()
return res
def subfiles(folder, join=True, prefix=None, suffix=None, sort=True):
if join:
l = os.path.join
else:
l = lambda x, y: y
res = [l(folder, i) for i in os.listdir(folder) if os.path.isfile(os.path.join(folder, i))
and (prefix is None or i.startswith(prefix))
and (suffix is None or i.endswith(suffix))]
if sort:
res.sort()
return res
subfolders = subdirs # I am tired of confusing those
def maybe_mkdir_p(directory):
splits = directory.split("/")[1:]
for i in range(0, len(splits)):
if not os.path.isdir(os.path.join("/", *splits[:i+1])):
os.mkdir(os.path.join("/", *splits[:i+1]))
| postprocess_prediction |
enumerate.rs | use super::coreaudio::sys::{
kAudioHardwareNoError, kAudioHardwarePropertyDefaultInputDevice,
kAudioHardwarePropertyDefaultOutputDevice, kAudioHardwarePropertyDevices,
kAudioObjectPropertyElementMaster, kAudioObjectPropertyScopeGlobal, kAudioObjectSystemObject,
AudioDeviceID, AudioObjectGetPropertyData, AudioObjectGetPropertyDataSize,
AudioObjectPropertyAddress, OSStatus,
};
use super::Device;
use std::mem;
use std::ptr::null;
use std::vec::IntoIter as VecIntoIter;
use {BackendSpecificError, DevicesError, SupportedStreamConfigRange};
unsafe fn audio_devices() -> Result<Vec<AudioDeviceID>, OSStatus> {
let property_address = AudioObjectPropertyAddress {
mSelector: kAudioHardwarePropertyDevices,
mScope: kAudioObjectPropertyScopeGlobal,
mElement: kAudioObjectPropertyElementMaster,
};
macro_rules! try_status_or_return {
($status:expr) => {
if $status != kAudioHardwareNoError as i32 {
return Err($status);
}
};
}
let data_size = 0u32;
let status = AudioObjectGetPropertyDataSize(
kAudioObjectSystemObject,
&property_address as *const _,
0,
null(),
&data_size as *const _ as *mut _,
);
try_status_or_return!(status);
let device_count = data_size / mem::size_of::<AudioDeviceID>() as u32;
let mut audio_devices = vec![];
audio_devices.reserve_exact(device_count as usize);
let status = AudioObjectGetPropertyData(
kAudioObjectSystemObject,
&property_address as *const _,
0,
null(),
&data_size as *const _ as *mut _,
audio_devices.as_mut_ptr() as *mut _,
);
try_status_or_return!(status);
audio_devices.set_len(device_count as usize);
Ok(audio_devices)
}
pub struct Devices(VecIntoIter<AudioDeviceID>);
impl Devices {
pub fn new() -> Result<Self, DevicesError> {
let devices = unsafe {
match audio_devices() {
Ok(devices) => devices,
Err(os_status) => {
let description = format!("{}", os_status);
let err = BackendSpecificError { description };
return Err(err.into());
}
}
};
Ok(Devices(devices.into_iter()))
}
}
unsafe impl Send for Devices {}
unsafe impl Sync for Devices {}
impl Iterator for Devices {
type Item = Device;
fn next(&mut self) -> Option<Device> {
self.0.next().map(|id| Device {
audio_device_id: id,
})
}
}
pub fn default_input_device() -> Option<Device> {
let property_address = AudioObjectPropertyAddress {
mSelector: kAudioHardwarePropertyDefaultInputDevice,
mScope: kAudioObjectPropertyScopeGlobal,
mElement: kAudioObjectPropertyElementMaster,
};
let audio_device_id: AudioDeviceID = 0;
let data_size = mem::size_of::<AudioDeviceID>();
let status = unsafe {
AudioObjectGetPropertyData(
kAudioObjectSystemObject,
&property_address as *const _,
0,
null(),
&data_size as *const _ as *mut _,
&audio_device_id as *const _ as *mut _,
)
};
if status != kAudioHardwareNoError as i32 {
return None;
}
let device = Device { audio_device_id };
Some(device)
}
pub fn | () -> Option<Device> {
let property_address = AudioObjectPropertyAddress {
mSelector: kAudioHardwarePropertyDefaultOutputDevice,
mScope: kAudioObjectPropertyScopeGlobal,
mElement: kAudioObjectPropertyElementMaster,
};
let audio_device_id: AudioDeviceID = 0;
let data_size = mem::size_of::<AudioDeviceID>();
let status = unsafe {
AudioObjectGetPropertyData(
kAudioObjectSystemObject,
&property_address as *const _,
0,
null(),
&data_size as *const _ as *mut _,
&audio_device_id as *const _ as *mut _,
)
};
if status != kAudioHardwareNoError as i32 {
return None;
}
let device = Device { audio_device_id };
Some(device)
}
pub type SupportedInputConfigs = VecIntoIter<SupportedStreamConfigRange>;
pub type SupportedOutputConfigs = VecIntoIter<SupportedStreamConfigRange>;
| default_output_device |
models.rs | #![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationList {
#[serde(skip_serializing)]
pub value: Vec<Operation>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Operation {
#[serde(skip_serializing)]
pub name: Option<String>,
#[serde(skip_serializing)]
pub display: Option<operation::Display>,
}
pub mod operation {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Display {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ConnectedClusterList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ConnectedCluster>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ConnectedCluster {
#[serde(flatten)]
pub tracked_resource: TrackedResource,
pub identity: ConnectedClusterIdentity,
pub properties: ConnectedClusterProperties,
#[serde(rename = "systemData", skip_serializing)]
pub system_data: Option<SystemData>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ConnectedClusterIdentity {
#[serde(rename = "principalId", skip_serializing)]
pub principal_id: Option<String>,
#[serde(rename = "tenantId", skip_serializing)]
pub tenant_id: Option<String>,
#[serde(rename = "type")]
pub type_: connected_cluster_identity::Type,
}
pub mod connected_cluster_identity {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
None,
SystemAssigned,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ConnectedClusterProperties {
#[serde(rename = "agentPublicKeyCertificate")]
pub agent_public_key_certificate: String,
#[serde(rename = "kubernetesVersion", skip_serializing)]
pub kubernetes_version: Option<String>,
#[serde(rename = "totalNodeCount", skip_serializing)]
pub total_node_count: Option<i64>,
#[serde(rename = "totalCoreCount", skip_serializing)]
pub total_core_count: Option<i32>,
#[serde(rename = "agentVersion", skip_serializing)]
pub agent_version: Option<String>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<ConnectedClusterProvisioningState>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub distribution: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub infrastructure: Option<String>,
#[serde(skip_serializing)]
pub offering: Option<String>,
#[serde(rename = "managedIdentityCertificateExpirationTime", skip_serializing)]
pub managed_identity_certificate_expiration_time: Option<String>,
#[serde(rename = "lastConnectivityTime", skip_serializing)]
pub last_connectivity_time: Option<String>,
#[serde(rename = "connectivityStatus", skip_serializing)]
pub connectivity_status: Option<connected_cluster_properties::ConnectivityStatus>,
#[serde(rename = "privateLinkState", default, skip_serializing_if = "Option::is_none")]
pub private_link_state: Option<connected_cluster_properties::PrivateLinkState>,
#[serde(rename = "privateLinkScopeResourceId", default, skip_serializing_if = "Option::is_none")]
pub private_link_scope_resource_id: Option<String>,
}
pub mod connected_cluster_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ConnectivityStatus {
Connecting,
Connected,
Offline,
Expired,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PrivateLinkState {
Enabled,
Disabled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CredentialResults {
#[serde(rename = "hybridConnectionConfig", default, skip_serializing_if = "Option::is_none")]
pub hybrid_connection_config: Option<HybridConnectionConfig>,
#[serde(skip_serializing)]
pub kubeconfigs: Vec<CredentialResult>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CredentialResult {
#[serde(skip_serializing)]
pub name: Option<String>,
#[serde(skip_serializing)]
pub value: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ConnectedClusterProvisioningState {
Succeeded,
Failed,
Canceled,
Provisioning,
Updating,
Deleting,
Accepted,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ConnectedClusterPatch {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ConnectedClusterPatchProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ConnectedClusterPatchProperties {
#[serde(rename = "privateLinkState", default, skip_serializing_if = "Option::is_none")]
pub private_link_state: Option<connected_cluster_patch_properties::PrivateLinkState>,
#[serde(rename = "privateLinkScopeResourceId", default, skip_serializing_if = "Option::is_none")]
pub private_link_scope_resource_id: Option<String>,
}
pub mod connected_cluster_patch_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PrivateLinkState {
Enabled,
Disabled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct HybridConnectionConfig {
#[serde(rename = "expirationTime", skip_serializing)]
pub expiration_time: Option<i64>,
#[serde(rename = "hybridConnectionName", skip_serializing)]
pub hybrid_connection_name: Option<String>,
#[serde(skip_serializing)]
pub relay: Option<String>,
#[serde(skip_serializing)]
pub token: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SystemData {
#[serde(rename = "createdBy", default, skip_serializing_if = "Option::is_none")]
pub created_by: Option<String>,
#[serde(rename = "createdByType", default, skip_serializing_if = "Option::is_none")]
pub created_by_type: Option<system_data::CreatedByType>,
#[serde(rename = "createdAt", default, skip_serializing_if = "Option::is_none")]
pub created_at: Option<String>,
#[serde(rename = "lastModifiedBy", default, skip_serializing_if = "Option::is_none")]
pub last_modified_by: Option<String>,
#[serde(rename = "lastModifiedByType", default, skip_serializing_if = "Option::is_none")]
pub last_modified_by_type: Option<system_data::LastModifiedByType>,
#[serde(rename = "lastModifiedAt", default, skip_serializing_if = "Option::is_none")]
pub last_modified_at: Option<String>,
}
pub mod system_data {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum CreatedByType {
User,
Application,
ManagedIdentity,
Key,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum LastModifiedByType {
User,
Application,
ManagedIdentity,
Key,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct | {
#[serde(rename = "authenticationMethod")]
pub authentication_method: list_cluster_user_credentials_properties::AuthenticationMethod,
#[serde(rename = "clientProxy")]
pub client_proxy: bool,
}
pub mod list_cluster_user_credentials_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AuthenticationMethod {
Token,
#[serde(rename = "AAD")]
Aad,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorResponse {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorDetail>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorDetail {
#[serde(skip_serializing)]
pub code: Option<String>,
#[serde(skip_serializing)]
pub message: Option<String>,
#[serde(skip_serializing)]
pub target: Option<String>,
#[serde(skip_serializing)]
pub details: Vec<ErrorDetail>,
#[serde(rename = "additionalInfo", skip_serializing)]
pub additional_info: Vec<ErrorAdditionalInfo>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorAdditionalInfo {
#[serde(rename = "type", skip_serializing)]
pub type_: Option<String>,
#[serde(skip_serializing)]
pub info: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TrackedResource {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
pub location: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(skip_serializing)]
pub id: Option<String>,
#[serde(skip_serializing)]
pub name: Option<String>,
#[serde(rename = "type", skip_serializing)]
pub type_: Option<String>,
}
| ListClusterUserCredentialsProperties |
main_test.go | package main
| type TestDataItem struct {
block []byte
blockSize int
expectedOutputBlock []byte
}
func TestAddPadding( t *testing.T ) {
// input-result data items
dataItems := []TestDataItem{
{ []byte("YELLOW SUBMARINE"),20,[]byte("YELLOW SUBMARINE\x04\x04\x04\x04")},
{ []byte("YELLOW SUBMARINE"),15,[]byte("YELLOW SUBMARINE\x04\x04\x04\x04\x04\x04\x04\x04\x04\x04\x04\x04\x04\x04")},
{ []byte("YELLOW SUBMARINE"),16,[]byte("YELLOW SUBMARINE")},
{ []byte("YELLOW SUBMARINE"),4,[]byte("YELLOW SUBMARINE")},
{ []byte("YELLOW SUBMARINE"),5,[]byte("YELLOW SUBMARINE\x04\x04\x04\x04")},
}
for _, item := range dataItems {
result := AddPadding(item.block, item.blockSize)
if len(result) != len(item.expectedOutputBlock) {
t.Errorf( "addPadding() with args %v %v : FAILED, expected value '%v', but got '%v'", (item.block), item.blockSize, (item.expectedOutputBlock), result)
} else {
t.Logf( "addPadding() with args %v %v : PASSED, expected an error and got an error '%v'", (item.block), item.blockSize, (item.expectedOutputBlock))
}
}
}
func TestXOROnBytes( t *testing.T ) {
bytes1 := []byte("1234")
bytes2 := []byte("1234")
result := XOROnBytes(bytes1,bytes2)
if len(result) != len(bytes1) {
t.Errorf("Result not equal lenght as input input:%v result:%v",bytes1,result)
}
}
func TestEncodeDecode( t *testing.T) {
iv := "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
key:= "0000000000000000"
bytes := []byte ("yellow submarineyellow submarine")
encrypted := EncryptCBC(iv,key,bytes)
decrypted := DecryptCBC(iv, key, encrypted)
for i:=0;i<len(bytes);i++ {
if bytes[i] != decrypted[i] {
t.Error()
}
}
t.Logf("bytes: %v",bytes)
t.Logf("dec: %v",decrypted)
} |
import "testing"
|
strings.py | # file openpyxl/writer/strings.py
# Copyright (c) 2010 openpyxl
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# @license: http://www.opensource.org/licenses/mit-license.php
# @author: Eric Gazoni
"""Write the shared string table."""
# Python stdlib imports
from ....compat import BytesIO as StringIO
# package imports
from ..shared.xmltools import start_tag, end_tag, tag, XMLGenerator
def create_string_table(workbook):
"""Compile the string table for a workbook."""
strings = set()
for sheet in workbook.worksheets:
for cell in sheet.get_cell_collection():
if cell.data_type == cell.TYPE_STRING and cell._value is not None:
strings.add(cell.value)
return dict((key, i) for i, key in enumerate(strings))
def write_string_table(string_table):
"""Write the string table xml."""
temp_buffer = StringIO()
doc = XMLGenerator(temp_buffer, 'utf-8')
start_tag(doc, 'sst', {'xmlns':
'http://schemas.openxmlformats.org/spreadsheetml/2006/main',
'uniqueCount': '%d' % len(string_table)})
strings_to_write = sorted(string_table.items(),
key=lambda pair: pair[1])
for key in [pair[0] for pair in strings_to_write]:
start_tag(doc, 'si')
if key.strip() != key:
attr = {'xml:space': 'preserve'}
else:
attr = {}
tag(doc, 't', attr, key)
end_tag(doc, 'si')
end_tag(doc, 'sst')
string_table_xml = temp_buffer.getvalue()
temp_buffer.close()
return string_table_xml
class StringTableBuilder(object):
def __init__(self):
self.counter = 0
self.dct = {}
def add(self, key):
|
def get_table(self):
return self.dct
| key = key.strip()
try:
return self.dct[key]
except KeyError:
res = self.dct[key] = self.counter
self.counter += 1
return res |
SvgSendAndArchiveSharp.tsx | // /src/action/send_and_archive/materialiconssharp/24px.svg
import { createSvgIcon } from './createSvgIcon';
export const SvgSendAndArchiveSharp = createSvgIcon(
`<svg xmlns="http://www.w3.org/2000/svg" enable-background="new 0 0 24 24" height="24" viewBox="0 0 24 24" width="24">
<g>
<rect fill="none" height="24" width="24"/>
| </g>
<g>
<g>
<g>
<path d="M17,10c0.1,0,0.19,0.01,0.28,0.01L3,4v6l8,2l-8,2v6l7-2.95c0-0.02,0-0.03,0-0.05C10,13.13,13.13,10,17,10z"/>
</g>
<g>
<path d="M17,12c-2.76,0-5,2.24-5,5s2.24,5,5,5s5-2.24,5-5S19.76,12,17,12z M17,20l-3-3h2.5v-3h1v3H20L17,20z"/>
</g>
</g>
</g>
</svg>`
); | |
primitive_types2.rs | // primitive_types2.rs
// Fill in the rest of the line that has code missing!
// No hints, there's no tricks, just get used to typing these :)
fn main() {
// Characters (`char`)
let my_first_initial = 'C';
if my_first_initial.is_alphabetic() {
println!("Alphabetical!");
} else if my_first_initial.is_numeric() {
println!("Numerical!");
} else {
println!("Neither alphabetic nor numeric!");
}
let your_character = 'F'; // Finish this line like the example! What's your favorite character?
// Try a letter, try a number, try a special character, try a character
// from a different language than your own, try an emoji!
if your_character.is_alphabetic() {
println!("Alphabetical!");
} else if your_character.is_numeric() | else {
println!("Neither alphabetic nor numeric!"); }
}
| {
println!("Numerical!");
} |
expr-fn-panic.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// error-pattern:explicit panic
fn f() -> ! {
panic!()
}
fn | () {
f();
}
| main |
history.service.ts | import {Injectable} from '@angular/core';
import {MapObject} from '../models/map-object';
@Injectable({
providedIn: 'root'
})
export class HistoryService {
private historyLimit = 1000;
private history: HistoryEntry[] = [];
public pushEntry(entry: HistoryEntry): void {
this.history.push(entry);
if (this.history.length > this.historyLimit) {
this.history.splice(0, 1); // remove the oldest entry
}
}
public popEntry(): HistoryEntry {
if (this.history.length > 0) {
return this.history.pop();
}
return undefined;
}
}
export abstract class HistoryEntry {
public readonly type: HistoryEntryType;
public affected: MapObject[];
}
export class AddObjectEntry extends HistoryEntry {
constructor() {
super();
this.type = HistoryEntryType.addObject;
}
}
export class RemoveObjectEntry extends HistoryEntry {
constructor() {
super();
this.type = HistoryEntryType.removeObject;
}
}
export class ChangeObjectEntry extends HistoryEntry {
public changes: {
rotate: boolean,
translateX: number;
translateY: number;
bringToTop: boolean;
bringToBottom: boolean;
}[]; | }
}
export class PasteClipboardEntry extends HistoryEntry {
constructor() {
super();
this.type = HistoryEntryType.paste;
}
}
export enum HistoryEntryType {
addObject,
removeObject,
changeObject,
paste
} | constructor() {
super();
this.type = HistoryEntryType.changeObject; |
signed.rs | use std::borrow::Cow;
use std::collections::BTreeMap;
use std::marker::PhantomData;
use std::ops::Deref;
use serde;
use serde_json;
use sodiumoxide::crypto::sign;
use ser::signatures::Base64Signature;
pub trait Signatures {
fn get_signature(&self, entity: &str, key_id: &str) -> Option<&sign::Signature>;
fn get_signatures_for_entity<'a>(
&'a self,
entity: &'a str
) -> Box<Iterator<Item = (&'a str, &sign::Signature)> + 'a>;
fn get_signatures<'a>(&'a self)
-> Box<Iterator<Item = (&'a str, &'a str, &sign::Signature)> + 'a>;
fn get_entities<'a>(&'a self) -> Box<Iterator<Item = &'a str> + 'a>;
fn as_map<'a>(&'a self) -> BTreeMap<&'a str, BTreeMap<&'a str, &sign::Signature>>;
}
pub trait SignaturesMut {
fn add_signature(&mut self, entity: &str, key_id: &str, sig: sign::Signature);
fn clear(&mut self);
}
pub trait Signed {
fn signatures(&self) -> &Signatures;
}
pub trait SignedMut: Signed {
fn signatures_mut(&mut self) -> &mut SignaturesMut;
}
pub trait AsCanonical {
fn as_canonical(&self) -> Cow<[u8]>;
}
pub trait GetUnsigned {
fn get_unsigned(&self) -> Option<serde_json::Value>;
}
impl<S> Signatures for BTreeMap<String, BTreeMap<String, S>>
where S: Deref<Target = sign::Signature>
{
fn get_signature(&self, entity: &str, key_id: &str) -> Option<&sign::Signature> {
self.get(entity).and_then(|sigs| sigs.get(key_id)).map(|s| s.deref())
}
fn get_signatures_for_entity<'a>(
&'a self,
entity: &'a str
) -> Box<Iterator<Item = (&'a str, &sign::Signature)> + 'a> {
Box::new(self.iter()
.filter_map(move |(d, sigs)| if d == entity {
Some(sigs)
} else {
None
})
.flat_map(|s| s.iter())
.map(|(k, v)| (&k[..], v.deref())))
}
fn get_signatures<'a>(&'a self)
-> Box<Iterator<Item = (&'a str, &'a str, &sign::Signature)> + 'a> {
Box::new(self.iter()
.flat_map(|(e, sigs)| sigs.iter().map(move |(n, sig)| (e, n, sig)))
.map(|(e, n, sig)| (&e[..], &n[..], sig.deref())))
}
| }
fn as_map<'a>(&'a self) -> BTreeMap<&'a str, BTreeMap<&'a str, &'a sign::Signature>> {
self.iter()
.map(|(e, v)| {
let map = v.iter()
.map(|(n, s)| -> (&'a str, &'a sign::Signature) { (n, s) })
.collect();
(e as &'a str, map)
})
.collect()
}
}
impl<S> SignaturesMut for BTreeMap<String, BTreeMap<String, S>>
where S: From<sign::Signature>
{
fn add_signature(&mut self, entity: &str, key_id: &str, sig: sign::Signature) {
self.entry(entity.to_string())
.or_insert_with(BTreeMap::new)
.insert(key_id.to_string(), S::from(sig));
}
fn clear(&mut self) {
self.clear();
}
}
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct SimpleSigned {
pub signatures: BTreeMap<String, BTreeMap<String, Base64Signature>>,
}
impl Signed for SimpleSigned {
fn signatures(&self) -> &Signatures {
&self.signatures
}
}
impl SignedMut for SimpleSigned {
fn signatures_mut(&mut self) -> &mut SignaturesMut {
&mut self.signatures
}
}
enum SimpleSignedField {
SIGNATURES,
IGNORE,
}
impl serde::de::Deserialize for SimpleSignedField {
#[inline]
fn deserialize<D>(deserializer: &mut D) -> Result<SimpleSignedField, D::Error>
where D: serde::de::Deserializer
{
deserializer.deserialize_struct_field(SimpleSignedFieldVisitor::<D> {
phantom: PhantomData,
})
}
}
struct SimpleSignedFieldVisitor<D> {
phantom: PhantomData<D>,
}
impl<D> serde::de::Visitor for SimpleSignedFieldVisitor<D>
where D: serde::de::Deserializer
{
type Value = SimpleSignedField;
fn visit_usize<E>(&mut self, value: usize) -> Result<SimpleSignedField, E>
where E: serde::de::Error
{
match value {
0usize => Ok(SimpleSignedField::SIGNATURES),
_ => Ok(SimpleSignedField::IGNORE),
}
}
fn visit_str<E>(&mut self, value: &str) -> Result<SimpleSignedField, E>
where E: serde::de::Error
{
match value {
"signatures" => Ok(SimpleSignedField::SIGNATURES),
_ => Ok(SimpleSignedField::IGNORE),
}
}
fn visit_bytes<E>(&mut self, value: &[u8]) -> Result<SimpleSignedField, E>
where E: serde::de::Error
{
match value {
b"signatures" => Ok(SimpleSignedField::SIGNATURES),
_ => Ok(SimpleSignedField::IGNORE),
}
}
}
struct SimpleSignedVisitor<D: serde::de::Deserializer>(PhantomData<D>);
impl<D: serde::de::Deserializer> serde::de::Visitor for SimpleSignedVisitor<D> {
type Value = SimpleSigned;
#[inline]
fn visit_seq<V>(&mut self, mut visitor: V) -> Result<SimpleSigned, V::Error>
where V: serde::de::SeqVisitor
{
{
let sigs = match try!(visitor.visit()) {
Some(value) => value,
None => {
return Err(serde::de::Error::end_of_stream());
}
};
try!(visitor.end());
Ok(SimpleSigned { signatures: sigs })
}
}
#[inline]
fn visit_map<V>(&mut self, mut visitor: V) -> Result<SimpleSigned, V::Error>
where V: serde::de::MapVisitor
{
{
let mut sigs = None;
while let Some(key) = try!(visitor.visit_key()) {
match key {
SimpleSignedField::SIGNATURES => {
sigs = Some(try!(visitor.visit_value()));
}
_ => {
try!(visitor.visit_value::<serde::de::impls::IgnoredAny>());
}
}
}
let sigs = match sigs {
Some(sigs) => sigs,
None => {
match visitor.missing_field("signatures") {
Ok(value) => value,
Err(_) => BTreeMap::new(),
}
}
};
try!(visitor.end());
Ok(SimpleSigned { signatures: sigs })
}
}
}
impl serde::de::Deserialize for SimpleSigned {
fn deserialize<D>(deserializer: &mut D) -> Result<SimpleSigned, D::Error>
where D: serde::de::Deserializer
{
{
const FIELDS: &'static [&'static str] = &["signatures"];
deserializer.deserialize_struct("SimpleSigned",
FIELDS,
SimpleSignedVisitor::<D>(PhantomData))
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::collections::BTreeMap;
use ser::signatures::Base64Signature;
use sodiumoxide::crypto::sign;
use serde_json;
use itertools::Itertools;
#[test]
fn map_sigs() {
let map: BTreeMap<String, BTreeMap<String, Base64Signature>> = serde_json::from_slice(br#"{
"jki.re":{
"ed25519:auto":"X2t7jN0jaJsiZWp57da9GqmQ874QFbukCMSqc5VclaB+2n4i8LPcZDkD6+fzg4tkfpSsiIDogkY4HWv1cnGhAg"
}
}"#).unwrap();
let sig = map.get_signature("jki.re", "ed25519:auto").expect("Missing signature");
let expected_sig_bytes = b"_k{\x8c\xdd#h\x9b\"ejy\xed\xd6\xbd\x1a\xa9\x90\xf3\xbe\x10\x15\xbb\xa4\x08\xc4\xaas\x95\\\x95\xa0~\xda~\"\xf0\xb3\xdcd9\x03\xeb\xe7\xf3\x83\x8bd~\x94\xac\x88\x80\xe8\x82F8\x1dk\xf5rq\xa1\x02";
let expected_sig = sign::Signature::from_slice(expected_sig_bytes).unwrap();
assert_eq!(sig, &expected_sig);
assert!(map.get_signature("jki.re", "ed25519:test").is_none());
assert!(map.get_signature("example.com", "ed25519:auto").is_none());
let entities = &map.get_entities().collect_vec();
assert_eq!(&entities[..], &["jki.re"]);
let sigs_for_entites = &map.get_signatures_for_entity("jki.re").collect_vec();
assert_eq!(&sigs_for_entites[..], &[("ed25519:auto", &expected_sig)]);
let entities = &map.get_signatures().collect_vec();
assert_eq!(&entities[..], &[("jki.re", "ed25519:auto", &expected_sig)]);
}
} | fn get_entities<'a>(&'a self) -> Box<Iterator<Item = &'a str> + 'a> {
Box::new(self.keys().map(|s| &s[..])) |
timer_controller.py | import time
from optparse import OptionParser
def build_option_parser():
parser = OptionParser()
parser.add_option("-t", "--time", dest="given_time", type="string", help="Use HH:MM format for timer")
return parser.parse_args()
def countdown_timer(given_time_seconds):
while given_time_seconds:
minutes, seconds = divmod(given_time_seconds, 60)
hours, minutes = divmod(minutes, 60)
time_format = '{:02d}:{:02d}:{:02d}'.format(hours, minutes, seconds)
print(time_format, end='\r')
time.sleep(1)
given_time_seconds -= 1
def main():
(options, args) = build_option_parser()
given_time = options.given_time
if given_time:
hours = int(given_time.split(':')[0])
minutes = int(given_time.split(':')[1])
given_time_seconds = (hours * 3600) + (minutes * 60) | print("Use -h option to view help\n Developer: Aditya Kamble (adityakamble49.com)")
if __name__ == '__main__':
main() | countdown_timer(given_time_seconds)
else: |
exp1x.py | from keras.layers import Input
from keras.layers.merge import Concatenate
from keras.models import Model
from keras.optimizers import Adam
from .keras_base import KerasBaseExp
from .keras_base import exp_bag_of_strokes
from .blocks import fc_branch, final_type1
class mlp_type1(KerasBaseExp):
def initialize_model(self, in_dims, out_dims):
input_layer = [Input(shape=(d, )) for d in in_dims]
if len(input_layer) > 1:
layer = Concatenate()(input_layer)
else:
layer = input_layer[0]
layer = fc_branch(layer, self.decay)
self.model = Model(inputs=input_layer, outputs=final_type1(layer, out_dims))
opt = Adam(lr=self.learning_rate)
self.model.compile(optimizer=opt, metrics=['accuracy'], loss='categorical_crossentropy')
| class EXP1(mlp_type1, exp_bag_of_strokes):
pass | |
api.go | package api
import (
"github.com/gin-gonic/gin"
"github.com/vx3r/wg-gen-web/api/v1"
)
// ApplyRoutes apply routes to gin engine
func ApplyRoutes(r *gin.Engine, private bool) | {
api := r.Group("/api")
{
apiv1.ApplyRoutes(api, private)
}
} |
|
load_label_map.py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
from object_detection.tf_utils import label_map_util
class LoadLabelMap():
def __init__(self):
return
def load_label_map(self, cfg):
| """
LOAD LABEL MAP
"""
print('Loading label map')
LABEL_PATH = cfg['label_path']
NUM_CLASSES = cfg['num_classes']
try:
label_map = label_map_util.load_labelmap(LABEL_PATH)
categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True)
category_index = label_map_util.create_category_index(categories)
except:
import traceback
traceback.print_exc()
return category_index |
|
url_helpers.py | import re
URL_REGEX = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
r'localhost|' # localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
def is_url(word):
if URL_REGEX.match(word):
|
return False
| return True |
server.go | package server
import (
"admin"
"api/graphite"
"api/http"
"api/udp"
"cluster"
"configuration"
"coordinator"
"datastore"
"time"
"wal"
log "code.google.com/p/log4go"
)
type Server struct {
RaftServer *coordinator.RaftServer
ProtobufServer *coordinator.ProtobufServer
ClusterConfig *cluster.ClusterConfiguration
HttpApi *http.HttpServer
GraphiteApi *graphite.Server
UdpApi *udp.Server
AdminServer *admin.HttpServer
Coordinator coordinator.Coordinator
Config *configuration.Configuration
RequestHandler *coordinator.ProtobufRequestHandler
stopped bool
writeLog *wal.WAL
shardStore *datastore.LevelDbShardDatastore
}
func NewServer(config *configuration.Configuration) (*Server, error) {
log.Info("Opening database at %s", config.DataDir)
shardDb, err := datastore.NewLevelDbShardDatastore(config)
if err != nil {
return nil, err
}
newClient := func(connectString string) cluster.ServerConnection {
return coordinator.NewProtobufClient(connectString, config.ProtobufTimeout.Duration)
}
writeLog, err := wal.NewWAL(config)
if err != nil {
return nil, err
}
clusterConfig := cluster.NewClusterConfiguration(config, writeLog, shardDb, newClient)
raftServer := coordinator.NewRaftServer(config, clusterConfig)
clusterConfig.LocalRaftName = raftServer.GetRaftName()
clusterConfig.SetShardCreator(raftServer)
clusterConfig.CreateFutureShardsAutomaticallyBeforeTimeComes()
coord := coordinator.NewCoordinatorImpl(config, raftServer, clusterConfig)
requestHandler := coordinator.NewProtobufRequestHandler(coord, clusterConfig)
protobufServer := coordinator.NewProtobufServer(config.ProtobufListenString(), requestHandler)
raftServer.AssignCoordinator(coord)
httpApi := http.NewHttpServer(config.ApiHttpPortString(), config.ApiReadTimeout, config.AdminAssetsDir, coord, coord, clusterConfig, raftServer)
httpApi.EnableSsl(config.ApiHttpSslPortString(), config.ApiHttpCertPath)
graphiteApi := graphite.NewServer(config, coord, clusterConfig)
udpApi := udp.NewServer(config, coord, clusterConfig)
adminServer := admin.NewHttpServer(config.AdminAssetsDir, config.AdminHttpPortString())
return &Server{
RaftServer: raftServer,
ProtobufServer: protobufServer,
ClusterConfig: clusterConfig,
HttpApi: httpApi,
GraphiteApi: graphiteApi,
UdpApi: udpApi,
Coordinator: coord,
AdminServer: adminServer,
Config: config,
RequestHandler: requestHandler,
writeLog: writeLog,
shardStore: shardDb}, nil
}
func (self *Server) ListenAndServe() error {
err := self.RaftServer.ListenAndServe()
if err != nil {
return err
}
log.Info("Waiting for local server to be added")
self.ClusterConfig.WaitForLocalServerLoaded()
self.writeLog.SetServerId(self.ClusterConfig.ServerId())
time.Sleep(5 * time.Second)
// check to make sure that the raft connection string hasn't changed
raftConnectionString := self.Config.RaftConnectionString()
if self.ClusterConfig.LocalServer.ProtobufConnectionString != self.Config.ProtobufConnectionString() ||
self.ClusterConfig.LocalServer.RaftConnectionString != raftConnectionString {
log.Info("Sending change connection string command (%s,%s) (%s,%s)",
self.ClusterConfig.LocalServer.ProtobufConnectionString,
self.Config.ProtobufConnectionString(),
self.ClusterConfig.LocalServer.RaftConnectionString,
raftConnectionString,
)
err := self.RaftServer.ChangeConnectionString(
self.ClusterConfig.LocalRaftName,
self.Config.ProtobufConnectionString(),
self.Config.RaftConnectionString(),
true, // force the rename
)
if err != nil {
panic(err)
}
log.Info("Connection string changed successfully")
}
go self.ProtobufServer.ListenAndServe()
log.Info("Recovering from log...")
err = self.ClusterConfig.RecoverFromWAL() | if err != nil {
return err
}
log.Info("recovered")
err = self.Coordinator.(*coordinator.CoordinatorImpl).ConnectToProtobufServers(self.RaftServer.GetRaftName())
if err != nil {
return err
}
log.Info("Starting admin interface on port %d", self.Config.AdminHttpPort)
go self.AdminServer.ListenAndServe()
if self.Config.GraphiteEnabled {
if self.Config.GraphitePort <= 0 || self.Config.GraphiteDatabase == "" {
log.Warn("Cannot start graphite server. please check your configuration")
} else {
log.Info("Starting Graphite Listener on port %d", self.Config.GraphitePort)
go self.GraphiteApi.ListenAndServe()
}
}
if self.Config.UdpInputEnabled {
if self.Config.UdpInputPort <= 0 || self.Config.UdpInputDatabase == "" {
log.Warn("Cannot start udp server. please check your configuration")
} else {
log.Info("Starting UDP Listener on port %d", self.Config.UdpInputPort)
go self.UdpApi.ListenAndServe()
}
}
// start processing continuous queries
self.RaftServer.StartProcessingContinuousQueries()
log.Info("Starting Http Api server on port %d", self.Config.ApiHttpPort)
self.HttpApi.ListenAndServe()
return nil
}
func (self *Server) Stop() {
if self.stopped {
return
}
log.Info("Stopping server")
self.stopped = true
log.Info("Stopping api server")
self.HttpApi.Close()
log.Info("Api server stopped")
log.Info("Stopping admin server")
self.AdminServer.Close()
log.Info("admin server stopped")
log.Info("Stopping raft server")
self.RaftServer.Close()
log.Info("Raft server stopped")
log.Info("Stopping protobuf server")
self.ProtobufServer.Close()
log.Info("protobuf server stopped")
log.Info("Stopping wal")
self.writeLog.Close()
log.Info("wal stopped")
log.Info("Stopping shard store")
self.shardStore.Close()
log.Info("shard store stopped")
} | |
asm.module.ts | import { CommonModule } from '@angular/common';
import { APP_INITIALIZER, ModuleWithProviders, NgModule } from '@angular/core';
import { AuthService } from '../auth/user-auth/facade/auth.service';
import { AuthHttpHeaderService } from '../auth/user-auth/services/auth-http-header.service';
import { AuthStorageService } from '../auth/user-auth/services/auth-storage.service';
import { provideDefaultConfig } from '../config/config-providers';
import { defaultAsmConfig } from './config/default-asm-config';
import { AsmAuthHttpHeaderService } from './services/asm-auth-http-header.service';
import { AsmAuthStorageService } from './services/asm-auth-storage.service';
import { AsmAuthService } from './services/asm-auth.service';
import { AsmStatePersistenceService } from './services/asm-state-persistence.service';
import { AsmStoreModule } from './store/asm-store.module';
/**
* @deprecated since 3.2, use asm lib instead
*/
export function asmStatePersistenceFactory(
asmStatePersistenceService: AsmStatePersistenceService
) {
const result = () => asmStatePersistenceService.initSync();
return result;
}
/**
* @deprecated since 3.2, use asm lib instead
*/
@NgModule({
imports: [CommonModule, AsmStoreModule],
})
export class AsmModule {
static forRoot(): ModuleWithProviders<AsmModule> {
return {
ngModule: AsmModule,
providers: [
provideDefaultConfig(defaultAsmConfig),
{
provide: AuthStorageService,
useExisting: AsmAuthStorageService,
},
{
provide: AuthService,
useExisting: AsmAuthService,
},
{
provide: AuthHttpHeaderService,
useExisting: AsmAuthHttpHeaderService, | },
{
provide: APP_INITIALIZER,
useFactory: asmStatePersistenceFactory,
deps: [AsmStatePersistenceService],
multi: true,
},
],
};
}
} | |
40.rs | // rustfmt-enum_discrim_align_threshold: 40
enum | {
A = 1,
Bcdef = 2,
}
enum NoDiscrims {
ThisIsAFairlyLongEnumVariantWithoutDiscrimLongerThan40,
A = 1,
ThisIsAnotherFairlyLongEnumVariantWithoutDiscrimLongerThan40,
Bcdef = 2,
}
enum TooLong {
ThisOneHasDiscrimAaaaaaaaaaaaaaaaaaaaaaChar40 = 10,
A = 1,
Bcdef = 2,
}
enum Borderline {
ThisOneHasDiscrimAaaaaaaaaaaaaaaaaaaaaa = 10,
A = 1,
Bcdef = 2,
}
// Live specimen from #1686
enum LongWithSmallDiff {
SceneColorimetryEstimates = 0x73636F65,
SceneAppearanceEstimates = 0x73617065,
FocalPlaneColorimetryEstimates = 0x66706365,
ReflectionHardcopyOriginalColorimetry = 0x72686F63,
ReflectionPrintOutputColorimetry = 0x72706F63,
} | Standard |
solution_test.go | package p1610
import "testing"
func runSample(t *testing.T, points [][]int, angle int, location []int, expect int) {
res := visiblePoints(points, angle, location)
if res != expect {
t.Errorf("Sample expect %d, but got %d", expect, res)
}
}
func TestSample1(t *testing.T) {
points := [][]int{{2, 1}, {2, 2}, {3, 3}}
angle := 90
loc := []int{1, 1}
expect := 3
runSample(t, points, angle, loc, expect)
}
func TestSample2(t *testing.T) {
points := [][]int{{2, 1}, {2, 2}, {3, 4}, {1, 1}}
angle := 90
loc := []int{1, 1}
expect := 4
runSample(t, points, angle, loc, expect)
}
func | (t *testing.T) {
points := [][]int{{0, 1}, {2, 1}}
angle := 13
loc := []int{1, 1}
expect := 1
runSample(t, points, angle, loc, expect)
}
func TestSample4(t *testing.T) {
points := [][]int{{956, 232}, {438, 752}, {595, 297}, {508, 143}, {111, 594}, {645, 824}, {758, 434}, {447, 423}, {825, 356}, {807, 377}}
angle := 38
loc := []int{74, 581}
expect := 7
runSample(t, points, angle, loc, expect)
}
| TestSample3 |
marker.rs | // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Primitive traits and types representing basic properties of types.
//!
//! Rust types can be classified in various useful ways according to
//! their intrinsic properties. These classifications are represented
//! as traits.
#![stable(feature = "rust1", since = "1.0.0")]
use cmp;
use hash::Hash;
use hash::Hasher;
/// Types that can be transferred across thread boundaries.
///
/// This trait is automatically implemented when the compiler determines it's
/// appropriate.
///
/// An example of a non-`Send` type is the reference-counting pointer
/// [`rc::Rc`][`Rc`]. If two threads attempt to clone [`Rc`]s that point to the same
/// reference-counted value, they might try to update the reference count at the
/// same time, which is [undefined behavior][ub] because [`Rc`] doesn't use atomic
/// operations. Its cousin [`sync::Arc`][arc] does use atomic operations (incurring
/// some overhead) and thus is `Send`.
///
/// See [the Nomicon](../../nomicon/send-and-sync.html) for more details.
///
/// [`Rc`]: ../../std/rc/struct.Rc.html
/// [arc]: ../../std/sync/struct.Arc.html
/// [ub]: ../../reference.html#behavior-considered-undefined
#[stable(feature = "rust1", since = "1.0.0")]
#[lang = "send"]
#[rustc_on_unimplemented = "`{Self}` cannot be sent between threads safely"]
pub unsafe trait Send {
// empty.
}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl Send for .. { }
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> !Send for *const T { }
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> !Send for *mut T { }
/// Types with a constant size known at compile time.
///
/// All type parameters have an implicit bound of `Sized`. The special syntax
/// `?Sized` can be used to remove this bound if it's not appropriate.
///
/// ```
/// # #![allow(dead_code)]
/// struct Foo<T>(T);
/// struct Bar<T: ?Sized>(T);
///
/// // struct FooUse(Foo<[i32]>); // error: Sized is not implemented for [i32]
/// struct BarUse(Bar<[i32]>); // OK
/// ```
///
/// The one exception is the implicit `Self` type of a trait, which does not
/// get an implicit `Sized` bound. This is because a `Sized` bound prevents
/// the trait from being used to form a [trait object]:
///
/// ```
/// # #![allow(unused_variables)]
/// trait Foo { }
/// trait Bar: Sized { }
///
/// struct Impl;
/// impl Foo for Impl { }
/// impl Bar for Impl { }
///
/// let x: &Foo = &Impl; // OK
/// // let y: &Bar = &Impl; // error: the trait `Bar` cannot
/// // be made into an object
/// ```
///
/// [trait object]: ../../book/trait-objects.html
#[stable(feature = "rust1", since = "1.0.0")]
#[lang = "sized"]
#[rustc_on_unimplemented = "`{Self}` does not have a constant size known at compile-time"]
#[fundamental] // for Default, for example, which requires that `[T]: !Default` be evaluatable
pub trait Sized {
// Empty.
}
/// Types that can be "unsized" to a dynamically-sized type.
///
/// For example, the sized array type `[i8; 2]` implements `Unsize<[i8]>` and
/// `Unsize<fmt::Debug>`.
///
/// All implementations of `Unsize` are provided automatically by the compiler.
///
/// `Unsize` is implemented for:
///
/// - `[T; N]` is `Unsize<[T]>`
/// - `T` is `Unsize<Trait>` when `T: Trait`
/// - `Foo<..., T, ...>` is `Unsize<Foo<..., U, ...>>` if:
/// - `T: Unsize<U>`
/// - Foo is a struct
/// - Only the last field of `Foo` has a type involving `T`
/// - `T` is not part of the type of any other fields
/// - `Bar<T>: Unsize<Bar<U>>`, if the last field of `Foo` has type `Bar<T>`
///
/// `Unsize` is used along with [`ops::CoerceUnsized`][coerceunsized] to allow
/// "user-defined" containers such as [`rc::Rc`][rc] to contain dynamically-sized
/// types. See the [DST coercion RFC][RFC982] and [the nomicon entry on coercion][nomicon-coerce]
/// for more details.
///
/// [coerceunsized]: ../ops/trait.CoerceUnsized.html
/// [rc]: ../../std/rc/struct.Rc.html
/// [RFC982]: https://github.com/rust-lang/rfcs/blob/master/text/0982-dst-coercion.md
#[unstable(feature = "unsize", issue = "27732")]
#[lang="unsize"]
pub trait Unsize<T: ?Sized> {
// Empty.
}
/// Types whose values can be duplicated simply by copying bits.
///
/// By default, variable bindings have 'move semantics.' In other
/// words:
///
/// ```
/// #[derive(Debug)]
/// struct Foo;
///
/// let x = Foo;
///
/// let y = x;
///
/// // `x` has moved into `y`, and so cannot be used
///
/// // println!("{:?}", x); // error: use of moved value
/// ```
///
/// However, if a type implements `Copy`, it instead has 'copy semantics':
///
/// ```
/// // We can derive a `Copy` implementation. `Clone` is also required, as it's
/// // a supertrait of `Copy`.
/// #[derive(Debug, Copy, Clone)]
/// struct Foo;
///
/// let x = Foo;
///
/// let y = x;
///
/// // `y` is a copy of `x`
///
/// println!("{:?}", x); // A-OK!
/// ```
///
/// It's important to note that in these two examples, the only difference is whether you
/// are allowed to access `x` after the assignment. Under the hood, both a copy and a move
/// can result in bits being copied in memory, although this is sometimes optimized away.
///
/// ## How can I implement `Copy`?
///
/// There are two ways to implement `Copy` on your type. The simplest is to use `derive`:
///
/// ```
/// #[derive(Copy, Clone)]
/// struct MyStruct;
/// ```
///
/// You can also implement `Copy` and `Clone` manually:
///
/// ```
/// struct MyStruct;
///
/// impl Copy for MyStruct { }
///
/// impl Clone for MyStruct {
/// fn clone(&self) -> MyStruct {
/// *self
/// }
/// }
/// ```
///
/// There is a small difference between the two: the `derive` strategy will also place a `Copy`
/// bound on type parameters, which isn't always desired.
///
/// ## What's the difference between `Copy` and `Clone`?
///
/// Copies happen implicitly, for example as part of an assignment `y = x`. The behavior of
/// `Copy` is not overloadable; it is always a simple bit-wise copy.
///
/// Cloning is an explicit action, `x.clone()`. The implementation of [`Clone`] can
/// provide any type-specific behavior necessary to duplicate values safely. For example,
/// the implementation of [`Clone`] for [`String`] needs to copy the pointed-to string
/// buffer in the heap. A simple bitwise copy of [`String`] values would merely copy the
/// pointer, leading to a double free down the line. For this reason, [`String`] is [`Clone`]
/// but not `Copy`.
///
/// [`Clone`] is a supertrait of `Copy`, so everything which is `Copy` must also implement
/// [`Clone`]. If a type is `Copy` then its [`Clone`] implementation need only return `*self`
/// (see the example above).
///
/// ## When can my type be `Copy`?
///
/// A type can implement `Copy` if all of its components implement `Copy`. For example, this
/// struct can be `Copy`:
///
/// ```
/// # #[allow(dead_code)]
/// struct Point {
/// x: i32,
/// y: i32,
/// }
/// ```
///
/// A struct can be `Copy`, and [`i32`] is `Copy`, therefore `Point` is eligible to be `Copy`.
/// By contrast, consider
///
/// ```
/// # #![allow(dead_code)]
/// # struct Point;
/// struct PointList {
/// points: Vec<Point>,
/// }
/// ```
///
/// The struct `PointList` cannot implement `Copy`, because [`Vec<T>`] is not `Copy`. If we
/// attempt to derive a `Copy` implementation, we'll get an error:
///
/// ```text
/// the trait `Copy` may not be implemented for this type; field `points` does not implement `Copy`
/// ```
///
/// ## When *can't* my type be `Copy`?
///
/// Some types can't be copied safely. For example, copying `&mut T` would create an aliased
/// mutable reference. Copying [`String`] would duplicate responsibility for managing the
/// [`String`]'s buffer, leading to a double free.
///
/// Generalizing the latter case, any type implementing [`Drop`] can't be `Copy`, because it's
/// managing some resource besides its own [`size_of::<T>()`] bytes.
///
/// If you try to implement `Copy` on a struct or enum containing non-`Copy` data, you will get
/// the error [E0204].
///
/// [E0204]: ../../error-index.html#E0204
///
/// ## When *should* my type be `Copy`?
///
/// Generally speaking, if your type _can_ implement `Copy`, it should. Keep in mind, though,
/// that implementing `Copy` is part of the public API of your type. If the type might become
/// non-`Copy` in the future, it could be prudent to omit the `Copy` implementation now, to
/// avoid a breaking API change.
///
/// [`Vec<T>`]: ../../std/vec/struct.Vec.html
/// [`String`]: ../../std/string/struct.String.html
/// [`Drop`]: ../../std/ops/trait.Drop.html
/// [`size_of::<T>()`]: ../../std/mem/fn.size_of.html
/// [`Clone`]: ../clone/trait.Clone.html
/// [`String`]: ../../std/string/struct.String.html
/// [`i32`]: ../../std/primitive.i32.html
#[stable(feature = "rust1", since = "1.0.0")]
#[lang = "copy"]
pub trait Copy : Clone {
// Empty.
}
/// Types for which it is safe to share references between threads.
///
/// This trait is automatically implemented when the compiler determines
/// it's appropriate.
///
/// The precise definition is: a type `T` is `Sync` if `&T` is
/// [`Send`][send]. In other words, if there is no possibility of
/// [undefined behavior][ub] (including data races) when passing
/// `&T` references between threads.
///
/// As one would expect, primitive types like [`u8`][u8] and [`f64`][f64]
/// are all `Sync`, and so are simple aggregate types containing them,
/// like tuples, structs and enums. More examples of basic `Sync`
/// types include "immutable" types like `&T`, and those with simple
/// inherited mutability, such as [`Box<T>`][box], [`Vec<T>`][vec] and
/// most other collection types. (Generic parameters need to be `Sync`
/// for their container to be `Sync`.)
///
/// A somewhat surprising consequence of the definition is that `&mut T`
/// is `Sync` (if `T` is `Sync`) even though it seems like that might
/// provide unsynchronized mutation. The trick is that a mutable
/// reference behind a shared reference (that is, `& &mut T`)
/// becomes read-only, as if it were a `& &T`. Hence there is no risk
/// of a data race.
///
/// Types that are not `Sync` are those that have "interior
/// mutability" in a non-thread-safe form, such as [`cell::Cell`][cell]
/// and [`cell::RefCell`][refcell]. These types allow for mutation of
/// their contents even through an immutable, shared reference. For
/// example the `set` method on [`Cell<T>`][cell] takes `&self`, so it requires
/// only a shared reference [`&Cell<T>`][cell]. The method performs no
/// synchronization, thus [`Cell`][cell] cannot be `Sync`.
///
/// Another example of a non-`Sync` type is the reference-counting
/// pointer [`rc::Rc`][rc]. Given any reference [`&Rc<T>`][rc], you can clone
/// a new [`Rc<T>`][rc], modifying the reference counts in a non-atomic way.
///
/// For cases when one does need thread-safe interior mutability,
/// Rust provides [atomic data types], as well as explicit locking via
/// [`sync::Mutex`][mutex] and [`sync::RWLock`][rwlock]. These types
/// ensure that any mutation cannot cause data races, hence the types
/// are `Sync`. Likewise, [`sync::Arc`][arc] provides a thread-safe
/// analogue of [`Rc`][rc].
///
/// Any types with interior mutability must also use the
/// [`cell::UnsafeCell`][unsafecell] wrapper around the value(s) which
/// can be mutated through a shared reference. Failing to doing this is
/// [undefined behavior][ub]. For example, [`transmute`][transmute]-ing
/// from `&T` to `&mut T` is invalid.
///
/// See [the Nomicon](../../nomicon/send-and-sync.html) for more
/// details about `Sync`.
///
/// [send]: trait.Send.html
/// [u8]: ../../std/primitive.u8.html
/// [f64]: ../../std/primitive.f64.html
/// [box]: ../../std/boxed/struct.Box.html
/// [vec]: ../../std/vec/struct.Vec.html
/// [cell]: ../cell/struct.Cell.html
/// [refcell]: ../cell/struct.RefCell.html
/// [rc]: ../../std/rc/struct.Rc.html
/// [arc]: ../../std/sync/struct.Arc.html
/// [atomic data types]: ../sync/atomic/index.html
/// [mutex]: ../../std/sync/struct.Mutex.html
/// [rwlock]: ../../std/sync/struct.RwLock.html
/// [unsafecell]: ../cell/struct.UnsafeCell.html
/// [ub]: ../../reference.html#behavior-considered-undefined
/// [transmute]: ../../std/mem/fn.transmute.html
#[stable(feature = "rust1", since = "1.0.0")]
#[lang = "sync"]
#[rustc_on_unimplemented = "`{Self}` cannot be shared between threads safely"]
pub unsafe trait Sync {
// Empty
}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl Sync for .. { }
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> !Sync for *const T { }
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> !Sync for *mut T { }
macro_rules! impls{
($t: ident) => (
#[stable(feature = "rust1", since = "1.0.0")]
impl<T:?Sized> Hash for $t<T> {
#[inline]
fn hash<H: Hasher>(&self, _: &mut H) {
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T:?Sized> cmp::PartialEq for $t<T> {
fn eq(&self, _other: &$t<T>) -> bool {
true
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T:?Sized> cmp::Eq for $t<T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T:?Sized> cmp::PartialOrd for $t<T> {
fn partial_cmp(&self, _other: &$t<T>) -> Option<cmp::Ordering> {
Option::Some(cmp::Ordering::Equal)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T:?Sized> cmp::Ord for $t<T> {
fn cmp(&self, _other: &$t<T>) -> cmp::Ordering {
cmp::Ordering::Equal
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T:?Sized> Copy for $t<T> { }
#[stable(feature = "rust1", since = "1.0.0")]
impl<T:?Sized> Clone for $t<T> {
fn clone(&self) -> $t<T> {
$t
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T:?Sized> Default for $t<T> {
fn default() -> $t<T> {
$t
}
}
)
}
/// Zero-sized type used to mark things that "act like" they own a `T`.
///
/// Adding a `PhantomData<T>` field to your type tells the compiler that your
/// type acts as though it stores a value of type `T`, even though it doesn't
/// really. This information is used when computing certain safety properties.
///
/// For a more in-depth explanation of how to use `PhantomData<T>`, please see
/// [the Nomicon](../../nomicon/phantom-data.html).
///
/// # A ghastly note 👻👻👻
///
/// Though they both have scary names, `PhantomData` and 'phantom types' are
/// related, but not identical. A phantom type parameter is simply a type
/// parameter which is never used. In Rust, this often causes the compiler to
/// complain, and the solution is to add a "dummy" use by way of `PhantomData`.
///
/// # Examples
///
/// ## Unused lifetime parameters
///
/// Perhaps the most common use case for `PhantomData` is a struct that has an
/// unused lifetime parameter, typically as part of some unsafe code. For
/// example, here is a struct `Slice` that has two pointers of type `*const T`,
/// presumably pointing into an array somewhere:
///
/// ```ignore
/// struct Slice<'a, T> {
/// start: *const T,
/// end: *const T,
/// }
/// ```
///
/// The intention is that the underlying data is only valid for the
/// lifetime `'a`, so `Slice` should not outlive `'a`. However, this
/// intent is not expressed in the code, since there are no uses of
/// the lifetime `'a` and hence it is not clear what data it applies
/// to. We can correct this by telling the compiler to act *as if* the
/// `Slice` struct contained a reference `&'a T`:
///
/// ```
/// use std::marker::PhantomData;
///
/// # #[allow(dead_code)]
/// struct Slice<'a, T: 'a> {
/// start: *const T,
/// end: *const T,
/// phantom: PhantomData<&'a T>,
/// }
/// ```
///
/// This also in turn requires the annotation `T: 'a`, indicating
/// that any references in `T` are valid over the lifetime `'a`.
///
/// When initializing a `Slice` you simply provide the value
/// `PhantomData` for the field `phantom`:
///
/// ```
/// # #![allow(dead_code)]
/// # use std::marker::PhantomData;
/// # struct Slice<'a, T: 'a> {
/// # start: *const T,
/// # end: *const T,
/// # phantom: PhantomData<&'a T>,
/// # }
/// fn borrow_vec<'a, T>(vec: &'a Vec<T>) -> Slice<'a, T> {
/// let ptr = vec.as_ptr();
/// Slice {
/// start: ptr,
/// end: unsafe { ptr.offset(vec.len() as isize) },
/// phantom: PhantomData,
/// }
/// }
/// ```
///
/// ## Unused type parameters
///
/// It sometimes happens that you have unused type parameters which
/// indicate what type of data a struct is "tied" to, even though that
/// data is not actually found in the struct itself. Here is an
/// example where this arises with [FFI]. The foreign interface uses
/// handles of type `*mut ()` to refer to Rust values of different
/// types. We track the Rust type using a phantom type parameter on
/// the struct `ExternalResource` which wraps a handle.
///
/// [FFI]: ../../book/ffi.html
///
/// ```
/// # #![allow(dead_code)]
/// # trait ResType { }
/// # struct ParamType;
/// # mod foreign_lib {
/// # pub fn new(_: usize) -> *mut () { 42 as *mut () }
/// # pub fn do_stuff(_: *mut (), _: usize) {}
/// # }
/// # fn convert_params(_: ParamType) -> usize { 42 }
/// use std::marker::PhantomData;
/// use std::mem;
///
/// struct ExternalResource<R> {
/// resource_handle: *mut (),
/// resource_type: PhantomData<R>,
/// }
///
/// impl<R: ResType> ExternalResource<R> {
/// fn new() -> ExternalResource<R> {
/// let size_of_res = mem::size_of::<R>();
/// ExternalResource {
/// resource_handle: foreign_lib::new(size_of_res),
/// resource_type: PhantomData,
/// }
/// }
///
/// fn do_stuff(&self, param: ParamType) {
/// let foreign_params = convert_params(param);
/// foreign_lib::do_stuff(self.resource_handle, foreign_params);
/// }
/// }
/// ```
///
/// ## Ownership and the drop check
///
/// Adding a field of type `PhantomData<T>` indicates that your
/// type owns data of type `T`. This in turn implies that when your
/// type is dropped, it may drop one or more instances of the type
/// `T`. This has bearing on the Rust compiler's [drop check]
/// analysis.
///
/// If your struct does not in fact *own* the data of type `T`, it is
/// better to use a reference type, like `PhantomData<&'a T>`
/// (ideally) or `PhantomData<*const T>` (if no lifetime applies), so
/// as not to indicate ownership.
///
/// [drop check]: ../../nomicon/dropck.html
#[lang = "phantom_data"]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct PhantomDa | >;
impls! { PhantomData }
mod impls {
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<'a, T: Sync + ?Sized> Send for &'a T {}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<'a, T: Send + ?Sized> Send for &'a mut T {}
}
/// Types that can be reflected over.
///
/// By "reflection" we mean use of the [`Any`][any] trait, or related
/// machinery such as [`TypeId`][typeid].
///
/// `Reflect` is implemented for all types. Its purpose is to ensure
/// that when you write a generic function that will employ reflection,
/// that must be reflected (no pun intended) in the generic bounds of
/// that function.
///
/// ```
/// #![feature(reflect_marker)]
/// use std::marker::Reflect;
/// use std::any::Any;
///
/// # #[allow(dead_code)]
/// fn foo<T: Reflect + 'static>(x: &T) {
/// let any: &Any = x;
/// if any.is::<u32>() { println!("u32"); }
/// }
/// ```
///
/// Without the bound `T: Reflect`, `foo` would not typecheck. (As
/// a matter of style, it would be preferable to write `T: Any`,
/// because `T: Any` implies `T: Reflect` and `T: 'static`, but we
/// use `Reflect` here for illustrative purposes.)
///
/// The `Reflect` bound serves to alert `foo`'s caller to the
/// fact that `foo` may behave differently depending on whether
/// `T` is `u32` or not. The ability for a caller to reason about what
/// a function may do based solely on what generic bounds are declared
/// is often called the "[parametricity property][param]". Despite the
/// use of `Reflect`, Rust lacks true parametricity because a generic
/// function can, at the very least, call [`mem::size_of`][size_of]
/// without employing any trait bounds whatsoever.
///
/// [any]: ../any/trait.Any.html
/// [typeid]: ../any/struct.TypeId.html
/// [param]: http://en.wikipedia.org/wiki/Parametricity
/// [size_of]: ../mem/fn.size_of.html
#[rustc_reflect_like]
#[unstable(feature = "reflect_marker",
reason = "requires RFC and more experience",
issue = "27749")]
#[rustc_deprecated(since = "1.14.0", reason = "Specialization makes parametricity impossible")]
#[rustc_on_unimplemented = "`{Self}` does not implement `Any`; \
ensure all type parameters are bounded by `Any`"]
pub trait Reflect {}
#[unstable(feature = "reflect_marker",
reason = "requires RFC and more experience",
issue = "27749")]
#[rustc_deprecated(since = "1.14.0", reason = "Specialization makes parametricity impossible")]
#[allow(deprecated)]
impl Reflect for .. { }
| ta<T:?Sized |
saver.py | """
Saving model output.
"""
import json
import pickle
from copy import deepcopy
import os
import numpy as np
import xarray as xr
def save_to_pickle(datafield, filename):
"""
Save datafield to pickle file. Keep in mind that restoring a pickle
requires that the internal structure of the types for the pickled data
remain unchanged, o.e. not recommended for long-term storage.
:param datafield: datafield or dataarray to save
:type datafield: xr.Dataset|xr.DataArray
:param filename: filename
:type filename: str
"""
assert isinstance(datafield, (xr.DataArray, xr.Dataset))
if not filename.endswith(".pkl"):
filename += ".pkl"
with open(filename, "wb") as handle:
pickle.dump(datafield, handle, protocol=pickle.HIGHEST_PROTOCOL)
def save_to_netcdf(datafield, filename):
"""
Save datafield to NetCDF. NetCDF cannot handle structured attributes,
hence they are stripped and if there are some, they are saved as json
with the same filename.
:param datafield: datafield or dataarray to save
:type datafield: xr.Dataset|xr.DataArray
:param filename: filename
:type filename: str
"""
assert isinstance(datafield, (xr.DataArray, xr.Dataset))
datafield = deepcopy(datafield)
if not filename.endswith(".nc"):
filename += ".nc"
if datafield.attrs:
attributes_copy = deepcopy(datafield.attrs)
_save_attrs_json(attributes_copy, filename)
datafield.attrs = {}
datafield.to_netcdf(filename)
def _save_attrs_json(attrs, filename):
"""
Save attributes to json.
:param attrs: attributes to save
:type attrs: dict
:param filename: filename for the json file
:type filename: str
"""
def sanitise_attrs(attrs):
|
filename = os.path.splitext(filename)[0] + ".json"
with open(filename, "w") as handle:
json.dump(sanitise_attrs(attrs), handle)
| sanitised = {}
for k, v in attrs.items():
if isinstance(v, list):
sanitised[k] = [
sanitise_attrs(vv) if isinstance(vv, dict) else vv.tolist() if isinstance(vv, np.ndarray) else vv
for vv in v
]
elif isinstance(v, dict):
sanitised[k] = sanitise_attrs(v)
elif isinstance(v, np.ndarray):
sanitised[k] = v.tolist()
else:
sanitised[k] = v
return sanitised |
predicate.go | package reads
import (
"bytes"
"fmt"
"regexp"
"strconv"
"github.com/influxdata/flux/ast"
"github.com/influxdata/flux/semantic"
"github.com/influxdata/influxdb/models"
"github.com/influxdata/influxdb/storage/reads/datatypes"
"github.com/influxdata/influxql"
"github.com/pkg/errors"
)
const (
fieldKey = "_field"
measurementKey = "_measurement"
valueKey = "_value"
fieldRef = "$"
)
// NodeVisitor can be called by Walk to traverse the Node hierarchy.
// The Visit() function is called once per node.
type NodeVisitor interface {
Visit(*datatypes.Node) NodeVisitor
}
func WalkChildren(v NodeVisitor, node *datatypes.Node) {
for _, n := range node.Children {
WalkNode(v, n)
}
}
func WalkNode(v NodeVisitor, node *datatypes.Node) {
if v = v.Visit(node); v == nil {
return
}
WalkChildren(v, node)
}
func PredicateToExprString(p *datatypes.Predicate) string {
if p == nil {
return "[none]"
}
var v predicateExpressionPrinter
WalkNode(&v, p.Root)
return v.Buffer.String()
}
type predicateExpressionPrinter struct {
bytes.Buffer
}
func (v *predicateExpressionPrinter) Visit(n *datatypes.Node) NodeVisitor {
switch n.NodeType {
case datatypes.NodeTypeLogicalExpression:
if len(n.Children) > 0 {
var op string
if n.GetLogical() == datatypes.LogicalAnd {
op = " AND "
} else {
op = " OR "
}
WalkNode(v, n.Children[0])
for _, e := range n.Children[1:] {
v.Buffer.WriteString(op)
WalkNode(v, e)
}
}
return nil
case datatypes.NodeTypeParenExpression:
if len(n.Children) == 1 {
v.Buffer.WriteString("( ")
WalkNode(v, n.Children[0])
v.Buffer.WriteString(" )")
}
return nil
case datatypes.NodeTypeComparisonExpression:
WalkNode(v, n.Children[0])
v.Buffer.WriteByte(' ')
switch n.GetComparison() {
case datatypes.ComparisonEqual:
v.Buffer.WriteByte('=')
case datatypes.ComparisonNotEqual:
v.Buffer.WriteString("!=")
case datatypes.ComparisonStartsWith:
v.Buffer.WriteString("startsWith")
case datatypes.ComparisonRegex:
v.Buffer.WriteString("=~")
case datatypes.ComparisonNotRegex:
v.Buffer.WriteString("!~")
case datatypes.ComparisonLess:
v.Buffer.WriteByte('<')
case datatypes.ComparisonLessEqual:
v.Buffer.WriteString("<=")
case datatypes.ComparisonGreater:
v.Buffer.WriteByte('>')
case datatypes.ComparisonGreaterEqual:
v.Buffer.WriteString(">=")
}
v.Buffer.WriteByte(' ')
WalkNode(v, n.Children[1])
return nil
case datatypes.NodeTypeTagRef:
v.Buffer.WriteByte('\'')
v.Buffer.WriteString(n.GetTagRefValue())
v.Buffer.WriteByte('\'')
return nil
case datatypes.NodeTypeFieldRef:
v.Buffer.WriteByte('$')
return nil
case datatypes.NodeTypeLiteral:
switch val := n.Value.(type) {
case *datatypes.Node_StringValue:
v.Buffer.WriteString(strconv.Quote(val.StringValue))
case *datatypes.Node_RegexValue:
v.Buffer.WriteByte('/')
v.Buffer.WriteString(val.RegexValue)
v.Buffer.WriteByte('/')
case *datatypes.Node_IntegerValue:
v.Buffer.WriteString(strconv.FormatInt(val.IntegerValue, 10))
case *datatypes.Node_UnsignedValue:
v.Buffer.WriteString(strconv.FormatUint(val.UnsignedValue, 10))
case *datatypes.Node_FloatValue:
v.Buffer.WriteString(strconv.FormatFloat(val.FloatValue, 'f', 10, 64))
case *datatypes.Node_BooleanValue:
if val.BooleanValue {
v.Buffer.WriteString("true")
} else {
v.Buffer.WriteString("false")
}
}
return nil
default:
return v
}
}
func toStoragePredicate(f *semantic.FunctionExpression) (*datatypes.Predicate, error) {
if f.Block.Parameters == nil || len(f.Block.Parameters.List) != 1 {
return nil, errors.New("storage predicate functions must have exactly one parameter")
}
root, err := toStoragePredicateHelper(f.Block.Body.(semantic.Expression), f.Block.Parameters.List[0].Key.Name)
if err != nil {
return nil, err
}
return &datatypes.Predicate{
Root: root,
}, nil
}
func toStoragePredicateHelper(n semantic.Expression, objectName string) (*datatypes.Node, error) {
switch n := n.(type) {
case *semantic.LogicalExpression:
left, err := toStoragePredicateHelper(n.Left, objectName)
if err != nil {
return nil, errors.Wrap(err, "left hand side")
}
right, err := toStoragePredicateHelper(n.Right, objectName)
if err != nil {
return nil, errors.Wrap(err, "right hand side")
}
children := []*datatypes.Node{left, right}
switch n.Operator {
case ast.AndOperator:
return &datatypes.Node{
NodeType: datatypes.NodeTypeLogicalExpression,
Value: &datatypes.Node_Logical_{Logical: datatypes.LogicalAnd},
Children: children,
}, nil
case ast.OrOperator:
return &datatypes.Node{
NodeType: datatypes.NodeTypeLogicalExpression,
Value: &datatypes.Node_Logical_{Logical: datatypes.LogicalOr},
Children: children,
}, nil
default:
return nil, fmt.Errorf("unknown logical operator %v", n.Operator)
}
case *semantic.BinaryExpression:
left, err := toStoragePredicateHelper(n.Left, objectName)
if err != nil {
return nil, errors.Wrap(err, "left hand side")
}
right, err := toStoragePredicateHelper(n.Right, objectName)
if err != nil {
return nil, errors.Wrap(err, "right hand side")
}
children := []*datatypes.Node{left, right}
op, err := toComparisonOperator(n.Operator)
if err != nil {
return nil, err
}
return &datatypes.Node{
NodeType: datatypes.NodeTypeComparisonExpression,
Value: &datatypes.Node_Comparison_{Comparison: op},
Children: children,
}, nil
case *semantic.StringLiteral:
return &datatypes.Node{
NodeType: datatypes.NodeTypeLiteral,
Value: &datatypes.Node_StringValue{
StringValue: n.Value,
},
}, nil
case *semantic.IntegerLiteral:
return &datatypes.Node{
NodeType: datatypes.NodeTypeLiteral,
Value: &datatypes.Node_IntegerValue{
IntegerValue: n.Value,
},
}, nil
case *semantic.BooleanLiteral:
return &datatypes.Node{
NodeType: datatypes.NodeTypeLiteral,
Value: &datatypes.Node_BooleanValue{
BooleanValue: n.Value,
},
}, nil
case *semantic.FloatLiteral:
return &datatypes.Node{
NodeType: datatypes.NodeTypeLiteral,
Value: &datatypes.Node_FloatValue{
FloatValue: n.Value,
},
}, nil
case *semantic.RegexpLiteral:
return &datatypes.Node{
NodeType: datatypes.NodeTypeLiteral,
Value: &datatypes.Node_RegexValue{
RegexValue: n.Value.String(),
},
}, nil
case *semantic.MemberExpression:
// Sanity check that the object is the objectName identifier
if ident, ok := n.Object.(*semantic.IdentifierExpression); !ok || ident.Name != objectName {
return nil, fmt.Errorf("unknown object %q", n.Object)
}
switch n.Property {
case fieldKey:
return &datatypes.Node{
NodeType: datatypes.NodeTypeTagRef,
Value: &datatypes.Node_TagRefValue{
TagRefValue: models.FieldKeyTagKey,
},
}, nil
case measurementKey:
return &datatypes.Node{
NodeType: datatypes.NodeTypeTagRef,
Value: &datatypes.Node_TagRefValue{
TagRefValue: models.MeasurementTagKey,
},
}, nil
case valueKey:
return &datatypes.Node{
NodeType: datatypes.NodeTypeFieldRef,
Value: &datatypes.Node_FieldRefValue{
FieldRefValue: valueKey,
},
}, nil
}
return &datatypes.Node{
NodeType: datatypes.NodeTypeTagRef,
Value: &datatypes.Node_TagRefValue{
TagRefValue: n.Property,
},
}, nil
case *semantic.DurationLiteral:
return nil, errors.New("duration literals not supported in storage predicates")
case *semantic.DateTimeLiteral:
return nil, errors.New("time literals not supported in storage predicates")
default:
return nil, fmt.Errorf("unsupported semantic expression type %T", n)
}
}
func | (o ast.OperatorKind) (datatypes.Node_Comparison, error) {
switch o {
case ast.EqualOperator:
return datatypes.ComparisonEqual, nil
case ast.NotEqualOperator:
return datatypes.ComparisonNotEqual, nil
case ast.RegexpMatchOperator:
return datatypes.ComparisonRegex, nil
case ast.NotRegexpMatchOperator:
return datatypes.ComparisonNotRegex, nil
case ast.StartsWithOperator:
return datatypes.ComparisonStartsWith, nil
case ast.LessThanOperator:
return datatypes.ComparisonLess, nil
case ast.LessThanEqualOperator:
return datatypes.ComparisonLessEqual, nil
case ast.GreaterThanOperator:
return datatypes.ComparisonGreater, nil
case ast.GreaterThanEqualOperator:
return datatypes.ComparisonGreaterEqual, nil
default:
return 0, fmt.Errorf("unknown operator %v", o)
}
}
// NodeToExpr transforms a predicate node to an influxql.Expr.
func NodeToExpr(node *datatypes.Node, remap map[string]string) (influxql.Expr, error) {
v := &nodeToExprVisitor{remap: remap}
WalkNode(v, node)
if err := v.Err(); err != nil {
return nil, err
}
if len(v.exprs) > 1 {
return nil, errors.New("invalid expression")
}
if len(v.exprs) == 0 {
return nil, nil
}
// TODO(edd): It would be preferable if RewriteRegexConditions was a
// package level function in influxql.
stmt := &influxql.SelectStatement{
Condition: v.exprs[0],
}
stmt.RewriteRegexConditions()
return stmt.Condition, nil
}
type nodeToExprVisitor struct {
remap map[string]string
exprs []influxql.Expr
err error
}
func (v *nodeToExprVisitor) Visit(n *datatypes.Node) NodeVisitor {
if v.err != nil {
return nil
}
switch n.NodeType {
case datatypes.NodeTypeLogicalExpression:
if len(n.Children) > 1 {
op := influxql.AND
if n.GetLogical() == datatypes.LogicalOr {
op = influxql.OR
}
WalkNode(v, n.Children[0])
if v.err != nil {
return nil
}
for i := 1; i < len(n.Children); i++ {
WalkNode(v, n.Children[i])
if v.err != nil {
return nil
}
if len(v.exprs) >= 2 {
lhs, rhs := v.pop2()
v.exprs = append(v.exprs, &influxql.BinaryExpr{LHS: lhs, Op: op, RHS: rhs})
}
}
return nil
}
case datatypes.NodeTypeParenExpression:
if len(n.Children) != 1 {
v.err = errors.New("parenExpression expects one child")
return nil
}
WalkNode(v, n.Children[0])
if v.err != nil {
return nil
}
if len(v.exprs) > 0 {
v.exprs = append(v.exprs, &influxql.ParenExpr{Expr: v.pop()})
}
return nil
case datatypes.NodeTypeComparisonExpression:
WalkChildren(v, n)
if len(v.exprs) < 2 {
v.err = errors.New("comparisonExpression expects two children")
return nil
}
lhs, rhs := v.pop2()
be := &influxql.BinaryExpr{LHS: lhs, RHS: rhs}
switch n.GetComparison() {
case datatypes.ComparisonEqual:
be.Op = influxql.EQ
case datatypes.ComparisonNotEqual:
be.Op = influxql.NEQ
case datatypes.ComparisonStartsWith:
// TODO(sgc): rewrite to anchored RE, as index does not support startsWith yet
v.err = errors.New("startsWith not implemented")
return nil
case datatypes.ComparisonRegex:
be.Op = influxql.EQREGEX
case datatypes.ComparisonNotRegex:
be.Op = influxql.NEQREGEX
case datatypes.ComparisonLess:
be.Op = influxql.LT
case datatypes.ComparisonLessEqual:
be.Op = influxql.LTE
case datatypes.ComparisonGreater:
be.Op = influxql.GT
case datatypes.ComparisonGreaterEqual:
be.Op = influxql.GTE
default:
v.err = errors.New("invalid comparison operator")
return nil
}
v.exprs = append(v.exprs, be)
return nil
case datatypes.NodeTypeTagRef:
ref := n.GetTagRefValue()
if v.remap != nil {
if nk, ok := v.remap[ref]; ok {
ref = nk
}
}
v.exprs = append(v.exprs, &influxql.VarRef{Val: ref, Type: influxql.Tag})
return nil
case datatypes.NodeTypeFieldRef:
v.exprs = append(v.exprs, &influxql.VarRef{Val: fieldRef})
return nil
case datatypes.NodeTypeLiteral:
switch val := n.Value.(type) {
case *datatypes.Node_StringValue:
v.exprs = append(v.exprs, &influxql.StringLiteral{Val: val.StringValue})
case *datatypes.Node_RegexValue:
// TODO(sgc): consider hashing the RegexValue and cache compiled version
re, err := regexp.Compile(val.RegexValue)
if err != nil {
v.err = err
}
v.exprs = append(v.exprs, &influxql.RegexLiteral{Val: re})
return nil
case *datatypes.Node_IntegerValue:
v.exprs = append(v.exprs, &influxql.IntegerLiteral{Val: val.IntegerValue})
case *datatypes.Node_UnsignedValue:
v.exprs = append(v.exprs, &influxql.UnsignedLiteral{Val: val.UnsignedValue})
case *datatypes.Node_FloatValue:
v.exprs = append(v.exprs, &influxql.NumberLiteral{Val: val.FloatValue})
case *datatypes.Node_BooleanValue:
v.exprs = append(v.exprs, &influxql.BooleanLiteral{Val: val.BooleanValue})
default:
v.err = errors.New("unexpected literal type")
return nil
}
return nil
default:
return v
}
return nil
}
func (v *nodeToExprVisitor) Err() error {
return v.err
}
func (v *nodeToExprVisitor) pop() influxql.Expr {
if len(v.exprs) == 0 {
panic("stack empty")
}
var top influxql.Expr
top, v.exprs = v.exprs[len(v.exprs)-1], v.exprs[:len(v.exprs)-1]
return top
}
func (v *nodeToExprVisitor) pop2() (influxql.Expr, influxql.Expr) {
if len(v.exprs) < 2 {
panic("stack empty")
}
rhs := v.exprs[len(v.exprs)-1]
lhs := v.exprs[len(v.exprs)-2]
v.exprs = v.exprs[:len(v.exprs)-2]
return lhs, rhs
}
func IsTrueBooleanLiteral(expr influxql.Expr) bool {
b, ok := expr.(*influxql.BooleanLiteral)
if ok {
return b.Val
}
return false
}
func RewriteExprRemoveFieldValue(expr influxql.Expr) influxql.Expr {
return influxql.RewriteExpr(expr, func(expr influxql.Expr) influxql.Expr {
if be, ok := expr.(*influxql.BinaryExpr); ok {
if ref, ok := be.LHS.(*influxql.VarRef); ok {
if ref.Val == fieldRef {
return &influxql.BooleanLiteral{Val: true}
}
}
}
return expr
})
}
type hasRefs struct {
refs []string
found []bool
}
func (v *hasRefs) allFound() bool {
for _, val := range v.found {
if !val {
return false
}
}
return true
}
func (v *hasRefs) Visit(node influxql.Node) influxql.Visitor {
if v.allFound() {
return nil
}
if n, ok := node.(*influxql.VarRef); ok {
for i, r := range v.refs {
if !v.found[i] && r == n.Val {
v.found[i] = true
if v.allFound() {
return nil
}
}
}
}
return v
}
func HasFieldValueKey(expr influxql.Expr) bool {
refs := hasRefs{refs: []string{fieldRef}, found: make([]bool, 1)}
influxql.Walk(&refs, expr)
return refs.found[0]
}
| toComparisonOperator |
applications.go | package hdinsight
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"net/http"
)
// ApplicationsClient is the hDInsight Management Client
type ApplicationsClient struct {
BaseClient
}
// NewApplicationsClient creates an instance of the ApplicationsClient client.
func NewApplicationsClient(subscriptionID string) ApplicationsClient {
return NewApplicationsClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewApplicationsClientWithBaseURI creates an instance of the ApplicationsClient client.
func NewApplicationsClientWithBaseURI(baseURI string, subscriptionID string) ApplicationsClient {
return ApplicationsClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// Create creates applications for the HDInsight cluster.
// Parameters:
// resourceGroupName - the name of the resource group.
// clusterName - the name of the cluster.
// parameters - the application create request.
func (client ApplicationsClient) Create(ctx context.Context, resourceGroupName string, clusterName string, parameters ApplicationGetProperties) (result Application, err error) {
req, err := client.CreatePreparer(ctx, resourceGroupName, clusterName, parameters)
if err != nil {
err = autorest.NewErrorWithError(err, "hdinsight.ApplicationsClient", "Create", nil, "Failure preparing request")
return
}
resp, err := client.CreateSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "hdinsight.ApplicationsClient", "Create", resp, "Failure sending request")
return
}
result, err = client.CreateResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "hdinsight.ApplicationsClient", "Create", resp, "Failure responding to request")
}
return
}
// CreatePreparer prepares the Create request.
func (client ApplicationsClient) CreatePreparer(ctx context.Context, resourceGroupName string, clusterName string, parameters ApplicationGetProperties) (*http.Request, error) {
pathParameters := map[string]interface{}{
"applicationName": autorest.Encode("path", "hue"),
"clusterName": autorest.Encode("path", clusterName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2015-03-01-preview"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPut(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.HDInsight/clusters/{clusterName}/applications/{applicationName}", pathParameters),
autorest.WithJSON(parameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// CreateSender sends the Create request. The method will close the
// http.Response Body if it receives an error.
func (client ApplicationsClient) CreateSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// CreateResponder handles the response to the Create request. The method always
// closes the http.Response Body.
func (client ApplicationsClient) CreateResponder(resp *http.Response) (result Application, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// Delete deletes the specified application on the HDInsight cluster.
// Parameters:
// resourceGroupName - the name of the resource group.
// clusterName - the name of the cluster.
func (client ApplicationsClient) Delete(ctx context.Context, resourceGroupName string, clusterName string) (result ApplicationsDeleteFuture, err error) {
req, err := client.DeletePreparer(ctx, resourceGroupName, clusterName)
if err != nil {
err = autorest.NewErrorWithError(err, "hdinsight.ApplicationsClient", "Delete", nil, "Failure preparing request")
return
}
result, err = client.DeleteSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "hdinsight.ApplicationsClient", "Delete", result.Response(), "Failure sending request")
return
}
return
}
// DeletePreparer prepares the Delete request.
func (client ApplicationsClient) DeletePreparer(ctx context.Context, resourceGroupName string, clusterName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"applicationName": autorest.Encode("path", "hue"),
"clusterName": autorest.Encode("path", clusterName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2015-03-01-preview"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsDelete(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.HDInsight/clusters/{clusterName}/applications/{applicationName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// DeleteSender sends the Delete request. The method will close the
// http.Response Body if it receives an error.
func (client ApplicationsClient) DeleteSender(req *http.Request) (future ApplicationsDeleteFuture, err error) {
var resp *http.Response
resp, err = autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
if err != nil {
return
}
err = autorest.Respond(resp, azure.WithErrorUnlessStatusCode(http.StatusOK))
if err != nil {
return
}
future.Future, err = azure.NewFutureFromResponse(resp)
return
}
// DeleteResponder handles the response to the Delete request. The method always
// closes the http.Response Body.
func (client ApplicationsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByClosing())
result.Response = resp
return
}
// Get lists properties of the specified application.
// Parameters:
// resourceGroupName - the name of the resource group.
// clusterName - the name of the cluster.
// applicationName - the constant value for the application name.
func (client ApplicationsClient) Get(ctx context.Context, resourceGroupName string, clusterName string, applicationName string) (result Application, err error) {
req, err := client.GetPreparer(ctx, resourceGroupName, clusterName, applicationName)
if err != nil {
err = autorest.NewErrorWithError(err, "hdinsight.ApplicationsClient", "Get", nil, "Failure preparing request")
return
}
resp, err := client.GetSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "hdinsight.ApplicationsClient", "Get", resp, "Failure sending request")
return
}
result, err = client.GetResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "hdinsight.ApplicationsClient", "Get", resp, "Failure responding to request")
}
return
}
// GetPreparer prepares the Get request.
func (client ApplicationsClient) GetPreparer(ctx context.Context, resourceGroupName string, clusterName string, applicationName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"applicationName": autorest.Encode("path", applicationName),
"clusterName": autorest.Encode("path", clusterName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2015-03-01-preview"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.HDInsight/clusters/{clusterName}/applications/{applicationName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client ApplicationsClient) GetSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// GetResponder handles the response to the Get request. The method always
// closes the http.Response Body.
func (client ApplicationsClient) GetResponder(resp *http.Response) (result Application, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// List lists all of the applications HDInsight cluster.
// Parameters:
// resourceGroupName - the name of the resource group.
// clusterName - the name of the cluster.
func (client ApplicationsClient) List(ctx context.Context, resourceGroupName string, clusterName string) (result ApplicationListResultPage, err error) {
result.fn = client.listNextResults
req, err := client.ListPreparer(ctx, resourceGroupName, clusterName)
if err != nil {
err = autorest.NewErrorWithError(err, "hdinsight.ApplicationsClient", "List", nil, "Failure preparing request")
return
}
resp, err := client.ListSender(req)
if err != nil {
result.alr.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "hdinsight.ApplicationsClient", "List", resp, "Failure sending request")
return
}
result.alr, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "hdinsight.ApplicationsClient", "List", resp, "Failure responding to request")
}
return
}
// ListPreparer prepares the List request.
func (client ApplicationsClient) ListPreparer(ctx context.Context, resourceGroupName string, clusterName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"clusterName": autorest.Encode("path", clusterName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2015-03-01-preview"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.HDInsight/clusters/{clusterName}/applications", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListSender sends the List request. The method will close the
// http.Response Body if it receives an error.
func (client ApplicationsClient) ListSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// ListResponder handles the response to the List request. The method always
// closes the http.Response Body.
func (client ApplicationsClient) ListResponder(resp *http.Response) (result ApplicationListResult, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listNextResults retrieves the next set of results, if any.
func (client ApplicationsClient) listNextResults(lastResults ApplicationListResult) (result ApplicationListResult, err error) {
req, err := lastResults.applicationListResultPreparer()
if err != nil { | }
if req == nil {
return
}
resp, err := client.ListSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "hdinsight.ApplicationsClient", "listNextResults", resp, "Failure sending next results request")
}
result, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "hdinsight.ApplicationsClient", "listNextResults", resp, "Failure responding to next results request")
}
return
}
// ListComplete enumerates all values, automatically crossing page boundaries as required.
func (client ApplicationsClient) ListComplete(ctx context.Context, resourceGroupName string, clusterName string) (result ApplicationListResultIterator, err error) {
result.page, err = client.List(ctx, resourceGroupName, clusterName)
return
} | return result, autorest.NewErrorWithError(err, "hdinsight.ApplicationsClient", "listNextResults", nil, "Failure preparing next results request") |
plugin.js | ( function() {
function addCombo( editor, comboName, styleType, lang, entries, defaultLabel, styleDefinition, order ) {
var config = editor.config,style = new CKEDITOR.style( styleDefinition );
var names = entries.split( ';' ),values = [];
var styles = {};
for ( var i = 0; i < names.length; i++ ) {
var parts = names[ i ];
if ( parts ) {
parts = parts.split( '/' );
var vars = {},name = names[ i ] = parts[ 0 ];
vars[ styleType ] = values[ i ] = parts[ 1 ] || name;
styles[ name ] = new CKEDITOR.style( styleDefinition, vars );
styles[ name ]._.definition.name = name;
} else
names.splice( i--, 1 );
}
editor.ui.addRichCombo( comboName, {
label: 'Dòng', | panel: {
css: [ CKEDITOR.skin.getPath( 'editor' ) ].concat( config.contentsCss ),
multiSelect: false,
attributes: { 'aria-label': 'Chiều cao dòng' }
},
init: function() {
this.startGroup('Line-Height');
for ( var i = 0; i < names.length; i++ ) {
var name = names[ i ];
this.add( name, styles[ name ].buildPreview(), name );
}
},
onClick: function( value ) {
editor.focus();
editor.fire( 'saveSnapshot' );
var style = styles[ value ];
editor[ this.getValue() == value ? 'removeStyle' : 'applyStyle' ]( style );
editor.fire( 'saveSnapshot' );
},
onRender: function() {
editor.on( 'selectionChange', function( ev ) {
var currentValue = this.getValue();
var elementPath = ev.data.path,elements = elementPath.elements;
for ( var i = 0, element; i < elements.length; i++ ) {
element = elements[ i ];
for ( var value in styles ) {
if ( styles[ value ].checkElementMatch( element, true, editor ) ) {
if ( value != currentValue )
this.setValue( value );
return;
}
}
}
this.setValue( '', defaultLabel );
}, this );
},
refresh: function() {
if ( !editor.activeFilter.check( style ) )
this.setState( CKEDITOR.TRISTATE_DISABLED );
}
} );
}
CKEDITOR.plugins.add( 'lineheight', {
requires: 'richcombo',
lang: 'en,vi',
init: function( editor ) {
var config = editor.config;
addCombo( editor, 'lineheight', 'size', 'Dòng', config.line_height, 'Dòng', config.lineHeight_style, 40 );
}
} );
} )();
CKEDITOR.config.line_height = '1;2;3;4;5;6;7;8;9;10;11;12;13;14;15;16;17;18;19;20;21;22;23;24;25;26;27;28;29;30;31;32;33;34;35;36;37;38;39;40;41;42;43;44;45;46;47;48;49;50;51;52;53;54;55;56p;57;58;59;60;61;62;63;64;65;66;67;68;69;70;71;72';
CKEDITOR.config.lineHeight_style = {
element: 'span',
styles: { 'line-height': '#(size)' },
overrides: [ {
element: 'line-height', attributes: { 'size': null }
} ]
}; | title: 'Dòng',
toolbar: 'styles,' + order,
allowedContent: style,
requiredContent: style, |
processutils.py | # Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
System-level utilities and helper functions.
"""
import errno
import logging
import multiprocessing
import os
import random
import shlex
import signal
from eventlet.green import subprocess
from eventlet import greenthread
from oslo.utils import strutils
import six
from neutron_vpnaas.openstack.common._i18n import _
LOG = logging.getLogger(__name__)
class InvalidArgumentError(Exception):
def __init__(self, message=None):
super(InvalidArgumentError, self).__init__(message)
class UnknownArgumentError(Exception):
def __init__(self, message=None):
super(UnknownArgumentError, self).__init__(message)
class ProcessExecutionError(Exception):
def __init__(self, stdout=None, stderr=None, exit_code=None, cmd=None,
description=None):
self.exit_code = exit_code
self.stderr = stderr
self.stdout = stdout
self.cmd = cmd
self.description = description
if description is None:
description = _("Unexpected error while running command.")
if exit_code is None:
exit_code = '-'
message = _('%(description)s\n'
'Command: %(cmd)s\n'
'Exit code: %(exit_code)s\n'
'Stdout: %(stdout)r\n'
'Stderr: %(stderr)r') % {'description': description,
'cmd': cmd,
'exit_code': exit_code,
'stdout': stdout,
'stderr': stderr}
super(ProcessExecutionError, self).__init__(message)
class NoRootWrapSpecified(Exception):
def __init__(self, message=None):
super(NoRootWrapSpecified, self).__init__(message)
def _subprocess_setup():
# Python installs a SIGPIPE handler by default. This is usually not what
# non-Python subprocesses expect.
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
def execute(*cmd, **kwargs):
"""Helper method to shell out and execute a command through subprocess.
Allows optional retry.
:param cmd: Passed to subprocess.Popen.
:type cmd: string
:param process_input: Send to opened process.
:type process_input: string
:param env_variables: Environment variables and their values that
will be set for the process.
:type env_variables: dict
:param check_exit_code: Single bool, int, or list of allowed exit
codes. Defaults to [0]. Raise
:class:`ProcessExecutionError` unless
program exits with one of these code.
:type check_exit_code: boolean, int, or [int]
:param delay_on_retry: True | False. Defaults to True. If set to True,
wait a short amount of time before retrying.
:type delay_on_retry: boolean
:param attempts: How many times to retry cmd.
:type attempts: int
:param run_as_root: True | False. Defaults to False. If set to True,
the command is prefixed by the command specified
in the root_helper kwarg.
:type run_as_root: boolean
:param root_helper: command to prefix to commands called with
run_as_root=True
:type root_helper: string
:param shell: whether or not there should be a shell used to
execute this command. Defaults to false.
:type shell: boolean
:param loglevel: log level for execute commands.
:type loglevel: int. (Should be logging.DEBUG or logging.INFO)
:returns: (stdout, stderr) from process execution
:raises: :class:`UnknownArgumentError` on
receiving unknown arguments
:raises: :class:`ProcessExecutionError`
"""
process_input = kwargs.pop('process_input', None)
env_variables = kwargs.pop('env_variables', None)
check_exit_code = kwargs.pop('check_exit_code', [0])
ignore_exit_code = False
delay_on_retry = kwargs.pop('delay_on_retry', True)
attempts = kwargs.pop('attempts', 1)
run_as_root = kwargs.pop('run_as_root', False)
root_helper = kwargs.pop('root_helper', '')
shell = kwargs.pop('shell', False)
loglevel = kwargs.pop('loglevel', logging.DEBUG)
if isinstance(check_exit_code, bool):
ignore_exit_code = not check_exit_code
check_exit_code = [0]
elif isinstance(check_exit_code, int):
check_exit_code = [check_exit_code]
if kwargs:
raise UnknownArgumentError(_('Got unknown keyword args: %r') % kwargs)
if run_as_root and hasattr(os, 'geteuid') and os.geteuid() != 0:
if not root_helper:
raise NoRootWrapSpecified(
message=_('Command requested root, but did not '
'specify a root helper.'))
cmd = shlex.split(root_helper) + list(cmd)
cmd = map(str, cmd)
sanitized_cmd = strutils.mask_password(' '.join(cmd))
while attempts > 0:
attempts -= 1
try:
LOG.log(loglevel, _('Running cmd (subprocess): %s'), sanitized_cmd)
_PIPE = subprocess.PIPE # pylint: disable=E1101
if os.name == 'nt':
preexec_fn = None
close_fds = False
else:
preexec_fn = _subprocess_setup
close_fds = True
obj = subprocess.Popen(cmd,
stdin=_PIPE,
stdout=_PIPE,
stderr=_PIPE,
close_fds=close_fds,
preexec_fn=preexec_fn,
shell=shell,
env=env_variables)
result = None
for _i in six.moves.range(20):
# NOTE(russellb) 20 is an arbitrary number of retries to
# prevent any chance of looping forever here.
try:
if process_input is not None:
result = obj.communicate(process_input)
else:
result = obj.communicate()
except OSError as e:
if e.errno in (errno.EAGAIN, errno.EINTR):
continue
raise
break
obj.stdin.close() # pylint: disable=E1101
_returncode = obj.returncode # pylint: disable=E1101
LOG.log(loglevel, 'Result was %s' % _returncode) | sanitized_stderr = strutils.mask_password(stderr)
raise ProcessExecutionError(exit_code=_returncode,
stdout=sanitized_stdout,
stderr=sanitized_stderr,
cmd=sanitized_cmd)
return result
except ProcessExecutionError:
if not attempts:
raise
else:
LOG.log(loglevel, _('%r failed. Retrying.'), sanitized_cmd)
if delay_on_retry:
greenthread.sleep(random.randint(20, 200) / 100.0)
finally:
# NOTE(termie): this appears to be necessary to let the subprocess
# call clean something up in between calls, without
# it two execute calls in a row hangs the second one
greenthread.sleep(0)
def trycmd(*args, **kwargs):
"""A wrapper around execute() to more easily handle warnings and errors.
Returns an (out, err) tuple of strings containing the output of
the command's stdout and stderr. If 'err' is not empty then the
command can be considered to have failed.
:discard_warnings True | False. Defaults to False. If set to True,
then for succeeding commands, stderr is cleared
"""
discard_warnings = kwargs.pop('discard_warnings', False)
try:
out, err = execute(*args, **kwargs)
failed = False
except ProcessExecutionError as exn:
out, err = '', six.text_type(exn)
failed = True
if not failed and discard_warnings and err:
# Handle commands that output to stderr but otherwise succeed
err = ''
return out, err
def ssh_execute(ssh, cmd, process_input=None,
addl_env=None, check_exit_code=True):
sanitized_cmd = strutils.mask_password(cmd)
LOG.debug('Running cmd (SSH): %s', sanitized_cmd)
if addl_env:
raise InvalidArgumentError(_('Environment not supported over SSH'))
if process_input:
# This is (probably) fixable if we need it...
raise InvalidArgumentError(_('process_input not supported over SSH'))
stdin_stream, stdout_stream, stderr_stream = ssh.exec_command(cmd)
channel = stdout_stream.channel
# NOTE(justinsb): This seems suspicious...
# ...other SSH clients have buffering issues with this approach
stdout = stdout_stream.read()
sanitized_stdout = strutils.mask_password(stdout)
stderr = stderr_stream.read()
sanitized_stderr = strutils.mask_password(stderr)
stdin_stream.close()
exit_status = channel.recv_exit_status()
# exit_status == -1 if no exit code was returned
if exit_status != -1:
LOG.debug('Result was %s' % exit_status)
if check_exit_code and exit_status != 0:
raise ProcessExecutionError(exit_code=exit_status,
stdout=sanitized_stdout,
stderr=sanitized_stderr,
cmd=sanitized_cmd)
return (sanitized_stdout, sanitized_stderr)
def get_worker_count():
"""Utility to get the default worker count.
@return: The number of CPUs if that can be determined, else a default
worker count of 1 is returned.
"""
try:
return multiprocessing.cpu_count()
except NotImplementedError:
return 1 | if not ignore_exit_code and _returncode not in check_exit_code:
(stdout, stderr) = result
sanitized_stdout = strutils.mask_password(stdout) |
save_to_google_team_drive.py | # save_to_google_team_drive.py
""" Saves a file to a Google Team Drive, in a given parent folder """
import os
import sys
where_i_am = os.path.dirname(os.path.realpath(__file__))
sys.path.append(where_i_am)
sys.path.append(where_i_am + "/dependencies")
from google.oauth2 import service_account # noqa: E402
from googleapiclient.discovery import build # noqa: E402
from googleapiclient.http import MediaFileUpload # noqa: E402
def _get_credentials_from_service_account_info(google_credentials):
""" Return credentials given service account file and assumptions of scopes needed """
service_account_info = google_credentials
credentials = ""
# Scopes are defined here: https://developers.google.com/identity/protocols/googlescopes
SCOPES = ['https://www.googleapis.com/auth/drive']
credentials = service_account.Credentials.from_service_account_info(service_account_info, scopes=SCOPES)
return(credentials)
def save_file_to_google_team_drive(google_credentials, drive_id, parent_folder_id, local_folder_name, file_name):
""" If file exists, update it, else do initial upload """
# credentials = _get_credentials_from_service_account_file()
credentials = _get_credentials_from_service_account_info(google_credentials)
file_id = _get_file_id_given_filename(credentials, drive_id, parent_folder_id, file_name)
if file_id > "":
_update_existing_file(credentials, parent_folder_id, file_id, local_folder_name, file_name)
else:
file_id = _upload_new_file(credentials, drive_id, parent_folder_id, local_folder_name, file_name)
return(file_id)
def _get_file_id_given_filename(credentials, drive_id, parent_folder_id, file_name):
""" Find a File_Id given drive, parent folder, and file_name """
file_id = ""
service = build('drive', 'v3', credentials=credentials)
nextPageToken = ""
query_string = "name='" + file_name + "'" + " and '" + parent_folder_id + "' in parents"
query_string += " and trashed = False"
results = service.files().list(
pageSize=1000, # 1000 is the maximum pageSize allowed
pageToken=nextPageToken,
fields="kind, nextPageToken, incompleteSearch, files(id, name, mimeType, modifiedTime, parents)",
supportsAllDrives="true", # required if writing to a team drive
driveId=drive_id,
includeItemsFromAllDrives="true", # required if querying from a team drive
corpora="drive",
q=query_string).execute()
items = results.get('files', [])
if items:
for item in items:
if 'id' in item:
file_id = item['id']
break # if more than one file exists, we'll just return the first one
return file_id
def _update_existing_file(credentials, parent_folder_id, file_id, local_folder_name, file_name, mime_type='text/xml'):
""" upload new content for existing file_id """
full_path_file_name = _get_full_path_file_name(local_folder_name, file_name)
media = MediaFileUpload(full_path_file_name,
mimetype=mime_type,
resumable=True) # 'image/jpeg'
drive_service = build('drive', 'v3', credentials=credentials)
file = drive_service.files().update(fileId=file_id,
media_body=media,
supportsAllDrives=True,
fields='id').execute()
return(file.get('id'))
def _upload_new_file(credentials, drive_id, parent_folder_id, local_folder_name, file_name, mime_type='text/xml'):
""" Upload an all new file (note, this will produce duplicates,
so check for existance before calling this) """
full_path_file_name = _get_full_path_file_name(local_folder_name, file_name)
file_metadata = {'name': file_name, 'mimeType': mime_type,
'teamDriveId': drive_id,
'parents': [parent_folder_id]}
media = MediaFileUpload(full_path_file_name,
mimetype=mime_type,
resumable=True) # 'image/jpeg'
drive_service = build('drive', 'v3', credentials=credentials)
file = drive_service.files().create(body=file_metadata,
media_body=media,
supportsAllDrives=True,
fields='id').execute()
return(file.get('id'))
def _get_full_path_file_name(local_folder_name, file_name):
full_path_file_name = ''
if local_folder_name > '':
full_path_file_name = local_folder_name + '/'
full_path_file_name += file_name | return full_path_file_name
def _delete_existing_file(credentials, file_id):
""" Delete an existing file given file_id """
# note: user needs "organizer" privilege on the parent folder in order to delete
drive_service = build('drive', 'v3', credentials=credentials)
drive_service.files().delete(fileId=file_id,
supportsAllDrives=True).execute() | |
display_tickets.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# GUI import
import tkinter as tk
# Styling the GUI
from tkinter import ttk
# Database connection
from modules.create_db_components import create_connection
# Deletes the ticket from the database
from modules.removing_tickets import delete_ticket
"""This module is used to display all the tickets present in the
Database."""
# Owned
__author__ = "Datta Adithya"
__credits__ = ["Datta Adithya"]
__license__ = "MIT"
__maintainer__ = "Datta Adithya"
__email__ = "[email protected]"
# fonts for the project
text_font = ("Helvetica", 12)
# functions to retrieve all of the records from the database
def do_cat(conn):
cur = conn.cursor()
cur.execute('SELECT * FROM tickets WHERE category = "DO"')
conn.commit()
rows = cur.fetchall()
return rows
def dec_cat(conn):
cur = conn.cursor()
cur.execute('SELECT * FROM tickets WHERE category = "DEC"')
conn.commit()
rows = cur.fetchall()
return rows
def dlg_cat(conn):
cur = conn.cursor()
cur.execute('SELECT * FROM tickets WHERE category = "DLG"')
conn.commit()
rows = cur.fetchall()
return rows
def del_cat(conn):
cur = conn.cursor()
cur.execute('SELECT * FROM tickets WHERE category = "DEL"')
conn.commit()
rows = cur.fetchall()
return rows
# GUI for the project
class windows(tk.Tk):
def __init__(self, conn, *args, **kwargs):
tk.Tk.__init__(self, *args, **kwargs)
self.wm_title("Eisen's Tickets")
self.iconbitmap(self, default="../../assets/logo.ico")
self.conn = conn
container = tk.Frame(self, height=400, width=600)
container.pack(side="top", fill="both", expand=True)
container.grid_rowconfigure(0, weight=1)
container.grid_columnconfigure(0, weight=1)
self.frames = {}
for F in (MainPage, EisenDisplay, DoPage, DecPage, DlgPage, DelPage):
frame = F(container, self, self.conn)
self.frames[F] = frame
frame.grid(row=0, column=0, sticky="nsew")
self.show_frame(MainPage)
def show_frame(self, cont):
frame = self.frames[cont]
frame.tkraise()
def ticket_display(self, ticket):
new_window = tk.Toplevel(self)
ticket_id = ticket[0]
timestamp = ticket[1]
category = ticket[2]
task = ticket[3]
more_info = ticket[4]
fields = ["Ticket ID", "Timestamp", "Category", "Task", "More Info"]
details = [ticket_id, timestamp, category, task, more_info]
r = 0
for field in fields:
tk.Label(new_window, text=field, relief=tk.RIDGE, width=15).grid(
row=r, column=0
)
tk.Label(new_window, text=details[r], relief=tk.SUNKEN, width=100).grid(
row=r, column=1
)
r += 1
tk.Button(
new_window,
relief=tk.RIDGE,
text="Delete Ticket",
background="#FF3333",
command=lambda: delete_ticket(self.conn, ticket_id),
).grid(row=r, column=0, columnspan=2, sticky="ew")
# Pages made for navigation through the different categories
class MainPage(tk.Frame):
def __init__(self, parent, controller, conn):
tk.Frame.__init__(self, parent)
self.conn = conn
label = tk.Label(self, text="Start Page", font=text_font)
label.pack(padx=10, pady=10)
eisen_display_button = ttk.Button(
self,
text="Display Selection",
command=lambda: controller.show_frame(EisenDisplay),
)
eisen_display_button.pack(side="bottom", fill=tk.X)
class EisenDisplay(tk.Frame):
def __init__(self, parent, controller, conn):
|
class DoPage(tk.Frame):
def __init__(self, parent, controller, conn):
tk.Frame.__init__(self, parent)
label = tk.Label(self, text="Eisen's Do Page", font=text_font)
label.pack(padx=10, pady=10)
do_rows = do_cat(conn)
for element in do_rows:
tk.Button(
self,
text=element[3],
fg="black",
command=lambda ele=element: controller.ticket_display(ele),
).pack(fill=tk.X)
eisen_display_button = ttk.Button(
self,
text="Display Selection",
command=lambda: controller.show_frame(EisenDisplay),
)
eisen_display_button.pack(side="bottom", fill=tk.X)
dec_button = ttk.Button(
self, text="Eisen Decide", command=lambda: controller.show_frame(DecPage)
)
dec_button.pack(side="bottom", fill=tk.X)
class DecPage(tk.Frame):
def __init__(self, parent, controller, conn):
tk.Frame.__init__(self, parent)
label = tk.Label(self, text="Eisen's Decide Page", font=text_font)
label.pack(padx=10, pady=10)
dec_rows = dec_cat(conn)
for element in dec_rows:
tk.Button(
self,
text=element[3],
fg="black",
command=lambda ele=element: controller.ticket_display(ele),
).pack(fill=tk.X)
eisen_display_button = ttk.Button(
self,
text="Display Selection",
command=lambda: controller.show_frame(EisenDisplay),
)
eisen_display_button.pack(side="bottom", fill=tk.X)
dlg_button = ttk.Button(
self, text="Eisen Delegate", command=lambda: controller.show_frame(DlgPage)
)
dlg_button.pack(side="bottom", fill=tk.X)
class DlgPage(tk.Frame):
def __init__(self, parent, controller, conn):
tk.Frame.__init__(self, parent)
label = tk.Label(self, text="Eisen's Delegate Page", font=text_font)
label.pack(padx=10, pady=10)
dlg_rows = dlg_cat(conn)
for element in dlg_rows:
tk.Button(
self,
text=element[3],
fg="black",
command=lambda ele=element: controller.ticket_display(ele),
).pack(fill=tk.X)
eisen_display_button = ttk.Button(
self,
text="Display Selection",
command=lambda: controller.show_frame(EisenDisplay),
)
eisen_display_button.pack(side="bottom", fill=tk.X)
del_button = ttk.Button(
self, text="Eisen Delete", command=lambda: controller.show_frame(DelPage)
)
del_button.pack(side="bottom", fill=tk.X)
class DelPage(tk.Frame):
def __init__(self, parent, controller, conn):
tk.Frame.__init__(self, parent)
label = tk.Label(self, text="Eisen's Delete Page", font=text_font)
label.pack(padx=10, pady=10)
del_rows = del_cat(conn)
for element in del_rows:
tk.Button(
self,
text=element[3],
fg="black",
command=lambda ele=element: controller.ticket_display(ele),
).pack(fill=tk.X)
eisen_display_button = ttk.Button(
self,
text="Display Selection",
command=lambda: controller.show_frame(EisenDisplay),
)
eisen_display_button.pack(side="bottom", fill=tk.X)
do_button = ttk.Button(
self, text="Eisen Do", command=lambda: controller.show_frame(DoPage)
)
do_button.pack(side="bottom", fill=tk.X)
if __name__ == "__main__":
connection = create_connection(r"D:\eisen-tickets\assets\tickets.db")
four_windows = windows(connection)
four_windows.mainloop()
| tk.Frame.__init__(self, parent)
self.conn = conn
label = tk.Label(self, text="Eisen Display", font=text_font)
label.pack(padx=10, pady=10)
main_button = ttk.Button(
self,
text="Return to main page",
command=lambda: controller.show_frame(MainPage),
)
main_button.pack(side="bottom", fill=tk.X)
del_button = ttk.Button(
self, text="Eisen Delete", command=lambda: controller.show_frame(DelPage)
)
del_button.pack(side="bottom", fill=tk.X)
dlg_button = ttk.Button(
self, text="Eisen Delegate", command=lambda: controller.show_frame(DlgPage)
)
dlg_button.pack(side="bottom", fill=tk.X)
dec_button = ttk.Button(
self, text="Eisen Decide", command=lambda: controller.show_frame(DecPage)
)
dec_button.pack(side="bottom", fill=tk.X)
do_button = ttk.Button(
self, text="Eisen Do", command=lambda: controller.show_frame(DoPage)
)
do_button.pack(side="bottom", fill=tk.X) |
new_label_dialog.js | /**
* 创建标签
* @param funWhenCreateLabelSuccess 创建成功时回调,回传参数为新标签id
* @param funWhenCreateLabelFail 创建失败时回调,参数为错误码和错误信息
*/
function createLabel(funWhenCreateLabelSuccess, funWhenCreateLabelFail) {
var name | Name').val();
// 需要common.js
if (isStrEmpty(name)) {
error('标签名称不能为空', 'labelErrorMsg', true, 2000);
return;
} else {
$('#labelErrorMsg').html(' ');
}
disableButton(false, 'newLabelBtn', '正在创建...', "button-disable");
$.post(
'/blogger/' + pageOwnerBloggerId + '/label',
{title: name},
function (result) {
if (result.code === 0) {
disableButton(false, 'newLabelBtn', '创建成功', "button-disable");
funWhenCreateLabelSuccess(result.data);
setTimeout(function () {
disableButton(true, 'newLabelBtn', '创建', "button-disable");
$('#labelName').val('');
$('#newLabelDialog').modal('toggle');
}, 1000);
} else {
disableButton(true, 'newLabelBtn', '创建', "button-disable");
error(result.msg, 'labelErrorMsg', true, 3000);
funWhenCreateLabelFail(result);
}
}, 'json'
);
}
| = $('#label |
logger.go | package logger
import (
"fmt"
"io"
"os"
"strconv"
"strings"
"sync"
"sync/atomic"
"time"
"github.com/gofiber/fiber/v2"
"github.com/gofiber/fiber/v2/internal/bytebufferpool"
"github.com/gofiber/fiber/v2/internal/colorable"
"github.com/gofiber/fiber/v2/internal/fasttemplate"
"github.com/gofiber/fiber/v2/internal/isatty"
"github.com/valyala/fasthttp"
)
// Config defines the config for middleware.
type Config struct {
// Next defines a function to skip this middleware when returned true.
//
// Optional. Default: nil
Next func(c *fiber.Ctx) bool
// Format defines the logging tags
//
// Optional. Default: [${time}] ${status} - ${latency} ${method} ${path}\n
Format string
// TimeFormat https://programming.guide/go/format-parse-string-time-date-example.html
//
// Optional. Default: 15:04:05
TimeFormat string
// TimeZone can be specified, such as "UTC" and "America/New_York" and "Asia/Chongqing", etc
//
// Optional. Default: "Local"
TimeZone string
// Output is a writter where logs are written
//
// Default: os.Stderr
Output io.Writer
enableColors bool
enableLatency bool
timeZoneLocation *time.Location
}
// ConfigDefault is the default config
var ConfigDefault = Config{
Next: nil,
Format: "[${time}] ${status} - ${latency} ${method} ${path}\n",
TimeFormat: "15:04:05",
TimeZone: "Local",
Output: os.Stderr,
}
// Logger variables
const (
TagPid = "pid"
TagTime = "time"
TagReferer = "referer"
TagProtocol = "protocol"
TagIP = "ip"
TagIPs = "ips"
TagHost = "host"
TagMethod = "method"
TagPath = "path"
TagURL = "url"
TagUA = "ua"
TagLatency = "latency"
TagStatus = "status"
TagBody = "body"
TagBytesSent = "bytesSent"
TagBytesReceived = "bytesReceived"
TagRoute = "route"
TagError = "error"
TagHeader = "header:"
TagLocals = "locals:"
TagQuery = "query:"
TagForm = "form:"
TagCookie = "cookie:"
TagBlack = "black"
TagRed = "red"
TagGreen = "green"
TagYellow = "yellow"
TagBlue = "blue"
TagMagenta = "magenta"
TagCyan = "cyan"
TagWhite = "white"
TagReset = "reset"
)
// Color values
const (
cBlack = "\u001b[90m"
cRed = "\u001b[91m"
cGreen = "\u001b[92m"
cYellow = "\u001b[93m"
cBlue = "\u001b[94m"
cMagenta = "\u001b[95m"
cCyan = "\u001b[96m"
cWhite = "\u001b[97m"
cReset = "\u001b[0m"
)
// New creates a new middleware handler
func New(config ...Config) fiber.Handler {
// Set default config
cfg := ConfigDefault
// Override config if provided
if len(config) > 0 {
cfg = config[0]
// Enable colors if no custom format or output is given
if cfg.Format == "" && cfg.Output == nil {
cfg.enableColors = true
}
// Set default values
if cfg.Next == nil {
cfg.Next = ConfigDefault.Next
}
if cfg.Format == "" {
cfg.Format = ConfigDefault.Format
}
if cfg.TimeZone == "" {
cfg.TimeZone = ConfigDefault.TimeZone
}
if cfg.TimeFormat == "" {
cfg.TimeFormat = ConfigDefault.TimeFormat
}
if cfg.Output == nil {
cfg.Output = ConfigDefault.Output
}
} else {
cfg.enableColors = true
}
// Get timezone location
tz, err := time.LoadLocation(cfg.TimeZone)
if err != nil || tz == nil {
cfg.timeZoneLocation = time.Local
} else {
cfg.timeZoneLocation = tz
}
// Check if format contains latency
cfg.enableLatency = strings.Contains(cfg.Format, "${latency}")
// Create template parser
tmpl := fasttemplate.New(cfg.Format, "${", "}")
// Create correct timeformat
var timestamp atomic.Value
timestamp.Store(time.Now().In(cfg.timeZoneLocation).Format(cfg.TimeFormat))
// Update date/time every 750 milliseconds in a separate go routine
if strings.Contains(cfg.Format, "${time}") {
go func() {
for {
time.Sleep(750 * time.Millisecond)
timestamp.Store(time.Now().In(cfg.timeZoneLocation).Format(cfg.TimeFormat))
}
}()
}
// Set PID once
pid := strconv.Itoa(os.Getpid())
// Set variables
var (
start, stop time.Time
once sync.Once
errHandler fiber.ErrorHandler
)
// If colors are enabled, check terminal compatibility
if cfg.enableColors {
cfg.Output = colorable.NewColorableStderr()
if os.Getenv("TERM") == "dumb" || (!isatty.IsTerminal(os.Stderr.Fd()) && !isatty.IsCygwinTerminal(os.Stderr.Fd())) {
cfg.Output = colorable.NewNonColorable(os.Stderr)
}
}
var errPadding = 15
var errPaddingStr = strconv.Itoa(errPadding)
// Return new handler
return func(c *fiber.Ctx) (err error) {
// Don't execute middleware if Next returns true
if cfg.Next != nil && cfg.Next(c) {
return c.Next()
}
// Set error handler once
once.Do(func() {
errHandler = c.App().Config().ErrorHandler
stack := c.App().Stack()
for m := range stack {
for r := range stack[m] {
if len(stack[m][r].Path) > errPadding {
errPadding = len(stack[m][r].Path)
errPaddingStr = strconv.Itoa(errPadding)
}
}
}
})
// Set latency start time
if cfg.enableLatency {
start = time.Now()
}
// Handle request, store err for logging
chainErr := c.Next()
// Manually call error handler
if chainErr != nil {
if err := errHandler(c, chainErr); err != nil {
_ = c.SendStatus(fiber.StatusInternalServerError)
}
}
// Set latency stop time
if cfg.enableLatency {
stop = time.Now()
}
// Get new buffer
buf := bytebufferpool.Get()
// Default output when no custom Format or io.Writer is given
if cfg.enableColors {
// Format error if exist
formatErr := ""
if chainErr != nil {
formatErr = cRed + " | " + chainErr.Error() + cReset
}
// Format log to buffer
_, _ = buf.WriteString(fmt.Sprintf("%s |%s %3d %s| %7v | %15s |%s %-7s %s| %-"+errPaddingStr+"s %s\n",
timestamp.Load().(string),
statusColor(c.Response().StatusCode()), c.Response().StatusCode(), cReset,
stop.Sub(start).Round(time.Millisecond),
c.IP(),
methodColor(c.Method()), c.Method(), cReset,
c.Path(),
formatErr,
))
// Write buffer to output
_, _ = cfg.Output.Write(buf.Bytes())
// Put buffer back to pool
bytebufferpool.Put(buf)
// End chain
return nil
}
// Loop over template tags to replace it with the correct value
_, err = tmpl.ExecuteFunc(buf, func(w io.Writer, tag string) (int, error) {
switch tag {
case TagTime:
return buf.WriteString(timestamp.Load().(string))
case TagReferer:
return buf.WriteString(c.Get(fiber.HeaderReferer))
case TagProtocol:
return buf.WriteString(c.Protocol())
case TagPid:
return buf.WriteString(pid)
case TagIP:
return buf.WriteString(c.IP())
case TagIPs:
return buf.WriteString(c.Get(fiber.HeaderXForwardedFor))
case TagHost:
return buf.WriteString(c.Hostname())
case TagPath:
return buf.WriteString(c.Path())
case TagURL:
return buf.WriteString(c.OriginalURL())
case TagUA:
return buf.WriteString(c.Get(fiber.HeaderUserAgent))
case TagLatency:
return buf.WriteString(stop.Sub(start).String())
case TagBody:
return buf.Write(c.Body())
case TagBytesReceived:
return appendInt(buf, len(c.Request().Body()))
case TagBytesSent:
return appendInt(buf, len(c.Response().Body()))
case TagRoute:
return buf.WriteString(c.Route().Path)
case TagStatus:
return appendInt(buf, c.Response().StatusCode())
case TagMethod:
return buf.WriteString(c.Method())
case TagBlack:
return buf.WriteString(cBlack)
case TagRed:
return buf.WriteString(cRed)
case TagGreen:
return buf.WriteString(cGreen)
case TagYellow:
return buf.WriteString(cYellow)
case TagBlue:
return buf.WriteString(cBlue)
case TagMagenta:
return buf.WriteString(cMagenta)
case TagCyan:
return buf.WriteString(cCyan)
case TagWhite:
return buf.WriteString(cWhite)
case TagReset:
return buf.WriteString(cReset)
case TagError:
if chainErr != nil {
return buf.WriteString(chainErr.Error())
}
return buf.WriteString("-")
default:
// Check if we have a value tag i.e.: "header:x-key"
switch {
case strings.HasPrefix(tag, TagHeader):
return buf.WriteString(c.Get(tag[7:]))
case strings.HasPrefix(tag, TagQuery):
return buf.WriteString(c.Query(tag[6:]))
case strings.HasPrefix(tag, TagForm):
return buf.WriteString(c.FormValue(tag[5:]))
case strings.HasPrefix(tag, TagCookie):
return buf.WriteString(c.Cookies(tag[7:]))
case strings.HasPrefix(tag, TagLocals):
switch v := c.Locals(tag[7:]).(type) {
case []byte:
return buf.Write(v)
case string:
return buf.WriteString(v)
case nil:
return 0, nil
default:
return buf.WriteString(fmt.Sprintf("%v", v))
}
}
}
return 0, nil
})
// Also write errors to the buffer
if err != nil {
_, _ = buf.WriteString(err.Error())
}
// Write buffer to output
if _, err := cfg.Output.Write(buf.Bytes()); err != nil {
// Write error to output
if _, err := cfg.Output.Write([]byte(err.Error())); err != nil {
// There is something wrong with the given io.Writer
// TODO: What should we do here?
}
}
// Put buffer back to pool
bytebufferpool.Put(buf)
return nil
}
}
func appendInt(buf *bytebufferpool.ByteBuffer, v int) (int, error) | {
old := len(buf.B)
buf.B = fasthttp.AppendUint(buf.B, v)
return len(buf.B) - old, nil
} |
|
animation.rs | use bevy::prelude::*;
pub fn animate_sprite_system(
time: Res<Time>,
texture_atlases: Res<Assets<TextureAtlas>>, | if timer.finished() {
let texture_atlas = texture_atlases.get(texture_atlas_handle).unwrap();
sprite.index = ((sprite.index as usize + 1) % texture_atlas.textures.len()) as u32;
}
}
} | mut query: Query<(&mut Timer, &mut TextureAtlasSprite, &Handle<TextureAtlas>)>,
) {
for (mut timer, mut sprite, texture_atlas_handle) in query.iter_mut() {
timer.tick(time.delta()); |
settings.js | import styled from "styled-components";
const primaryColor = "#464649";
const secondaryColor = "#a87062";
const normalTextColor = "#bcbcbd";
const whiteColor = "#ffffff";
const DarkerColor = "#0c0c10";
const backgroundColor = "#151519";
export const OpenSettingsModalIcon = styled.div`
position: absolute;
top: 0px;
left: 0px;
margin-top: 1.25rem;
margin-left: 1.25rem;
color: ${(props) => (!props.color ? normalTextColor : props.color)};
font-size: 1.35rem;
transition: 500ms;
&:hover {
color: ${secondaryColor};
}
`;
export const ModalDisplay = styled.div`
display: ${(props) => (props.show ? "flex" : "none")};
flex-direction: row;
background-color: #1010106b;
backdrop-filter: blur(15px);
justify-content: center;
align-items: center;
padding: 10rem;
position: fixed;
width: 100%;
height: 100vh;
z-index: 3;
@media screen and (max-width: 1144px) {
flex-direction: column-reverse;
padding: 2rem;
box-sizing: border-box;
}
`;
export const PreviewContainer = styled.div`
position: relative;
width: 1200px;
height: 700px;
margin: 4rem;
padding: 0.2rem;
display: flex;
justify-content: center;
align-items: center;
background-image: url(${(props) => props.backgroundImage});
background-position: center center;
background-size: 1200px 700px;
background-attachment: local;
background-repeat: no-repeat;
zoom: 0.8;
@media screen and (max-width: 1750px) {
zoom: 0.6;
}
@media screen and (max-width: 1403px) {
zoom: 0.443;
}
`;
export const BackgroundImagePreview = styled.div`
position: absolute;
width: 100%;
height: 100%;
&:after,
&:before {
background: url(${(props) => props.backgroundImage});
background-size: cover;
background-repeat: no-repeat;
content: "";
position: absolute;
top: 0;
left: 0;
right: 0;
bottom: 0;
}
&:after {
filter: blur(${(props) => props.blurLevel + "px"});
}
&:before {
box-sizing: border-box;
margin: -10px;
border: 2px solid ${primaryColor};
}
`;
export const ModalContainer = styled.div`
display: grid;
grid-template-rows: 1fr 55px;
overflow: scroll;
overflow-x: hidden;
overflow-y: auto;
font-family: "sora";
background-color: ${backgroundColor};
color: white;
width: 544px;
max-height: 92%;
border-radius: 2px;
box-sizing: border-box;
`;
export const OptionsContainer = styled.div`
display: flex;
flex-direction: column;
width: 100%;
padding: 1rem;
box-sizing: border-box;
overflow: auto;
overflow-x: hidden; | overflow-y: auto;
&::-webkit-scrollbar {
width: 9px;
}
&::-webkit-scrollbar-track {
background: ${DarkerColor};
}
&::-webkit-scrollbar-thumb {
background-color: ${secondaryColor};
border-radius: 0;
}
`;
export const SaveDiscardContainer = styled.div`
display: flex;
flex-direction: row;
justify-content: space-between;
width: 100%;
height: 100%;
box-sizing: border-box;
padding: 1.25rem;
border-top: 2px solid ${DarkerColor};
`;
export const ModalTitles = styled.h1`
font-size: ${(props) => (props.small ? "0.75rem" : "0.83rem")};
font-weight: 700;
color: ${whiteColor};
`;
export const ModalItem = styled.div`
display: flex;
flex-direction: ${(props) => (props.row ? "row" : "column")};
justify-content: ${(props) => (props.row ? "space-between" : "left")};
align-items: ${(props) => (props.row ? "center" : "left")};
margin-bottom: ${(props) => (props.noBottomMargin ? "0" : "0.5rem")};
margin-top: ${(props) => (props.noTopMargin ? "0" : " 0.5rem")};
`;
export const DefaultButton = styled.button`
font-family: "sora";
font-weight: 700;
font-size: 0.75rem;
background-color: ${primaryColor};
color: ${normalTextColor};
padding: 0.21rem 1rem 0.21rem 1rem;
margin: 0 !important;
border-radius: 2px;
border: 0;
width: 80px;
transition: 500ms;
&:hover {
background-color: ${secondaryColor};
}
`;
export const DefaultRange = styled.input`
width: 360px;
appearance: none;
background-color: transparent;
&:focus {
outline: none;
}
&::-webkit-slider-thumb {
height: 18px;
width: 18px;
border-radius: 50%;
background: ${secondaryColor};
-webkit-appearance: none;
-moz-appearance: none;
margin-top: -0.4rem;
cursor: pointer;
}
&::-webkit-slider-runnable-track {
width: 100%;
height: 5px;
background: ${primaryColor};
border-radius: 2px;
-webkit-appearance: none;
}
&::-moz-range-thumb {
height: 18px;
width: 18px;
border-radius: 50%;
background: ${secondaryColor};
margin-top: -0.4rem;
border: none;
cursor: pointer;
}
&::-moz-range-track {
width: 100%;
height: 5px;
background: ${primaryColor};
border-radius: 2px;
border: none;
}
&::-moz-focus-inner {
border: none;
visibility: hidden;
}
`;
export const DefaultSwitchLabel = styled.label`
position: relative;
display: inline-block;
width: 60px;
height: 24px;
`;
export const DefaultSwitchInput = styled.input`
opacity: 0;
width: 0;
height: 0;
&:checked + span::before {
-webkit-transform: translateX(26px);
-ms-transform: translateX(26px);
transform: translateX(26px);
background-color: ${secondaryColor};
}
`;
export const DefaultSwitchSpan = styled.span`
position: absolute;
cursor: pointer;
top: 0;
left: 0;
right: 0;
bottom: 0;
border-radius: 2px;
background-color: ${DarkerColor};
-webkit-transition: 0.4s;
transition: 0.4s;
&:before {
position: absolute;
content: "";
height: 16px;
width: 26px;
left: 4px;
bottom: 4px;
border-radius: 2px;
background-color: ${primaryColor};
-webkit-transition: 0.4s;
transition: 0.4s;
}
`;
export const Upload = styled.input`
font-family: "sora";
font-size: 0.7rem;
font-weight: 500;
&::-webkit-file-upload-button {
font-family: "sora";
font-weight: 700;
font-size: 0.75rem;
background-color: ${primaryColor};
color: ${normalTextColor};
padding: 0.21rem 1rem 0.21rem 1rem;
margin-right: 0.5rem;
border-radius: 2px;
border: 0;
transition: 500ms;
&:hover {
background-color: ${secondaryColor};
}
}
&::file-selector-button {
font-family: "sora";
font-weight: 700;
font-size: 0.75rem;
background-color: ${primaryColor};
color: ${normalTextColor};
padding: 0.21rem 1rem 0.21rem 1rem;
margin-right: 0.5rem;
border-radius: 2px;
border: 0;
transition: 500ms;
&:hover {
background-color: ${secondaryColor};
}
}
`;
export const TimezoneSelect = styled.select`
font-family: "sora";
font-weight: 700;
font-size: 0.75rem;
background-color: ${primaryColor};
color: ${normalTextColor};
padding: 0.21rem 0.5rem 0.21rem 0.5rem;
margin-right: 0.5rem;
border-radius: 2px;
border: 0;
transition: 500ms;
&:hover {
background-color: ${DarkerColor};
}
&::-webkit-scrollbar {
width: 9px;
}
&::-webkit-scrollbar-track {
background: ${primaryColor};
}
&::-webkit-scrollbar-thumb {
background-color: ${secondaryColor};
border-radius: 10px;
}
`;
export const CloseIcon = styled.div`
font-size: 1.5rem;
color: ${primaryColor};
display: flex;
align-items: center;
transition: 500ms;
&:hover {
color: ${secondaryColor};
}
`;
export const SupportMeLink = styled.a`
display: flex;
justify-content: center;
align-items: center;
margin: 0.62rem;
font-size: 0.77rem;
color: ${secondaryColor} !important;
text-align: center;
font-weight: 700;
`;
export const SupportMeIcon = styled.div`
display: flex;
align-items: center;
margin-left: 0.18rem;
font-size: 1rem;
`;
export const BookmarkInputsContainer = styled.div`
display: flex;
flex-direction: row;
align-items: center;
width: 100%;
`;
export const BookmarkInputs = styled.input`
width: 100%;
height: 20px;
border: 0;
border-radius: 2px;
margin-bottom: 0.31rem;
font-family: "roboto";
font-weight: 400;
background-color: ${primaryColor};
color: ${normalTextColor};
`;
export const BookmarkInputRemove = styled.div`
font-size: 1.3rem;
margin-left: 0.35rem;
color: ${primaryColor};
transition: 500ms;
&:hover {
color: ${secondaryColor};
}
`;
export const CropImageContainer = styled.div`
display: flex;
flex-direction: column;
margin-top: 1rem;
margin-bottom: 2rem;
`;
export const CropImageRangeContainer = styled.div`
width: 400px;
display: flex;
flex-direction: row;
font-size: 1.7rem;
color: ${whiteColor};
margin-top: 0.4rem;
margin-bottom: 0.25rem;
justify-content: right;
align-items: center;
`;
export const TwoConfigContainer = styled.div`
display: grid;
grid-template-columns: 1fr 1fr;
width: 100%;
height: min-content;
margin-top: 0.8rem;
justify-items: center;
`;
export const TwoConfigItem = styled.div`
display: flex;
flex-direction: column;
`;
export const TwoConfigItemTitle = styled.div`
width: 100%;
text-align: center;
font-family: "sora";
font-size: 0.8rem;
font-weight: 700;
margin-bottom: 0.5rem;
`;
export const HexInputContainer = styled.div`
margin-top: 0.5rem;
input {
width: 100%;
height: 20px;
border: 0;
border-radius: 2px;
margin-bottom: 0.31rem;
font-family: "roboto";
font-weight: 400;
text-align: center;
background-color: ${primaryColor};
color: ${normalTextColor};
}
`; | |
plt0.rs | use fancy_slice::FancySlice;
use crate::wii_texture_formats::WiiPaletteFormat;
use crate::user_data::{UserData, UserDataValue};
pub(crate) fn plt0(data: FancySlice) -> Plt0 {
let size = data.i32_be(0x4);
let version = data.i32_be(0x8);
//let bres_offset = data.i32_be(0xc);
//let resources_offset = data.i32_be(0x10);
let string_offset = data.u32_be(0x14);
let pixel_format = data.u32_be(0x18);
//let num_entries = data.u16_be(0x1c);
let orig_path_offset = data.i32_be(0x20);
let pixel_format = WiiPaletteFormat::new(pixel_format);
let user_data = if version == 3 {
let _user_data_offset = data.i32_be(0x24);
let mut user_data = vec!();
// TODO
user_data.push(UserData {
name: "TODO".into(),
value: UserDataValue::Int(42),
});
user_data
} else if version == 1 {
vec!()
} else {
panic!("Unknown PLT0 verison: {}", version)
};
let name = data.str(string_offset as usize).unwrap().to_string();
// TODO: This doesnt necasarily start at PLT0_HEADER_SIZE, maybe the offset is stored in the
// resources which I havent parsed yet??
// Brawlcrate seems to just be reading from PLT0_HEADER_SIZE ???
let color_data: Vec<u16> = data.relative_slice(PLT0_HEADER_SIZE..size as usize)
.chunks_exact(2)
.map(|x| u16::from_be_bytes([x[0], x[1]])).collect();
Plt0 { name, pixel_format, orig_path_offset, user_data, color_data }
}
const PLT0_HEADER_SIZE: usize = 0x40;
#[derive(Clone, Debug)]
pub struct Plt0 {
pub name: String,
pub pixel_format: WiiPaletteFormat,
pub user_data: Vec<UserData>,
pub color_data: Vec<u16>,
// TODO: Calculate this, what is it even pointing to?
orig_path_offset: i32,
}
impl Plt0 {
pub fn | (&self, bres_offset: i32) -> Vec<u8> {
let mut output = vec!();
let size = PLT0_HEADER_SIZE + self.color_data.len() * 2;
let version = if self.user_data.len() > 0 { 3 } else { 1 };
let num_entries = self.color_data.len();
// create PLT0 header
output.extend("PLT0".chars().map(|x| x as u8));
output.extend(&i32::to_be_bytes(size as i32));
output.extend(&i32::to_be_bytes(version));
output.extend(&i32::to_be_bytes(bres_offset));
output.extend(&i32::to_be_bytes(0)); // TODO: resources_offset
output.extend(&u32::to_be_bytes(0)); // TODO: string_offset
output.extend(&u32::to_be_bytes(self.pixel_format.value()));
output.extend(&u16::to_be_bytes(num_entries as u16));
output.extend(&u16::to_be_bytes(0)); // padding
output.extend(&i32::to_be_bytes(self.orig_path_offset));
if self.user_data.len() > 0 {
output.extend(&i32::to_be_bytes(0x44)); // TODO: I just guessed this is a constant?
}
output.extend(&[0; 0x1c]); // padding
// create user data
for _user_data in &self.user_data {
output.push(0x42); // TODO
}
if self.user_data.len() > 0 {
while output.len() % 0x20 != 0 {
output.push(0x00);
}
}
// create color data
for color in &self.color_data {
output.extend(&u16::to_be_bytes(*color));
}
output
}
}
| compile |
preprocessing.py | from typing import List, Tuple, Dict, Any
from collections import Counter
import pretty_midi
import matplotlib.pyplot as plt
import librosa.display
import os
from os import listdir, walk
from os.path import isfile, isdir, join
from sys import argv
import traceback
import logging
import numpy as np
from shutil import copyfile
import shutil
# Ideas behind the preprocessing class
#
# 1. only use those midi with one tempo and one key, since some midi music
# have key and tempo changes inside. Which might make some unpredictable result
#
# 2. list distribution for all keys contained in the corpus. Only select those
# most frequent appeared. (different keys may increase training difficulty)
#
# 3. only select similar tempo music, based on the mean and std of tempos,
# simple one will be left boundary = mean - std, right boundary = mean + std
#
# 4. find the mean of highest and lowest pitch in the corpus. filter out those not
# the range. We have pitch range from 0-128, no meaning cover two extreme sides.
class FileReport(object):
"""
This class is mainly for generating meta information for our report
"""
def __init__(self,
tempos: List[float],
freq_key: Dict[int, int],
min_pitch: List[int],
max_pitch: List[int]):
self.tempos = tempos
self.freq_key = freq_key
self.min_pitch = min_pitch
self.max_pitch = max_pitch
def aggregation_report(self):
"""
two important variable are min_pitch and max_pitch,
since they will be used to decode from pitch to audio
"""
temp_mean = np.array(self.tempos).mean()
temp_std = np.array(self.tempos).std()
most_freq_key = self.getMostFreqValue(self.freq_key)
min_pitch = int(np.array(self.min_pitch).mean())
max_pitch = int(np.array(self.max_pitch).mean())
return temp_mean, temp_std, most_freq_key, min_pitch, max_pitch
def plots(self):
# implement later on
pass
def getMostFreqValue(self, keys: Dict[int, int], reversed=True) -> int:
return sorted(keys.items(), key=lambda kv: kv[1], reverse=reversed)[0][0]
class Preprocess(object):
def __init__(self, path: str):
self.path = path
self.fileFilter()
def generateMidiFileReport(self) -> FileReport:
"""
meta information like tempos, keys, pitches will be generated for
filtering the midi files
"""
tempos = []
keys = []
max_pitchs = []
min_pitchs = []
for pm in self.pms:
try:
tempos.append(pm.estimate_tempo())
key = pm.key_signature_changes[0].key_number
keys.append(key)
min_pitch, max_pitch = self.getMinMaxPitch(pm)
max_pitchs.append(max_pitch)
min_pitchs.append(min_pitch)
except:
pass
self.report = FileReport(tempos, dict(
Counter(keys)), min_pitchs, max_pitchs)
return self.report
def getMinMaxPitch(self, pm: pretty_midi.PrettyMIDI):
"""
find the min and max pitch inside a midi file
"""
notes = [
note.pitch for instrument in pm.instruments for note in instrument.notes
]
return min(notes), max(notes)
def SaveFilterMIDIfiles(self):
"""
according generated meta data info to filter out those not in range
"""
report = self.generateMidiFileReport()
temp_mean, temp_std, key, left_boundary, right_boundary = report.aggregation_report()
piano_roll_paths = []
for pm, path in zip(self.pms, self.paths):
try:
tempo = pm.estimate_tempo()
min_pitch, max_pitch = self.getMinMaxPitch(pm)
if self.isTempoInRange(tempo, temp_mean, temp_std) \
and self.isPitchInRange(min_pitch, max_pitch, left_boundary, right_boundary) \
and self.isKeyMatch(pm.key_signature_changes[0].key_number, key):
savedPath = os.path.join(os.getcwd(), 'filterData')
if not os.path.exists(savedPath):
os.makedirs(savedPath, exist_ok=True)
shutil.move(
path, os.path.join(os.getcwd(), 'filterData', os.path.basename(path)))
except:
pass
def | (self, tempo: float, mean: float, std: float) -> bool:
"""
a helper function that can be used check if a midi file's tempo in range
"""
if tempo > (mean - std) and tempo < (mean + std):
return True
return False
def isKeyMatch(self, key: int, grand_truth_key: int) -> bool:
if key == grand_truth_key:
return True
return False
def isPitchInRange(self, low_pitch: int,
high_pitch: int,
left_boundary: int,
right_boundary: int) -> bool:
if low_pitch >= left_boundary and high_pitch <= right_boundary:
return True
return False
def fileFilter(self):
"""
first filtering that only allow one tempo and one key inside a midi file
"""
self.pms: List[pretty_midi.PrettyMIDI] = []
self.paths: List[str] = []
for (dirPath, _, files) in walk(self.path): # type: ignore
for file in files:
# get the absoluted path of file
path = join(dirPath, file)
try:
pm = pretty_midi.PrettyMIDI(path)
# only handle files contain one key and one tempo
if len(pm.key_signature_changes) == 1 \
and len(pm.time_signature_changes) == 1:
self.pms.append(pm)
self.paths.append(path)
except: # skip all parsing exceptions
pass
def cliArgParser(argv) -> Any:
if len(argv) != 2:
raise ValueError(f"path of folder must be provided")
if isdir(argv[1]):
path = os.path.abspath(argv[1])
return path
else:
raise ValueError(f"provided path is not a folder")
if __name__ == "__main__":
try:
path = cliArgParser(argv)
p = Preprocess(path)
p.SaveFilterMIDIfiles()
except Exception as err:
print(traceback.format_exc())
exit(1)
| isTempoInRange |
resource_citrixadc_appfwurlencodedformcontenttype_test.go | /*
Copyright 2016 Citrix Systems, Inc
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package citrixadc
import (
"fmt"
"github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/terraform"
"testing"
)
const testAccAppfwurlencodedformcontenttype_basic = `
resource "citrixadc_appfwurlencodedformcontenttype" "tf_urlencodedformcontenttype" {
urlencodedformcontenttypevalue = "tf_urlencodedformcontenttype"
isregex = "NOTREGEX"
}
`
func TestAccAppfwurlencodedformcontenttype_basic(t *testing.T) {
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckAppfwurlencodedformcontenttypeDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: testAccAppfwurlencodedformcontenttype_basic,
Check: resource.ComposeTestCheckFunc(
testAccCheckAppfwurlencodedformcontenttypeExist("citrixadc_appfwurlencodedformcontenttype.tf_urlencodedformcontenttype", nil),
resource.TestCheckResourceAttr("citrixadc_appfwurlencodedformcontenttype.tf_urlencodedformcontenttype", "urlencodedformcontenttypevalue", "tf_urlencodedformcontenttype"),
resource.TestCheckResourceAttr("citrixadc_appfwurlencodedformcontenttype.tf_urlencodedformcontenttype", "isregex", "NOTREGEX"),
),
},
},
})
}
func testAccCheckAppfwurlencodedformcontenttypeExist(n string, id *string) resource.TestCheckFunc {
return func(s *terraform.State) error {
rs, ok := s.RootModule().Resources[n]
if !ok {
return fmt.Errorf("Not found: %s", n)
}
if rs.Primary.ID == "" |
if id != nil {
if *id != "" && *id != rs.Primary.ID {
return fmt.Errorf("Resource ID has changed!")
}
*id = rs.Primary.ID
}
nsClient := testAccProvider.Meta().(*NetScalerNitroClient).client
data, err := nsClient.FindResource("appfwurlencodedformcontenttype", rs.Primary.ID)
if err != nil {
return err
}
if data == nil {
return fmt.Errorf("appfwurlencodedformcontenttype %s not found", n)
}
return nil
}
}
func testAccCheckAppfwurlencodedformcontenttypeDestroy(s *terraform.State) error {
nsClient := testAccProvider.Meta().(*NetScalerNitroClient).client
for _, rs := range s.RootModule().Resources {
if rs.Type != "citrixadc_appfwurlencodedformcontenttype" {
continue
}
if rs.Primary.ID == "" {
return fmt.Errorf("No name is set")
}
_, err := nsClient.FindResource("appfwurlencodedformcontenttype", rs.Primary.ID)
if err == nil {
return fmt.Errorf("appfwurlencodedformcontenttype %s still exists", rs.Primary.ID)
}
}
return nil
}
| {
return fmt.Errorf("No appfwurlencodedformcontenttype name is set")
} |
sync.ts | import chalk from "chalk";
import fs from "fs-extra";
import { glob } from "glob";
import path from "path";
import { Logger } from "tsrpc-proto";
import { i18n } from "../i18n/i18n";
import { CliUtil } from "../models/CliUtil";
import { TsrpcConfig } from "../models/TsrpcConfig";
import { error } from "../models/util";
import { ensureSymlinks } from "./link";
export type CmdSyncOptions = {
from: string | undefined,
to: string | undefined,
verbose: boolean | undefined,
config: undefined
} | { config: TsrpcConfig }
export async function cmdSync(options: CmdSyncOptions) {
if (options.config) {
if (!options.config.sync?.length) {
console.log(chalk.yellow(i18n.nothingSyncConf));
return;
}
const logger = options.config.verbose ? console : undefined;
// Copy
for (let item of options.config.sync) {
if (item.type === 'copy') {
CliUtil.doing(`${i18n.copy} '${item.from}' -> '${item.to}'`);
await copyDirReadonly(item.from, item.to, !!item.clean, logger);
CliUtil.done(true);
}
}
// Symlinks
await ensureSymlinks(options.config.sync.filter(v => v.type === 'symlink').map(v => ({
src: v.from,
dst: v.to
})), console);
console.log(chalk.green(i18n.allSyncedSucc))
}
else {
// Validate options
if (!options.from) {
throw error(i18n.missingParam, { param: 'from' });
}
if (!options.to) {
throw error(i18n.missingParam, { param: 'to' });
}
if (await fs.access(options.from).catch(() => true)) {
throw error(i18n.dirNotExists, { dir: path.resolve(options.from) })
}
CliUtil.doing(`${i18n.copy} '${path.resolve(options.from)}' -> '${path.resolve(options.to)}'`);
await copyDirReadonly(options.from, options.to, true, options.verbose ? console : undefined);
CliUtil.done(true);
console.log(chalk.green(i18n.syncedSucc))
}
}
export async function copyDirReadonly(src: string, dst: string, clean: boolean, logger?: Logger) {
// Clean
if (clean) {
logger?.debug(`Start to clean '${dst}'`)
await fs.remove(dst);
logger?.debug(`Cleaned succ`)
}
// Copy
logger?.debug(`Start to copy '${src}' to '${dst}'`)
await fs.ensureDir(dst);
await fs.copy(src, dst);
logger?.debug('Copyed succ');
// Readonly (chmod 0o444)
setReadonlyRecursive(dst, logger);
}
export async function | (dst: string, logger?: Logger) {
logger?.debug(`Start to setReadonlyRecursive to '${dst}'`)
let items = await new Promise<string[]>((rs, rj) => {
glob(path.resolve(dst, '**'), (err, matches) => {
err ? rj() : rs(matches);
})
})
for (let item of items) {
let stat = fs.statSync(item);
if (stat.isFile()) {
await fs.chmod(item, 0o444);
logger?.log(chalk.green('chmod 444: ' + item));
}
}
logger?.debug('setReadonlyRecursive succ');
} | setReadonlyRecursive |
test_replace.py | # coding=utf-8
# pylint: disable-msg=E1101,W0612
import numpy as np
import pytest
import pandas as pd
import pandas.util.testing as tm
from .common import TestData
class TestSeriesReplace(TestData):
def test_replace(self):
N = 100
ser = pd.Series(np.random.randn(N))
ser[0:4] = np.nan
ser[6:10] = 0
# replace list with a single value
ser.replace([np.nan], -1, inplace=True)
exp = ser.fillna(-1)
tm.assert_series_equal(ser, exp)
rs = ser.replace(0., np.nan)
ser[ser == 0.] = np.nan
tm.assert_series_equal(rs, ser)
ser = pd.Series(np.fabs(np.random.randn(N)), tm.makeDateIndex(N),
dtype=object)
ser[:5] = np.nan
ser[6:10] = 'foo'
ser[20:30] = 'bar'
# replace list with a single value
rs = ser.replace([np.nan, 'foo', 'bar'], -1)
assert (rs[:5] == -1).all()
assert (rs[6:10] == -1).all()
assert (rs[20:30] == -1).all()
assert (pd.isna(ser[:5])).all()
# replace with different values
rs = ser.replace({np.nan: -1, 'foo': -2, 'bar': -3})
assert (rs[:5] == -1).all()
assert (rs[6:10] == -2).all()
assert (rs[20:30] == -3).all()
assert (pd.isna(ser[:5])).all()
# replace with different values with 2 lists
rs2 = ser.replace([np.nan, 'foo', 'bar'], [-1, -2, -3])
tm.assert_series_equal(rs, rs2)
# replace inplace
ser.replace([np.nan, 'foo', 'bar'], -1, inplace=True)
assert (ser[:5] == -1).all()
assert (ser[6:10] == -1).all()
assert (ser[20:30] == -1).all()
ser = pd.Series([np.nan, 0, np.inf])
tm.assert_series_equal(ser.replace(np.nan, 0), ser.fillna(0))
ser = pd.Series([np.nan, 0, 'foo', 'bar', np.inf, None, pd.NaT])
tm.assert_series_equal(ser.replace(np.nan, 0), ser.fillna(0))
filled = ser.copy()
filled[4] = 0
tm.assert_series_equal(ser.replace(np.inf, 0), filled)
ser = pd.Series(self.ts.index)
tm.assert_series_equal(ser.replace(np.nan, 0), ser.fillna(0))
# malformed
msg = r"Replacement lists must match in length\. Expecting 3 got 2"
with pytest.raises(ValueError, match=msg):
ser.replace([1, 2, 3], [np.nan, 0])
# make sure that we aren't just masking a TypeError because bools don't
# implement indexing
with pytest.raises(TypeError, match='Cannot compare types .+'):
ser.replace([1, 2], [np.nan, 0])
ser = pd.Series([0, 1, 2, 3, 4])
result = ser.replace([0, 1, 2, 3, 4], [4, 3, 2, 1, 0])
tm.assert_series_equal(result, pd.Series([4, 3, 2, 1, 0]))
def test_replace_gh5319(self):
# API change from 0.12?
# GH 5319
ser = pd.Series([0, np.nan, 2, 3, 4])
expected = ser.ffill()
result = ser.replace([np.nan])
tm.assert_series_equal(result, expected)
ser = pd.Series([0, np.nan, 2, 3, 4])
expected = ser.ffill()
result = ser.replace(np.nan)
tm.assert_series_equal(result, expected)
# GH 5797
ser = pd.Series(pd.date_range('20130101', periods=5))
expected = ser.copy()
expected.loc[2] = pd.Timestamp('20120101')
result = ser.replace({pd.Timestamp('20130103'):
pd.Timestamp('20120101')})
tm.assert_series_equal(result, expected)
result = ser.replace(pd.Timestamp('20130103'),
pd.Timestamp('20120101'))
tm.assert_series_equal(result, expected)
# GH 11792: Test with replacing NaT in a list with tz data
ts = pd.Timestamp('2015/01/01', tz='UTC')
s = pd.Series([pd.NaT, pd.Timestamp('2015/01/01', tz='UTC')])
result = s.replace([np.nan, pd.NaT], pd.Timestamp.min)
expected = pd.Series([pd.Timestamp.min, ts], dtype=object)
tm.assert_series_equal(expected, result)
def test_replace_with_single_list(self):
ser = pd.Series([0, 1, 2, 3, 4])
result = ser.replace([1, 2, 3])
tm.assert_series_equal(result, pd.Series([0, 0, 0, 0, 4]))
s = ser.copy()
s.replace([1, 2, 3], inplace=True)
tm.assert_series_equal(s, pd.Series([0, 0, 0, 0, 4]))
# make sure things don't get corrupted when fillna call fails | with pytest.raises(ValueError, match=msg):
s.replace([1, 2, 3], inplace=True, method='crash_cymbal')
tm.assert_series_equal(s, ser)
def test_replace_with_empty_list(self):
# GH 21977
s = pd.Series([[1], [2, 3], [], np.nan, [4]])
expected = s
result = s.replace([], np.nan)
tm.assert_series_equal(result, expected)
# GH 19266
with pytest.raises(ValueError, match="cannot assign mismatch"):
s.replace({np.nan: []})
with pytest.raises(ValueError, match="cannot assign mismatch"):
s.replace({np.nan: ['dummy', 'alt']})
def test_replace_mixed_types(self):
s = pd.Series(np.arange(5), dtype='int64')
def check_replace(to_rep, val, expected):
sc = s.copy()
r = s.replace(to_rep, val)
sc.replace(to_rep, val, inplace=True)
tm.assert_series_equal(expected, r)
tm.assert_series_equal(expected, sc)
# MUST upcast to float
e = pd.Series([0., 1., 2., 3., 4.])
tr, v = [3], [3.0]
check_replace(tr, v, e)
# MUST upcast to float
e = pd.Series([0, 1, 2, 3.5, 4])
tr, v = [3], [3.5]
check_replace(tr, v, e)
# casts to object
e = pd.Series([0, 1, 2, 3.5, 'a'])
tr, v = [3, 4], [3.5, 'a']
check_replace(tr, v, e)
# again casts to object
e = pd.Series([0, 1, 2, 3.5, pd.Timestamp('20130101')])
tr, v = [3, 4], [3.5, pd.Timestamp('20130101')]
check_replace(tr, v, e)
# casts to object
e = pd.Series([0, 1, 2, 3.5, True], dtype='object')
tr, v = [3, 4], [3.5, True]
check_replace(tr, v, e)
# test an object with dates + floats + integers + strings
dr = pd.date_range('1/1/2001', '1/10/2001',
freq='D').to_series().reset_index(drop=True)
result = dr.astype(object).replace(
[dr[0], dr[1], dr[2]], [1.0, 2, 'a'])
expected = pd.Series([1.0, 2, 'a'] + dr[3:].tolist(), dtype=object)
tm.assert_series_equal(result, expected)
def test_replace_bool_with_string_no_op(self):
s = pd.Series([True, False, True])
result = s.replace('fun', 'in-the-sun')
tm.assert_series_equal(s, result)
def test_replace_bool_with_string(self):
# nonexistent elements
s = pd.Series([True, False, True])
result = s.replace(True, '2u')
expected = pd.Series(['2u', False, '2u'])
tm.assert_series_equal(expected, result)
def test_replace_bool_with_bool(self):
s = pd.Series([True, False, True])
result = s.replace(True, False)
expected = pd.Series([False] * len(s))
tm.assert_series_equal(expected, result)
def test_replace_with_dict_with_bool_keys(self):
s = pd.Series([True, False, True])
with pytest.raises(TypeError, match='Cannot compare types .+'):
s.replace({'asdf': 'asdb', True: 'yes'})
def test_replace2(self):
N = 100
ser = pd.Series(np.fabs(np.random.randn(N)), tm.makeDateIndex(N),
dtype=object)
ser[:5] = np.nan
ser[6:10] = 'foo'
ser[20:30] = 'bar'
# replace list with a single value
rs = ser.replace([np.nan, 'foo', 'bar'], -1)
assert (rs[:5] == -1).all()
assert (rs[6:10] == -1).all()
assert (rs[20:30] == -1).all()
assert (pd.isna(ser[:5])).all()
# replace with different values
rs = ser.replace({np.nan: -1, 'foo': -2, 'bar': -3})
assert (rs[:5] == -1).all()
assert (rs[6:10] == -2).all()
assert (rs[20:30] == -3).all()
assert (pd.isna(ser[:5])).all()
# replace with different values with 2 lists
rs2 = ser.replace([np.nan, 'foo', 'bar'], [-1, -2, -3])
tm.assert_series_equal(rs, rs2)
# replace inplace
ser.replace([np.nan, 'foo', 'bar'], -1, inplace=True)
assert (ser[:5] == -1).all()
assert (ser[6:10] == -1).all()
assert (ser[20:30] == -1).all()
def test_replace_with_empty_dictlike(self):
# GH 15289
s = pd.Series(list('abcd'))
tm.assert_series_equal(s, s.replace(dict()))
tm.assert_series_equal(s, s.replace(pd.Series([])))
def test_replace_string_with_number(self):
# GH 15743
s = pd.Series([1, 2, 3])
result = s.replace('2', np.nan)
expected = pd.Series([1, 2, 3])
tm.assert_series_equal(expected, result)
def test_replace_replacer_equals_replacement(self):
# GH 20656
# make sure all replacers are matching against original values
s = pd.Series(['a', 'b'])
expected = pd.Series(['b', 'a'])
result = s.replace({'a': 'b', 'b': 'a'})
tm.assert_series_equal(expected, result)
def test_replace_unicode_with_number(self):
# GH 15743
s = pd.Series([1, 2, 3])
result = s.replace('2', np.nan)
expected = pd.Series([1, 2, 3])
tm.assert_series_equal(expected, result)
def test_replace_mixed_types_with_string(self):
# Testing mixed
s = pd.Series([1, 2, 3, '4', 4, 5])
result = s.replace([2, '4'], np.nan)
expected = pd.Series([1, np.nan, 3, np.nan, 4, 5])
tm.assert_series_equal(expected, result)
@pytest.mark.parametrize("categorical, numeric", [
(pd.Categorical('A', categories=['A', 'B']), [1]),
(pd.Categorical(('A', ), categories=['A', 'B']), [1]),
(pd.Categorical(('A', 'B'), categories=['A', 'B']), [1, 2]),
])
def test_replace_categorical(self, categorical, numeric):
# GH 24971
# Do not check if dtypes are equal due to a known issue that
# Categorical.replace sometimes coerces to object (GH 23305)
s = pd.Series(categorical)
result = s.replace({'A': 1, 'B': 2})
expected = pd.Series(numeric)
tm.assert_series_equal(expected, result, check_dtype=False)
def test_replace_with_no_overflowerror(self):
# GH 25616
# casts to object without Exception from OverflowError
s = pd.Series([0, 1, 2, 3, 4])
result = s.replace([3], ['100000000000000000000'])
expected = pd.Series([0, 1, 2, '100000000000000000000', 4])
tm.assert_series_equal(result, expected)
s = pd.Series([0, '100000000000000000000',
'100000000000000000001'])
result = s.replace(['100000000000000000000'], [1])
expected = pd.Series([0, 1, '100000000000000000001'])
tm.assert_series_equal(result, expected) | s = ser.copy()
msg = (r"Invalid fill method\. Expecting pad \(ffill\) or backfill"
r" \(bfill\)\. Got crash_cymbal") |
help.rs | // EndBASIC
// Copyright 2020 Julio Merino
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy
// of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
//! Interactive help support.
use crate::console::Console;
use crate::exec::CATEGORY;
use async_trait::async_trait;
use endbasic_core::ast::{ArgSep, Expr, VarType};
use endbasic_core::exec::Machine;
use endbasic_core::syms::{
CallError, CallableMetadata, CallableMetadataBuilder, Command, CommandResult, Symbols,
};
use radix_trie::{Trie, TrieCommon};
use std::cell::RefCell;
use std::collections::{BTreeMap, HashMap};
use std::io;
use std::rc::Rc;
/// Cheat-sheet for the language syntax.
const LANG_REFERENCE: &str = r"
Symbols (variable, array and function references):
name? Boolean (TRUE and FALSE).
name# Floating point (double).
name% Integer (32 bits).
name$ String.
name Type determined by value or definition.
Assignments and declarations:
varref[(dim1[, ..., dimN])] = expr
DIM varname[(dim1[, ..., dimN])] [AS BOOLEAN|DOUBLE|INTEGER|STRING]
Expressions:
a + b a - b a * b a / b a MOD b -a
a AND b NOT a a OR b a XOR b
a = b a <> b a < b a <= b a > b a >= b
(a) varref
arrayref(s1[, ..., sN]) funcref(a1[, ..., aN])
Flow control:
IF expr THEN: ...: ELSE IF expr THEN: ...: ELSE: ...: END IF
FOR varref = expr TO expr [STEP int]: ...: NEXT
WHILE expr: ...: WEND
Misc:
st1: st2 Separates statements (same as a newline).
REM text Comment until end of line.
' text Comment until end of line.
, Long separator for arguments to builtin call.
; Short separator for arguments to builtin call.
";
/// Returns the header for the help summary.
fn header() -> Vec<String> {
vec![
"".to_owned(),
format!(" EndBASIC {}", env!("CARGO_PKG_VERSION")),
" Copyright 2020-2021 Julio Merino".to_owned(),
"".to_owned(),
format!(" Project page at <{}>", env!("CARGO_PKG_HOMEPAGE")),
" License Apache Version 2.0 <http://www.apache.org/licenses/LICENSE-2.0>".to_owned(),
]
}
/// Refills a paragraph to fit within a maximum width, returning the formatted lines.
///
/// This does not cut words half-way, which means that it may be impossible to fit certain words in
/// the specified width. If that happens, lines will overflow.
fn refill(paragraph: &str, width: usize) -> Vec<String> {
if paragraph.is_empty() {
return vec!["".to_owned()];
}
let mut lines = vec![];
let mut line = String::new();
for word in paragraph.split_whitespace() {
if !line.is_empty() {
// Determine how many spaces to inject after a period. We want 2 spaces to separate
// different sentences and 1 otherwise. The heuristic here isn't great and it'd be
// better to respect the original spacing of the paragraph.
let spaces = if line.ends_with('.') {
let first = word.chars().next().expect("Words cannot be empty");
if first == first.to_ascii_uppercase() {
2
} else {
1
}
} else {
1
};
if (line.len() + word.len() + spaces) >= width {
lines.push(line);
line = String::new();
} else {
for _ in 0..spaces {
line.push(' ');
}
}
}
line.push_str(word);
}
if !line.is_empty() {
lines.push(line);
}
lines
}
/// Same as `refill` but prints the lines to the console instead of returning them.
///
/// The width is automatically determined from the console's size.
fn refill_and_print(console: &mut dyn Console, paragraph: &str) -> io::Result<()> {
// TODO(jmmv): This queries the size on every print, which is not very efficient. Should reuse
// this across calls, maybe by having a wrapper over Console and using it throughout.
let size = console.size()?;
let lines = refill(paragraph, size.column - 8);
for line in lines {
if line.is_empty() {
console.print("")?;
} else {
console.print(&format!(" {}", line))?;
}
}
Ok(())
}
/// Handler for a specific help topic.
trait Topic {
/// Returns the name of the topic.
fn name(&self) -> &str;
/// Returns the human-readable, one-line description of this topic.
fn title(&self) -> &str;
/// Indicates whether this topic shows up in the topics summary or not.
fn show_in_summary(&self) -> bool;
/// Dumps the contents of this topic to the `_console`.
fn describe(&self, _console: &mut dyn Console) -> io::Result<()>;
}
/// A help topic to describe a callable.
struct CallableTopic {
name: String,
metadata: CallableMetadata,
}
impl Topic for CallableTopic {
fn name(&self) -> &str {
&self.name
}
fn title(&self) -> &str {
self.metadata.description().next().unwrap()
}
fn show_in_summary(&self) -> bool {
false
}
fn describe(&self, console: &mut dyn Console) -> io::Result<()> {
console.print("")?;
if self.metadata.return_type() == VarType::Void {
if self.metadata.syntax().is_empty() {
refill_and_print(console, self.metadata.name())?
} else {
refill_and_print(
console,
&format!("{} {}", self.metadata.name(), self.metadata.syntax()),
)?
}
} else {
refill_and_print(
console,
&format!(
"{}{}({})",
self.metadata.name(),
self.metadata.return_type().annotation(),
self.metadata.syntax(),
),
)?;
}
for paragraph in self.metadata.description() {
console.print("")?;
refill_and_print(console, paragraph)?;
}
console.print("")?;
Ok(())
}
}
/// A help topic to describe a category of callables.
struct CategoryTopic {
name: &'static str,
metadatas: Vec<CallableMetadata>,
}
impl Topic for CategoryTopic {
fn name(&self) -> &str {
self.name
}
fn title(&self) -> &str {
self.name
}
fn show_in_summary(&self) -> bool |
fn describe(&self, console: &mut dyn Console) -> io::Result<()> {
let description = self.metadatas.get(0).expect("Must have at least one symbol").category();
let mut index = BTreeMap::default();
let mut max_length = 0;
for metadata in &self.metadatas {
debug_assert_eq!(
description,
metadata.category(),
"All commands registered in this category must be equivalent"
);
let name = format!("{}{}", metadata.name(), metadata.return_type().annotation());
if name.len() > max_length {
max_length = name.len();
}
let blurb = metadata.description().next().unwrap();
let previous = index.insert(name, blurb);
assert!(previous.is_none(), "Names should have been unique");
}
console.print("")?;
for line in description.lines() {
refill_and_print(console, line)?;
console.print("")?;
}
for (name, blurb) in index.iter() {
let filler = " ".repeat(max_length - name.len());
// TODO(jmmv): Should use refill_and_print but continuation lines need special handling
// to be indented properly.
console.print(&format!(" >> {}{} {}", name, filler, blurb))?;
}
console.print("")?;
refill_and_print(console, " Type HELP followed by the name of a symbol for details.")?;
console.print("")?;
Ok(())
}
}
/// A help topic to describe the language's grammar.
struct LanguageTopic {}
impl Topic for LanguageTopic {
fn name(&self) -> &str {
"Language reference"
}
fn title(&self) -> &str {
"Language reference"
}
fn show_in_summary(&self) -> bool {
true
}
fn describe(&self, console: &mut dyn Console) -> io::Result<()> {
for line in LANG_REFERENCE.lines() {
// Print line by line to honor any possible differences in line feeds.
console.print(line)?;
}
console.print("")?;
Ok(())
}
}
/// Maintains the collection of topics as a trie indexed by their name.
struct Topics(Trie<String, Box<dyn Topic>>);
impl Topics {
/// Builds an index of the given `symbols` and returns a new collection of help topics.
fn new(symbols: &Symbols) -> Self {
fn insert(topics: &mut Trie<String, Box<dyn Topic>>, topic: Box<dyn Topic>) {
let key = topic.name().to_ascii_uppercase();
topics.insert(key, topic);
}
let mut topics = Trie::default();
insert(&mut topics, Box::from(LanguageTopic {}));
let mut categories = HashMap::new();
for (name, symbol) in symbols.as_hashmap().iter() {
if let Some(metadata) = symbol.metadata() {
let category_title = metadata.category().lines().next().unwrap();
categories
.entry(category_title)
.or_insert_with(Vec::default)
.push(metadata.clone());
insert(
&mut topics,
Box::from(CallableTopic {
name: format!("{}{}", name, metadata.return_type().annotation()),
metadata: metadata.clone(),
}),
);
}
}
for (name, metadatas) in categories.into_iter() {
insert(&mut topics, Box::from(CategoryTopic { name, metadatas }));
}
Self(topics)
}
/// Returns the given topic named `name`, where `name` can be a prefix.
///
/// If `name` is not long enough to uniquely identify a topic or if the topic does not exist,
/// returns an error.
fn find(&self, name: &str) -> Result<&dyn Topic, CallError> {
let key = name.to_ascii_uppercase();
match self.0.get_raw_descendant(&key) {
Some(subtrie) => {
let children: Vec<(&String, &Box<dyn Topic>)> = subtrie.iter().collect();
match children[..] {
[(_name, topic)] => Ok(topic.as_ref()),
_ => {
let completions: Vec<String> =
children.iter().map(|(name, _topic)| (*name).to_owned()).collect();
Err(CallError::ArgumentError(format!(
"Ambiguous help topic {}; candidates are: {}",
name,
completions.join(", ")
)))
}
}
}
None => Err(CallError::ArgumentError(format!("Unknown help topic {}", name))),
}
}
/// Returns an iterator over all the topics.
fn values(&self) -> radix_trie::iter::Values<String, Box<dyn Topic>> {
self.0.values()
}
}
/// The `HELP` command.
pub struct HelpCommand {
metadata: CallableMetadata,
console: Rc<RefCell<dyn Console>>,
}
impl HelpCommand {
/// Creates a new command that writes help messages to `output`.
pub fn new(console: Rc<RefCell<dyn Console>>) -> Rc<Self> {
Rc::from(Self {
metadata: CallableMetadataBuilder::new("HELP", VarType::Void)
.with_syntax("[topic]")
.with_category(CATEGORY)
.with_description(
"Prints interactive help.
Without arguments, shows a summary of all available top-level help topics.
With a single argument, which may be a bare name or a string, shows detailed information about the \
given help topic, command, or function. Topic names with spaces in them must be double-quoted.
Topic names are case-insensitive and can be specified as prefixes, in which case the topic whose \
name starts with the prefix will be shown. For example, the following invocations are all \
equivalent: HELP CON, HELP console, HELP \"Console manipulation\".",
)
.build(),
console,
})
}
/// Prints a summary of all available help topics.
fn summary(&self, topics: &Topics) -> io::Result<()> {
let mut console = self.console.borrow_mut();
for line in header() {
refill_and_print(&mut *console, &line)?;
}
// TODO(jmmv): Should use refill_and_print but continuation lines need special handling to
// be indented properly.
console.print("")?;
refill_and_print(&mut *console, "Top-level help topics:")?;
console.print("")?;
for topic in topics.values() {
if topic.show_in_summary() {
console.print(&format!(" >> {}", topic.title()))?;
}
}
console.print("")?;
refill_and_print(&mut *console, "Type HELP followed by the name of a topic for details.")?;
refill_and_print(
&mut *console,
"Type HELP HELP for details on how to specify topic names.",
)?;
console.print("")?;
Ok(())
}
}
#[async_trait(?Send)]
impl Command for HelpCommand {
fn metadata(&self) -> &CallableMetadata {
&self.metadata
}
async fn exec(&self, args: &[(Option<Expr>, ArgSep)], machine: &mut Machine) -> CommandResult {
let topics = Topics::new(machine.get_symbols());
match args {
[] => {
self.summary(&topics)?;
}
[(Some(Expr::Symbol(vref)), ArgSep::End)] => {
let topic = topics.find(&format!("{}", vref))?;
let mut console = self.console.borrow_mut();
topic.describe(&mut *console)?;
}
[(Some(Expr::Text(name)), ArgSep::End)] => {
let topic = topics.find(name)?;
let mut console = self.console.borrow_mut();
topic.describe(&mut *console)?;
}
_ => {
return Err(CallError::ArgumentError(
"HELP takes zero or only one argument".to_owned(),
))
}
}
Ok(())
}
}
/// Adds all help-related commands to the `machine` and makes them write to `console`.
pub fn add_all(machine: &mut Machine, console: Rc<RefCell<dyn Console>>) {
machine.add_command(HelpCommand::new(console));
}
#[cfg(test)]
pub(crate) mod testutils {
use super::*;
use endbasic_core::ast::Value;
use endbasic_core::syms::{
CallableMetadata, CallableMetadataBuilder, Function, FunctionResult,
};
/// A command that does nothing.
pub(crate) struct DoNothingCommand {
metadata: CallableMetadata,
}
impl DoNothingCommand {
/// Creates a new instance of the command.
pub fn new() -> Rc<Self> {
Rc::from(Self {
metadata: CallableMetadataBuilder::new("DO_NOTHING", VarType::Void)
.with_syntax("this [would] <be|the> syntax \"specification\"")
.with_category(
"Testing
This is a sample category for testing.",
)
.with_description(
"This is the blurb.
First paragraph of the extended description.
Second paragraph of the extended description.",
)
.build(),
})
}
}
#[async_trait(?Send)]
impl Command for DoNothingCommand {
fn metadata(&self) -> &CallableMetadata {
&self.metadata
}
async fn exec(
&self,
_args: &[(Option<Expr>, ArgSep)],
_machine: &mut Machine,
) -> CommandResult {
Ok(())
}
}
/// A function that does nothing that can take any name.
pub(crate) struct EmptyFunction {
metadata: CallableMetadata,
}
impl EmptyFunction {
pub(crate) fn new() -> Rc<Self> {
EmptyFunction::new_with_name("EMPTY")
}
pub(crate) fn new_with_name(name: &'static str) -> Rc<Self> {
Rc::from(Self {
metadata: CallableMetadataBuilder::new(name, VarType::Text)
.with_syntax("this [would] <be|the> syntax \"specification\"")
.with_category(
"Testing
This is a sample category for testing.",
)
.with_description(
"This is the blurb.
First paragraph of the extended description.
Second paragraph of the extended description.",
)
.build(),
})
}
}
impl Function for EmptyFunction {
fn metadata(&self) -> &CallableMetadata {
&self.metadata
}
fn exec(&self, _args: &[Expr], _symbols: &mut Symbols) -> FunctionResult {
Ok(Value::Text("irrelevant".to_owned()))
}
}
}
#[cfg(test)]
mod tests {
use super::testutils::*;
use super::*;
use crate::testutils::*;
#[test]
fn test_refill_empty() {
assert_eq!(&[""], refill("", 0).as_slice());
assert_eq!(&[""], refill("", 10).as_slice());
}
#[test]
fn test_refill_nothing_fits() {
assert_eq!(&["this", "is", "some", "text"], refill("this is some text", 0).as_slice());
assert_eq!(&["this", "is", "some", "text"], refill("this is some text", 1).as_slice());
}
#[test]
fn test_refill_some_lines() {
assert_eq!(
&["this is a piece", "of text with", "a-fictitious-very-long-word", "within it"],
refill("this is a piece of text with a-fictitious-very-long-word within it", 16)
.as_slice()
);
}
#[test]
fn test_refill_reformats_periods() {
assert_eq!(&["foo. bar. baz."], refill("foo. bar. baz.", 100).as_slice());
assert_eq!(&["foo. Bar. baz."], refill("foo. Bar. baz.", 100).as_slice());
assert_eq!(&["[some .. range]"], refill("[some .. range]", 100).as_slice());
}
fn tester() -> Tester {
let tester = Tester::empty();
let console = tester.get_console();
tester.add_command(HelpCommand::new(console))
}
#[test]
fn test_help_summarize_symbols() {
tester()
.add_command(DoNothingCommand::new())
.add_function(EmptyFunction::new())
.run("HELP")
.expect_prints(header())
.expect_prints([
"",
" Top-level help topics:",
"",
" >> Interpreter",
" >> Language reference",
" >> Testing",
"",
" Type HELP followed by the name of a topic for details.",
" Type HELP HELP for details on how to specify topic names.",
"",
])
.check();
}
#[test]
fn test_help_describe_callables_topic() {
tester()
.add_command(DoNothingCommand::new())
.add_function(EmptyFunction::new())
.run("help testing")
.expect_prints([
"",
" Testing",
"",
" This is a sample category for testing.",
"",
" >> DO_NOTHING This is the blurb.",
" >> EMPTY$ This is the blurb.",
"",
" Type HELP followed by the name of a symbol for details.",
"",
])
.check();
}
#[test]
fn test_help_describe_command() {
tester()
.add_command(DoNothingCommand::new())
.run("help Do_Nothing")
.expect_prints([
"",
" DO_NOTHING this [would] <be|the> syntax \"specification\"",
"",
" This is the blurb.",
"",
" First paragraph of the extended description.",
"",
" Second paragraph of the extended description.",
"",
])
.check();
}
fn do_help_describe_function_test(name: &str) {
tester()
.add_function(EmptyFunction::new())
.run(format!("help {}", name))
.expect_prints([
"",
" EMPTY$(this [would] <be|the> syntax \"specification\")",
"",
" This is the blurb.",
"",
" First paragraph of the extended description.",
"",
" Second paragraph of the extended description.",
"",
])
.check();
}
#[test]
fn test_help_describe_function_without_annotation() {
do_help_describe_function_test("Empty")
}
#[test]
fn test_help_describe_function_with_annotation() {
do_help_describe_function_test("EMPTY$")
}
#[test]
fn test_help_lang() {
for cmd in &["help lang", "help language", r#"help "Language Reference""#] {
tester()
.run(*cmd)
.expect_prints(LANG_REFERENCE.lines().collect::<Vec<&str>>())
.expect_prints([""])
.check();
}
}
#[test]
fn test_help_prefix_search() {
fn exp_output(name: &str) -> Vec<String> {
vec![
"".to_owned(),
format!(" {}$(this [would] <be|the> syntax \"specification\")", name),
"".to_owned(),
" This is the blurb.".to_owned(),
"".to_owned(),
" First paragraph of the extended description.".to_owned(),
"".to_owned(),
" Second paragraph of the extended description.".to_owned(),
"".to_owned(),
]
}
for cmd in &["help aa", "help aab", "help aabc"] {
tester()
.add_function(EmptyFunction::new_with_name("AABC"))
.add_function(EmptyFunction::new_with_name("ABC"))
.add_function(EmptyFunction::new_with_name("BC"))
.run(*cmd)
.expect_prints(exp_output("AABC"))
.check();
}
for cmd in &["help b", "help bc"] {
tester()
.add_function(EmptyFunction::new_with_name("AABC"))
.add_function(EmptyFunction::new_with_name("ABC"))
.add_function(EmptyFunction::new_with_name("BC"))
.run(*cmd)
.expect_prints(exp_output("BC"))
.check();
}
tester()
.add_function(EmptyFunction::new_with_name("ABC"))
.add_function(EmptyFunction::new_with_name("AABC"))
.run("help a")
.expect_err("Ambiguous help topic a; candidates are: AABC$, ABC$")
.check();
}
#[test]
fn test_help_errors() {
let mut t =
tester().add_command(DoNothingCommand::new()).add_function(EmptyFunction::new());
t.run("HELP foo bar").expect_err("Unexpected value in expression").check();
t.run("HELP foo, bar").expect_err("HELP takes zero or only one argument").check();
t.run("HELP lang%").expect_err("Unknown help topic lang%").check();
t.run("HELP foo$").expect_err("Unknown help topic foo$").check();
t.run("HELP foo").expect_err("Unknown help topic foo").check();
t.run("HELP do_nothing$").expect_err("Unknown help topic do_nothing$").check();
t.run("HELP empty?").expect_err("Unknown help topic empty?").check();
let mut t = tester();
t.run("HELP undoc").expect_err("Unknown help topic undoc").check();
t.run("undoc = 3: HELP undoc")
.expect_err("Unknown help topic undoc")
.expect_var("undoc", 3)
.check();
let mut t = tester();
t.run("HELP undoc").expect_err("Unknown help topic undoc").check();
t.run("DIM undoc(3): HELP undoc")
.expect_err("Unknown help topic undoc")
.expect_array("undoc", VarType::Integer, &[3], vec![])
.check();
}
}
| {
true
} |
lru.go | package state
import (
"errors"
"sync"
"github.com/spf13/viper"
"github.com/ethereum/go-ethereum/common"
lru "github.com/hashicorp/golang-lru"
)
//the default lru cache size is 1kw, that means the max memory size we needs is (32 + 32 + 4) * 10000000, about 700MB
var (
defaultLruSize int = 10000000
gStateLru *lru.Cache
once sync.Once
)
//redefine fast-query to avoid cycle package import
const FlagFastQuery = "fast-query"
func isWatcherEnabled() bool {
return viper.GetBool(FlagFastQuery) | if isWatcherEnabled() {
var e error = nil
gStateLru, e = lru.New(defaultLruSize)
if e != nil {
panic(errors.New("Failed to call InstanceOfStateLru cause :" + e.Error()))
}
}
})
return gStateLru
}
func GetStateFromLru(key common.Hash) []byte {
cache := InstanceOfStateLru()
if cache == nil {
return nil
}
value, ok := cache.Get(key)
if ok {
ret, ok := value.([]byte)
if ok {
return ret
}
}
return nil
}
func SetStateToLru(key common.Hash, value []byte) {
cache := InstanceOfStateLru()
if cache == nil {
return
}
cache.Add(key, value)
} | }
func InstanceOfStateLru() *lru.Cache {
once.Do(func() { |
reno.rs | // Copyright (C) 2019, Cloudflare, Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//! Reno Congestion Control
//!
//! Note that Slow Start can use HyStart++ when enabled.
use std::cmp;
use std::time::Instant;
use crate::packet;
use crate::recovery;
use crate::recovery::Acked;
use crate::recovery::CongestionControlOps;
use crate::recovery::Recovery;
pub static RENO: CongestionControlOps = CongestionControlOps {
on_packet_sent,
on_packet_acked,
congestion_event,
collapse_cwnd,
};
pub fn on_packet_sent(r: &mut Recovery, sent_bytes: usize, _now: Instant) {
r.bytes_in_flight += sent_bytes;
}
fn on_packet_acked(
r: &mut Recovery, packet: &Acked, epoch: packet::Epoch, now: Instant,
) {
r.bytes_in_flight = r.bytes_in_flight.saturating_sub(packet.size);
if r.in_congestion_recovery(packet.time_sent) {
return;
}
| if r.congestion_window < r.ssthresh {
// Slow start.
let cwnd_inc = cmp::min(
packet.size,
r.max_datagram_size * recovery::ABC_L -
cmp::min(
r.bytes_acked_sl,
r.max_datagram_size * recovery::ABC_L,
),
);
// In Slow slart, bytes_acked_sl is used for counting
// acknowledged bytes.
r.bytes_acked_sl += packet.size;
r.congestion_window += cwnd_inc;
if r.hystart.enabled() &&
epoch == packet::EPOCH_APPLICATION &&
r.hystart.try_enter_lss(
packet,
r.latest_rtt,
r.congestion_window,
now,
r.max_datagram_size,
)
{
r.ssthresh = r.congestion_window;
}
} else {
// Congestion avoidance.
let mut reno_cwnd = r.congestion_window;
r.bytes_acked_ca += packet.size;
if r.bytes_acked_ca >= r.congestion_window {
r.bytes_acked_ca -= r.congestion_window;
reno_cwnd += r.max_datagram_size;
}
// When in Limited Slow Start, take the max of CA cwnd and
// LSS cwnd.
if r.hystart.in_lss(epoch) {
let lss_cwnd = r.hystart.lss_cwnd(
packet.size,
r.bytes_acked_sl,
r.congestion_window,
r.ssthresh,
r.max_datagram_size,
);
r.bytes_acked_sl += packet.size;
r.congestion_window = cmp::max(reno_cwnd, lss_cwnd);
} else {
r.congestion_window = reno_cwnd;
}
}
}
fn congestion_event(
r: &mut Recovery, time_sent: Instant, epoch: packet::Epoch, now: Instant,
) {
// Start a new congestion event if packet was sent after the
// start of the previous congestion recovery period.
if !r.in_congestion_recovery(time_sent) {
r.congestion_recovery_start_time = Some(now);
r.congestion_window = (r.congestion_window as f64 *
recovery::LOSS_REDUCTION_FACTOR)
as usize;
r.congestion_window = cmp::max(
r.congestion_window,
r.max_datagram_size * recovery::MINIMUM_WINDOW_PACKETS,
);
r.bytes_acked_ca = (r.congestion_window as f64 *
recovery::LOSS_REDUCTION_FACTOR) as usize;
r.ssthresh = r.congestion_window;
if r.hystart.in_lss(epoch) {
r.hystart.congestion_event();
}
}
}
pub fn collapse_cwnd(r: &mut Recovery) {
r.congestion_window = r.max_datagram_size * recovery::MINIMUM_WINDOW_PACKETS;
r.bytes_acked_sl = 0;
r.bytes_acked_ca = 0;
}
#[cfg(test)]
mod tests {
use super::*;
use std::time::Duration;
#[test]
fn reno_init() {
let mut cfg = crate::Config::new(crate::PROTOCOL_VERSION).unwrap();
cfg.set_cc_algorithm(recovery::CongestionControlAlgorithm::Reno);
let r = Recovery::new(&cfg);
assert!(r.cwnd() > 0);
assert_eq!(r.bytes_in_flight, 0);
}
#[test]
fn reno_send() {
let mut cfg = crate::Config::new(crate::PROTOCOL_VERSION).unwrap();
cfg.set_cc_algorithm(recovery::CongestionControlAlgorithm::Reno);
let mut r = Recovery::new(&cfg);
let now = Instant::now();
r.on_packet_sent_cc(1000, now);
assert_eq!(r.bytes_in_flight, 1000);
}
#[test]
fn reno_slow_start() {
let mut cfg = crate::Config::new(crate::PROTOCOL_VERSION).unwrap();
cfg.set_cc_algorithm(recovery::CongestionControlAlgorithm::Reno);
let mut r = Recovery::new(&cfg);
let now = Instant::now();
let p = recovery::Sent {
pkt_num: 0,
frames: vec![],
time_sent: now,
time_acked: None,
time_lost: None,
size: r.max_datagram_size,
ack_eliciting: true,
in_flight: true,
delivered: 0,
delivered_time: std::time::Instant::now(),
recent_delivered_packet_sent_time: std::time::Instant::now(),
is_app_limited: false,
has_data: false,
};
// Send initcwnd full MSS packets to become no longer app limited
for _ in 0..recovery::INITIAL_WINDOW_PACKETS {
r.on_packet_sent_cc(p.size, now);
}
let cwnd_prev = r.cwnd();
let acked = vec![Acked {
pkt_num: p.pkt_num,
time_sent: p.time_sent,
size: p.size,
}];
r.on_packets_acked(acked, packet::EPOCH_APPLICATION, now);
// Check if cwnd increased by packet size (slow start).
assert_eq!(r.cwnd(), cwnd_prev + p.size);
}
#[test]
fn reno_slow_start_abc_l() {
let mut cfg = crate::Config::new(crate::PROTOCOL_VERSION).unwrap();
cfg.set_cc_algorithm(recovery::CongestionControlAlgorithm::Reno);
let mut r = Recovery::new(&cfg);
let now = Instant::now();
let p = recovery::Sent {
pkt_num: 0,
frames: vec![],
time_sent: now,
time_acked: None,
time_lost: None,
size: r.max_datagram_size,
ack_eliciting: true,
in_flight: true,
delivered: 0,
delivered_time: std::time::Instant::now(),
recent_delivered_packet_sent_time: std::time::Instant::now(),
is_app_limited: false,
has_data: false,
};
// Send initcwnd full MSS packets to become no longer app limited
for _ in 0..recovery::INITIAL_WINDOW_PACKETS {
r.on_packet_sent_cc(p.size, now);
}
let cwnd_prev = r.cwnd();
let acked = vec![
Acked {
pkt_num: p.pkt_num,
time_sent: p.time_sent,
size: p.size,
},
Acked {
pkt_num: p.pkt_num,
time_sent: p.time_sent,
size: p.size,
},
Acked {
pkt_num: p.pkt_num,
time_sent: p.time_sent,
size: p.size,
},
];
r.on_packets_acked(acked, packet::EPOCH_APPLICATION, now);
// Acked 3 packets, but cwnd will increase 2 x mss.
assert_eq!(r.cwnd(), cwnd_prev + p.size * recovery::ABC_L);
}
#[test]
fn reno_congestion_event() {
let mut cfg = crate::Config::new(crate::PROTOCOL_VERSION).unwrap();
cfg.set_cc_algorithm(recovery::CongestionControlAlgorithm::Reno);
let mut r = Recovery::new(&cfg);
let prev_cwnd = r.cwnd();
let now = Instant::now();
r.congestion_event(now, packet::EPOCH_APPLICATION, now);
// In Reno, after congestion event, cwnd will be cut in half.
assert_eq!(prev_cwnd / 2, r.cwnd());
}
#[test]
fn reno_congestion_avoidance() {
let mut cfg = crate::Config::new(crate::PROTOCOL_VERSION).unwrap();
cfg.set_cc_algorithm(recovery::CongestionControlAlgorithm::Reno);
let mut r = Recovery::new(&cfg);
let now = Instant::now();
let prev_cwnd = r.cwnd();
// Fill up bytes_in_flight to avoid app_limited=true
r.on_packet_sent_cc(20000, now);
// Trigger congestion event to update ssthresh
r.congestion_event(now, packet::EPOCH_APPLICATION, now);
// After congestion event, cwnd will be reduced.
let cur_cwnd =
(prev_cwnd as f64 * recovery::LOSS_REDUCTION_FACTOR) as usize;
assert_eq!(r.cwnd(), cur_cwnd);
let rtt = Duration::from_millis(100);
let acked = vec![Acked {
pkt_num: 0,
// To exit from recovery
time_sent: now + rtt,
// More than cur_cwnd to increase cwnd
size: 8000,
}];
// Ack more than cwnd bytes with rtt=100ms
r.update_rtt(rtt, Duration::from_millis(0), now);
r.on_packets_acked(acked, packet::EPOCH_APPLICATION, now + rtt * 2);
// After acking more than cwnd, expect cwnd increased by MSS
assert_eq!(r.cwnd(), cur_cwnd + r.max_datagram_size);
}
} | if r.app_limited {
return;
}
|
stiff_physics.rs | use hyperdual::{Float, Hyperdual};
use nalgebra::{self, DMatrix, DVector, Point, SVector};
trait OneHot {
type H;
fn from_one_hot(index: usize) -> Self::H;
}
impl<T: hyperdual::Zero + hyperdual::One + Copy + nalgebra::Scalar, const N: usize> OneHot
for Hyperdual<T, N>
{
type H = Hyperdual<T, N>;
/// Create a new dual number from a real number and set the real value or a derivative to one.
///
/// All other parts are set to zero.
#[inline]
fn from_one_hot(index: usize) -> Hyperdual<T, N>
where
T: hyperdual::Zero,
{
let mut dual = Hyperdual::<T, N>::from_real(T::zero());
dual[index] = T::one();
dual
}
}
#[cfg_attr(feature = "persistence", derive(serde::Deserialize, serde::Serialize))]
#[derive(Clone)]
pub struct | {
pub p1: usize,
pub p2: usize,
pub length: f64,
pub k: f64,
pub d: f64,
}
pub const D: usize = 2;
fn spring_force<const S: usize>(
p1_pos: SVector<Hyperdual<f64, S>, D>,
p1_vel: SVector<Hyperdual<f64, S>, D>,
p2_pos: SVector<Hyperdual<f64, S>, D>,
p2_vel: SVector<Hyperdual<f64, S>, D>,
relaxed_length: f64,
k: f64, // Spring constant
d: f64,
) -> SVector<Hyperdual<f64, S>, D> {
let dpos = p2_pos - p1_pos;
let dvel = p2_vel - p1_vel;
let spring_length = dpos.dot(&dpos).sqrt();
let spring_dir = dpos / spring_length;
let force_magnitude: Hyperdual<f64, S> = Hyperdual::from_real(k)
* (spring_length - Hyperdual::from_real(relaxed_length))
+ Hyperdual::from_real(d) * spring_dir.dot(&dvel);
spring_dir * -force_magnitude
}
pub fn new_state_vector_from_points(points: &[Point<f64, D>]) -> DVector<f64> {
puffin::profile_function!();
let num_points = points.len();
let block_size = num_points * D;
let system_size = block_size * 2 + 1;
let mut y0 = DVector::zeros(system_size);
for i in 0..num_points {
for j in 0..D {
y0[i * D + j] = points[i][j] as f64;
}
}
y0[system_size - 1] = 1.0; // For transformation to homogenous system.
y0
}
pub fn create_diff_eq_system_around_y0(
y0: &DVector<f64>,
point_masses: &[f64],
springs: &[Spring],
) -> DMatrix<f64> {
puffin::profile_function!();
assert_eq!(y0.len(), point_masses.len() * D * 2 + 1);
let num_points = point_masses.len();
let block_size = num_points * D;
let system_size = block_size * 2 + 1;
// Dual numbers for automatic differentiation of springs. (Spatial derivatives, not time derivatives).
let mut p1_pos = SVector::<Hyperdual<f64, 9>, D>::new(
Hyperdual::from_one_hot(1),
Hyperdual::from_one_hot(2),
);
let mut p2_pos = SVector::<Hyperdual<f64, 9>, D>::new(
Hyperdual::from_one_hot(3),
Hyperdual::from_one_hot(4),
);
let mut p1_vel = SVector::<Hyperdual<f64, 9>, D>::new(
Hyperdual::from_one_hot(5),
Hyperdual::from_one_hot(6),
);
let mut p2_vel = SVector::<Hyperdual<f64, 9>, D>::new(
Hyperdual::from_one_hot(7),
Hyperdual::from_one_hot(8),
);
// Construct A matrix for y' = Ay. (Time derivative of state vector).
let mut mat_a = DMatrix::zeros(system_size, system_size);
// Equations for variable substitutions
for i in 0..num_points * D {
// "velocity is velocity"
mat_a[(i, block_size + i)] = 1.0;
}
// Equations for spring forces
for spring in springs {
let p1_loc = spring.p1 * D;
let p2_loc = spring.p2 * D;
// Set parameters to spring function.
p1_pos[0][0] = y0[p1_loc];
p1_pos[1][0] = y0[p1_loc + 1];
p2_pos[0][0] = y0[p2_loc];
p2_pos[1][0] = y0[p2_loc + 1];
p1_vel[0][0] = y0[block_size + p1_loc];
p1_vel[1][0] = y0[block_size + p1_loc + 1];
p2_vel[0][0] = y0[block_size + p2_loc];
p2_vel[1][0] = y0[block_size + p2_loc + 1];
let p1_mass = point_masses[spring.p1];
let p2_mass = point_masses[spring.p2];
let force = spring_force(
p1_pos,
p1_vel,
p2_pos,
p2_vel,
spring.length,
spring.k,
spring.d,
);
for j in 0..D {
for k in 0..D {
// Acceleration based on position
mat_a[(block_size + p1_loc + j, p1_loc + k)] -= force[j][1 + k] / p1_mass; // p1 acc from pos of p1.
mat_a[(block_size + p1_loc + j, p2_loc + k)] -= force[j][3 + k] / p1_mass; // p1 acc from pos of p2.
mat_a[(block_size + p2_loc + j, p1_loc + k)] += force[j][1 + k] / p2_mass; // p2 acc from pos of p1.
mat_a[(block_size + p2_loc + j, p2_loc + k)] += force[j][3 + k] / p2_mass; // p2 acc from pos of p2.
// Damping
mat_a[(block_size + p1_loc + j, block_size + p1_loc + k)] -=
force[j][5 + k] / p1_mass; // p1 acc from vel of p1.
mat_a[(block_size + p1_loc + j, block_size + p2_loc + k)] -=
force[j][7 + k] / p1_mass; // p1 acc from vel of p2.
mat_a[(block_size + p2_loc + j, block_size + p1_loc + k)] +=
force[j][5 + k] / p2_mass; // p2 acc from vel of p1.
mat_a[(block_size + p2_loc + j, block_size + p2_loc + k)] +=
force[j][7 + k] / p2_mass; // p2 acc from vel of p2.
}
// Offset for linearization around y0.
let mut constant_term = force[j][0];
for k in 0..D {
constant_term -= force[j][1 + k] * y0[p1_loc + k]
+ force[j][3 + k] * y0[p2_loc + k]
+ force[j][5 + k] * y0[block_size + p1_loc + k]
+ force[j][7 + k] * y0[block_size + p2_loc + k];
}
// Constant acceleration term.
mat_a[(block_size + p1_loc + j, system_size - 1)] -= constant_term / p1_mass;
mat_a[(block_size + p2_loc + j, system_size - 1)] += constant_term / p2_mass;
}
}
mat_a
}
| Spring |
demo-angular-material.module.ts | import { NgModule } from '@angular/core';
import { RouterModule } from '@angular/router';
import { UIModule } from '../ui/ui.module';
import {
DEMOANGULARMATERIAL_COMPONENTS,
DemoAngularMaterialComponent,
DemoMaterialAddressComponent,
DemoMaterialDashboardComponent,
DemoMaterialDragAndDropComponent,
DemoMaterialTableComponent,
DemoMaterialTreeComponent
} from './components';
import {
DemoAngularMaterialModule as SharedDemoAngularMaterialModule,
routeDemoAngularMaterial
} from '@application/features';
| SharedDemoAngularMaterialModule,
UIModule,
RouterModule.forChild(
routeDemoAngularMaterial(
DemoAngularMaterialComponent,
DemoMaterialAddressComponent,
DemoMaterialDashboardComponent,
DemoMaterialDragAndDropComponent,
DemoMaterialTableComponent,
DemoMaterialTreeComponent
)
)
],
declarations: [...DEMOANGULARMATERIAL_COMPONENTS],
exports: [...DEMOANGULARMATERIAL_COMPONENTS]
})
export class DemoAngularMaterialModule {} | @NgModule({
imports: [ |
CISCOSB-PHY-MIB.py | #
# PySNMP MIB module CISCOSB-PHY-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCOSB-PHY-MIB
# Produced by pysmi-0.3.4 at Wed May 1 12:23:02 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "SingleValueConstraint", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsUnion")
switch001, = mibBuilder.importSymbols("CISCOSB-MIB", "switch001")
ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Unsigned32, Counter64, Counter32, TimeTicks, IpAddress, Bits, Integer32, Gauge32, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, ObjectIdentity, ModuleIdentity, NotificationType, iso = mibBuilder.importSymbols("SNMPv2-SMI", "Unsigned32", "Counter64", "Counter32", "TimeTicks", "IpAddress", "Bits", "Integer32", "Gauge32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "ObjectIdentity", "ModuleIdentity", "NotificationType", "iso")
DisplayString, TextualConvention, TimeStamp = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention", "TimeStamp")
rlPhy = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 90))
rlPhy.setRevisions(('2002-09-30 00:24', '2003-09-21 00:24',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: rlPhy.setRevisionsDescriptions(('Initial revision', 'Added MODULE-IDENTITY and TEXTUAL-CONVENTION IMPORTS.',))
if mibBuilder.loadTexts: rlPhy.setLastUpdated('200209300024Z')
if mibBuilder.loadTexts: rlPhy.setOrganization('Cisco Systems, Inc.')
if mibBuilder.loadTexts: rlPhy.setContactInfo('Postal: 170 West Tasman Drive San Jose , CA 95134-1706 USA Website: Cisco Small Business Support Community <http://www.cisco.com/go/smallbizsupport>')
if mibBuilder.loadTexts: rlPhy.setDescription("The MIB module describes the private MIB for testing Layer1 interfaces supported by CISCOSB's software and products.")
class RlPhyTestType(TextualConvention, Integer32):
description = 'A value indicating the test to perform.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24))
namedValues = NamedValues(("rlPhyTestTableNoTest", 1), ("rlPhyTestTableCableStatus", 2), ("rlPhyTestTableCableFault", 3), ("rlPhyTestTableCableLength", 4), ("rlPhyTestTableTransceiverTemp", 5), ("rlPhyTestTableTransceiverSupply", 6), ("rlPhyTestTableTxBias", 7), ("rlPhyTestTableTxOutput", 8), ("rlPhyTestTableRxOpticalPower", 9), ("rlPhyTestTableDataReady", 10), ("rlPhyTestTableLOS", 11), ("rlPhyTestTableTxFault", 12), ("rlPhyTestTableCableChannel1", 13), ("rlPhyTestTableCableChannel2", 14), ("rlPhyTestTableCableChannel3", 15), ("rlPhyTestTableCableChannel4", 16), ("rlPhyTestTableCablePolarity1", 17), ("rlPhyTestTableCablePolarity2", 18), ("rlPhyTestTableCablePolarity3", 19), ("rlPhyTestTableCablePolarity4", 20), ("rlPhyTestTableCablePairSkew1", 21), ("rlPhyTestTableCablePairSkew2", 22), ("rlPhyTestTableCablePairSkew3", 23), ("rlPhyTestTableCablePairSkew4", 24))
rlPhyTest = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 90, 1))
rlPhyTestSetTable = MibTable((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 90, 1, 1), )
if mibBuilder.loadTexts: rlPhyTestSetTable.setStatus('current')
if mibBuilder.loadTexts: rlPhyTestSetTable.setDescription('')
rlPhyTestSetEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 90, 1, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: rlPhyTestSetEntry.setStatus('current')
if mibBuilder.loadTexts: rlPhyTestSetEntry.setDescription('An entry containing objects for invoking tests on an interface.')
rlPhyTestSetType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 90, 1, 1, 1, 1), RlPhyTestType()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlPhyTestSetType.setStatus('current')
if mibBuilder.loadTexts: rlPhyTestSetType.setDescription('A control variable used to start operator initiated interface tests.1 indicates that no test has been initiated. Only operator initiated interface tests can be set to this variable.')
rlPhyTestGetTable = MibTable((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 90, 1, 2), )
if mibBuilder.loadTexts: rlPhyTestGetTable.setStatus('current')
if mibBuilder.loadTexts: rlPhyTestGetTable.setDescription('')
rlPhyTestGetEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 90, 1, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "CISCOSB-PHY-MIB", "rlPhyTestGetType"))
if mibBuilder.loadTexts: rlPhyTestGetEntry.setStatus('current')
if mibBuilder.loadTexts: rlPhyTestGetEntry.setDescription('An entry containing results of tests on an interface.')
rlPhyTestGetType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 90, 1, 2, 1, 1), RlPhyTestType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlPhyTestGetType.setStatus('current')
if mibBuilder.loadTexts: rlPhyTestGetType.setDescription('A control variable used to, 1 to indicate that this test can be done on the specified port, 2 to initiate the test whenever the user wishes')
rlPhyTestGetStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 90, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("none", 1), ("success", 2), ("inProgress", 3), ("notSupported", 4), ("unAbleToRun", 5), ("aborted", 6), ("failed", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlPhyTestGetStatus.setStatus('current')
if mibBuilder.loadTexts: rlPhyTestGetStatus.setDescription('This object contains the status of the most recently requested test for operator initiated tests or the value none(1) if no tests have been requested since the last reset. For non operator initiated tests the value is always none(1). Note that this facility provides no provision for saving the results of one test when starting another, as could be required if used by multiple managers concurrently.')
rlPhyTestGetResult = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 90, 1, 2, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlPhyTestGetResult.setStatus('current')
if mibBuilder.loadTexts: rlPhyTestGetResult.setDescription('This object holds the test result')
rlPhyTestGetUnits = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 90, 1, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19))).clone(namedValues=NamedValues(("integer", 1), ("boolean", 2), ("downUP", 3), ("reverseNormal", 4), ("mdiMdix", 5), ("meter", 6), ("degree", 7), ("microVolt", 8), ("microOham", 9), ("microAmper", 10), ("microWatt", 11), ("millisecond", 12), ("alaskaPhyLength", 13), ("alaskaPhyStatus", 14), ("dbm", 15), ("decidbm", 16), ("milidbm", 17), ("abcd", 18), ("nanosecond", 19)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlPhyTestGetUnits.setStatus('current')
if mibBuilder.loadTexts: rlPhyTestGetUnits.setDescription('The test result unit of measure. The units can be standard unit or special units that are designed for special test. The alaskaPhyLength unit is design for the VCT diagnostic and its values are: less_than_50M(1), 50-80M(2), 80-110M(3), 110-140M(4), more_than_140M(5). The alaskaPhyStatus unit is design for the VCT diagnostic and its values are: 4_pair_cable(1), 2_pair_cable(2), no_cable(3), open_cable(4), short_cable(5), bad_cable(6), impedance_mismatch(7).')
rlPhyTestGetAlarm = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 90, 1, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("notRelevant", 1), ("noAlarmSet", 2), ("lowWarning", 3), ("highWarning", 4), ("lowAlarm", 5), ("highAlarm", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlPhyTestGetAlarm.setStatus('current')
if mibBuilder.loadTexts: rlPhyTestGetAlarm.setDescription('This object hold the Alarm for this Entry. only Test that have can have alarms use this field, other holds the Value notRelevant(1) ')
rlPhyTestGetTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 90, 1, 2, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlPhyTestGetTimeStamp.setStatus('current')
if mibBuilder.loadTexts: |
mibBuilder.exportSymbols("CISCOSB-PHY-MIB", rlPhyTestGetTimeStamp=rlPhyTestGetTimeStamp, rlPhyTestSetTable=rlPhyTestSetTable, rlPhyTestGetAlarm=rlPhyTestGetAlarm, rlPhyTestGetEntry=rlPhyTestGetEntry, rlPhyTestGetTable=rlPhyTestGetTable, rlPhy=rlPhy, rlPhyTest=rlPhyTest, rlPhyTestSetEntry=rlPhyTestSetEntry, rlPhyTestSetType=rlPhyTestSetType, RlPhyTestType=RlPhyTestType, rlPhyTestGetType=rlPhyTestGetType, rlPhyTestGetStatus=rlPhyTestGetStatus, rlPhyTestGetResult=rlPhyTestGetResult, rlPhyTestGetUnits=rlPhyTestGetUnits, PYSNMP_MODULE_ID=rlPhy)
| rlPhyTestGetTimeStamp.setDescription('The time in string (formated DD-MMM-YYYY HH:MM:SS e.g 14-Apr-2002 10:33:31)') |
controller.go | package machineset
import (
"context"
"fmt"
"strconv"
"github.com/go-logr/logr"
machinev1 "github.com/openshift/machine-api-operator/pkg/apis/machine/v1beta1"
mapierrors "github.com/openshift/machine-api-operator/pkg/controller/machine"
corev1 "k8s.io/api/core/v1"
apierrors "k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/client-go/tools/record"
awsproviderv1 "sigs.k8s.io/cluster-api-provider-aws/pkg/apis/awsprovider/v1beta1"
ctrl "sigs.k8s.io/controller-runtime"
"sigs.k8s.io/controller-runtime/pkg/client"
"sigs.k8s.io/controller-runtime/pkg/controller"
)
const (
// This exposes compute information based on the providerSpec input.
// This is needed by the autoscaler to foresee upcoming capacity when scaling from zero.
// https://github.com/openshift/enhancements/pull/186
cpuKey = "machine.openshift.io/vCPU"
memoryKey = "machine.openshift.io/memoryMb"
gpuKey = "machine.openshift.io/GPU"
)
// Reconciler reconciles machineSets.
type Reconciler struct {
Client client.Client
Log logr.Logger
recorder record.EventRecorder
scheme *runtime.Scheme
}
// SetupWithManager creates a new controller for a manager.
func (r *Reconciler) SetupWithManager(mgr ctrl.Manager, options controller.Options) error {
_, err := ctrl.NewControllerManagedBy(mgr).
For(&machinev1.MachineSet{}).
WithOptions(options).
Build(r)
if err != nil {
return fmt.Errorf("failed setting up with a controller manager: %w", err)
}
r.recorder = mgr.GetEventRecorderFor("machineset-controller")
r.scheme = mgr.GetScheme()
return nil
}
// Reconcile implements controller runtime Reconciler interface.
func (r *Reconciler) Reconcile(req ctrl.Request) (ctrl.Result, error) {
logger := r.Log.WithValues("machineset", req.Name, "namespace", req.Namespace)
logger.V(3).Info("Reconciling")
ctx := context.Background()
machineSet := &machinev1.MachineSet{}
if err := r.Client.Get(ctx, req.NamespacedName, machineSet); err != nil {
if apierrors.IsNotFound(err) {
// Object not found, return. Created objects are automatically garbage collected.
// For additional cleanup logic use finalizers.
return ctrl.Result{}, nil
}
// Error reading the object - requeue the request.
return ctrl.Result{}, err
}
// Ignore deleted MachineSets, this can happen when foregroundDeletion
// is enabled
if !machineSet.DeletionTimestamp.IsZero() {
return ctrl.Result{}, nil
}
originalMachineSetToPatch := client.MergeFrom(machineSet.DeepCopy())
result, err := reconcile(machineSet)
if err != nil {
logger.Error(err, "Failed to reconcile MachineSet")
r.recorder.Eventf(machineSet, corev1.EventTypeWarning, "ReconcileError", "%v", err)
// we don't return here so we want to attempt to patch the machine regardless of an error.
}
if err := r.Client.Patch(ctx, machineSet, originalMachineSetToPatch); err != nil {
return ctrl.Result{}, fmt.Errorf("failed to patch machineSet: %v", err)
}
if isInvalidConfigurationError(err) {
// For situations where requeuing won't help we don't return error.
// https://github.com/kubernetes-sigs/controller-runtime/issues/617
return result, nil
}
return result, err
}
func isInvalidConfigurationError(err error) bool {
switch t := err.(type) {
case *mapierrors.MachineError:
if t.Reason == machinev1.InvalidConfigurationMachineError {
return true
}
}
return false
}
func reconcile(machineSet *machinev1.MachineSet) (ctrl.Result, error) {
providerConfig, err := awsproviderv1.ProviderSpecFromRawExtension(machineSet.Spec.Template.Spec.ProviderSpec.Value)
if err != nil {
return ctrl.Result{}, mapierrors.InvalidMachineConfiguration("failed to get providerConfig: %v", err)
}
instanceType, ok := InstanceTypes[providerConfig.InstanceType]
if !ok {
return ctrl.Result{}, mapierrors.InvalidMachineConfiguration("unknown instance type: %s", providerConfig.InstanceType)
}
if machineSet.Annotations == nil {
machineSet.Annotations = make(map[string]string)
}
// TODO: get annotations keys from machine API
machineSet.Annotations[cpuKey] = strconv.FormatInt(instanceType.VCPU, 10)
machineSet.Annotations[memoryKey] = strconv.FormatInt(instanceType.MemoryMb, 10)
machineSet.Annotations[gpuKey] = strconv.FormatInt(instanceType.GPU, 10) | return ctrl.Result{}, nil
} | |
deriving-show.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[derive(Debug)]
struct Unit;
#[derive(Debug)]
struct Tuple(int, uint);
#[derive(Debug)]
struct Struct { x: int, y: uint }
#[derive(Debug)]
enum Enum {
Nullary,
Variant(int, uint),
StructVariant { x: int, y : uint }
}
macro_rules! t {
($x:expr, $expected:expr) => {
assert_eq!(format!("{:?}", $x), $expected.to_string())
}
}
pub fn main() { | t!(Struct { x: 1, y: 2 }, "Struct { x: 1, y: 2 }");
t!(Enum::Nullary, "Nullary");
t!(Enum::Variant(1, 2), "Variant(1, 2)");
t!(Enum::StructVariant { x: 1, y: 2 }, "StructVariant { x: 1, y: 2 }");
} | t!(Unit, "Unit");
t!(Tuple(1, 2), "Tuple(1, 2)"); |
index.tsx | import { FaGithub } from "react-icons/fa";
import { FiX } from "react-icons/fi";
import { signIn, signOut, useSession } from "next-auth/client";
import styles from "./styles.module.scss";
export function SignInButton() {
const [session] = useSession();
return session ? (
<button
className={styles.signInButton}
type="button"
onClick={() => signOut()}
>
<FaGithub color="#04d361" />
{session.user.name}
<FiX color="#737380" className={styles.closeIcon} />
</button>
) : (
<button
className={styles.signInButton}
type="button"
onClick={() => signIn("github")}
>
<FaGithub color="#eba417" />
<span>Sign in with Github</span>
</button>
); | } |
|
neterrors.go | package neterrors
import (
"net"
"github.com/go-playground/errors"
)
const (
permanent = "Permanent"
transient = "Transient"
)
func init() {
errors.RegisterHelper(NETErrors)
}
// NETErrors helps classify io related errors
func NETErrors(c errors.Chain, err error) (cont bool) {
switch e := err.(type) {
case *net.AddrError:
tp := permanent
if e.Temporary() {
tp = transient
}
_ = c.AddTypes(tp, "net").AddTags(
errors.T("addr", e.Addr),
errors.T("is_timeout", e.Timeout()),
errors.T("is_temporary", e.Temporary()),
)
return false
case *net.DNSError:
tp := permanent
if e.Temporary() {
tp = transient
}
_ = c.AddTypes(tp, "net").AddTags(
errors.T("name", e.Name),
errors.T("server", e.Server),
errors.T("is_timeout", e.Timeout()),
errors.T("is_temporary", e.Temporary()),
)
return false
case *net.ParseError:
_ = c.AddTypes(permanent, "net").AddTags(
errors.T("type", e.Type),
errors.T("text", e.Text),
)
return false
case *net.OpError:
tp := permanent
if e.Temporary() |
_ = c.AddTypes(tp, "net").AddTags(
errors.T("op", e.Op),
errors.T("net", e.Net),
errors.T("addr", e.Addr),
errors.T("local_addr", e.Source),
errors.T("is_timeout", e.Timeout()),
errors.T("is_temporary", e.Temporary()),
)
return false
case net.UnknownNetworkError:
tp := permanent
if e.Temporary() {
tp = transient
}
_ = c.AddTypes(tp, "net").AddTags(
errors.T("is_timeout", e.Timeout()),
errors.T("is_temporary", e.Temporary()),
)
}
switch err {
case net.ErrWriteToConnected:
_ = c.AddTypes(transient, "net")
return false
}
return true
}
| {
tp = transient
} |
misc.py | from urllib.parse import urlparse
import classyjson as cj
import asyncio
import discord
import math
import time
from util.code import format_exception
from util.ipc import PacketType
def strip_command(ctx): # returns message.clean_content excluding the command used
length = len(ctx.prefix) + len(ctx.invoked_with) + 1
return ctx.message.clean_content[length:]
def dm_check(ctx):
def _dm_check(m):
return ctx.author == m.author and ctx.author.dm_channel == m.channel
return _dm_check
def recursive_update(obj, new):
if isinstance(obj, dict) and isinstance(new, dict):
for k, v in new.items():
obj[k] = recursive_update(obj.get(k, cj.classify({})), v)
elif isinstance(obj, list) and isinstance(new, list):
obj = [] # obj here needs to be reset to zero to avoid weird list issues (see /update command in cogs/cmds/owner.py)
for i, v in enumerate(new):
obj.append(recursive_update(obj[i], v) if i < len(obj) else v)
else:
return new
return obj
def make_health_bar(health: int, max_health: int, full: str, half: str, empty: str):
assert max_health % 2 == 0
return (
(full * (health // 2))
+ (half * (health % 2))
+ (empty * ((max_health // 2) - math.ceil(health / 2)))
+ f" ({health}/{max_health})"
)
async def lb_logic(bot, lb_list: list, u_entry: object, rank_fstr: str):
# add user entry to leaderboard if it's not there already
if u_entry is not None and u_entry[0] not in [e[0] for e in lb_list]:
lb_list.append(u_entry)
# sort
lb_list = sorted(lb_list, key=(lambda e: e[1]), reverse=True)
# shorten list
lb_list = lb_list[:9] if (u_entry is not None and u_entry[2] > 9) else lb_list[:10]
body = ""
# create base leaderboard
for entry in lb_list:
user = getattr(bot.get_user(entry[0]), "name", None)
if user is None:
res = await bot.ipc.broadcast(
{"type": PacketType.EVAL, "code": f"getattr(bot.get_user({entry[0]}), 'name', None)"}
)
for r in res.responses:
if not r.success:
raise ValueError(r.result)
if r.result:
user = r.result
break
if user is None:
user = "Unknown User"
else:
user = discord.utils.escape_markdown(user)
body += rank_fstr.format(entry[2], entry[1], user)
# add user if user is missing from the leaderboard
if u_entry is not None and u_entry[2] > 9:
body += "\n⋮" + rank_fstr.format(u_entry[2], u_entry[1], discord.utils.escape_markdown(bot.get_user(u_entry[0]).name))
return body + "\uFEFF"
def calc_total_wealth(db_user, u_items):
re |
def emojify_item(d, item: str):
try:
emoji_key = d.emoji_items[item]
if emoji_key.startswith("fish."):
return d.emojis.fish[emoji_key[5:]]
return d.emojis[emoji_key]
except KeyError:
return d.emojis.air
def format_required(d: object, shop_item: object, amount: int = 1):
base = f" {shop_item.buy_price * amount}{d.emojis.emerald}"
for req_item, req_amount in shop_item.requires.get("items", {}).items():
base += f" + {req_amount * amount}{d.emojis[d.emoji_items[req_item]]}"
return base
async def update_support_member_role(bot, member):
try:
db = bot.get_cog("Database")
support_guild = bot.get_guild(bot.d.support_server_id)
if support_guild is None:
support_guild = await bot.fetch_guild(bot.d.support_server_id)
role_map_values = set(bot.d.role_mappings.values())
roles = []
for role in member.roles: # add non rank roles to roles list
if role.id not in role_map_values and role.id != bot.d.support_server_id:
roles.append(role)
pickaxe_role = bot.d.role_mappings.get(await db.fetch_pickaxe(member.id))
if pickaxe_role is not None:
roles.append(support_guild.get_role(pickaxe_role))
if await db.fetch_item(member.id, "Bane Of Pillagers Amulet") is not None:
roles.append(support_guild.get_role(bot.d.role_mappings.get("BOP")))
if roles != member.roles:
try:
await member.edit(roles=roles)
except (discord.errors.Forbidden, discord.errors.HTTPException):
pass
except Exception as e:
print(format_exception(e))
class TTLPreventDuplicate:
def __init__(self, expire_after: float, max_size: int):
self.expire_after = expire_after
self.max_size = max_size
self.store = {}
def put(self, obj):
self.store[obj] = time.time()
def check(self, obj):
return obj in self.store
async def run(self):
try:
while True:
for k, v in list(self.store.items()):
if (time.time() - v) > self.expire_after:
del self.store[k]
await asyncio.sleep(1)
except asyncio.CancelledError:
pass
def fix_giphy_url(url: str) -> str:
return f"https://i.giphy.com/media/{url.split('-')[-1]}/giphy.gif"
| turn (
db_user["emeralds"]
+ db_user["vault_balance"] * 9
+ sum([u_it["sell_price"] * u_it.get("amount", 0) for u_it in u_items if u_it["sell_price"] > 0])
)
|
location-disabled.js | 'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
var _pure = require('recompose/pure');
var _pure2 = _interopRequireDefault(_pure);
var _SvgIcon = require('../../SvgIcon');
var _SvgIcon2 = _interopRequireDefault(_SvgIcon);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var DeviceLocationDisabled = function DeviceLocationDisabled(props) {
return _react2.default.createElement(
_SvgIcon2.default,
props,
_react2.default.createElement('path', { d: 'M20.94 11c-.46-4.17-3.77-7.48-7.94-7.94V1h-2v2.06c-1.13.12-2.19.46-3.16.97l1.5 1.5C10.16 5.19 11.06 5 12 5c3.87 0 7 3.13 7 7 0 .94-.19 1.84-.52 2.65l1.5 1.5c.5-.96.84-2.02.97-3.15H23v-2h-2.06zM3 4.27l2.04 2.04C3.97 7.62 3.25 9.23 3.06 11H1v2h2.06c.46 4.17 3.77 7.48 7.94 7.94V23h2v-2.06c1.77-.2 3.38-.91 4.69-1.98L19.73 21 21 19.73 4.27 3 3 4.27zm13.27 13.27C15.09 18.45 13.61 19 12 19c-3.87 0-7-3.13-7-7 0-1.61.55-3.09 1.46-4.27l9.81 9.81z' })
);
}; /**
* # LocationDisabled SVG Icon.
* @property {node} children - Elements passed into the SVG Icon.
* @property {string} color - This is the fill color of the svg icon.
* If not specified, this component will default to muiTheme.palette.textColor.
* @property {string} hoverColor - This is the icon color when the mouse hovers over the icon.
* @property {object} style - Override the inline-styles of the root element.
* @property {string} viewBox - Allows you to redefine what the coordinates
* without units mean inside an svg element. For example,
* if the SVG element is 500 (width) by 200 (height),
* and you pass viewBox="0 0 50 20", this means that the coordinates inside
* the svg will go from the top left corner (0,0) to bottom right (50,20)
* and each unit will be worth 10px.
* @example
* <svg width="24" height="24" ><path d="M20.94 11c-.46-4.17-3.77-7.48-7.94-7.94V1h-2v2.06c-1.13.12-2.19.46-3.16.97l1.5 1.5C10.16 5.19 11.06 5 12 5c3.87 0 7 3.13 7 7 0 .94-.19 1.84-.52 2.65l1.5 1.5c.5-.96.84-2.02.97-3.15H23v-2h-2.06zM3 4.27l2.04 2.04C3.97 7.62 3.25 9.23 3.06 11H1v2h2.06c.46 4.17 3.77 7.48 7.94 7.94V23h2v-2.06c1.77-.2 3.38-.91 4.69-1.98L19.73 21 21 19.73 4.27 3 3 4.27zm13.27 13.27C15.09 18.45 13.61 19 12 19c-3.87 0-7-3.13-7-7 0-1.61.55-3.09 1.46-4.27l9.81 9.81z"/></svg>
*/
DeviceLocationDisabled = (0, _pure2.default)(DeviceLocationDisabled);
DeviceLocationDisabled.displayName = 'DeviceLocationDisabled';
DeviceLocationDisabled.muiName = 'SvgIcon';
exports.default = DeviceLocationDisabled;
| //# sourceMappingURL=location-disabled.js.map | |
jquery.wijmo.wijexpander.min.js | var __extends=this.__extends||function(e,t){function r(){this.constructor=e}for(var n in t)t.hasOwnProperty(n)&&(e[n]=t[n]);r.prototype=t.prototype,e.prototype=new r},wijmo;(function(e){(function(t){"use strict";var n=jQuery,r="wijexpander",i="ui-expander-content",s=function(t){function r(){t.apply(this,arguments)}return __extends(r,t),r.prototype._setOption=function(e,t){var r=this.element.find("> ."+i);switch(e){case"contentUrl":t?r.wijContent(t):r.html(this.storeContentHTML);break;case"expandDirection":this._onDirectionChange(t,!0,this.options.expandDirection);break;case"expanded":t?this.expand():this.collapse();return;default:}n.wijmo.widget.prototype._setOption.apply(this,arguments)},r.prototype._innerDisable=function(){t.prototype._innerDisable.call(this),this._toggleDisableExpander(!0)},r.prototype._innerEnable=function(){t.prototype._innerEnable.call(this),this._toggleDisableExpander(!1)},r.prototype._toggleDisableExpander=function(e){var t=this.element.find("> ."+i);this.element.toggleClass(this.options.wijCSS.stateDisabled,e).find("> .ui-expander-header").toggleClass(this.options.wijCSS.stateDisabled,e),t.toggleClass(this.options.wijCSS.stateDisabled,e)},r.prototype._create=function(){var r=this.options,s=this.element.children(),o,u,a=r.wijCSS;window.wijmoApplyWijTouchUtilEvents&&(n=window.wijmoApplyWijTouchUtilEvents(n)),this.element.addClass("wijmo-wijexpander ui-expander "+r.wijCSS.widget+" ui-expander-icons"),this.header=o=n(s[0]),this.content=u=n(s[1]);if(r.expandDirection==="left"||r.expandDirection==="top")o.remove(),o.insertAfter(u);o.addClass("ui-expander-header"),o.attr("role","tab"),u.attr("role","tabpanel"),o.find("> a").length===0&&(o.wrapInner('<a href="javascript:void(null)"></a>'),this.headerLink=o.children()),o.find("> ."+e.getCSSSelector(a.icon)).length===0&&(this.headerIcon=n("<span></span>").addClass(a.icon).insertBefore(n("> a",o)[0])),u.addClass(i+" "+a.content),this.storeContentHTML=u.html(),t.prototype._create.call(this)},r.prototype._init=function(){var t=this.options,n=this.element.find("> ."+i);this._onDirectionChange(t.expandDirection,!1),t.contentUrl&&n.wijContent(this.options.contentUrl),t.expanded?(this.element.find("> .ui-expander-header").addClass(t.wijCSS.stateDefault+" "+t.wijCSS.stateActive).attr({"aria-expanded":"true",tabIndex:0}).addClass(this._headerCornerOpened).find("> ."+e.getCSSSelector(t.wijCSS.icon)).addClass(this._triangleIconOpened),n.addClass("ui-expander-content-active").addClass(this._contentCornerOpened).wijTriggerVisibility()):(n.hide(),this.element.find("> .ui-expander-header").addClass(t.wijCSS.stateDefault+" "+t.wijCSS.cornerAll).attr({"aria-expanded":"false",tabIndex:-1}).find("> ."+e.getCSSSelector(t.wijCSS.icon)).addClass(this._triangleIconClosed)),this._isDisabled()&&(this.element.addClass(t.wijCSS.stateDisabled).find("> .ui-expander-header").addClass(t.wijCSS.stateDisabled),n.addClass(t.wijCSS.stateDisabled)),this._bindLiveEvents()},r.prototype.destroy=function(){var e=this.options,t="wijmo-wijexpander ui-expander "+e.wijCSS.widget+" "+"ui-helper-reset ui-expander-icons "+"ui-expander-"+e.expandDirection+" "+e.wijCSS.stateFocus+" "+e.wijCSS.stateHover+" "+e.wijCSS.stateDisabled,r="ui-expander-header "+e.wijCSS.stateDefault+" "+e.wijCSS.cornerAll+" "+e.wijCSS.stateActive+" "+this._headerCornerOpened,s=i+" "+e.wijCSS.content+" "+"ui-expander-content-active "+e.wijCSS.stateDisabled+" "+this._contentCornerOpened;this._unbindLiveEvents(),this.element.removeClass(t),this.headerIcon&&(this.headerIcon.remove(),this.headerIcon=null),this.headerLink&&(this.header.html(this.headerLink.html()),this.headerLink.remove(),this.headerLink=null),this.header.removeClass(r),this.header.removeAttr("role").removeAttr("aria-expanded").removeAttr("tabIndex"),this.content.removeClass(s),this.content.removeAttr("role"),e.contentUrl&&this.content.html(this.storeContentHTML),this.header.insertBefore(this.content),n.wijmo.widget.prototype.destroy.apply(this,arguments)},r.prototype._bindLiveEvents=function(){var e=this.options,t="";this.element.off(".wijexpander"),n.support.isTouchEnabled&&n.support.isTouchEnabled()&&(t="wij"),this.element.on(t+"click.wijexpander",">.ui-expander-header",jQuery.proxy(this._onHeaderClick,this)),this.element.on(t+"mouseenter.wijexpander",".ui-expander-header",function(){n(this).addClass(e.wijCSS.stateHover)}),this.element.on(t+"mouseleave.wijexpander",".ui-expander-header",function(){n(this).removeClass(e.wijCSS.stateHover)}),this.element.on(t+"focus.wijexpander",".ui-expander-header",function(){n(this).addClass(e.wijCSS.stateFocus)}),this.element.on(t+"blur.wijexpander",".ui-expander-header",function(){n(this).removeClass(e.wijCSS.stateFocus)})},r.prototype._unbindLiveEvents=function(){this.element.off(".wijexpander",".ui-expander-header")},r.prototype._onDirectionChange=function(e,t,r){typeof r=="undefined"&&(r=null);var s,o,u,a,f,l,c,h,p=this.options;r&&r!==e&&this.element.removeClass("ui-expander-"+r),t&&(o=this.element.find(".ui-expander-header."+this._headerCornerOpened),o.removeClass(this._headerCornerOpened),u=this.element.find("."+i+"."+this._contentCornerOpened),u.removeClass(this._contentCornerOpened),a=this.element.find("."+this._triangleIconOpened),f=this.element.find("."+this._triangleIconClosed),a.removeClass(this._triangleIconOpened),f.removeClass(this._triangleIconClosed));switch(e){case"top":this._headerCornerOpened="ui-corner-bottom",this._contentCornerOpened="ui-corner-top",this._triangleIconOpened=p.wijCSS.iconArrowUp,this._triangleIconClosed=p.wijCSS.iconArrowRight,s=!0,this.element.removeClass("ui-helper-horizontal"),this.element.addClass("ui-expander-top");break;case"right":this._headerCornerOpened="ui-corner-left",this._contentCornerOpened="ui-corner-right",this._triangleIconOpened=p.wijCSS.iconArrowRight,this._triangleIconClosed=p.wijCSS.iconArrowDown,s=!1,this.element.addClass("ui-helper-horizontal"),this.element.addClass("ui-expander-right");break;case"left":this._headerCornerOpened="ui-corner-right",this._contentCornerOpened="ui-corner-left",this._triangleIconOpened=p.wijCSS.iconArrowLeft,this._triangleIconClosed=p.wijCSS.iconArrowDown,s=!0,this.element.addClass("ui-helper-horizontal"),this.element.addClass("ui-expander-left");break;default:this._headerCornerOpened="ui-corner-top",this._contentCornerOpened="ui-corner-bottom",this._triangleIconOpened=p.wijCSS.iconArrowDown,this._triangleIconClosed=p.wijCSS.iconArrowRight,s=!1,this.element.removeClass("ui-helper-horizontal"),this.element.addClass("ui-expander-bottom")}l=this.element.data("rightToLeft"),this.element.data("rightToLeft",s),t&&(a.addClass(this._triangleIconOpened),f.addClass(this._triangleIconClosed),o.addClass(this._headerCornerOpened),u.addClass(this._contentCornerOpened)),t&&s!==l&&this.element.children(".ui-expander-header").each(function(e,t){h=n(this),s?(c=h.next("."+i),h.remove(),h.insertAfter(c)):(c=h.prev("."+i),h.remove(),h.insertBefore(c))})},r.prototype.collapse=function(){var t=this.options,r,s,o,u,a=this.element.find("> ."+i);if(!t.allowExpand)return;return this.element.hasAllClasses(t.wijCSS.stateDisabled)?!1:this._trigger("beforeCollapse")?(t.animated?(r={expand:!1,content:a,complete:jQuery.proxy(function(){a.removeClass("ui-expander-content-active"),this._trigger("afterCollapse"),a.css("display","")},this),horizontal:this.element.hasClass("ui-helper-horizontal")},s=n.wijmo.wijexpander.animations,o=t.duration,u=t.animated,u&&!s[u]&&!n.easing[u]&&(u="slide"),s[u]||(s[u]=function(e){this.slide(e,{easing:u,duration:o||700})}),s[u](r)):(a.hide(),this._trigger("afterCollapse")),this.element.find("> .ui-expander-header").removeClass(t.wijCSS.stateActive).removeClass(this._headerCornerOpened).attr({"aria-expanded":"false",tabIndex:-1}).addClass(t.wijCSS.stateDefault+" "+t.wijCSS.cornerAll).find("> ."+e.getCSSSelector(t.wijCSS.icon)).removeClass(this._triangleIconOpened).addClass(this._triangleIconClosed),this.options.expanded=!1,!0):!1},r.prototype.expand=function(){var t=this.options,r,s,o,u,a=this.element.find("> ."+i);if(!t.allowExpand)return;return this.element.hasAllClasses(t.wijCSS.stateDisabled)?!1:this._trigger("beforeExpand")?(t.animated?(r={expand:!0,content:a,complete:jQuery.proxy(function(){a.addClass("ui-expander-content-active").addClass(this._contentCornerOpened).wijTriggerVisibility(),this._trigger("afterExpand"),a.css("display","")},this),horizontal:this.element.hasClass("ui-helper-horizontal")},s=n.wijmo.wijexpander.animations,o=t.duration,u=t.animated,u&&!s[u]&&!n.easing[u]&&(u="slide"),s[u]||(s[u]=function(e){this.slide(e,{easing:u,duration:o||700})}),s[u](r)):(a.show(),this._trigger("afterExpand")),this.element.find("> .ui-expander-header").removeClass(t.wijCSS.cornerAll).addClass(t.wijCSS.stateActive).addClass(this._headerCornerOpened).attr({"aria-expanded":"true",tabIndex:0}).find("> ."+e.getCSSSelector(t.wijCSS.icon)).removeClass(this._triangleIconClosed).addClass(this._triangleIconOpened),this.options.expanded=!0,!0):!1},r.prototype._onHeaderClick=function(e){this.option("expanded",!this.options.expanded)},r}(e.wijmoWidget);t.wijexpander=s;var o=function(){function e(){this.wijMobileCSS={header:"ui-header ui-bar-a",content:"ui-content ui-body ui-body-b"},this.initSelector=":jqmData(role='wijexpander')",this.allowExpand=!0,this.animated="slide",this.contentUrl="",this.expanded=!0,this.expandDirection="bottom",this.beforeCollapse=null,this.beforeExpand=null,this.afterCollapse=null,this.afterExpand=null}return e}();s.prototype.options=n.extend(!0,{},e.wijmoWidget.prototype.options,new o),n.wijmo.registerWidget(r,s.prototype),n.extend(n.wijmo.wijexpander,{animations:{slide:function(e,t){var r;e=n.extend({easing:"swing",duration:300},e,t),e.expand?(e.horizontal?r={width:"show",opacity:"show"}:r={height:"show",opacity:"show"},e.content.stop(!0,!0).animate(r,e)):(e.horizontal?r={width:"hide",opacity:"hide"}:r={height:"hide",opacity:"hide"},e.content.stop(!0,!0).animate(r,e))}}})})(e.expander||(e.expander={}));var t=e.expander})(wijmo||(wijmo={})); |
||
key.go | // Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build windows
package win32
import (
"fmt"
"syscall"
"unicode/utf16"
"github.com/as/shiny/event/key"
"github.com/as/shiny/screen"
)
type Key = key.Event
var KeyEvent func(hwnd syscall.Handle, e key.Event)
var keyboardLayout = GetKeyboardLayout(0)
func changeLanguage(h syscall.Handle, m uint32, charset, localeID uintptr) {
}
func readRune(vKey uint32, scanCode uint8) rune {
var (
keystate [256]byte
buf [4]uint16
)
if err := GetKeyboardState(&keystate[0]); err != nil {
panic(fmt.Sprintf("win32: %v", err))
}
ret := ToUnicodeEx(vKey, uint32(scanCode), &keystate[0], &buf[0], int32(len(buf)), 0, keyboardLayout)
if ret < 1 {
return -1
}
return utf16.Decode(buf[:ret])[0]
}
func keyModifiers() (m key.Modifiers) {
down := func(x int32) bool {
// GetKeyState gets the key state at the time of the message, so this is what we want.
return GetKeyState(x)&0x80 != 0
}
if down(VkControl) {
m |= key.ModControl
}
if down(VkMenu) {
m |= key.ModAlt
}
if down(VkShift) {
m |= key.ModShift
}
if down(VkLwin) || down(VkRwin) {
m |= key.ModMeta
}
return m
}
type ktab [256]key.Code
func (k *ktab) sendDown(h syscall.Handle, m uint32, w, l uintptr) uintptr {
const prev = 1 << 30
dir := key.DirNone
if l&prev != prev |
screen.Dev.Key <- key.Event{
Rune: readRune(uint32(w), byte(l>>16)),
Code: keytab[byte(w)],
Modifiers: keyModifiers(),
Direction: dir,
}
return 0
}
func (k *ktab) sendUp(h syscall.Handle, m uint32, w, l uintptr) uintptr {
screen.Dev.Key <- key.Event{
Rune: readRune(uint32(w), byte(l>>16)),
Code: keytab[byte(w)],
Modifiers: keyModifiers(),
Direction: key.DirRelease,
}
return 0
}
var keytab = ktab{
0x08: key.CodeDeleteBackspace,
0x09: key.CodeTab,
0x0D: key.CodeReturnEnter,
0x10: key.CodeLeftShift,
0x11: key.CodeLeftControl,
0x12: key.CodeLeftAlt,
0x14: key.CodeCapsLock,
0x1B: key.CodeEscape,
0x20: key.CodeSpacebar,
0x21: key.CodePageUp,
0x22: key.CodePageDown,
0x23: key.CodeEnd,
0x24: key.CodeHome,
0x25: key.CodeLeftArrow,
0x26: key.CodeUpArrow,
0x27: key.CodeRightArrow,
0x28: key.CodeDownArrow,
0x2E: key.CodeDeleteForward,
0x2F: key.CodeHelp,
0x30: key.Code0,
0x31: key.Code1,
0x32: key.Code2,
0x33: key.Code3,
0x34: key.Code4,
0x35: key.Code5,
0x36: key.Code6,
0x37: key.Code7,
0x38: key.Code8,
0x39: key.Code9,
0x41: key.CodeA,
0x42: key.CodeB,
0x43: key.CodeC,
0x44: key.CodeD,
0x45: key.CodeE,
0x46: key.CodeF,
0x47: key.CodeG,
0x48: key.CodeH,
0x49: key.CodeI,
0x4A: key.CodeJ,
0x4B: key.CodeK,
0x4C: key.CodeL,
0x4D: key.CodeM,
0x4E: key.CodeN,
0x4F: key.CodeO,
0x50: key.CodeP,
0x51: key.CodeQ,
0x52: key.CodeR,
0x53: key.CodeS,
0x54: key.CodeT,
0x55: key.CodeU,
0x56: key.CodeV,
0x57: key.CodeW,
0x58: key.CodeX,
0x59: key.CodeY,
0x5A: key.CodeZ,
0x5B: key.CodeLeftGUI,
0x5C: key.CodeRightGUI,
0x60: key.CodeKeypad0,
0x61: key.CodeKeypad1,
0x62: key.CodeKeypad2,
0x63: key.CodeKeypad3,
0x64: key.CodeKeypad4,
0x65: key.CodeKeypad5,
0x66: key.CodeKeypad6,
0x67: key.CodeKeypad7,
0x68: key.CodeKeypad8,
0x69: key.CodeKeypad9,
0x6A: key.CodeKeypadAsterisk,
0x6B: key.CodeKeypadPlusSign,
0x6D: key.CodeKeypadHyphenMinus,
0x6E: key.CodeFullStop,
0x6F: key.CodeKeypadSlash,
0x70: key.CodeF1,
0x71: key.CodeF2,
0x72: key.CodeF3,
0x73: key.CodeF4,
0x74: key.CodeF5,
0x75: key.CodeF6,
0x76: key.CodeF7,
0x77: key.CodeF8,
0x78: key.CodeF9,
0x79: key.CodeF10,
0x7A: key.CodeF11,
0x7B: key.CodeF12,
0x7C: key.CodeF13,
0x7D: key.CodeF14,
0x7E: key.CodeF15,
0x7F: key.CodeF16,
0x80: key.CodeF17,
0x81: key.CodeF18,
0x82: key.CodeF19,
0x83: key.CodeF20,
0x84: key.CodeF21,
0x85: key.CodeF22,
0x86: key.CodeF23,
0x87: key.CodeF24,
0x90: key.CodeKeypadNumLock,
0xA0: key.CodeLeftShift,
0xA1: key.CodeRightShift,
0xA2: key.CodeLeftControl,
0xA3: key.CodeRightControl,
0xAD: key.CodeMute,
0xAE: key.CodeVolumeDown,
0xAF: key.CodeVolumeUp,
0xBA: key.CodeSemicolon,
0xBB: key.CodeEqualSign,
0xBC: key.CodeComma,
0xBD: key.CodeHyphenMinus,
0xBE: key.CodeFullStop,
0xBF: key.CodeSlash,
0xC0: key.CodeGraveAccent,
0xDB: key.CodeLeftSquareBracket,
0xDC: key.CodeBackslash,
0xDD: key.CodeRightSquareBracket,
0xDE: key.CodeApostrophe,
0xDF: key.CodeUnknown,
}
| {
dir = key.DirPress
} |
memdb_iterator.go | package db
import (
"bytes"
"context"
"github.com/google/btree"
)
const (
// Size of the channel buffer between traversal goroutine and iterator. Using an unbuffered
// channel causes two context switches per item sent, while buffering allows more work per
// context switch. Tuned with benchmarks.
chBufferSize = 64
)
// memDBIterator is a memDB iterator.
type memDBIterator struct {
ch <-chan *item
cancel context.CancelFunc
item *item
start []byte
end []byte
}
var _ Iterator = (*memDBIterator)(nil)
// newMemDBIterator creates a new memDBIterator.
func newMemDBIterator(db *MemDB, start []byte, end []byte, reverse bool) *memDBIterator {
ctx, cancel := context.WithCancel(context.Background())
ch := make(chan *item, chBufferSize)
iter := &memDBIterator{
ch: ch,
cancel: cancel,
start: start,
end: end,
}
db.mtx.RLock()
go func() {
defer db.mtx.RUnlock()
// Because we use [start, end) for reverse ranges, while btree uses (start, end], we need
// the following variables to handle some reverse iteration conditions ourselves.
var (
skipEqual []byte
abortLessThan []byte
)
visitor := func(i btree.Item) bool {
item := i.(*item)
if skipEqual != nil && bytes.Equal(item.key, skipEqual) {
skipEqual = nil
return true
}
if abortLessThan != nil && bytes.Compare(item.key, abortLessThan) == -1 {
return false
}
select {
case <-ctx.Done():
return false
case ch <- item:
return true
}
}
switch {
case start == nil && end == nil && !reverse:
db.btree.Ascend(visitor)
case start == nil && end == nil && reverse:
db.btree.Descend(visitor)
case end == nil && !reverse:
// must handle this specially, since nil is considered less than anything else
db.btree.AscendGreaterOrEqual(newKey(start), visitor)
case !reverse:
db.btree.AscendRange(newKey(start), newKey(end), visitor)
case end == nil:
// abort after start, since we use [start, end) while btree uses (start, end]
abortLessThan = start
db.btree.Descend(visitor)
default:
// skip end and abort after start, since we use [start, end) while btree uses (start, end]
skipEqual = end
abortLessThan = start
db.btree.DescendLessOrEqual(newKey(end), visitor)
}
close(ch)
}()
// prime the iterator with the first value, if any
if item, ok := <-ch; ok {
iter.item = item
}
return iter
}
// Close implements Iterator.
func (i *memDBIterator) Close() {
i.cancel()
for range i.ch { // drain channel
}
i.item = nil
}
// Domain implements Iterator.
func (i *memDBIterator) Domain() ([]byte, []byte) {
return i.start, i.end
}
// Valid implements Iterator.
func (i *memDBIterator) Valid() bool {
return i.item != nil
}
// Next implements Iterator.
func (i *memDBIterator) Next() {
item, ok := <-i.ch
switch {
case ok:
i.item = item
case i.item == nil: | panic("called Next() on invalid iterator")
default:
i.item = nil
}
}
// Error implements Iterator.
func (i *memDBIterator) Error() error {
return nil // famous last words
}
// Key implements Iterator.
func (i *memDBIterator) Key() []byte {
if i.item == nil {
panic("called Key() on invalid iterator")
}
return i.item.key
}
// Value implements Iterator.
func (i *memDBIterator) Value() []byte {
if i.item == nil {
panic("called Value() on invalid iterator")
}
return i.item.value
} | |
tsd.d.ts | /// <reference path="commander/commander.d.ts" />
/// <reference path="node/node.d.ts" /> | /// <reference path="es6-promise/es6-promise.d.ts" /> |
|
system_info.py | #!/usr/bin/env python3
"""
This file defines a set of system_info classes for getting
information about various resources (libraries, library directories,
include directories, etc.) in the system. Usage:
info_dict = get_info(<name>)
where <name> is a string 'atlas','x11','fftw','lapack','blas',
'lapack_src', 'blas_src', etc. For a complete list of allowed names,
see the definition of get_info() function below.
Returned info_dict is a dictionary which is compatible with
distutils.setup keyword arguments. If info_dict == {}, then the
asked resource is not available (system_info could not find it).
Several *_info classes specify an environment variable to specify
the locations of software. When setting the corresponding environment
variable to 'None' then the software will be ignored, even when it
is available in system.
Global parameters:
system_info.search_static_first - search static libraries (.a)
in precedence to shared ones (.so, .sl) if enabled.
system_info.verbosity - output the results to stdout if enabled.
The file 'site.cfg' is looked for in
1) Directory of main setup.py file being run.
2) Home directory of user running the setup.py file as ~/.numpy-site.cfg
3) System wide directory (location of this file...)
The first one found is used to get system configuration options The
format is that used by ConfigParser (i.e., Windows .INI style). The
section ALL is not intended for general use.
Appropriate defaults are used if nothing is specified.
The order of finding the locations of resources is the following:
1. environment variable
2. section in site.cfg
3. DEFAULT section in site.cfg
4. System default search paths (see ``default_*`` variables below).
Only the first complete match is returned.
Currently, the following classes are available, along with their section names:
Numeric_info:Numeric
_numpy_info:Numeric
_pkg_config_info:None
accelerate_info:accelerate
agg2_info:agg2
amd_info:amd
atlas_3_10_blas_info:atlas
atlas_3_10_blas_threads_info:atlas
atlas_3_10_info:atlas
atlas_3_10_threads_info:atlas
atlas_blas_info:atlas
atlas_blas_threads_info:atlas
atlas_info:atlas
atlas_threads_info:atlas
blas64__opt_info:ALL # usage recommended (general ILP64 BLAS, 64_ symbol suffix)
blas_ilp64_opt_info:ALL # usage recommended (general ILP64 BLAS)
blas_ilp64_plain_opt_info:ALL # usage recommended (general ILP64 BLAS, no symbol suffix)
blas_info:blas
blas_mkl_info:mkl
blas_opt_info:ALL # usage recommended
blas_src_info:blas_src
blis_info:blis
boost_python_info:boost_python
dfftw_info:fftw
dfftw_threads_info:fftw
djbfft_info:djbfft
f2py_info:ALL
fft_opt_info:ALL
fftw2_info:fftw
fftw3_info:fftw3
fftw_info:fftw
fftw_threads_info:fftw
flame_info:flame
freetype2_info:freetype2
gdk_2_info:gdk_2
gdk_info:gdk
gdk_pixbuf_2_info:gdk_pixbuf_2
gdk_pixbuf_xlib_2_info:gdk_pixbuf_xlib_2
gdk_x11_2_info:gdk_x11_2
gtkp_2_info:gtkp_2
gtkp_x11_2_info:gtkp_x11_2
lapack64__opt_info:ALL # usage recommended (general ILP64 LAPACK, 64_ symbol suffix)
lapack_atlas_3_10_info:atlas
lapack_atlas_3_10_threads_info:atlas
lapack_atlas_info:atlas
lapack_atlas_threads_info:atlas
lapack_ilp64_opt_info:ALL # usage recommended (general ILP64 LAPACK)
lapack_ilp64_plain_opt_info:ALL # usage recommended (general ILP64 LAPACK, no symbol suffix)
lapack_info:lapack
lapack_mkl_info:mkl
lapack_opt_info:ALL # usage recommended
lapack_src_info:lapack_src
mkl_info:mkl
numarray_info:numarray
numerix_info:numerix
numpy_info:numpy
openblas64__info:openblas64_
openblas64__lapack_info:openblas64_
openblas_clapack_info:openblas
openblas_ilp64_info:openblas_ilp64
openblas_ilp64_lapack_info:openblas_ilp64
openblas_info:openblas
openblas_lapack_info:openblas
sfftw_info:fftw
sfftw_threads_info:fftw
system_info:ALL
umfpack_info:umfpack
wx_info:wx
x11_info:x11
xft_info:xft
Note that blas_opt_info and lapack_opt_info honor the NPY_BLAS_ORDER
and NPY_LAPACK_ORDER environment variables to determine the order in which
specific BLAS and LAPACK libraries are searched for.
This search (or autodetection) can be bypassed by defining the environment
variables NPY_BLAS_LIBS and NPY_LAPACK_LIBS, which should then contain the
exact linker flags to use (language will be set to F77). Building against
Netlib BLAS/LAPACK or stub files, in order to be able to switch BLAS and LAPACK
implementations at runtime. If using this to build NumPy itself, it is
recommended to also define NPY_CBLAS_LIBS (assuming your BLAS library has a
CBLAS interface) to enable CBLAS usage for matrix multiplication (unoptimized
otherwise).
Example:
----------
[DEFAULT]
# default section
library_dirs = /usr/lib:/usr/local/lib:/opt/lib
include_dirs = /usr/include:/usr/local/include:/opt/include
src_dirs = /usr/local/src:/opt/src
# search static libraries (.a) in preference to shared ones (.so)
search_static_first = 0
[fftw]
libraries = rfftw, fftw
[atlas]
library_dirs = /usr/lib/3dnow:/usr/lib/3dnow/atlas
# for overriding the names of the atlas libraries
libraries = lapack, f77blas, cblas, atlas
[x11]
library_dirs = /usr/X11R6/lib
include_dirs = /usr/X11R6/include
----------
Note that the ``libraries`` key is the default setting for libraries.
Authors:
Pearu Peterson <[email protected]>, February 2002
David M. Cooke <[email protected]>, April 2002
Copyright 2002 Pearu Peterson all rights reserved,
Pearu Peterson <[email protected]>
Permission to use, modify, and distribute this software is given under the
terms of the NumPy (BSD style) license. See LICENSE.txt that came with
this distribution for specifics.
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
"""
import sys
import os
import re
import copy
import warnings
import subprocess
import textwrap
from glob import glob
from functools import reduce
from configparser import NoOptionError
from configparser import RawConfigParser as ConfigParser
# It seems that some people are importing ConfigParser from here so is
# good to keep its class name. Use of RawConfigParser is needed in
# order to be able to load path names with percent in them, like
# `feature%2Fcool` which is common on git flow branch names.
from distutils.errors import DistutilsError
from distutils.dist import Distribution
import sysconfig
from numpy.distutils import log
from distutils.util import get_platform
from numpy.distutils.exec_command import (
find_executable, filepath_from_subprocess_output,
)
from numpy.distutils.misc_util import (is_sequence, is_string,
get_shared_lib_extension)
from numpy.distutils.command.config import config as cmd_config
from numpy.distutils import customized_ccompiler as _customized_ccompiler
from numpy.distutils import _shell_utils
import distutils.ccompiler
import tempfile
import shutil
__all__ = ['system_info']
# Determine number of bits
import platform
_bits = {'32bit': 32, '64bit': 64}
platform_bits = _bits[platform.architecture()[0]]
global_compiler = None
def customized_ccompiler():
global global_compiler
if not global_compiler:
global_compiler = _customized_ccompiler()
return global_compiler
def _c_string_literal(s):
"""
Convert a python string into a literal suitable for inclusion into C code
"""
# only these three characters are forbidden in C strings
s = s.replace('\\', r'\\')
s = s.replace('"', r'\"')
s = s.replace('\n', r'\n')
return '"{}"'.format(s)
def libpaths(paths, bits):
"""Return a list of library paths valid on 32 or 64 bit systems.
Inputs:
paths : sequence
A sequence of strings (typically paths)
bits : int
An integer, the only valid values are 32 or 64. A ValueError exception
is raised otherwise.
Examples:
Consider a list of directories
>>> paths = ['/usr/X11R6/lib','/usr/X11/lib','/usr/lib']
For a 32-bit platform, this is already valid:
>>> np.distutils.system_info.libpaths(paths,32)
['/usr/X11R6/lib', '/usr/X11/lib', '/usr/lib']
On 64 bits, we prepend the '64' postfix
>>> np.distutils.system_info.libpaths(paths,64)
['/usr/X11R6/lib64', '/usr/X11R6/lib', '/usr/X11/lib64', '/usr/X11/lib',
'/usr/lib64', '/usr/lib']
"""
if bits not in (32, 64):
raise ValueError("Invalid bit size in libpaths: 32 or 64 only")
# Handle 32bit case
if bits == 32:
return paths
# Handle 64bit case
out = []
for p in paths:
out.extend([p + '64', p])
return out
if sys.platform == 'win32':
default_lib_dirs = ['C:\\',
os.path.join(sysconfig.get_config_var('exec_prefix'),
'libs')]
default_runtime_dirs = []
default_include_dirs = []
default_src_dirs = ['.']
default_x11_lib_dirs = []
default_x11_include_dirs = []
_include_dirs = [
'include',
'include/suitesparse',
]
_lib_dirs = [
'lib',
]
_include_dirs = [d.replace('/', os.sep) for d in _include_dirs]
_lib_dirs = [d.replace('/', os.sep) for d in _lib_dirs]
def add_system_root(library_root):
"""Add a package manager root to the include directories"""
global default_lib_dirs
global default_include_dirs
library_root = os.path.normpath(library_root)
default_lib_dirs.extend(
os.path.join(library_root, d) for d in _lib_dirs)
default_include_dirs.extend(
os.path.join(library_root, d) for d in _include_dirs)
# VCpkg is the de-facto package manager on windows for C/C++
# libraries. If it is on the PATH, then we append its paths here.
vcpkg = shutil.which('vcpkg')
if vcpkg:
vcpkg_dir = os.path.dirname(vcpkg)
if platform.architecture()[0] == '32bit':
specifier = 'x86'
else:
specifier = 'x64'
vcpkg_installed = os.path.join(vcpkg_dir, 'installed')
for vcpkg_root in [
os.path.join(vcpkg_installed, specifier + '-windows'),
os.path.join(vcpkg_installed, specifier + '-windows-static'),
]:
add_system_root(vcpkg_root)
# Conda is another popular package manager that provides libraries
conda = shutil.which('conda')
if conda:
conda_dir = os.path.dirname(conda)
add_system_root(os.path.join(conda_dir, '..', 'Library'))
add_system_root(os.path.join(conda_dir, 'Library'))
else:
default_lib_dirs = libpaths(['/usr/local/lib', '/opt/lib', '/usr/lib',
'/opt/local/lib', '/sw/lib'], platform_bits)
default_runtime_dirs = []
default_include_dirs = ['/usr/local/include',
'/opt/include',
# path of umfpack under macports
'/opt/local/include/ufsparse',
'/opt/local/include', '/sw/include',
'/usr/include/suitesparse']
default_src_dirs = ['.', '/usr/local/src', '/opt/src', '/sw/src']
default_x11_lib_dirs = libpaths(['/usr/X11R6/lib', '/usr/X11/lib',
'/usr/lib'], platform_bits)
default_x11_include_dirs = ['/usr/X11R6/include', '/usr/X11/include']
if os.path.exists('/usr/lib/X11'):
globbed_x11_dir = glob('/usr/lib/*/libX11.so')
if globbed_x11_dir:
x11_so_dir = os.path.split(globbed_x11_dir[0])[0]
default_x11_lib_dirs.extend([x11_so_dir, '/usr/lib/X11'])
default_x11_include_dirs.extend(['/usr/lib/X11/include',
'/usr/include/X11'])
# iOS: we need to cancel this call
if (not os.getenv('PLATFORM').startswith('iphone')):
with open(os.devnull, 'w') as tmp:
try:
p = subprocess.Popen(["gcc", "-print-multiarch"], stdout=subprocess.PIPE,
stderr=tmp)
except (OSError, DistutilsError):
# OSError if gcc is not installed, or SandboxViolation (DistutilsError
# subclass) if an old setuptools bug is triggered (see gh-3160).
pass
else:
triplet = str(p.communicate()[0].decode().strip())
if p.returncode == 0:
# gcc supports the "-print-multiarch" option
default_x11_lib_dirs += [os.path.join("/usr/lib/", triplet)]
default_lib_dirs += [os.path.join("/usr/lib/", triplet)]
if os.path.join(sys.prefix, 'lib') not in default_lib_dirs:
default_lib_dirs.insert(0, os.path.join(sys.prefix, 'lib'))
default_include_dirs.append(os.path.join(sys.prefix, 'include'))
default_src_dirs.append(os.path.join(sys.prefix, 'src'))
default_lib_dirs = [_m for _m in default_lib_dirs if os.path.isdir(_m)]
default_runtime_dirs = [_m for _m in default_runtime_dirs if os.path.isdir(_m)]
default_include_dirs = [_m for _m in default_include_dirs if os.path.isdir(_m)]
default_src_dirs = [_m for _m in default_src_dirs if os.path.isdir(_m)]
so_ext = get_shared_lib_extension()
def get_standard_file(fname):
"""Returns a list of files named 'fname' from
1) System-wide directory (directory-location of this module)
2) Users HOME directory (os.environ['HOME'])
3) Local directory
"""
# System-wide file
filenames = []
try:
f = __file__
except NameError:
f = sys.argv[0]
sysfile = os.path.join(os.path.split(os.path.abspath(f))[0],
fname)
if os.path.isfile(sysfile):
filenames.append(sysfile)
# Home directory
# And look for the user config file
try:
f = os.path.expanduser('~')
except KeyError:
pass
else:
user_file = os.path.join(f, fname)
if os.path.isfile(user_file):
filenames.append(user_file)
# Local file
if os.path.isfile(fname):
filenames.append(os.path.abspath(fname))
return filenames
def _parse_env_order(base_order, env):
""" Parse an environment variable `env` by splitting with "," and only returning elements from `base_order`
This method will sequence the environment variable and check for their
individual elements in `base_order`.
The items in the environment variable may be negated via '^item' or '!itema,itemb'.
It must start with ^/! to negate all options.
Raises
------
ValueError: for mixed negated and non-negated orders or multiple negated orders
Parameters
----------
base_order : list of str
the base list of orders
env : str
the environment variable to be parsed, if none is found, `base_order` is returned
Returns
-------
allow_order : list of str
allowed orders in lower-case
unknown_order : list of str
for values not overlapping with `base_order`
"""
order_str = os.environ.get(env, None)
# ensure all base-orders are lower-case (for easier comparison)
base_order = [order.lower() for order in base_order]
if order_str is None:
return base_order, []
neg = order_str.startswith('^') or order_str.startswith('!')
# Check format
order_str_l = list(order_str)
sum_neg = order_str_l.count('^') + order_str_l.count('!')
if neg:
if sum_neg > 1:
raise ValueError(f"Environment variable '{env}' may only contain a single (prefixed) negation: {order_str}")
# remove prefix
order_str = order_str[1:]
elif sum_neg > 0:
raise ValueError(f"Environment variable '{env}' may not mix negated an non-negated items: {order_str}")
# Split and lower case
orders = order_str.lower().split(',')
# to inform callee about non-overlapping elements
unknown_order = []
# if negated, we have to remove from the order
if neg:
allow_order = base_order.copy()
for order in orders:
if not order:
continue
if order not in base_order:
unknown_order.append(order)
continue
if order in allow_order:
allow_order.remove(order)
else:
allow_order = []
for order in orders:
if not order:
continue
if order not in base_order:
unknown_order.append(order)
continue
if order not in allow_order:
allow_order.append(order)
return allow_order, unknown_order
def get_info(name, notfound_action=0):
"""
notfound_action:
0 - do nothing
1 - display warning message
2 - raise error
"""
cl = {'atlas': atlas_info, # use lapack_opt or blas_opt instead
'atlas_threads': atlas_threads_info, # ditto
'atlas_blas': atlas_blas_info,
'atlas_blas_threads': atlas_blas_threads_info,
'lapack_atlas': lapack_atlas_info, # use lapack_opt instead
'lapack_atlas_threads': lapack_atlas_threads_info, # ditto
'atlas_3_10': atlas_3_10_info, # use lapack_opt or blas_opt instead
'atlas_3_10_threads': atlas_3_10_threads_info, # ditto
'atlas_3_10_blas': atlas_3_10_blas_info,
'atlas_3_10_blas_threads': atlas_3_10_blas_threads_info,
'lapack_atlas_3_10': lapack_atlas_3_10_info, # use lapack_opt instead
'lapack_atlas_3_10_threads': lapack_atlas_3_10_threads_info, # ditto
'flame': flame_info, # use lapack_opt instead
'mkl': mkl_info,
# openblas which may or may not have embedded lapack
'openblas': openblas_info, # use blas_opt instead
# openblas with embedded lapack
'openblas_lapack': openblas_lapack_info, # use blas_opt instead
'openblas_clapack': openblas_clapack_info, # use blas_opt instead
'blis': blis_info, # use blas_opt instead
'lapack_mkl': lapack_mkl_info, # use lapack_opt instead
'blas_mkl': blas_mkl_info, # use blas_opt instead
'accelerate': accelerate_info, # use blas_opt instead
'openblas64_': openblas64__info,
'openblas64__lapack': openblas64__lapack_info,
'openblas_ilp64': openblas_ilp64_info,
'openblas_ilp64_lapack': openblas_ilp64_lapack_info,
'x11': x11_info,
'fft_opt': fft_opt_info,
'fftw': fftw_info,
'fftw2': fftw2_info,
'fftw3': fftw3_info,
'dfftw': dfftw_info,
'sfftw': sfftw_info,
'fftw_threads': fftw_threads_info,
'dfftw_threads': dfftw_threads_info,
'sfftw_threads': sfftw_threads_info,
'djbfft': djbfft_info,
'blas': blas_info, # use blas_opt instead
'lapack': lapack_info, # use lapack_opt instead
'lapack_src': lapack_src_info,
'blas_src': blas_src_info,
'numpy': numpy_info,
'f2py': f2py_info,
'Numeric': Numeric_info,
'numeric': Numeric_info,
'numarray': numarray_info,
'numerix': numerix_info,
'lapack_opt': lapack_opt_info,
'lapack_ilp64_opt': lapack_ilp64_opt_info,
'lapack_ilp64_plain_opt': lapack_ilp64_plain_opt_info,
'lapack64__opt': lapack64__opt_info,
'blas_opt': blas_opt_info,
'blas_ilp64_opt': blas_ilp64_opt_info,
'blas_ilp64_plain_opt': blas_ilp64_plain_opt_info,
'blas64__opt': blas64__opt_info,
'boost_python': boost_python_info,
'agg2': agg2_info,
'wx': wx_info,
'gdk_pixbuf_xlib_2': gdk_pixbuf_xlib_2_info,
'gdk-pixbuf-xlib-2.0': gdk_pixbuf_xlib_2_info,
'gdk_pixbuf_2': gdk_pixbuf_2_info,
'gdk-pixbuf-2.0': gdk_pixbuf_2_info,
'gdk': gdk_info,
'gdk_2': gdk_2_info,
'gdk-2.0': gdk_2_info,
'gdk_x11_2': gdk_x11_2_info,
'gdk-x11-2.0': gdk_x11_2_info,
'gtkp_x11_2': gtkp_x11_2_info,
'gtk+-x11-2.0': gtkp_x11_2_info,
'gtkp_2': gtkp_2_info,
'gtk+-2.0': gtkp_2_info,
'xft': xft_info,
'freetype2': freetype2_info,
'umfpack': umfpack_info,
'amd': amd_info,
}.get(name.lower(), system_info)
return cl().get_info(notfound_action)
class NotFoundError(DistutilsError):
"""Some third-party program or library is not found."""
class AliasedOptionError(DistutilsError):
"""
Aliases entries in config files should not be existing.
In section '{section}' we found multiple appearances of options {options}."""
class AtlasNotFoundError(NotFoundError):
"""
Atlas (http://github.com/math-atlas/math-atlas) libraries not found.
Directories to search for the libraries can be specified in the
numpy/distutils/site.cfg file (section [atlas]) or by setting
the ATLAS environment variable."""
class FlameNotFoundError(NotFoundError):
"""
FLAME (http://www.cs.utexas.edu/~flame/web/) libraries not found.
Directories to search for the libraries can be specified in the
numpy/distutils/site.cfg file (section [flame])."""
class LapackNotFoundError(NotFoundError):
"""
Lapack (http://www.netlib.org/lapack/) libraries not found.
Directories to search for the libraries can be specified in the
numpy/distutils/site.cfg file (section [lapack]) or by setting
the LAPACK environment variable."""
class LapackSrcNotFoundError(LapackNotFoundError):
"""
Lapack (http://www.netlib.org/lapack/) sources not found.
Directories to search for the sources can be specified in the
numpy/distutils/site.cfg file (section [lapack_src]) or by setting
the LAPACK_SRC environment variable."""
class LapackILP64NotFoundError(NotFoundError):
"""
64-bit Lapack libraries not found.
Known libraries in numpy/distutils/site.cfg file are:
openblas64_, openblas_ilp64
"""
class BlasOptNotFoundError(NotFoundError):
"""
Optimized (vendor) Blas libraries are not found.
Falls back to netlib Blas library which has worse performance.
A better performance should be easily gained by switching
Blas library."""
class BlasNotFoundError(NotFoundError):
"""
Blas (http://www.netlib.org/blas/) libraries not found.
Directories to search for the libraries can be specified in the
numpy/distutils/site.cfg file (section [blas]) or by setting
the BLAS environment variable."""
class BlasILP64NotFoundError(NotFoundError):
"""
64-bit Blas libraries not found.
Known libraries in numpy/distutils/site.cfg file are:
openblas64_, openblas_ilp64
"""
class BlasSrcNotFoundError(BlasNotFoundError):
"""
Blas (http://www.netlib.org/blas/) sources not found.
Directories to search for the sources can be specified in the
numpy/distutils/site.cfg file (section [blas_src]) or by setting
the BLAS_SRC environment variable."""
class FFTWNotFoundError(NotFoundError):
"""
FFTW (http://www.fftw.org/) libraries not found.
Directories to search for the libraries can be specified in the
numpy/distutils/site.cfg file (section [fftw]) or by setting
the FFTW environment variable."""
class DJBFFTNotFoundError(NotFoundError):
"""
DJBFFT (https://cr.yp.to/djbfft.html) libraries not found.
Directories to search for the libraries can be specified in the
numpy/distutils/site.cfg file (section [djbfft]) or by setting
the DJBFFT environment variable."""
class NumericNotFoundError(NotFoundError):
"""
Numeric (https://www.numpy.org/) module not found.
Get it from above location, install it, and retry setup.py."""
class X11NotFoundError(NotFoundError):
"""X11 libraries not found."""
class UmfpackNotFoundError(NotFoundError):
"""
UMFPACK sparse solver (https://www.cise.ufl.edu/research/sparse/umfpack/)
not found. Directories to search for the libraries can be specified in the
numpy/distutils/site.cfg file (section [umfpack]) or by setting
the UMFPACK environment variable."""
class system_info:
""" get_info() is the only public method. Don't use others.
"""
dir_env_var = None
# XXX: search_static_first is disabled by default, may disappear in
# future unless it is proved to be useful.
search_static_first = 0
# The base-class section name is a random word "ALL" and is not really
# intended for general use. It cannot be None nor can it be DEFAULT as
# these break the ConfigParser. See gh-15338
section = 'ALL'
saved_results = {}
notfounderror = NotFoundError
def __init__(self,
default_lib_dirs=default_lib_dirs,
default_include_dirs=default_include_dirs,
):
self.__class__.info = {}
self.local_prefixes = []
defaults = {'library_dirs': os.pathsep.join(default_lib_dirs),
'include_dirs': os.pathsep.join(default_include_dirs),
'runtime_library_dirs': os.pathsep.join(default_runtime_dirs),
'rpath': '',
'src_dirs': os.pathsep.join(default_src_dirs),
'search_static_first': str(self.search_static_first),
'extra_compile_args': '', 'extra_link_args': ''}
self.cp = ConfigParser(defaults)
self.files = []
self.files.extend(get_standard_file('.numpy-site.cfg'))
self.files.extend(get_standard_file('site.cfg'))
self.parse_config_files()
if self.section is not None:
self.search_static_first = self.cp.getboolean(
self.section, 'search_static_first')
assert isinstance(self.search_static_first, int)
def parse_config_files(self):
self.cp.read(self.files)
if not self.cp.has_section(self.section):
if self.section is not None:
self.cp.add_section(self.section)
def calc_libraries_info(self):
libs = self.get_libraries()
dirs = self.get_lib_dirs()
# The extensions use runtime_library_dirs
r_dirs = self.get_runtime_lib_dirs()
# Intrinsic distutils use rpath, we simply append both entries
# as though they were one entry
r_dirs.extend(self.get_runtime_lib_dirs(key='rpath'))
info = {}
for lib in libs:
i = self.check_libs(dirs, [lib])
if i is not None:
dict_append(info, **i)
else:
log.info('Library %s was not found. Ignoring' % (lib))
if r_dirs:
i = self.check_libs(r_dirs, [lib])
if i is not None:
# Swap library keywords found to runtime_library_dirs
# the libraries are insisting on the user having defined
# them using the library_dirs, and not necessarily by
# runtime_library_dirs
del i['libraries']
i['runtime_library_dirs'] = i.pop('library_dirs')
dict_append(info, **i)
else:
log.info('Runtime library %s was not found. Ignoring' % (lib))
return info
def set_info(self, **info):
if info:
lib_info = self.calc_libraries_info()
dict_append(info, **lib_info)
# Update extra information
extra_info = self.calc_extra_info()
dict_append(info, **extra_info)
self.saved_results[self.__class__.__name__] = info
def get_option_single(self, *options):
""" Ensure that only one of `options` are found in the section
Parameters
----------
*options : list of str
a list of options to be found in the section (``self.section``)
Returns
-------
str :
the option that is uniquely found in the section
Raises
------
AliasedOptionError :
in case more than one of the options are found
"""
found = [self.cp.has_option(self.section, opt) for opt in options]
if sum(found) == 1:
return options[found.index(True)]
elif sum(found) == 0:
# nothing is found anyways
return options[0]
# Else we have more than 1 key found
if AliasedOptionError.__doc__ is None:
raise AliasedOptionError()
raise AliasedOptionError(AliasedOptionError.__doc__.format(
section=self.section, options='[{}]'.format(', '.join(options))))
def has_info(self):
return self.__class__.__name__ in self.saved_results
def calc_extra_info(self):
""" Updates the information in the current information with
respect to these flags:
extra_compile_args
extra_link_args
"""
info = {}
for key in ['extra_compile_args', 'extra_link_args']:
# Get values
opt = self.cp.get(self.section, key)
opt = _shell_utils.NativeParser.split(opt)
if opt:
tmp = {key: opt}
dict_append(info, **tmp)
return info
def get_info(self, notfound_action=0):
""" Return a dictionary with items that are compatible
with numpy.distutils.setup keyword arguments.
"""
flag = 0
if not self.has_info():
flag = 1
log.info(self.__class__.__name__ + ':')
if hasattr(self, 'calc_info'):
self.calc_info()
if notfound_action:
if not self.has_info():
if notfound_action == 1:
warnings.warn(self.notfounderror.__doc__, stacklevel=2)
elif notfound_action == 2:
raise self.notfounderror(self.notfounderror.__doc__)
else:
raise ValueError(repr(notfound_action))
if not self.has_info():
log.info(' NOT AVAILABLE')
self.set_info()
else:
log.info(' FOUND:')
res = self.saved_results.get(self.__class__.__name__)
if log.get_threshold() <= log.INFO and flag:
for k, v in res.items():
v = str(v)
if k in ['sources', 'libraries'] and len(v) > 270:
v = v[:120] + '...\n...\n...' + v[-120:]
log.info(' %s = %s', k, v)
log.info('')
return copy.deepcopy(res)
def get_paths(self, section, key):
dirs = self.cp.get(section, key).split(os.pathsep)
env_var = self.dir_env_var
if env_var:
if is_sequence(env_var):
e0 = env_var[-1]
for e in env_var:
if e in os.environ:
e0 = e
break
if not env_var[0] == e0:
log.info('Setting %s=%s' % (env_var[0], e0))
env_var = e0
if env_var and env_var in os.environ:
d = os.environ[env_var]
if d == 'None':
log.info('Disabled %s: %s',
self.__class__.__name__, '(%s is None)'
% (env_var,))
return []
if os.path.isfile(d):
dirs = [os.path.dirname(d)] + dirs
l = getattr(self, '_lib_names', [])
if len(l) == 1:
b = os.path.basename(d)
b = os.path.splitext(b)[0]
if b[:3] == 'lib':
log.info('Replacing _lib_names[0]==%r with %r' \
% (self._lib_names[0], b[3:]))
self._lib_names[0] = b[3:]
else:
ds = d.split(os.pathsep)
ds2 = []
for d in ds:
if os.path.isdir(d):
ds2.append(d)
for dd in ['include', 'lib']:
d1 = os.path.join(d, dd)
if os.path.isdir(d1):
ds2.append(d1)
dirs = ds2 + dirs
default_dirs = self.cp.get(self.section, key).split(os.pathsep)
dirs.extend(default_dirs)
ret = []
for d in dirs:
if len(d) > 0 and not os.path.isdir(d):
warnings.warn('Specified path %s is invalid.' % d, stacklevel=2)
continue
if d not in ret:
ret.append(d)
log.debug('( %s = %s )', key, ':'.join(ret))
return ret
def get_lib_dirs(self, key='library_dirs'):
return self.get_paths(self.section, key)
def get_runtime_lib_dirs(self, key='runtime_library_dirs'):
path = self.get_paths(self.section, key)
if path == ['']:
path = []
return path
def get_include_dirs(self, key='include_dirs'):
return self.get_paths(self.section, key)
def get_src_dirs(self, key='src_dirs'):
return self.get_paths(self.section, key)
def get_libs(self, key, default):
try:
libs = self.cp.get(self.section, key)
except NoOptionError:
if not default:
return []
if is_string(default):
return [default]
return default
return [b for b in [a.strip() for a in libs.split(',')] if b]
def get_libraries(self, key='libraries'):
if hasattr(self, '_lib_names'):
return self.get_libs(key, default=self._lib_names)
else:
return self.get_libs(key, '')
def library_extensions(self):
c = customized_ccompiler()
static_exts = []
if c.compiler_type != 'msvc':
# MSVC doesn't understand binutils
static_exts.append('.a')
if sys.platform == 'win32':
static_exts.append('.lib') # .lib is used by MSVC and others
if self.search_static_first:
exts = static_exts + [so_ext]
else:
exts = [so_ext] + static_exts
if sys.platform == 'cygwin':
exts.append('.dll.a')
if sys.platform == 'darwin':
exts.append('.dylib')
return exts
def check_libs(self, lib_dirs, libs, opt_libs=[]):
"""If static or shared libraries are available then return
their info dictionary.
Checks for all libraries as shared libraries first, then
static (or vice versa if self.search_static_first is True).
"""
exts = self.library_extensions()
info = None
for ext in exts:
info = self._check_libs(lib_dirs, libs, opt_libs, [ext])
if info is not None:
break
if not info:
log.info(' libraries %s not found in %s', ','.join(libs),
lib_dirs)
return info
def check_libs2(self, lib_dirs, libs, opt_libs=[]):
"""If static or shared libraries are available then return
their info dictionary.
Checks each library for shared or static.
"""
exts = self.library_extensions()
info = self._check_libs(lib_dirs, libs, opt_libs, exts)
if not info:
log.info(' libraries %s not found in %s', ','.join(libs),
lib_dirs)
return info
def _find_lib(self, lib_dir, lib, exts):
assert is_string(lib_dir)
# under windows first try without 'lib' prefix
if sys.platform == 'win32':
lib_prefixes = ['', 'lib']
else:
lib_prefixes = ['lib']
# for each library name, see if we can find a file for it.
for ext in exts:
for prefix in lib_prefixes:
p = self.combine_paths(lib_dir, prefix + lib + ext)
if p:
break
if p:
assert len(p) == 1
# ??? splitext on p[0] would do this for cygwin
# doesn't seem correct
if ext == '.dll.a':
lib += '.dll'
if ext == '.lib':
lib = prefix + lib
return lib
return False
def _find_libs(self, lib_dirs, libs, exts):
# make sure we preserve the order of libs, as it can be important
found_dirs, found_libs = [], []
for lib in libs:
for lib_dir in lib_dirs:
found_lib = self._find_lib(lib_dir, lib, exts)
if found_lib:
found_libs.append(found_lib)
if lib_dir not in found_dirs:
found_dirs.append(lib_dir)
break
return found_dirs, found_libs
def _check_libs(self, lib_dirs, libs, opt_libs, exts):
"""Find mandatory and optional libs in expected paths.
Missing optional libraries are silently forgotten.
"""
if not is_sequence(lib_dirs):
lib_dirs = [lib_dirs]
# First, try to find the mandatory libraries
found_dirs, found_libs = self._find_libs(lib_dirs, libs, exts)
if len(found_libs) > 0 and len(found_libs) == len(libs):
# Now, check for optional libraries
opt_found_dirs, opt_found_libs = self._find_libs(lib_dirs, opt_libs, exts)
found_libs.extend(opt_found_libs)
for lib_dir in opt_found_dirs:
if lib_dir not in found_dirs:
found_dirs.append(lib_dir)
info = {'libraries': found_libs, 'library_dirs': found_dirs}
return info
else:
return None
def combine_paths(self, *args):
"""Return a list of existing paths composed by all combinations
of items from the arguments.
"""
return combine_paths(*args)
class fft_opt_info(system_info):
def calc_info(self):
info = {}
fftw_info = get_info('fftw3') or get_info('fftw2') or get_info('dfftw')
djbfft_info = get_info('djbfft')
if fftw_info:
dict_append(info, **fftw_info)
if djbfft_info:
dict_append(info, **djbfft_info)
self.set_info(**info)
return
class fftw_info(system_info):
#variables to override
section = 'fftw'
dir_env_var = 'FFTW'
notfounderror = FFTWNotFoundError
ver_info = [{'name':'fftw3',
'libs':['fftw3'],
'includes':['fftw3.h'],
'macros':[('SCIPY_FFTW3_H', None)]},
{'name':'fftw2',
'libs':['rfftw', 'fftw'],
'includes':['fftw.h', 'rfftw.h'],
'macros':[('SCIPY_FFTW_H', None)]}]
def calc_ver_info(self, ver_param):
"""Returns True on successful version detection, else False"""
lib_dirs = self.get_lib_dirs()
incl_dirs = self.get_include_dirs()
opt = self.get_option_single(self.section + '_libs', 'libraries')
libs = self.get_libs(opt, ver_param['libs'])
info = self.check_libs(lib_dirs, libs)
if info is not None:
flag = 0
for d in incl_dirs:
if len(self.combine_paths(d, ver_param['includes'])) \
== len(ver_param['includes']):
dict_append(info, include_dirs=[d])
flag = 1
break
if flag:
dict_append(info, define_macros=ver_param['macros'])
else:
info = None
if info is not None:
self.set_info(**info)
return True
else:
log.info(' %s not found' % (ver_param['name']))
return False
def calc_info(self):
for i in self.ver_info:
if self.calc_ver_info(i):
break
class fftw2_info(fftw_info):
#variables to override
section = 'fftw'
dir_env_var = 'FFTW'
notfounderror = FFTWNotFoundError
ver_info = [{'name':'fftw2',
'libs':['rfftw', 'fftw'],
'includes':['fftw.h', 'rfftw.h'],
'macros':[('SCIPY_FFTW_H', None)]}
]
class fftw3_info(fftw_info):
#variables to override
section = 'fftw3'
dir_env_var = 'FFTW3'
notfounderror = FFTWNotFoundError
ver_info = [{'name':'fftw3',
'libs':['fftw3'],
'includes':['fftw3.h'],
'macros':[('SCIPY_FFTW3_H', None)]},
]
class dfftw_info(fftw_info):
section = 'fftw'
dir_env_var = 'FFTW'
ver_info = [{'name':'dfftw',
'libs':['drfftw', 'dfftw'],
'includes':['dfftw.h', 'drfftw.h'],
'macros':[('SCIPY_DFFTW_H', None)]}]
class sfftw_info(fftw_info):
section = 'fftw'
dir_env_var = 'FFTW'
ver_info = [{'name':'sfftw',
'libs':['srfftw', 'sfftw'],
'includes':['sfftw.h', 'srfftw.h'],
'macros':[('SCIPY_SFFTW_H', None)]}]
class fftw_threads_info(fftw_info):
section = 'fftw'
dir_env_var = 'FFTW'
ver_info = [{'name':'fftw threads',
'libs':['rfftw_threads', 'fftw_threads'],
'includes':['fftw_threads.h', 'rfftw_threads.h'],
'macros':[('SCIPY_FFTW_THREADS_H', None)]}]
class dfftw_threads_info(fftw_info):
section = 'fftw'
dir_env_var = 'FFTW'
ver_info = [{'name':'dfftw threads',
'libs':['drfftw_threads', 'dfftw_threads'],
'includes':['dfftw_threads.h', 'drfftw_threads.h'],
'macros':[('SCIPY_DFFTW_THREADS_H', None)]}]
class sfftw_threads_info(fftw_info):
section = 'fftw'
dir_env_var = 'FFTW'
ver_info = [{'name':'sfftw threads',
'libs':['srfftw_threads', 'sfftw_threads'],
'includes':['sfftw_threads.h', 'srfftw_threads.h'],
'macros':[('SCIPY_SFFTW_THREADS_H', None)]}]
class djbfft_info(system_info):
section = 'djbfft'
dir_env_var = 'DJBFFT'
notfounderror = DJBFFTNotFoundError
def get_paths(self, section, key):
pre_dirs = system_info.get_paths(self, section, key)
dirs = []
for d in pre_dirs:
dirs.extend(self.combine_paths(d, ['djbfft']) + [d])
return [d for d in dirs if os.path.isdir(d)]
def calc_info(self):
lib_dirs = self.get_lib_dirs()
incl_dirs = self.get_include_dirs()
info = None
for d in lib_dirs:
p = self.combine_paths(d, ['djbfft.a'])
if p:
info = {'extra_objects': p}
break
p = self.combine_paths(d, ['libdjbfft.a', 'libdjbfft' + so_ext])
if p:
info = {'libraries': ['djbfft'], 'library_dirs': [d]}
break
if info is None:
return
for d in incl_dirs:
if len(self.combine_paths(d, ['fftc8.h', 'fftfreq.h'])) == 2:
dict_append(info, include_dirs=[d],
define_macros=[('SCIPY_DJBFFT_H', None)])
self.set_info(**info)
return
return
class mkl_info(system_info):
section = 'mkl'
dir_env_var = 'MKLROOT'
_lib_mkl = ['mkl_rt']
def get_mkl_rootdir(self):
mklroot = os.environ.get('MKLROOT', None)
if mklroot is not None:
return mklroot
paths = os.environ.get('LD_LIBRARY_PATH', '').split(os.pathsep)
ld_so_conf = '/etc/ld.so.conf'
if os.path.isfile(ld_so_conf):
with open(ld_so_conf, 'r') as f:
for d in f:
d = d.strip()
if d:
paths.append(d)
intel_mkl_dirs = []
for path in paths:
path_atoms = path.split(os.sep)
for m in path_atoms:
if m.startswith('mkl'):
d = os.sep.join(path_atoms[:path_atoms.index(m) + 2])
intel_mkl_dirs.append(d)
break
for d in paths:
dirs = glob(os.path.join(d, 'mkl', '*'))
dirs += glob(os.path.join(d, 'mkl*'))
for sub_dir in dirs:
if os.path.isdir(os.path.join(sub_dir, 'lib')):
return sub_dir
return None
def __init__(self):
mklroot = self.get_mkl_rootdir()
if mklroot is None:
system_info.__init__(self)
else:
from .cpuinfo import cpu
if cpu.is_Itanium():
plt = '64'
elif cpu.is_Intel() and cpu.is_64bit():
plt = 'intel64'
else:
plt = '32'
system_info.__init__(
self,
default_lib_dirs=[os.path.join(mklroot, 'lib', plt)],
default_include_dirs=[os.path.join(mklroot, 'include')])
def calc_info(self):
lib_dirs = self.get_lib_dirs()
incl_dirs = self.get_include_dirs()
opt = self.get_option_single('mkl_libs', 'libraries')
mkl_libs = self.get_libs(opt, self._lib_mkl)
info = self.check_libs2(lib_dirs, mkl_libs)
if info is None:
return
dict_append(info,
define_macros=[('SCIPY_MKL_H', None),
('HAVE_CBLAS', None)],
include_dirs=incl_dirs)
if sys.platform == 'win32':
pass # win32 has no pthread library
else:
dict_append(info, libraries=['pthread'])
self.set_info(**info)
class lapack_mkl_info(mkl_info):
pass
class blas_mkl_info(mkl_info):
pass
class atlas_info(system_info):
section = 'atlas'
dir_env_var = 'ATLAS'
_lib_names = ['f77blas', 'cblas']
if sys.platform[:7] == 'freebsd':
_lib_atlas = ['atlas_r']
_lib_lapack = ['alapack_r']
else:
_lib_atlas = ['atlas']
_lib_lapack = ['lapack']
notfounderror = AtlasNotFoundError
def get_paths(self, section, key):
pre_dirs = system_info.get_paths(self, section, key)
dirs = []
for d in pre_dirs:
dirs.extend(self.combine_paths(d, ['atlas*', 'ATLAS*',
'sse', '3dnow', 'sse2']) + [d])
return [d for d in dirs if os.path.isdir(d)]
def calc_info(self):
lib_dirs = self.get_lib_dirs()
info = {}
opt = self.get_option_single('atlas_libs', 'libraries')
atlas_libs = self.get_libs(opt, self._lib_names + self._lib_atlas)
lapack_libs = self.get_libs('lapack_libs', self._lib_lapack)
atlas = None
lapack = None
atlas_1 = None
for d in lib_dirs:
atlas = self.check_libs2(d, atlas_libs, [])
if atlas is not None:
lib_dirs2 = [d] + self.combine_paths(d, ['atlas*', 'ATLAS*'])
lapack = self.check_libs2(lib_dirs2, lapack_libs, [])
if lapack is not None:
break
if atlas:
atlas_1 = atlas
log.info(self.__class__)
if atlas is None:
atlas = atlas_1
if atlas is None:
return
include_dirs = self.get_include_dirs()
h = (self.combine_paths(lib_dirs + include_dirs, 'cblas.h') or [None])
h = h[0]
if h:
h = os.path.dirname(h)
dict_append(info, include_dirs=[h])
info['language'] = 'c'
if lapack is not None:
dict_append(info, **lapack)
dict_append(info, **atlas)
elif 'lapack_atlas' in atlas['libraries']:
dict_append(info, **atlas)
dict_append(info,
define_macros=[('ATLAS_WITH_LAPACK_ATLAS', None)])
self.set_info(**info)
return
else:
dict_append(info, **atlas)
dict_append(info, define_macros=[('ATLAS_WITHOUT_LAPACK', None)])
message = textwrap.dedent("""
*********************************************************************
Could not find lapack library within the ATLAS installation.
*********************************************************************
""")
warnings.warn(message, stacklevel=2)
self.set_info(**info)
return
# Check if lapack library is complete, only warn if it is not.
lapack_dir = lapack['library_dirs'][0]
lapack_name = lapack['libraries'][0]
lapack_lib = None
lib_prefixes = ['lib']
if sys.platform == 'win32':
lib_prefixes.append('')
for e in self.library_extensions():
for prefix in lib_prefixes:
fn = os.path.join(lapack_dir, prefix + lapack_name + e)
if os.path.exists(fn):
lapack_lib = fn
break
if lapack_lib:
break
if lapack_lib is not None:
sz = os.stat(lapack_lib)[6]
if sz <= 4000 * 1024:
message = textwrap.dedent("""
*********************************************************************
Lapack library (from ATLAS) is probably incomplete:
size of %s is %sk (expected >4000k)
Follow the instructions in the KNOWN PROBLEMS section of the file
numpy/INSTALL.txt.
*********************************************************************
""") % (lapack_lib, sz / 1024)
warnings.warn(message, stacklevel=2)
else:
info['language'] = 'f77'
atlas_version, atlas_extra_info = get_atlas_version(**atlas)
dict_append(info, **atlas_extra_info)
self.set_info(**info)
class atlas_blas_info(atlas_info):
_lib_names = ['f77blas', 'cblas']
def calc_info(self):
lib_dirs = self.get_lib_dirs()
info = {}
opt = self.get_option_single('atlas_libs', 'libraries')
atlas_libs = self.get_libs(opt, self._lib_names + self._lib_atlas)
atlas = self.check_libs2(lib_dirs, atlas_libs, [])
if atlas is None:
return
include_dirs = self.get_include_dirs()
h = (self.combine_paths(lib_dirs + include_dirs, 'cblas.h') or [None])
h = h[0]
if h:
h = os.path.dirname(h)
dict_append(info, include_dirs=[h])
info['language'] = 'c'
info['define_macros'] = [('HAVE_CBLAS', None)]
atlas_version, atlas_extra_info = get_atlas_version(**atlas)
dict_append(atlas, **atlas_extra_info)
dict_append(info, **atlas)
self.set_info(**info)
return
class atlas_threads_info(atlas_info):
dir_env_var = ['PTATLAS', 'ATLAS']
_lib_names = ['ptf77blas', 'ptcblas']
class atlas_blas_threads_info(atlas_blas_info):
dir_env_var = ['PTATLAS', 'ATLAS']
_lib_names = ['ptf77blas', 'ptcblas']
class lapack_atlas_info(atlas_info):
_lib_names = ['lapack_atlas'] + atlas_info._lib_names
class lapack_atlas_threads_info(atlas_threads_info):
_lib_names = ['lapack_atlas'] + atlas_threads_info._lib_names
class atlas_3_10_info(atlas_info):
_lib_names = ['satlas']
_lib_atlas = _lib_names
_lib_lapack = _lib_names
class atlas_3_10_blas_info(atlas_3_10_info):
_lib_names = ['satlas']
def calc_info(self):
lib_dirs = self.get_lib_dirs()
info = {}
opt = self.get_option_single('atlas_lib', 'libraries')
atlas_libs = self.get_libs(opt, self._lib_names)
atlas = self.check_libs2(lib_dirs, atlas_libs, [])
if atlas is None:
return
include_dirs = self.get_include_dirs()
h = (self.combine_paths(lib_dirs + include_dirs, 'cblas.h') or [None])
h = h[0]
if h:
h = os.path.dirname(h)
dict_append(info, include_dirs=[h])
info['language'] = 'c'
info['define_macros'] = [('HAVE_CBLAS', None)]
atlas_version, atlas_extra_info = get_atlas_version(**atlas)
dict_append(atlas, **atlas_extra_info)
dict_append(info, **atlas)
self.set_info(**info)
return
class atlas_3_10_threads_info(atlas_3_10_info):
dir_env_var = ['PTATLAS', 'ATLAS']
_lib_names = ['tatlas']
_lib_atlas = _lib_names
_lib_lapack = _lib_names
class atlas_3_10_blas_threads_info(atlas_3_10_blas_info):
dir_env_var = ['PTATLAS', 'ATLAS']
_lib_names = ['tatlas']
class lapack_atlas_3_10_info(atlas_3_10_info):
pass
class lapack_atlas_3_10_threads_info(atlas_3_10_threads_info):
pass
class lapack_info(system_info):
section = 'lapack'
dir_env_var = 'LAPACK'
_lib_names = ['lapack']
notfounderror = LapackNotFoundError
def calc_info(self):
lib_dirs = self.get_lib_dirs()
opt = self.get_option_single('lapack_libs', 'libraries')
lapack_libs = self.get_libs(opt, self._lib_names)
info = self.check_libs(lib_dirs, lapack_libs, [])
if info is None:
return
info['language'] = 'f77'
self.set_info(**info)
class lapack_src_info(system_info):
# LAPACK_SRC is deprecated, please do not use this!
# Build or install a BLAS library via your package manager or from
# source separately.
section = 'lapack_src'
dir_env_var = 'LAPACK_SRC'
notfounderror = LapackSrcNotFoundError
def get_paths(self, section, key):
pre_dirs = system_info.get_paths(self, section, key)
dirs = []
for d in pre_dirs:
dirs.extend([d] + self.combine_paths(d, ['LAPACK*/SRC', 'SRC']))
return [d for d in dirs if os.path.isdir(d)]
def calc_info(self):
src_dirs = self.get_src_dirs()
src_dir = ''
for d in src_dirs:
if os.path.isfile(os.path.join(d, 'dgesv.f')):
src_dir = d
break
if not src_dir:
#XXX: Get sources from netlib. May be ask first.
return
# The following is extracted from LAPACK-3.0/SRC/Makefile.
# Added missing names from lapack-lite-3.1.1/SRC/Makefile
# while keeping removed names for Lapack-3.0 compatibility.
allaux = '''
ilaenv ieeeck lsame lsamen xerbla
iparmq
''' # *.f
laux = '''
bdsdc bdsqr disna labad lacpy ladiv lae2 laebz laed0 laed1
laed2 laed3 laed4 laed5 laed6 laed7 laed8 laed9 laeda laev2
lagtf lagts lamch lamrg lanst lapy2 lapy3 larnv larrb larre
larrf lartg laruv las2 lascl lasd0 lasd1 lasd2 lasd3 lasd4
lasd5 lasd6 lasd7 lasd8 lasd9 lasda lasdq lasdt laset lasq1
lasq2 lasq3 lasq4 lasq5 lasq6 lasr lasrt lassq lasv2 pttrf
stebz stedc steqr sterf
larra larrc larrd larr larrk larrj larrr laneg laisnan isnan
lazq3 lazq4
''' # [s|d]*.f
lasrc = '''
gbbrd gbcon gbequ gbrfs gbsv gbsvx gbtf2 gbtrf gbtrs gebak
gebal gebd2 gebrd gecon geequ gees geesx geev geevx gegs gegv
gehd2 gehrd gelq2 gelqf gels gelsd gelss gelsx gelsy geql2
geqlf geqp3 geqpf geqr2 geqrf gerfs gerq2 gerqf gesc2 gesdd
gesv gesvd gesvx getc2 getf2 getrf getri getrs ggbak ggbal
gges ggesx ggev ggevx ggglm gghrd gglse ggqrf ggrqf ggsvd
ggsvp gtcon gtrfs gtsv gtsvx gttrf gttrs gtts2 hgeqz hsein
hseqr labrd lacon laein lags2 lagtm lahqr lahrd laic1 lals0
lalsa lalsd langb lange langt lanhs lansb lansp lansy lantb
lantp lantr lapll lapmt laqgb laqge laqp2 laqps laqsb laqsp
laqsy lar1v lar2v larf larfb larfg larft larfx largv larrv
lartv larz larzb larzt laswp lasyf latbs latdf latps latrd
latrs latrz latzm lauu2 lauum pbcon pbequ pbrfs pbstf pbsv
pbsvx pbtf2 pbtrf pbtrs pocon poequ porfs posv posvx potf2
potrf potri potrs ppcon ppequ pprfs ppsv ppsvx pptrf pptri
pptrs ptcon pteqr ptrfs ptsv ptsvx pttrs ptts2 spcon sprfs
spsv spsvx sptrf sptri sptrs stegr stein sycon syrfs sysv
sysvx sytf2 sytrf sytri sytrs tbcon tbrfs tbtrs tgevc tgex2
tgexc tgsen tgsja tgsna tgsy2 tgsyl tpcon tprfs tptri tptrs
trcon trevc trexc trrfs trsen trsna trsyl trti2 trtri trtrs
tzrqf tzrzf
lacn2 lahr2 stemr laqr0 laqr1 laqr2 laqr3 laqr4 laqr5
''' # [s|c|d|z]*.f
sd_lasrc = '''
laexc lag2 lagv2 laln2 lanv2 laqtr lasy2 opgtr opmtr org2l
org2r orgbr orghr orgl2 orglq orgql orgqr orgr2 orgrq orgtr
orm2l orm2r ormbr ormhr orml2 ormlq ormql ormqr ormr2 ormr3
ormrq ormrz ormtr rscl sbev sbevd sbevx sbgst sbgv sbgvd sbgvx
sbtrd spev spevd spevx spgst spgv spgvd spgvx sptrd stev stevd
stevr stevx syev syevd syevr syevx sygs2 sygst sygv sygvd
sygvx sytd2 sytrd
''' # [s|d]*.f
cz_lasrc = '''
bdsqr hbev hbevd hbevx hbgst hbgv hbgvd hbgvx hbtrd hecon heev
heevd heevr heevx hegs2 hegst hegv hegvd hegvx herfs hesv
hesvx hetd2 hetf2 hetrd hetrf hetri hetrs hpcon hpev hpevd
hpevx hpgst hpgv hpgvd hpgvx hprfs hpsv hpsvx hptrd hptrf
hptri hptrs lacgv lacp2 lacpy lacrm lacrt ladiv laed0 laed7
laed8 laesy laev2 lahef lanhb lanhe lanhp lanht laqhb laqhe
laqhp larcm larnv lartg lascl laset lasr lassq pttrf rot spmv
spr stedc steqr symv syr ung2l ung2r ungbr unghr ungl2 unglq
ungql ungqr ungr2 ungrq ungtr unm2l unm2r unmbr unmhr unml2
unmlq unmql unmqr unmr2 unmr3 unmrq unmrz unmtr upgtr upmtr
''' # [c|z]*.f
#######
sclaux = laux + ' econd ' # s*.f
dzlaux = laux + ' secnd ' # d*.f
slasrc = lasrc + sd_lasrc # s*.f
dlasrc = lasrc + sd_lasrc # d*.f
clasrc = lasrc + cz_lasrc + ' srot srscl ' # c*.f
zlasrc = lasrc + cz_lasrc + ' drot drscl ' # z*.f
oclasrc = ' icmax1 scsum1 ' # *.f
ozlasrc = ' izmax1 dzsum1 ' # *.f
sources = ['s%s.f' % f for f in (sclaux + slasrc).split()] \
+ ['d%s.f' % f for f in (dzlaux + dlasrc).split()] \
+ ['c%s.f' % f for f in (clasrc).split()] \
+ ['z%s.f' % f for f in (zlasrc).split()] \
+ ['%s.f' % f for f in (allaux + oclasrc + ozlasrc).split()]
sources = [os.path.join(src_dir, f) for f in sources]
# Lapack 3.1:
src_dir2 = os.path.join(src_dir, '..', 'INSTALL')
sources += [os.path.join(src_dir2, p + 'lamch.f') for p in 'sdcz']
# Lapack 3.2.1:
sources += [os.path.join(src_dir, p + 'larfp.f') for p in 'sdcz']
sources += [os.path.join(src_dir, 'ila' + p + 'lr.f') for p in 'sdcz']
sources += [os.path.join(src_dir, 'ila' + p + 'lc.f') for p in 'sdcz']
# Should we check here actual existence of source files?
# Yes, the file listing is different between 3.0 and 3.1
# versions.
sources = [f for f in sources if os.path.isfile(f)]
info = {'sources': sources, 'language': 'f77'}
self.set_info(**info)
atlas_version_c_text = r'''
/* This file is generated from numpy/distutils/system_info.py */
void ATL_buildinfo(void);
int main(void) {
ATL_buildinfo();
return 0;
}
'''
_cached_atlas_version = {}
def get_atlas_version(**config):
libraries = config.get('libraries', [])
library_dirs = config.get('library_dirs', [])
key = (tuple(libraries), tuple(library_dirs))
if key in _cached_atlas_version:
return _cached_atlas_version[key]
c = cmd_config(Distribution())
atlas_version = None
info = {}
try:
s, o = c.get_output(atlas_version_c_text,
libraries=libraries, library_dirs=library_dirs,
)
if s and re.search(r'undefined reference to `_gfortran', o, re.M):
s, o = c.get_output(atlas_version_c_text,
libraries=libraries + ['gfortran'],
library_dirs=library_dirs,
)
if not s:
warnings.warn(textwrap.dedent("""
*****************************************************
Linkage with ATLAS requires gfortran. Use
python setup.py config_fc --fcompiler=gnu95 ...
when building extension libraries that use ATLAS.
Make sure that -lgfortran is used for C++ extensions.
*****************************************************
"""), stacklevel=2)
dict_append(info, language='f90',
define_macros=[('ATLAS_REQUIRES_GFORTRAN', None)])
except Exception: # failed to get version from file -- maybe on Windows
# look at directory name
for o in library_dirs:
m = re.search(r'ATLAS_(?P<version>\d+[.]\d+[.]\d+)_', o)
if m:
atlas_version = m.group('version')
if atlas_version is not None:
break
# final choice --- look at ATLAS_VERSION environment
# variable
if atlas_version is None:
atlas_version = os.environ.get('ATLAS_VERSION', None)
if atlas_version:
dict_append(info, define_macros=[(
'ATLAS_INFO', _c_string_literal(atlas_version))
])
else:
dict_append(info, define_macros=[('NO_ATLAS_INFO', -1)])
return atlas_version or '?.?.?', info
if not s:
m = re.search(r'ATLAS version (?P<version>\d+[.]\d+[.]\d+)', o)
if m:
atlas_version = m.group('version')
if atlas_version is None:
if re.search(r'undefined symbol: ATL_buildinfo', o, re.M):
atlas_version = '3.2.1_pre3.3.6'
else:
log.info('Status: %d', s)
log.info('Output: %s', o)
elif atlas_version == '3.2.1_pre3.3.6':
dict_append(info, define_macros=[('NO_ATLAS_INFO', -2)])
else:
dict_append(info, define_macros=[(
'ATLAS_INFO', _c_string_literal(atlas_version))
])
result = _cached_atlas_version[key] = atlas_version, info
return result
class lapack_opt_info(system_info):
notfounderror = LapackNotFoundError
# List of all known LAPACK libraries, in the default order
lapack_order = ['mkl', 'openblas', 'flame',
'accelerate', 'atlas', 'lapack']
order_env_var_name = 'NPY_LAPACK_ORDER'
def _calc_info_mkl(self):
info = get_info('lapack_mkl')
if info:
self.set_info(**info)
return True
return False
def _calc_info_openblas(self):
info = get_info('openblas_lapack')
if info:
self.set_info(**info)
return True
info = get_info('openblas_clapack')
if info:
self.set_info(**info)
return True
return False
def _calc_info_flame(self):
info = get_info('flame')
if info:
self.set_info(**info)
return True
return False
def _calc_info_atlas(self):
info = get_info('atlas_3_10_threads')
if not info:
info = get_info('atlas_3_10')
if not info:
info = get_info('atlas_threads')
if not info:
info = get_info('atlas')
if info:
# Figure out if ATLAS has lapack...
# If not we need the lapack library, but not BLAS!
l = info.get('define_macros', [])
if ('ATLAS_WITH_LAPACK_ATLAS', None) in l \
or ('ATLAS_WITHOUT_LAPACK', None) in l:
# Get LAPACK (with possible warnings)
# If not found we don't accept anything
# since we can't use ATLAS with LAPACK!
lapack_info = self._get_info_lapack()
if not lapack_info:
return False
dict_append(info, **lapack_info)
self.set_info(**info)
return True
return False
def _calc_info_accelerate(self):
info = get_info('accelerate')
if info:
self.set_info(**info)
return True
return False
def _get_info_blas(self):
# Default to get the optimized BLAS implementation
info = get_info('blas_opt')
if not info:
warnings.warn(BlasNotFoundError.__doc__ or '', stacklevel=3)
info_src = get_info('blas_src')
if not info_src:
warnings.warn(BlasSrcNotFoundError.__doc__ or '', stacklevel=3)
return {}
dict_append(info, libraries=[('fblas_src', info_src)])
return info
def _get_info_lapack(self):
info = get_info('lapack')
if not info:
warnings.warn(LapackNotFoundError.__doc__ or '', stacklevel=3)
info_src = get_info('lapack_src')
if not info_src:
warnings.warn(LapackSrcNotFoundError.__doc__ or '', stacklevel=3)
return {}
dict_append(info, libraries=[('flapack_src', info_src)])
return info
def _calc_info_lapack(self):
info = self._get_info_lapack()
if info:
info_blas = self._get_info_blas()
dict_append(info, **info_blas)
dict_append(info, define_macros=[('NO_ATLAS_INFO', 1)])
self.set_info(**info)
return True
return False
def _calc_info_from_envvar(self):
info = {}
info['language'] = 'f77'
info['libraries'] = []
info['include_dirs'] = []
info['define_macros'] = []
info['extra_link_args'] = os.environ['NPY_LAPACK_LIBS'].split()
self.set_info(**info)
return True
def _calc_info(self, name):
return getattr(self, '_calc_info_{}'.format(name))()
def calc_info(self):
lapack_order, unknown_order = _parse_env_order(self.lapack_order, self.order_env_var_name)
if len(unknown_order) > 0:
raise ValueError("lapack_opt_info user defined "
"LAPACK order has unacceptable "
"values: {}".format(unknown_order))
if 'NPY_LAPACK_LIBS' in os.environ:
# Bypass autodetection, set language to F77 and use env var linker
# flags directly
self._calc_info_from_envvar()
return
for lapack in lapack_order:
if self._calc_info(lapack):
return
if 'lapack' not in lapack_order:
# Since the user may request *not* to use any library, we still need
# to raise warnings to signal missing packages!
warnings.warn(LapackNotFoundError.__doc__ or '', stacklevel=2)
warnings.warn(LapackSrcNotFoundError.__doc__ or '', stacklevel=2)
class _ilp64_opt_info_mixin:
symbol_suffix = None
symbol_prefix = None
def _check_info(self, info):
macros = dict(info.get('define_macros', []))
prefix = macros.get('BLAS_SYMBOL_PREFIX', '')
suffix = macros.get('BLAS_SYMBOL_SUFFIX', '')
if self.symbol_prefix not in (None, prefix):
return False
if self.symbol_suffix not in (None, suffix):
return False
return bool(info)
class lapack_ilp64_opt_info(lapack_opt_info, _ilp64_opt_info_mixin):
notfounderror = LapackILP64NotFoundError
lapack_order = ['openblas64_', 'openblas_ilp64']
order_env_var_name = 'NPY_LAPACK_ILP64_ORDER'
def _calc_info(self, name):
info = get_info(name + '_lapack')
if self._check_info(info):
self.set_info(**info)
return True
return False
class lapack_ilp64_plain_opt_info(lapack_ilp64_opt_info):
# Same as lapack_ilp64_opt_info, but fix symbol names
symbol_prefix = ''
symbol_suffix = ''
class lapack64__opt_info(lapack_ilp64_opt_info):
symbol_prefix = ''
symbol_suffix = '64_'
class blas_opt_info(system_info):
notfounderror = BlasNotFoundError
# List of all known BLAS libraries, in the default order
blas_order = ['mkl', 'blis', 'openblas',
'accelerate', 'atlas', 'blas']
order_env_var_name = 'NPY_BLAS_ORDER'
def _calc_info_mkl(self):
info = get_info('blas_mkl')
if info:
self.set_info(**info)
return True
return False
def _calc_info_blis(self):
info = get_info('blis')
if info:
self.set_info(**info)
return True
return False
def _calc_info_openblas(self):
info = get_info('openblas')
if info:
self.set_info(**info)
return True
return False
def _calc_info_atlas(self):
info = get_info('atlas_3_10_blas_threads')
if not info:
info = get_info('atlas_3_10_blas')
if not info:
info = get_info('atlas_blas_threads')
if not info:
info = get_info('atlas_blas')
if info:
self.set_info(**info)
return True
return False
def _calc_info_accelerate(self):
info = get_info('accelerate')
if info:
self.set_info(**info)
return True
return False
def _calc_info_blas(self):
# Warn about a non-optimized BLAS library
warnings.warn(BlasOptNotFoundError.__doc__ or '', stacklevel=3)
info = {}
dict_append(info, define_macros=[('NO_ATLAS_INFO', 1)])
blas = get_info('blas')
if blas:
dict_append(info, **blas)
else:
# Not even BLAS was found! | blas_src = get_info('blas_src')
if not blas_src:
warnings.warn(BlasSrcNotFoundError.__doc__ or '', stacklevel=3)
return False
dict_append(info, libraries=[('fblas_src', blas_src)])
self.set_info(**info)
return True
def _calc_info_from_envvar(self):
info = {}
info['language'] = 'f77'
info['libraries'] = []
info['include_dirs'] = []
info['define_macros'] = []
info['extra_link_args'] = os.environ['NPY_BLAS_LIBS'].split()
if 'NPY_CBLAS_LIBS' in os.environ:
info['define_macros'].append(('HAVE_CBLAS', None))
info['extra_link_args'].extend(
os.environ['NPY_CBLAS_LIBS'].split())
self.set_info(**info)
return True
def _calc_info(self, name):
return getattr(self, '_calc_info_{}'.format(name))()
def calc_info(self):
blas_order, unknown_order = _parse_env_order(self.blas_order, self.order_env_var_name)
if len(unknown_order) > 0:
raise ValueError("blas_opt_info user defined BLAS order has unacceptable values: {}".format(unknown_order))
if 'NPY_BLAS_LIBS' in os.environ:
# Bypass autodetection, set language to F77 and use env var linker
# flags directly
self._calc_info_from_envvar()
return
for blas in blas_order:
if self._calc_info(blas):
return
if 'blas' not in blas_order:
# Since the user may request *not* to use any library, we still need
# to raise warnings to signal missing packages!
warnings.warn(BlasNotFoundError.__doc__ or '', stacklevel=2)
warnings.warn(BlasSrcNotFoundError.__doc__ or '', stacklevel=2)
class blas_ilp64_opt_info(blas_opt_info, _ilp64_opt_info_mixin):
notfounderror = BlasILP64NotFoundError
blas_order = ['openblas64_', 'openblas_ilp64']
order_env_var_name = 'NPY_BLAS_ILP64_ORDER'
def _calc_info(self, name):
info = get_info(name)
if self._check_info(info):
self.set_info(**info)
return True
return False
class blas_ilp64_plain_opt_info(blas_ilp64_opt_info):
symbol_prefix = ''
symbol_suffix = ''
class blas64__opt_info(blas_ilp64_opt_info):
symbol_prefix = ''
symbol_suffix = '64_'
class cblas_info(system_info):
section = 'cblas'
dir_env_var = 'CBLAS'
# No default as it's used only in blas_info
_lib_names = []
notfounderror = BlasNotFoundError
class blas_info(system_info):
section = 'blas'
dir_env_var = 'BLAS'
_lib_names = ['blas']
notfounderror = BlasNotFoundError
def calc_info(self):
lib_dirs = self.get_lib_dirs()
opt = self.get_option_single('blas_libs', 'libraries')
blas_libs = self.get_libs(opt, self._lib_names)
info = self.check_libs(lib_dirs, blas_libs, [])
if info is None:
return
else:
info['include_dirs'] = self.get_include_dirs()
if platform.system() == 'Windows':
# The check for windows is needed because get_cblas_libs uses the
# same compiler that was used to compile Python and msvc is
# often not installed when mingw is being used. This rough
# treatment is not desirable, but windows is tricky.
info['language'] = 'f77' # XXX: is it generally true?
# If cblas is given as an option, use those
cblas_info_obj = cblas_info()
cblas_opt = cblas_info_obj.get_option_single('cblas_libs', 'libraries')
cblas_libs = cblas_info_obj.get_libs(cblas_opt, None)
if cblas_libs:
info['libraries'] = cblas_libs + blas_libs
info['define_macros'] = [('HAVE_CBLAS', None)]
else:
lib = self.get_cblas_libs(info)
if lib is not None:
info['language'] = 'c'
info['libraries'] = lib
info['define_macros'] = [('HAVE_CBLAS', None)]
self.set_info(**info)
def get_cblas_libs(self, info):
""" Check whether we can link with CBLAS interface
This method will search through several combinations of libraries
to check whether CBLAS is present:
1. Libraries in ``info['libraries']``, as is
2. As 1. but also explicitly adding ``'cblas'`` as a library
3. As 1. but also explicitly adding ``'blas'`` as a library
4. Check only library ``'cblas'``
5. Check only library ``'blas'``
Parameters
----------
info : dict
system information dictionary for compilation and linking
Returns
-------
libraries : list of str or None
a list of libraries that enables the use of CBLAS interface.
Returns None if not found or a compilation error occurs.
Since 1.17 returns a list.
"""
# primitive cblas check by looking for the header and trying to link
# cblas or blas
c = customized_ccompiler()
tmpdir = tempfile.mkdtemp()
s = textwrap.dedent("""\
#include <cblas.h>
int main(int argc, const char *argv[])
{
double a[4] = {1,2,3,4};
double b[4] = {5,6,7,8};
return cblas_ddot(4, a, 1, b, 1) > 10;
}""")
src = os.path.join(tmpdir, 'source.c')
try:
with open(src, 'wt') as f:
f.write(s)
try:
# check we can compile (find headers)
obj = c.compile([src], output_dir=tmpdir,
include_dirs=self.get_include_dirs())
except (distutils.ccompiler.CompileError, distutils.ccompiler.LinkError):
return None
# check we can link (find library)
# some systems have separate cblas and blas libs.
for libs in [info['libraries'], ['cblas'] + info['libraries'],
['blas'] + info['libraries'], ['cblas'], ['blas']]:
try:
c.link_executable(obj, os.path.join(tmpdir, "a.out"),
libraries=libs,
library_dirs=info['library_dirs'],
extra_postargs=info.get('extra_link_args', []))
return libs
except distutils.ccompiler.LinkError:
pass
finally:
shutil.rmtree(tmpdir)
return None
class openblas_info(blas_info):
section = 'openblas'
dir_env_var = 'OPENBLAS'
_lib_names = ['openblas']
_require_symbols = []
notfounderror = BlasNotFoundError
@property
def symbol_prefix(self):
try:
return self.cp.get(self.section, 'symbol_prefix')
except NoOptionError:
return ''
@property
def symbol_suffix(self):
try:
return self.cp.get(self.section, 'symbol_suffix')
except NoOptionError:
return ''
def _calc_info(self):
c = customized_ccompiler()
lib_dirs = self.get_lib_dirs()
# Prefer to use libraries over openblas_libs
opt = self.get_option_single('openblas_libs', 'libraries')
openblas_libs = self.get_libs(opt, self._lib_names)
info = self.check_libs(lib_dirs, openblas_libs, [])
if c.compiler_type == "msvc" and info is None:
from numpy.distutils.fcompiler import new_fcompiler
f = new_fcompiler(c_compiler=c)
if f and f.compiler_type == 'gnu95':
# Try gfortran-compatible library files
info = self.check_msvc_gfortran_libs(lib_dirs, openblas_libs)
# Skip lapack check, we'd need build_ext to do it
skip_symbol_check = True
elif info:
skip_symbol_check = False
info['language'] = 'c'
if info is None:
return None
# Add extra info for OpenBLAS
extra_info = self.calc_extra_info()
dict_append(info, **extra_info)
if not (skip_symbol_check or self.check_symbols(info)):
return None
info['define_macros'] = [('HAVE_CBLAS', None)]
if self.symbol_prefix:
info['define_macros'] += [('BLAS_SYMBOL_PREFIX', self.symbol_prefix)]
if self.symbol_suffix:
info['define_macros'] += [('BLAS_SYMBOL_SUFFIX', self.symbol_suffix)]
return info
def calc_info(self):
info = self._calc_info()
if info is not None:
self.set_info(**info)
def check_msvc_gfortran_libs(self, library_dirs, libraries):
# First, find the full path to each library directory
library_paths = []
for library in libraries:
for library_dir in library_dirs:
# MinGW static ext will be .a
fullpath = os.path.join(library_dir, library + '.a')
if os.path.isfile(fullpath):
library_paths.append(fullpath)
break
else:
return None
# Generate numpy.distutils virtual static library file
basename = self.__class__.__name__
tmpdir = os.path.join(os.getcwd(), 'build', basename)
if not os.path.isdir(tmpdir):
os.makedirs(tmpdir)
info = {'library_dirs': [tmpdir],
'libraries': [basename],
'language': 'f77'}
fake_lib_file = os.path.join(tmpdir, basename + '.fobjects')
fake_clib_file = os.path.join(tmpdir, basename + '.cobjects')
with open(fake_lib_file, 'w') as f:
f.write("\n".join(library_paths))
with open(fake_clib_file, 'w') as f:
pass
return info
def check_symbols(self, info):
res = False
c = customized_ccompiler()
tmpdir = tempfile.mkdtemp()
prototypes = "\n".join("void %s%s%s();" % (self.symbol_prefix,
symbol_name,
self.symbol_suffix)
for symbol_name in self._require_symbols)
calls = "\n".join("%s%s%s();" % (self.symbol_prefix,
symbol_name,
self.symbol_suffix)
for symbol_name in self._require_symbols)
s = textwrap.dedent("""\
%(prototypes)s
int main(int argc, const char *argv[])
{
%(calls)s
return 0;
}""") % dict(prototypes=prototypes, calls=calls)
src = os.path.join(tmpdir, 'source.c')
out = os.path.join(tmpdir, 'a.out')
# Add the additional "extra" arguments
try:
extra_args = info['extra_link_args']
except Exception:
extra_args = []
try:
with open(src, 'wt') as f:
f.write(s)
obj = c.compile([src], output_dir=tmpdir)
try:
c.link_executable(obj, out, libraries=info['libraries'],
library_dirs=info['library_dirs'],
extra_postargs=extra_args)
res = True
except distutils.ccompiler.LinkError:
res = False
finally:
shutil.rmtree(tmpdir)
return res
class openblas_lapack_info(openblas_info):
section = 'openblas'
dir_env_var = 'OPENBLAS'
_lib_names = ['openblas']
_require_symbols = ['zungqr_']
notfounderror = BlasNotFoundError
class openblas_clapack_info(openblas_lapack_info):
_lib_names = ['openblas', 'lapack']
class openblas_ilp64_info(openblas_info):
section = 'openblas_ilp64'
dir_env_var = 'OPENBLAS_ILP64'
_lib_names = ['openblas64']
_require_symbols = ['dgemm_', 'cblas_dgemm']
notfounderror = BlasILP64NotFoundError
def _calc_info(self):
info = super()._calc_info()
if info is not None:
info['define_macros'] += [('HAVE_BLAS_ILP64', None)]
return info
class openblas_ilp64_lapack_info(openblas_ilp64_info):
_require_symbols = ['dgemm_', 'cblas_dgemm', 'zungqr_', 'LAPACKE_zungqr']
def _calc_info(self):
info = super()._calc_info()
if info:
info['define_macros'] += [('HAVE_LAPACKE', None)]
return info
class openblas64__info(openblas_ilp64_info):
# ILP64 Openblas, with default symbol suffix
section = 'openblas64_'
dir_env_var = 'OPENBLAS64_'
_lib_names = ['openblas64_']
symbol_suffix = '64_'
symbol_prefix = ''
class openblas64__lapack_info(openblas_ilp64_lapack_info, openblas64__info):
pass
class blis_info(blas_info):
section = 'blis'
dir_env_var = 'BLIS'
_lib_names = ['blis']
notfounderror = BlasNotFoundError
def calc_info(self):
lib_dirs = self.get_lib_dirs()
opt = self.get_option_single('blis_libs', 'libraries')
blis_libs = self.get_libs(opt, self._lib_names)
info = self.check_libs2(lib_dirs, blis_libs, [])
if info is None:
return
# Add include dirs
incl_dirs = self.get_include_dirs()
dict_append(info,
language='c',
define_macros=[('HAVE_CBLAS', None)],
include_dirs=incl_dirs)
self.set_info(**info)
class flame_info(system_info):
""" Usage of libflame for LAPACK operations
This requires libflame to be compiled with lapack wrappers:
./configure --enable-lapack2flame ...
Be aware that libflame 5.1.0 has some missing names in the shared library, so
if you have problems, try the static flame library.
"""
section = 'flame'
_lib_names = ['flame']
notfounderror = FlameNotFoundError
def check_embedded_lapack(self, info):
""" libflame does not necessarily have a wrapper for fortran LAPACK, we need to check """
c = customized_ccompiler()
tmpdir = tempfile.mkdtemp()
s = textwrap.dedent("""\
void zungqr_();
int main(int argc, const char *argv[])
{
zungqr_();
return 0;
}""")
src = os.path.join(tmpdir, 'source.c')
out = os.path.join(tmpdir, 'a.out')
# Add the additional "extra" arguments
extra_args = info.get('extra_link_args', [])
try:
with open(src, 'wt') as f:
f.write(s)
obj = c.compile([src], output_dir=tmpdir)
try:
c.link_executable(obj, out, libraries=info['libraries'],
library_dirs=info['library_dirs'],
extra_postargs=extra_args)
return True
except distutils.ccompiler.LinkError:
return False
finally:
shutil.rmtree(tmpdir)
def calc_info(self):
lib_dirs = self.get_lib_dirs()
flame_libs = self.get_libs('libraries', self._lib_names)
info = self.check_libs2(lib_dirs, flame_libs, [])
if info is None:
return
# Add the extra flag args to info
extra_info = self.calc_extra_info()
dict_append(info, **extra_info)
if self.check_embedded_lapack(info):
# check if the user has supplied all information required
self.set_info(**info)
else:
# Try and get the BLAS lib to see if we can get it to work
blas_info = get_info('blas_opt')
if not blas_info:
# since we already failed once, this ain't going to work either
return
# Now we need to merge the two dictionaries
for key in blas_info:
if isinstance(blas_info[key], list):
info[key] = info.get(key, []) + blas_info[key]
elif isinstance(blas_info[key], tuple):
info[key] = info.get(key, ()) + blas_info[key]
else:
info[key] = info.get(key, '') + blas_info[key]
# Now check again
if self.check_embedded_lapack(info):
self.set_info(**info)
class accelerate_info(system_info):
section = 'accelerate'
_lib_names = ['accelerate', 'veclib']
notfounderror = BlasNotFoundError
def calc_info(self):
# Make possible to enable/disable from config file/env var
libraries = os.environ.get('ACCELERATE')
if libraries:
libraries = [libraries]
else:
libraries = self.get_libs('libraries', self._lib_names)
libraries = [lib.strip().lower() for lib in libraries]
if (sys.platform == 'darwin' and
not os.getenv('_PYTHON_HOST_PLATFORM', None)):
# Use the system BLAS from Accelerate or vecLib under OSX
args = []
link_args = []
if get_platform()[-4:] == 'i386' or 'intel' in get_platform() or \
'x86_64' in get_platform() or \
'i386' in platform.platform():
intel = 1
else:
intel = 0
if (os.path.exists('/System/Library/Frameworks'
'/Accelerate.framework/') and
'accelerate' in libraries):
if intel:
args.extend(['-msse3'])
args.extend([
'-I/System/Library/Frameworks/vecLib.framework/Headers'])
link_args.extend(['-Wl,-framework', '-Wl,Accelerate'])
elif (os.path.exists('/System/Library/Frameworks'
'/vecLib.framework/') and
'veclib' in libraries):
if intel:
args.extend(['-msse3'])
args.extend([
'-I/System/Library/Frameworks/vecLib.framework/Headers'])
link_args.extend(['-Wl,-framework', '-Wl,vecLib'])
if args:
self.set_info(extra_compile_args=args,
extra_link_args=link_args,
define_macros=[('NO_ATLAS_INFO', 3),
('HAVE_CBLAS', None)])
return
class blas_src_info(system_info):
# BLAS_SRC is deprecated, please do not use this!
# Build or install a BLAS library via your package manager or from
# source separately.
section = 'blas_src'
dir_env_var = 'BLAS_SRC'
notfounderror = BlasSrcNotFoundError
def get_paths(self, section, key):
pre_dirs = system_info.get_paths(self, section, key)
dirs = []
for d in pre_dirs:
dirs.extend([d] + self.combine_paths(d, ['blas']))
return [d for d in dirs if os.path.isdir(d)]
def calc_info(self):
src_dirs = self.get_src_dirs()
src_dir = ''
for d in src_dirs:
if os.path.isfile(os.path.join(d, 'daxpy.f')):
src_dir = d
break
if not src_dir:
#XXX: Get sources from netlib. May be ask first.
return
blas1 = '''
caxpy csscal dnrm2 dzasum saxpy srotg zdotc ccopy cswap drot
dznrm2 scasum srotm zdotu cdotc dasum drotg icamax scnrm2
srotmg zdrot cdotu daxpy drotm idamax scopy sscal zdscal crotg
dcabs1 drotmg isamax sdot sswap zrotg cscal dcopy dscal izamax
snrm2 zaxpy zscal csrot ddot dswap sasum srot zcopy zswap
scabs1
'''
blas2 = '''
cgbmv chpmv ctrsv dsymv dtrsv sspr2 strmv zhemv ztpmv cgemv
chpr dgbmv dsyr lsame ssymv strsv zher ztpsv cgerc chpr2 dgemv
dsyr2 sgbmv ssyr xerbla zher2 ztrmv cgeru ctbmv dger dtbmv
sgemv ssyr2 zgbmv zhpmv ztrsv chbmv ctbsv dsbmv dtbsv sger
stbmv zgemv zhpr chemv ctpmv dspmv dtpmv ssbmv stbsv zgerc
zhpr2 cher ctpsv dspr dtpsv sspmv stpmv zgeru ztbmv cher2
ctrmv dspr2 dtrmv sspr stpsv zhbmv ztbsv
'''
blas3 = '''
cgemm csymm ctrsm dsyrk sgemm strmm zhemm zsyr2k chemm csyr2k
dgemm dtrmm ssymm strsm zher2k zsyrk cher2k csyrk dsymm dtrsm
ssyr2k zherk ztrmm cherk ctrmm dsyr2k ssyrk zgemm zsymm ztrsm
'''
sources = [os.path.join(src_dir, f + '.f') \
for f in (blas1 + blas2 + blas3).split()]
#XXX: should we check here actual existence of source files?
sources = [f for f in sources if os.path.isfile(f)]
info = {'sources': sources, 'language': 'f77'}
self.set_info(**info)
class x11_info(system_info):
section = 'x11'
notfounderror = X11NotFoundError
_lib_names = ['X11']
def __init__(self):
system_info.__init__(self,
default_lib_dirs=default_x11_lib_dirs,
default_include_dirs=default_x11_include_dirs)
def calc_info(self):
if sys.platform in ['win32']:
return
lib_dirs = self.get_lib_dirs()
include_dirs = self.get_include_dirs()
opt = self.get_option_single('x11_libs', 'libraries')
x11_libs = self.get_libs(opt, self._lib_names)
info = self.check_libs(lib_dirs, x11_libs, [])
if info is None:
return
inc_dir = None
for d in include_dirs:
if self.combine_paths(d, 'X11/X.h'):
inc_dir = d
break
if inc_dir is not None:
dict_append(info, include_dirs=[inc_dir])
self.set_info(**info)
class _numpy_info(system_info):
section = 'Numeric'
modulename = 'Numeric'
notfounderror = NumericNotFoundError
def __init__(self):
include_dirs = []
try:
module = __import__(self.modulename)
prefix = []
for name in module.__file__.split(os.sep):
if name == 'lib':
break
prefix.append(name)
# Ask numpy for its own include path before attempting
# anything else
try:
include_dirs.append(getattr(module, 'get_include')())
except AttributeError:
pass
include_dirs.append(sysconfig.get_path('include'))
except ImportError:
pass
py_incl_dir = sysconfig.get_path('include')
include_dirs.append(py_incl_dir)
py_pincl_dir = sysconfig.get_path('platinclude')
if py_pincl_dir not in include_dirs:
include_dirs.append(py_pincl_dir)
for d in default_include_dirs:
d = os.path.join(d, os.path.basename(py_incl_dir))
if d not in include_dirs:
include_dirs.append(d)
system_info.__init__(self,
default_lib_dirs=[],
default_include_dirs=include_dirs)
def calc_info(self):
try:
module = __import__(self.modulename)
except ImportError:
return
info = {}
macros = []
for v in ['__version__', 'version']:
vrs = getattr(module, v, None)
if vrs is None:
continue
macros = [(self.modulename.upper() + '_VERSION',
_c_string_literal(vrs)),
(self.modulename.upper(), None)]
break
dict_append(info, define_macros=macros)
include_dirs = self.get_include_dirs()
inc_dir = None
for d in include_dirs:
if self.combine_paths(d,
os.path.join(self.modulename,
'arrayobject.h')):
inc_dir = d
break
if inc_dir is not None:
dict_append(info, include_dirs=[inc_dir])
if info:
self.set_info(**info)
return
class numarray_info(_numpy_info):
section = 'numarray'
modulename = 'numarray'
class Numeric_info(_numpy_info):
section = 'Numeric'
modulename = 'Numeric'
class numpy_info(_numpy_info):
section = 'numpy'
modulename = 'numpy'
class numerix_info(system_info):
section = 'numerix'
def calc_info(self):
which = None, None
if os.getenv("NUMERIX"):
which = os.getenv("NUMERIX"), "environment var"
# If all the above fail, default to numpy.
if which[0] is None:
which = "numpy", "defaulted"
try:
import numpy # noqa: F401
which = "numpy", "defaulted"
except ImportError as e:
msg1 = str(e)
try:
import Numeric # noqa: F401
which = "numeric", "defaulted"
except ImportError as e:
msg2 = str(e)
try:
import numarray # noqa: F401
which = "numarray", "defaulted"
except ImportError as e:
msg3 = str(e)
log.info(msg1)
log.info(msg2)
log.info(msg3)
which = which[0].strip().lower(), which[1]
if which[0] not in ["numeric", "numarray", "numpy"]:
raise ValueError("numerix selector must be either 'Numeric' "
"or 'numarray' or 'numpy' but the value obtained"
" from the %s was '%s'." % (which[1], which[0]))
os.environ['NUMERIX'] = which[0]
self.set_info(**get_info(which[0]))
class f2py_info(system_info):
def calc_info(self):
try:
import numpy.f2py as f2py
except ImportError:
return
f2py_dir = os.path.join(os.path.dirname(f2py.__file__), 'src')
self.set_info(sources=[os.path.join(f2py_dir, 'fortranobject.c')],
include_dirs=[f2py_dir])
return
class boost_python_info(system_info):
section = 'boost_python'
dir_env_var = 'BOOST'
def get_paths(self, section, key):
pre_dirs = system_info.get_paths(self, section, key)
dirs = []
for d in pre_dirs:
dirs.extend([d] + self.combine_paths(d, ['boost*']))
return [d for d in dirs if os.path.isdir(d)]
def calc_info(self):
src_dirs = self.get_src_dirs()
src_dir = ''
for d in src_dirs:
if os.path.isfile(os.path.join(d, 'libs', 'python', 'src',
'module.cpp')):
src_dir = d
break
if not src_dir:
return
py_incl_dirs = [sysconfig.get_path('include')]
py_pincl_dir = sysconfig.get_path('platinclude')
if py_pincl_dir not in py_incl_dirs:
py_incl_dirs.append(py_pincl_dir)
srcs_dir = os.path.join(src_dir, 'libs', 'python', 'src')
bpl_srcs = glob(os.path.join(srcs_dir, '*.cpp'))
bpl_srcs += glob(os.path.join(srcs_dir, '*', '*.cpp'))
info = {'libraries': [('boost_python_src',
{'include_dirs': [src_dir] + py_incl_dirs,
'sources':bpl_srcs}
)],
'include_dirs': [src_dir],
}
if info:
self.set_info(**info)
return
class agg2_info(system_info):
section = 'agg2'
dir_env_var = 'AGG2'
def get_paths(self, section, key):
pre_dirs = system_info.get_paths(self, section, key)
dirs = []
for d in pre_dirs:
dirs.extend([d] + self.combine_paths(d, ['agg2*']))
return [d for d in dirs if os.path.isdir(d)]
def calc_info(self):
src_dirs = self.get_src_dirs()
src_dir = ''
for d in src_dirs:
if os.path.isfile(os.path.join(d, 'src', 'agg_affine_matrix.cpp')):
src_dir = d
break
if not src_dir:
return
if sys.platform == 'win32':
agg2_srcs = glob(os.path.join(src_dir, 'src', 'platform',
'win32', 'agg_win32_bmp.cpp'))
else:
agg2_srcs = glob(os.path.join(src_dir, 'src', '*.cpp'))
agg2_srcs += [os.path.join(src_dir, 'src', 'platform',
'X11',
'agg_platform_support.cpp')]
info = {'libraries':
[('agg2_src',
{'sources': agg2_srcs,
'include_dirs': [os.path.join(src_dir, 'include')],
}
)],
'include_dirs': [os.path.join(src_dir, 'include')],
}
if info:
self.set_info(**info)
return
class _pkg_config_info(system_info):
section = None
config_env_var = 'PKG_CONFIG'
default_config_exe = 'pkg-config'
append_config_exe = ''
version_macro_name = None
release_macro_name = None
version_flag = '--modversion'
cflags_flag = '--cflags'
def get_config_exe(self):
if self.config_env_var in os.environ:
return os.environ[self.config_env_var]
return self.default_config_exe
def get_config_output(self, config_exe, option):
cmd = config_exe + ' ' + self.append_config_exe + ' ' + option
try:
o = subprocess.check_output(cmd)
except (OSError, subprocess.CalledProcessError):
pass
else:
o = filepath_from_subprocess_output(o)
return o
def calc_info(self):
config_exe = find_executable(self.get_config_exe())
if not config_exe:
log.warn('File not found: %s. Cannot determine %s info.' \
% (config_exe, self.section))
return
info = {}
macros = []
libraries = []
library_dirs = []
include_dirs = []
extra_link_args = []
extra_compile_args = []
version = self.get_config_output(config_exe, self.version_flag)
if version:
macros.append((self.__class__.__name__.split('.')[-1].upper(),
_c_string_literal(version)))
if self.version_macro_name:
macros.append((self.version_macro_name + '_%s'
% (version.replace('.', '_')), None))
if self.release_macro_name:
release = self.get_config_output(config_exe, '--release')
if release:
macros.append((self.release_macro_name + '_%s'
% (release.replace('.', '_')), None))
opts = self.get_config_output(config_exe, '--libs')
if opts:
for opt in opts.split():
if opt[:2] == '-l':
libraries.append(opt[2:])
elif opt[:2] == '-L':
library_dirs.append(opt[2:])
else:
extra_link_args.append(opt)
opts = self.get_config_output(config_exe, self.cflags_flag)
if opts:
for opt in opts.split():
if opt[:2] == '-I':
include_dirs.append(opt[2:])
elif opt[:2] == '-D':
if '=' in opt:
n, v = opt[2:].split('=')
macros.append((n, v))
else:
macros.append((opt[2:], None))
else:
extra_compile_args.append(opt)
if macros:
dict_append(info, define_macros=macros)
if libraries:
dict_append(info, libraries=libraries)
if library_dirs:
dict_append(info, library_dirs=library_dirs)
if include_dirs:
dict_append(info, include_dirs=include_dirs)
if extra_link_args:
dict_append(info, extra_link_args=extra_link_args)
if extra_compile_args:
dict_append(info, extra_compile_args=extra_compile_args)
if info:
self.set_info(**info)
return
class wx_info(_pkg_config_info):
section = 'wx'
config_env_var = 'WX_CONFIG'
default_config_exe = 'wx-config'
append_config_exe = ''
version_macro_name = 'WX_VERSION'
release_macro_name = 'WX_RELEASE'
version_flag = '--version'
cflags_flag = '--cxxflags'
class gdk_pixbuf_xlib_2_info(_pkg_config_info):
section = 'gdk_pixbuf_xlib_2'
append_config_exe = 'gdk-pixbuf-xlib-2.0'
version_macro_name = 'GDK_PIXBUF_XLIB_VERSION'
class gdk_pixbuf_2_info(_pkg_config_info):
section = 'gdk_pixbuf_2'
append_config_exe = 'gdk-pixbuf-2.0'
version_macro_name = 'GDK_PIXBUF_VERSION'
class gdk_x11_2_info(_pkg_config_info):
section = 'gdk_x11_2'
append_config_exe = 'gdk-x11-2.0'
version_macro_name = 'GDK_X11_VERSION'
class gdk_2_info(_pkg_config_info):
section = 'gdk_2'
append_config_exe = 'gdk-2.0'
version_macro_name = 'GDK_VERSION'
class gdk_info(_pkg_config_info):
section = 'gdk'
append_config_exe = 'gdk'
version_macro_name = 'GDK_VERSION'
class gtkp_x11_2_info(_pkg_config_info):
section = 'gtkp_x11_2'
append_config_exe = 'gtk+-x11-2.0'
version_macro_name = 'GTK_X11_VERSION'
class gtkp_2_info(_pkg_config_info):
section = 'gtkp_2'
append_config_exe = 'gtk+-2.0'
version_macro_name = 'GTK_VERSION'
class xft_info(_pkg_config_info):
section = 'xft'
append_config_exe = 'xft'
version_macro_name = 'XFT_VERSION'
class freetype2_info(_pkg_config_info):
section = 'freetype2'
append_config_exe = 'freetype2'
version_macro_name = 'FREETYPE2_VERSION'
class amd_info(system_info):
section = 'amd'
dir_env_var = 'AMD'
_lib_names = ['amd']
def calc_info(self):
lib_dirs = self.get_lib_dirs()
opt = self.get_option_single('amd_libs', 'libraries')
amd_libs = self.get_libs(opt, self._lib_names)
info = self.check_libs(lib_dirs, amd_libs, [])
if info is None:
return
include_dirs = self.get_include_dirs()
inc_dir = None
for d in include_dirs:
p = self.combine_paths(d, 'amd.h')
if p:
inc_dir = os.path.dirname(p[0])
break
if inc_dir is not None:
dict_append(info, include_dirs=[inc_dir],
define_macros=[('SCIPY_AMD_H', None)],
swig_opts=['-I' + inc_dir])
self.set_info(**info)
return
class umfpack_info(system_info):
section = 'umfpack'
dir_env_var = 'UMFPACK'
notfounderror = UmfpackNotFoundError
_lib_names = ['umfpack']
def calc_info(self):
lib_dirs = self.get_lib_dirs()
opt = self.get_option_single('umfpack_libs', 'libraries')
umfpack_libs = self.get_libs(opt, self._lib_names)
info = self.check_libs(lib_dirs, umfpack_libs, [])
if info is None:
return
include_dirs = self.get_include_dirs()
inc_dir = None
for d in include_dirs:
p = self.combine_paths(d, ['', 'umfpack'], 'umfpack.h')
if p:
inc_dir = os.path.dirname(p[0])
break
if inc_dir is not None:
dict_append(info, include_dirs=[inc_dir],
define_macros=[('SCIPY_UMFPACK_H', None)],
swig_opts=['-I' + inc_dir])
dict_append(info, **get_info('amd'))
self.set_info(**info)
return
def combine_paths(*args, **kws):
""" Return a list of existing paths composed by all combinations of
items from arguments.
"""
r = []
for a in args:
if not a:
continue
if is_string(a):
a = [a]
r.append(a)
args = r
if not args:
return []
if len(args) == 1:
result = reduce(lambda a, b: a + b, map(glob, args[0]), [])
elif len(args) == 2:
result = []
for a0 in args[0]:
for a1 in args[1]:
result.extend(glob(os.path.join(a0, a1)))
else:
result = combine_paths(*(combine_paths(args[0], args[1]) + args[2:]))
log.debug('(paths: %s)', ','.join(result))
return result
language_map = {'c': 0, 'c++': 1, 'f77': 2, 'f90': 3}
inv_language_map = {0: 'c', 1: 'c++', 2: 'f77', 3: 'f90'}
def dict_append(d, **kws):
languages = []
for k, v in kws.items():
if k == 'language':
languages.append(v)
continue
if k in d:
if k in ['library_dirs', 'include_dirs',
'extra_compile_args', 'extra_link_args',
'runtime_library_dirs', 'define_macros']:
[d[k].append(vv) for vv in v if vv not in d[k]]
else:
d[k].extend(v)
else:
d[k] = v
if languages:
l = inv_language_map[max([language_map.get(l, 0) for l in languages])]
d['language'] = l
return
def parseCmdLine(argv=(None,)):
import optparse
parser = optparse.OptionParser("usage: %prog [-v] [info objs]")
parser.add_option('-v', '--verbose', action='store_true', dest='verbose',
default=False,
help='be verbose and print more messages')
opts, args = parser.parse_args(args=argv[1:])
return opts, args
def show_all(argv=None):
import inspect
if argv is None:
argv = sys.argv
opts, args = parseCmdLine(argv)
if opts.verbose:
log.set_threshold(log.DEBUG)
else:
log.set_threshold(log.INFO)
show_only = []
for n in args:
if n[-5:] != '_info':
n = n + '_info'
show_only.append(n)
show_all = not show_only
_gdict_ = globals().copy()
for name, c in _gdict_.items():
if not inspect.isclass(c):
continue
if not issubclass(c, system_info) or c is system_info:
continue
if not show_all:
if name not in show_only:
continue
del show_only[show_only.index(name)]
conf = c()
conf.verbosity = 2
# we don't need the result, but we want
# the side effect of printing diagnostics
conf.get_info()
if show_only:
log.info('Info classes not defined: %s', ','.join(show_only))
if __name__ == "__main__":
show_all() | warnings.warn(BlasNotFoundError.__doc__ or '', stacklevel=3)
|
roonlychecker.py | from .checker import CSPChecker
from .checkerro import CSPReportOnlyChecker
from securityheaders.checkers import Finding,FindingType,FindingSeverity
class CSPReportOnlyNoCSPChecker(CSPReportOnlyChecker, CSPChecker):
def check(self, headers, opt_options=dict()):
rocsp = CSPReportOnlyChecker.getcsp(self,headers)
csp = CSPChecker.getcsp(self,headers)
if not csp and rocsp:
|
return []
| description = "The CSP is not enforced as only the content-security-policy-report-only header is present. Can you set the content-security-policy?"
return [Finding(rocsp.headerkey, FindingType.REPORT_ONLY,description,FindingSeverity.INFO, None, None)] |
cursor_icon.rs | use vizia::*;
macro_rules! cursor_label {
($cx:ident, $name:ident) => {
Label::new($cx, stringify!($name))
.width(Pixels(110.0))
.height(Pixels(30.0))
.border_width(Pixels(1.0))
.border_color(Color::black())
.child_top(Stretch(1.0))
.child_bottom(Stretch(1.0))
.child_left(Pixels(5.0))
.text_wrap(false)
.on_hover(|cx| {
cx.emit(WindowEvent::SetCursor(CursorIcon::$name));
});
};
}
fn | () {
Application::new(WindowDescription::new().with_title("Cursor Icon"), |cx| {
HStack::new(cx, |cx| {
VStack::new(cx, |cx| {
cursor_label!(cx, Default);
cursor_label!(cx, Crosshair);
cursor_label!(cx, Hand);
cursor_label!(cx, Arrow);
cursor_label!(cx, Move);
cursor_label!(cx, Text);
cursor_label!(cx, Wait);
cursor_label!(cx, Help);
cursor_label!(cx, Progress);
cursor_label!(cx, NotAllowed);
cursor_label!(cx, ContextMenu);
cursor_label!(cx, Cell);
})
.child_space(Stretch(1.0))
.row_between(Pixels(10.0));
VStack::new(cx, |cx| {
cursor_label!(cx, VerticalText);
cursor_label!(cx, Alias);
cursor_label!(cx, Copy);
cursor_label!(cx, NoDrop);
cursor_label!(cx, Grab);
cursor_label!(cx, Grabbing);
cursor_label!(cx, AllScroll);
cursor_label!(cx, ZoomIn);
cursor_label!(cx, ZoomOut);
cursor_label!(cx, EResize);
cursor_label!(cx, NResize);
cursor_label!(cx, NeResize);
})
.child_space(Stretch(1.0))
.row_between(Pixels(10.0));
VStack::new(cx, |cx| {
cursor_label!(cx, NwResize);
cursor_label!(cx, SResize);
cursor_label!(cx, SeResize);
cursor_label!(cx, SwResize);
cursor_label!(cx, WResize);
cursor_label!(cx, EwResize);
cursor_label!(cx, NsResize);
cursor_label!(cx, NeswResize);
cursor_label!(cx, NwseResize);
cursor_label!(cx, ColResize);
cursor_label!(cx, RowResize);
cursor_label!(cx, None);
})
.child_space(Stretch(1.0))
.row_between(Pixels(10.0));
})
.child_space(Stretch(1.0));
})
.run();
}
| main |
urlmap.py | # (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
"""
Map URL prefixes to WSGI applications. See ``URLMap``
"""
import re
import os
import cgi
try:
# Python 3
from collections import MutableMapping as DictMixin
except ImportError:
# Python 2
from UserDict import DictMixin
from paste import httpexceptions
__all__ = ['URLMap', 'PathProxyURLMap']
def urlmap_factory(loader, global_conf, **local_conf):
if 'not_found_app' in local_conf:
not_found_app = local_conf.pop('not_found_app')
else:
not_found_app = global_conf.get('not_found_app')
if not_found_app:
not_found_app = loader.get_app(not_found_app, global_conf=global_conf)
urlmap = URLMap(not_found_app=not_found_app)
for path, app_name in local_conf.items():
path = parse_path_expression(path)
app = loader.get_app(app_name, global_conf=global_conf)
urlmap[path] = app
return urlmap
def parse_path_expression(path):
"""
Parses a path expression like 'domain foobar.com port 20 /' or
just '/foobar' for a path alone. Returns as an address that
URLMap likes.
"""
parts = path.split()
domain = port = path = None
while parts:
if parts[0] == 'domain':
parts.pop(0)
if not parts:
raise ValueError("'domain' must be followed with a domain name")
if domain:
raise ValueError("'domain' given twice")
domain = parts.pop(0)
elif parts[0] == 'port':
parts.pop(0)
if not parts:
raise ValueError("'port' must be followed with a port number")
if port:
raise ValueError("'port' given twice")
port = parts.pop(0)
else:
if path:
raise ValueError("more than one path given (have %r, got %r)"
% (path, parts[0]))
path = parts.pop(0)
s = ''
if domain:
s = 'http://%s' % domain
if port:
if not domain:
raise ValueError("If you give a port, you must also give a domain")
s += ':' + port
if path:
if s:
s += '/'
s += path
return s
class URLMap(DictMixin):
"""
URLMap instances are dictionary-like object that dispatch to one
of several applications based on the URL.
The dictionary keys are URLs to match (like
``PATH_INFO.startswith(url)``), and the values are applications to
dispatch to. URLs are matched most-specific-first, i.e., longest
URL first. The ``SCRIPT_NAME`` and ``PATH_INFO`` environmental
variables are adjusted to indicate the new context.
URLs can also include domains, like ``http://blah.com/foo``, or as
tuples ``('blah.com', '/foo')``. This will match domain names; without
the ``http://domain`` or with a domain of ``None`` any domain will be
matched (so long as no other explicit domain matches). """
def __init__(self, not_found_app=None):
self.applications = []
if not not_found_app:
not_found_app = self.not_found_app
self.not_found_application = not_found_app
def __len__(self):
return len(self.applications)
def __iter__(self):
for app_url, app in self.applications:
yield app_url
norm_url_re = re.compile('//+')
domain_url_re = re.compile('^(http|https)://')
def not_found_app(self, environ, start_response):
mapper = environ.get('paste.urlmap_object')
if mapper:
matches = [p for p, a in mapper.applications]
extra = 'defined apps: %s' % (
',\n '.join(map(repr, matches)))
else:
extra = ''
extra += '\nSCRIPT_NAME: %r' % cgi.escape(environ.get('SCRIPT_NAME'))
extra += '\nPATH_INFO: %r' % cgi.escape(environ.get('PATH_INFO'))
extra += '\nHTTP_HOST: %r' % cgi.escape(environ.get('HTTP_HOST'))
app = httpexceptions.HTTPNotFound(
environ['PATH_INFO'],
comment=cgi.escape(extra)).wsgi_application
return app(environ, start_response)
def normalize_url(self, url, trim=True):
if isinstance(url, (list, tuple)):
domain = url[0]
url = self.normalize_url(url[1])[1]
return domain, url
assert (not url or url.startswith('/')
or self.domain_url_re.search(url)), (
"URL fragments must start with / or http:// (you gave %r)" % url)
match = self.domain_url_re.search(url)
if match:
url = url[match.end():]
if '/' in url:
domain, url = url.split('/', 1)
url = '/' + url
else:
domain, url = url, ''
else:
domain = None
url = self.norm_url_re.sub('/', url)
if trim:
url = url.rstrip('/')
return domain, url
def sort_apps(self):
"""
Make sure applications are sorted with longest URLs first
"""
def key(app_desc):
(domain, url), app = app_desc
if not domain:
# Make sure empty domains sort last:
return '\xff', -len(url)
else:
return domain, -len(url)
apps = [(key(desc), desc) for desc in self.applications]
apps.sort()
self.applications = [desc for (sortable, desc) in apps]
def __setitem__(self, url, app):
if app is None:
try:
del self[url]
except KeyError:
pass
return
dom_url = self.normalize_url(url)
if dom_url in self:
del self[dom_url]
self.applications.append((dom_url, app))
self.sort_apps()
def __getitem__(self, url):
dom_url = self.normalize_url(url)
for app_url, app in self.applications:
if app_url == dom_url:
return app
raise KeyError(
"No application with the url %r (domain: %r; existing: %s)"
% (url[1], url[0] or '*', self.applications))
def __delitem__(self, url):
|
def keys(self):
return [app_url for app_url, app in self.applications]
def __call__(self, environ, start_response):
host = environ.get('HTTP_HOST', environ.get('SERVER_NAME')).lower()
if ':' in host:
host, port = host.split(':', 1)
else:
if environ['wsgi.url_scheme'] == 'http':
port = '80'
else:
port = '443'
path_info = environ.get('PATH_INFO')
path_info = self.normalize_url(path_info, False)[1]
for (domain, app_url), app in self.applications:
if domain and domain != host and domain != host+':'+port:
continue
if (path_info == app_url
or path_info.startswith(app_url + '/')):
environ['SCRIPT_NAME'] += app_url
environ['PATH_INFO'] = path_info[len(app_url):]
return app(environ, start_response)
environ['paste.urlmap_object'] = self
return self.not_found_application(environ, start_response)
class PathProxyURLMap(object):
"""
This is a wrapper for URLMap that catches any strings that
are passed in as applications; these strings are treated as
filenames (relative to `base_path`) and are passed to the
callable `builder`, which will return an application.
This is intended for cases when configuration files can be
treated as applications.
`base_paste_url` is the URL under which all applications added through
this wrapper must go. Use ``""`` if you want this to not
change incoming URLs.
"""
def __init__(self, map, base_paste_url, base_path, builder):
self.map = map
self.base_paste_url = self.map.normalize_url(base_paste_url)
self.base_path = base_path
self.builder = builder
def __setitem__(self, url, app):
if isinstance(app, (str, unicode)):
app_fn = os.path.join(self.base_path, app)
app = self.builder(app_fn)
url = self.map.normalize_url(url)
# @@: This means http://foo.com/bar will potentially
# match foo.com, but /base_paste_url/bar, which is unintuitive
url = (url[0] or self.base_paste_url[0],
self.base_paste_url[1] + url[1])
self.map[url] = app
def __getattr__(self, attr):
return getattr(self.map, attr)
# This is really the only settable attribute
def not_found_application__get(self):
return self.map.not_found_application
def not_found_application__set(self, value):
self.map.not_found_application = value
not_found_application = property(not_found_application__get,
not_found_application__set)
| url = self.normalize_url(url)
for app_url, app in self.applications:
if app_url == url:
self.applications.remove((app_url, app))
break
else:
raise KeyError(
"No application with the url %r" % (url,)) |
main.go | package main
import (
"sync"
)
func main() {
tourists := make(chan tourist)
wg := sync.WaitGroup{}
go pcPool(tourists)
for i := 0; i < 20; i++ {
wg.Add(1)
tourists <- tourist{uint(i), &wg}
}
wg.Wait()
}
func pcPool(personPool chan tourist) {
personBuffer := make(chan tourist, 8)
for i := 0; i < 8; i++ {
go computer(personBuffer)
}
for {
person := <-personPool
personBuffer <- person
}
}
func | (person chan tourist) {
for {
p := <-person
p.Hello()
}
}
| computer |
plot.py | """Plotting module for SymPy.
A plot is represented by the ``Plot`` class that contains a reference to the
backend and a list of the data series to be plotted. The data series are
instances of classes meant to simplify getting points and meshes from SymPy
expressions. ``plot_backends`` is a dictionary with all the backends.
This module gives only the essential. For all the fancy stuff use directly
the backend. You can get the backend wrapper for every plot from the
``_backend`` attribute. Moreover the data series classes have various useful
methods like ``get_points``, ``get_meshes``, etc, that may
be useful if you wish to use another plotting library.
Especially if you need publication ready graphs and this module is not enough
for you - just get the ``_backend`` attribute and add whatever you want
directly to it. In the case of matplotlib (the common way to graph data in
python) just copy ``_backend.fig`` which is the figure and ``_backend.ax``
which is the axis and work on them as you would on any other matplotlib object.
Simplicity of code takes much greater importance than performance. Do not use it
if you care at all about performance. A new backend instance is initialized
every time you call ``show()`` and the old one is left to the garbage collector.
"""
from collections.abc import Callable
from sympy.core.basic import Basic
from sympy.core.containers import Tuple
from sympy.core.expr import Expr
from sympy.core.function import arity, Function
from sympy.core.symbol import (Dummy, Symbol)
from sympy.core.sympify import sympify
from sympy.external import import_module
from sympy.printing.latex import latex
from sympy.utilities.exceptions import sympy_deprecation_warning
from sympy.utilities.iterables import is_sequence
from .experimental_lambdify import (vectorized_lambdify, lambdify)
# N.B.
# When changing the minimum module version for matplotlib, please change
# the same in the `SymPyDocTestFinder`` in `sympy/testing/runtests.py`
# Backend specific imports - textplot
from sympy.plotting.textplot import textplot
# Global variable
# Set to False when running tests / doctests so that the plots don't show.
_show = True
def unset_show():
"""
Disable show(). For use in the tests.
"""
global _show
_show = False
def _str_or_latex(label):
if isinstance(label, Basic):
return latex(label, mode='inline')
return str(label)
##############################################################################
# The public interface
##############################################################################
class Plot:
"""The central class of the plotting module.
Explanation
===========
For interactive work the function ``plot`` is better suited.
This class permits the plotting of SymPy expressions using numerous
backends (matplotlib, textplot, the old pyglet module for sympy, Google
charts api, etc).
The figure can contain an arbitrary number of plots of SymPy expressions,
lists of coordinates of points, etc. Plot has a private attribute _series that
contains all data series to be plotted (expressions for lines or surfaces,
lists of points, etc (all subclasses of BaseSeries)). Those data series are
instances of classes not imported by ``from sympy import *``.
The customization of the figure is on two levels. Global options that
concern the figure as a whole (eg title, xlabel, scale, etc) and
per-data series options (eg name) and aesthetics (eg. color, point shape,
line type, etc.).
The difference between options and aesthetics is that an aesthetic can be
a function of the coordinates (or parameters in a parametric plot). The
supported values for an aesthetic are:
- None (the backend uses default values)
- a constant
- a function of one variable (the first coordinate or parameter)
- a function of two variables (the first and second coordinate or
parameters)
- a function of three variables (only in nonparametric 3D plots)
Their implementation depends on the backend so they may not work in some
backends.
If the plot is parametric and the arity of the aesthetic function permits
it the aesthetic is calculated over parameters and not over coordinates.
If the arity does not permit calculation over parameters the calculation is
done over coordinates.
Only cartesian coordinates are supported for the moment, but you can use
the parametric plots to plot in polar, spherical and cylindrical
coordinates.
The arguments for the constructor Plot must be subclasses of BaseSeries.
Any global option can be specified as a keyword argument.
The global options for a figure are:
- title : str
- xlabel : str or Symbol
- ylabel : str or Symbol
- zlabel : str or Symbol
- legend : bool
- xscale : {'linear', 'log'}
- yscale : {'linear', 'log'}
- axis : bool
- axis_center : tuple of two floats or {'center', 'auto'}
- xlim : tuple of two floats
- ylim : tuple of two floats
- aspect_ratio : tuple of two floats or {'auto'}
- autoscale : bool
- margin : float in [0, 1]
- backend : {'default', 'matplotlib', 'text'} or a subclass of BaseBackend
- size : optional tuple of two floats, (width, height); default: None
The per data series options and aesthetics are:
There are none in the base series. See below for options for subclasses.
Some data series support additional aesthetics or options:
ListSeries, LineOver1DRangeSeries, Parametric2DLineSeries,
Parametric3DLineSeries support the following:
Aesthetics:
- line_color : string, or float, or function, optional
Specifies the color for the plot, which depends on the backend being
used.
For example, if ``MatplotlibBackend`` is being used, then
Matplotlib string colors are acceptable ("red", "r", "cyan", "c", ...).
Alternatively, we can use a float number `0 < color < 1` wrapped in a
string (for example, `line_color="0.5"`) to specify grayscale colors.
Alternatively, We can specify a function returning a single
float value: this will be used to apply a color-loop (for example,
`line_color=lambda x: math.cos(x)`).
Note that by setting line_color, it would be applied simultaneously
to all the series.
options:
- label : str
- steps : bool
- integers_only : bool
SurfaceOver2DRangeSeries, ParametricSurfaceSeries support the following:
aesthetics:
- surface_color : function which returns a float.
"""
def __init__(self, *args,
title=None, xlabel=None, ylabel=None, zlabel=None, aspect_ratio='auto',
xlim=None, ylim=None, axis_center='auto', axis=True,
xscale='linear', yscale='linear', legend=False, autoscale=True,
margin=0, annotations=None, markers=None, rectangles=None,
fill=None, backend='default', size=None, **kwargs):
super().__init__()
# Options for the graph as a whole.
# The possible values for each option are described in the docstring of
# Plot. They are based purely on convention, no checking is done.
self.title = title
self.xlabel = xlabel
self.ylabel = ylabel
self.zlabel = zlabel
self.aspect_ratio = aspect_ratio
self.axis_center = axis_center
self.axis = axis
self.xscale = xscale
self.yscale = yscale
self.legend = legend
self.autoscale = autoscale
self.margin = margin
self.annotations = annotations
self.markers = markers
self.rectangles = rectangles
self.fill = fill
# Contains the data objects to be plotted. The backend should be smart
# enough to iterate over this list.
self._series = []
self._series.extend(args)
# The backend type. On every show() a new backend instance is created
# in self._backend which is tightly coupled to the Plot instance
# (thanks to the parent attribute of the backend).
if isinstance(backend, str):
self.backend = plot_backends[backend]
elif (type(backend) == type) and issubclass(backend, BaseBackend):
self.backend = backend
else:
raise TypeError(
"backend must be either a string or a subclass of BaseBackend")
is_real = \
lambda lim: all(getattr(i, 'is_real', True) for i in lim)
is_finite = \
lambda lim: all(getattr(i, 'is_finite', True) for i in lim)
# reduce code repetition
def check_and_set(t_name, t):
if t:
if not is_real(t):
raise ValueError(
"All numbers from {}={} must be real".format(t_name, t))
if not is_finite(t):
raise ValueError(
"All numbers from {}={} must be finite".format(t_name, t))
setattr(self, t_name, (float(t[0]), float(t[1])))
self.xlim = None
check_and_set("xlim", xlim)
self.ylim = None
check_and_set("ylim", ylim)
self.size = None
check_and_set("size", size)
def show(self):
# TODO move this to the backend (also for save)
if hasattr(self, '_backend'):
self._backend.close()
self._backend = self.backend(self)
self._backend.show()
def save(self, path):
if hasattr(self, '_backend'):
self._backend.close()
self._backend = self.backend(self)
self._backend.save(path)
def __str__(self):
series_strs = [('[%d]: ' % i) + str(s)
for i, s in enumerate(self._series)]
return 'Plot object containing:\n' + '\n'.join(series_strs)
def __getitem__(self, index):
return self._series[index]
def __setitem__(self, index, *args):
if len(args) == 1 and isinstance(args[0], BaseSeries):
self._series[index] = args
def __delitem__(self, index):
del self._series[index]
def append(self, arg):
"""Adds an element from a plot's series to an existing plot.
Examples
========
Consider two ``Plot`` objects, ``p1`` and ``p2``. To add the
second plot's first series object to the first, use the
``append`` method, like so:
.. plot::
:format: doctest
:include-source: True
>>> from sympy import symbols
>>> from sympy.plotting import plot
>>> x = symbols('x')
>>> p1 = plot(x*x, show=False)
>>> p2 = plot(x, show=False)
>>> p1.append(p2[0])
>>> p1
Plot object containing:
[0]: cartesian line: x**2 for x over (-10.0, 10.0)
[1]: cartesian line: x for x over (-10.0, 10.0)
>>> p1.show()
See Also
========
extend
"""
if isinstance(arg, BaseSeries):
self._series.append(arg)
else:
raise TypeError('Must specify element of plot to append.')
def extend(self, arg):
"""Adds all series from another plot.
Examples
========
Consider two ``Plot`` objects, ``p1`` and ``p2``. To add the
second plot to the first, use the ``extend`` method, like so:
.. plot::
:format: doctest
:include-source: True
>>> from sympy import symbols
>>> from sympy.plotting import plot
>>> x = symbols('x')
>>> p1 = plot(x**2, show=False)
>>> p2 = plot(x, -x, show=False)
>>> p1.extend(p2)
>>> p1
Plot object containing:
[0]: cartesian line: x**2 for x over (-10.0, 10.0)
[1]: cartesian line: x for x over (-10.0, 10.0)
[2]: cartesian line: -x for x over (-10.0, 10.0)
>>> p1.show()
"""
if isinstance(arg, Plot):
self._series.extend(arg._series)
elif is_sequence(arg):
self._series.extend(arg)
else:
raise TypeError('Expecting Plot or sequence of BaseSeries')
class PlotGrid:
"""This class helps to plot subplots from already created SymPy plots
in a single figure.
Examples
========
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy import symbols
>>> from sympy.plotting import plot, plot3d, PlotGrid
>>> x, y = symbols('x, y')
>>> p1 = plot(x, x**2, x**3, (x, -5, 5))
>>> p2 = plot((x**2, (x, -6, 6)), (x, (x, -5, 5)))
>>> p3 = plot(x**3, (x, -5, 5))
>>> p4 = plot3d(x*y, (x, -5, 5), (y, -5, 5))
Plotting vertically in a single line:
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> PlotGrid(2, 1, p1, p2)
PlotGrid object containing:
Plot[0]:Plot object containing:
[0]: cartesian line: x for x over (-5.0, 5.0)
[1]: cartesian line: x**2 for x over (-5.0, 5.0)
[2]: cartesian line: x**3 for x over (-5.0, 5.0)
Plot[1]:Plot object containing:
[0]: cartesian line: x**2 for x over (-6.0, 6.0)
[1]: cartesian line: x for x over (-5.0, 5.0)
Plotting horizontally in a single line:
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> PlotGrid(1, 3, p2, p3, p4)
PlotGrid object containing:
Plot[0]:Plot object containing:
[0]: cartesian line: x**2 for x over (-6.0, 6.0)
[1]: cartesian line: x for x over (-5.0, 5.0)
Plot[1]:Plot object containing:
[0]: cartesian line: x**3 for x over (-5.0, 5.0)
Plot[2]:Plot object containing:
[0]: cartesian surface: x*y for x over (-5.0, 5.0) and y over (-5.0, 5.0)
Plotting in a grid form:
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> PlotGrid(2, 2, p1, p2, p3, p4)
PlotGrid object containing:
Plot[0]:Plot object containing:
[0]: cartesian line: x for x over (-5.0, 5.0)
[1]: cartesian line: x**2 for x over (-5.0, 5.0)
[2]: cartesian line: x**3 for x over (-5.0, 5.0)
Plot[1]:Plot object containing:
[0]: cartesian line: x**2 for x over (-6.0, 6.0)
[1]: cartesian line: x for x over (-5.0, 5.0)
Plot[2]:Plot object containing:
[0]: cartesian line: x**3 for x over (-5.0, 5.0)
Plot[3]:Plot object containing:
[0]: cartesian surface: x*y for x over (-5.0, 5.0) and y over (-5.0, 5.0)
"""
def __init__(self, nrows, ncolumns, *args, show=True, size=None, **kwargs):
"""
Parameters
==========
nrows :
The number of rows that should be in the grid of the
required subplot.
ncolumns :
The number of columns that should be in the grid
of the required subplot.
nrows and ncolumns together define the required grid.
Arguments
=========
A list of predefined plot objects entered in a row-wise sequence
i.e. plot objects which are to be in the top row of the required
grid are written first, then the second row objects and so on
Keyword arguments
=================
show : Boolean
The default value is set to ``True``. Set show to ``False`` and
the function will not display the subplot. The returned instance
of the ``PlotGrid`` class can then be used to save or display the
plot by calling the ``save()`` and ``show()`` methods
respectively.
size : (float, float), optional
A tuple in the form (width, height) in inches to specify the size of
the overall figure. The default value is set to ``None``, meaning
the size will be set by the default backend.
"""
self.nrows = nrows
self.ncolumns = ncolumns
self._series = []
self.args = args
for arg in args:
self._series.append(arg._series)
self.backend = DefaultBackend
self.size = size
if show:
self.show()
def show(self):
if hasattr(self, '_backend'):
self._backend.close()
self._backend = self.backend(self)
self._backend.show()
def save(self, path):
if hasattr(self, '_backend'):
self._backend.close()
self._backend = self.backend(self)
self._backend.save(path)
def __str__(self):
plot_strs = [('Plot[%d]:' % i) + str(plot)
for i, plot in enumerate(self.args)]
return 'PlotGrid object containing:\n' + '\n'.join(plot_strs)
##############################################################################
# Data Series
##############################################################################
#TODO more general way to calculate aesthetics (see get_color_array)
### The base class for all series
class BaseSeries:
"""Base class for the data objects containing stuff to be plotted.
Explanation
===========
The backend should check if it supports the data series that it's given.
(eg TextBackend supports only LineOver1DRange).
It's the backend responsibility to know how to use the class of
data series that it's given.
Some data series classes are grouped (using a class attribute like is_2Dline)
according to the api they present (based only on convention). The backend is
not obliged to use that api (eg. The LineOver1DRange belongs to the
is_2Dline group and presents the get_points method, but the
TextBackend does not use the get_points method).
"""
# Some flags follow. The rationale for using flags instead of checking base
# classes is that setting multiple flags is simpler than multiple
# inheritance.
is_2Dline = False
# Some of the backends expect:
# - get_points returning 1D np.arrays list_x, list_y
# - get_color_array returning 1D np.array (done in Line2DBaseSeries)
# with the colors calculated at the points from get_points
is_3Dline = False
# Some of the backends expect:
# - get_points returning 1D np.arrays list_x, list_y, list_y
# - get_color_array returning 1D np.array (done in Line2DBaseSeries)
# with the colors calculated at the points from get_points
is_3Dsurface = False
# Some of the backends expect:
# - get_meshes returning mesh_x, mesh_y, mesh_z (2D np.arrays)
# - get_points an alias for get_meshes
is_contour = False
# Some of the backends expect:
# - get_meshes returning mesh_x, mesh_y, mesh_z (2D np.arrays)
# - get_points an alias for get_meshes
is_implicit = False
# Some of the backends expect:
# - get_meshes returning mesh_x (1D array), mesh_y(1D array,
# mesh_z (2D np.arrays)
# - get_points an alias for get_meshes
# Different from is_contour as the colormap in backend will be
# different
is_parametric = False
# The calculation of aesthetics expects:
# - get_parameter_points returning one or two np.arrays (1D or 2D)
# used for calculation aesthetics
def __init__(self):
super().__init__()
@property
def is_3D(self):
flags3D = [
self.is_3Dline,
self.is_3Dsurface
]
return any(flags3D)
@property
def is_line(self):
flagslines = [
self.is_2Dline,
self.is_3Dline
]
return any(flagslines)
### 2D lines
class Line2DBaseSeries(BaseSeries):
"""A base class for 2D lines.
- adding the label, steps and only_integers options
- making is_2Dline true
- defining get_segments and get_color_array
"""
is_2Dline = True
_dim = 2
def __init__(self):
super().__init__()
self.label = None
self.steps = False
self.only_integers = False
self.line_color = None
def get_data(self):
""" Return lists of coordinates for plotting the line.
Returns
=======
x: list
List of x-coordinates
y: list
List of y-coordinates
y: list
List of z-coordinates in case of Parametric3DLineSeries
"""
np = import_module('numpy')
points = self.get_points()
if self.steps is True:
if len(points) == 2:
x = np.array((points[0], points[0])).T.flatten()[1:]
y = np.array((points[1], points[1])).T.flatten()[:-1]
points = (x, y)
else:
x = np.repeat(points[0], 3)[2:]
y = np.repeat(points[1], 3)[:-2]
z = np.repeat(points[2], 3)[1:-1]
points = (x, y, z)
return points
def get_segments(self):
sympy_deprecation_warning(
"""
The Line2DBaseSeries.get_segments() method is deprecated.
Instead, use the MatplotlibBackend.get_segments() method, or use
The get_points() or get_data() methods.
""",
deprecated_since_version="1.9",
active_deprecations_target="deprecated-get-segments")
np = import_module('numpy')
points = type(self).get_data(self)
points = np.ma.array(points).T.reshape(-1, 1, self._dim)
return np.ma.concatenate([points[:-1], points[1:]], axis=1)
def get_color_array(self):
np = import_module('numpy')
c = self.line_color
if hasattr(c, '__call__'):
f = np.vectorize(c)
nargs = arity(c)
if nargs == 1 and self.is_parametric:
x = self.get_parameter_points()
return f(centers_of_segments(x))
else:
variables = list(map(centers_of_segments, self.get_points()))
if nargs == 1:
return f(variables[0])
elif nargs == 2:
return f(*variables[:2])
else: # only if the line is 3D (otherwise raises an error)
return f(*variables)
else:
return c*np.ones(self.nb_of_points)
class List2DSeries(Line2DBaseSeries):
"""Representation for a line consisting of list of points."""
def __init__(self, list_x, list_y):
np = import_module('numpy')
super().__init__()
self.list_x = np.array(list_x)
self.list_y = np.array(list_y)
self.label = 'list'
def __str__(self):
return 'list plot'
def get_points(self):
return (self.list_x, self.list_y)
class LineOver1DRangeSeries(Line2DBaseSeries):
"""Representation for a line consisting of a SymPy expression over a range."""
def __init__(self, expr, var_start_end, **kwargs):
super().__init__()
self.expr = sympify(expr)
self.label = kwargs.get('label', None) or self.expr
self.var = sympify(var_start_end[0])
self.start = float(var_start_end[1])
self.end = float(var_start_end[2])
self.nb_of_points = kwargs.get('nb_of_points', 300)
self.adaptive = kwargs.get('adaptive', True)
self.depth = kwargs.get('depth', 12)
self.line_color = kwargs.get('line_color', None)
self.xscale = kwargs.get('xscale', 'linear')
def __str__(self):
return 'cartesian line: %s for %s over %s' % (
str(self.expr), str(self.var), str((self.start, self.end)))
def get_points(self):
""" Return lists of coordinates for plotting. Depending on the
`adaptive` option, this function will either use an adaptive algorithm
or it will uniformly sample the expression over the provided range.
Returns
=======
x: list
List of x-coordinates
y: list
List of y-coordinates
Explanation
===========
The adaptive sampling is done by recursively checking if three
points are almost collinear. If they are not collinear, then more
points are added between those points.
References
==========
.. [1] Adaptive polygonal approximation of parametric curves,
Luiz Henrique de Figueiredo.
"""
if self.only_integers or not self.adaptive:
return self._uniform_sampling()
else:
f = lambdify([self.var], self.expr)
x_coords = []
y_coords = []
np = import_module('numpy')
def sample(p, q, depth):
""" Samples recursively if three points are almost collinear.
For depth < 6, points are added irrespective of whether they
satisfy the collinearity condition or not. The maximum depth
allowed is 12.
"""
# Randomly sample to avoid aliasing.
random = 0.45 + np.random.rand() * 0.1
if self.xscale == 'log':
xnew = 10**(np.log10(p[0]) + random * (np.log10(q[0]) -
np.log10(p[0])))
else:
xnew = p[0] + random * (q[0] - p[0])
ynew = f(xnew)
new_point = np.array([xnew, ynew])
# Maximum depth
if depth > self.depth:
x_coords.append(q[0])
y_coords.append(q[1])
# Sample irrespective of whether the line is flat till the
# depth of 6. We are not using linspace to avoid aliasing.
elif depth < 6:
sample(p, new_point, depth + 1)
sample(new_point, q, depth + 1)
# Sample ten points if complex values are encountered
# at both ends. If there is a real value in between, then
# sample those points further.
elif p[1] is None and q[1] is None:
if self.xscale == 'log':
xarray = np.logspace(p[0], q[0], 10)
else:
xarray = np.linspace(p[0], q[0], 10)
yarray = list(map(f, xarray))
if not all(y is None for y in yarray):
for i in range(len(yarray) - 1):
if not (yarray[i] is None and yarray[i + 1] is None):
sample([xarray[i], yarray[i]],
[xarray[i + 1], yarray[i + 1]], depth + 1)
# Sample further if one of the end points in None (i.e. a
# complex value) or the three points are not almost collinear.
elif (p[1] is None or q[1] is None or new_point[1] is None
or not flat(p, new_point, q)):
sample(p, new_point, depth + 1)
sample(new_point, q, depth + 1)
else:
x_coords.append(q[0])
y_coords.append(q[1])
f_start = f(self.start)
f_end = f(self.end)
x_coords.append(self.start)
y_coords.append(f_start)
sample(np.array([self.start, f_start]),
np.array([self.end, f_end]), 0)
return (x_coords, y_coords)
def _uniform_sampling(self):
np = import_module('numpy')
if self.only_integers is True:
if self.xscale == 'log':
list_x = np.logspace(int(self.start), int(self.end),
num=int(self.end) - int(self.start) + 1)
else:
list_x = np.linspace(int(self.start), int(self.end),
num=int(self.end) - int(self.start) + 1)
else:
if self.xscale == 'log':
list_x = np.logspace(self.start, self.end, num=self.nb_of_points)
else:
list_x = np.linspace(self.start, self.end, num=self.nb_of_points)
f = vectorized_lambdify([self.var], self.expr)
list_y = f(list_x)
return (list_x, list_y)
class Parametric2DLineSeries(Line2DBaseSeries):
"""Representation for a line consisting of two parametric SymPy expressions
over a range."""
is_parametric = True
def __init__(self, expr_x, expr_y, var_start_end, **kwargs):
super().__init__()
self.expr_x = sympify(expr_x)
self.expr_y = sympify(expr_y)
self.label = kwargs.get('label', None) or \
Tuple(self.expr_x, self.expr_y)
self.var = sympify(var_start_end[0])
self.start = float(var_start_end[1])
self.end = float(var_start_end[2])
self.nb_of_points = kwargs.get('nb_of_points', 300)
self.adaptive = kwargs.get('adaptive', True)
self.depth = kwargs.get('depth', 12)
self.line_color = kwargs.get('line_color', None)
def __str__(self):
return 'parametric cartesian line: (%s, %s) for %s over %s' % (
str(self.expr_x), str(self.expr_y), str(self.var),
str((self.start, self.end)))
def get_parameter_points(self):
np = import_module('numpy')
return np.linspace(self.start, self.end, num=self.nb_of_points)
def _uniform_sampling(self):
param = self.get_parameter_points()
fx = vectorized_lambdify([self.var], self.expr_x)
fy = vectorized_lambdify([self.var], self.expr_y)
list_x = fx(param)
list_y = fy(param)
return (list_x, list_y)
def get_points(self):
""" Return lists of coordinates for plotting. Depending on the
`adaptive` option, this function will either use an adaptive algorithm
or it will uniformly sample the expression over the provided range.
Returns
=======
x: list
List of x-coordinates
y: list
List of y-coordinates
Explanation
===========
The adaptive sampling is done by recursively checking if three
points are almost collinear. If they are not collinear, then more
points are added between those points.
References
==========
.. [1] Adaptive polygonal approximation of parametric curves,
Luiz Henrique de Figueiredo.
"""
if not self.adaptive:
return self._uniform_sampling()
f_x = lambdify([self.var], self.expr_x)
f_y = lambdify([self.var], self.expr_y)
x_coords = []
y_coords = []
def sample(param_p, param_q, p, q, depth):
""" Samples recursively if three points are almost collinear.
For depth < 6, points are added irrespective of whether they
satisfy the collinearity condition or not. The maximum depth
allowed is 12.
"""
# Randomly sample to avoid aliasing.
np = import_module('numpy')
random = 0.45 + np.random.rand() * 0.1
param_new = param_p + random * (param_q - param_p)
xnew = f_x(param_new)
ynew = f_y(param_new)
new_point = np.array([xnew, ynew])
# Maximum depth
if depth > self.depth:
x_coords.append(q[0])
y_coords.append(q[1])
# Sample irrespective of whether the line is flat till the
# depth of 6. We are not using linspace to avoid aliasing.
elif depth < 6:
sample(param_p, param_new, p, new_point, depth + 1)
sample(param_new, param_q, new_point, q, depth + 1)
# Sample ten points if complex values are encountered
# at both ends. If there is a real value in between, then
# sample those points further.
elif ((p[0] is None and q[1] is None) or
(p[1] is None and q[1] is None)):
param_array = np.linspace(param_p, param_q, 10)
x_array = list(map(f_x, param_array))
y_array = list(map(f_y, param_array))
if not all(x is None and y is None
for x, y in zip(x_array, y_array)):
for i in range(len(y_array) - 1):
if ((x_array[i] is not None and y_array[i] is not None) or
(x_array[i + 1] is not None and y_array[i + 1] is not None)):
point_a = [x_array[i], y_array[i]]
point_b = [x_array[i + 1], y_array[i + 1]]
sample(param_array[i], param_array[i], point_a,
point_b, depth + 1)
# Sample further if one of the end points in None (i.e. a complex
# value) or the three points are not almost collinear.
elif (p[0] is None or p[1] is None
or q[1] is None or q[0] is None
or not flat(p, new_point, q)):
sample(param_p, param_new, p, new_point, depth + 1)
sample(param_new, param_q, new_point, q, depth + 1)
else:
x_coords.append(q[0])
y_coords.append(q[1])
f_start_x = f_x(self.start)
f_start_y = f_y(self.start)
start = [f_start_x, f_start_y]
f_end_x = f_x(self.end)
f_end_y = f_y(self.end)
end = [f_end_x, f_end_y]
x_coords.append(f_start_x)
y_coords.append(f_start_y)
sample(self.start, self.end, start, end, 0)
return x_coords, y_coords
### 3D lines
class Line3DBaseSeries(Line2DBaseSeries):
"""A base class for 3D lines.
Most of the stuff is derived from Line2DBaseSeries."""
is_2Dline = False
is_3Dline = True
_dim = 3
def __init__(self):
super().__init__()
class Parametric3DLineSeries(Line3DBaseSeries):
"""Representation for a 3D line consisting of three parametric SymPy
expressions and a range."""
is_parametric = True
def __init__(self, expr_x, expr_y, expr_z, var_start_end, **kwargs):
super().__init__()
self.expr_x = sympify(expr_x)
self.expr_y = sympify(expr_y)
self.expr_z = sympify(expr_z)
self.label = kwargs.get('label', None) or \
Tuple(self.expr_x, self.expr_y)
self.var = sympify(var_start_end[0])
self.start = float(var_start_end[1])
self.end = float(var_start_end[2])
self.nb_of_points = kwargs.get('nb_of_points', 300)
self.line_color = kwargs.get('line_color', None)
self._xlim = None
self._ylim = None
self._zlim = None
def __str__(self):
return '3D parametric cartesian line: (%s, %s, %s) for %s over %s' % (
str(self.expr_x), str(self.expr_y), str(self.expr_z),
str(self.var), str((self.start, self.end)))
def get_parameter_points(self):
np = import_module('numpy')
return np.linspace(self.start, self.end, num=self.nb_of_points)
def get_points(self):
np = import_module('numpy')
param = self.get_parameter_points()
fx = vectorized_lambdify([self.var], self.expr_x)
fy = vectorized_lambdify([self.var], self.expr_y)
fz = vectorized_lambdify([self.var], self.expr_z)
list_x = fx(param)
list_y = fy(param)
list_z = fz(param)
list_x = np.array(list_x, dtype=np.float64)
list_y = np.array(list_y, dtype=np.float64)
list_z = np.array(list_z, dtype=np.float64)
list_x = np.ma.masked_invalid(list_x)
list_y = np.ma.masked_invalid(list_y)
list_z = np.ma.masked_invalid(list_z)
self._xlim = (np.amin(list_x), np.amax(list_x))
self._ylim = (np.amin(list_y), np.amax(list_y))
self._zlim = (np.amin(list_z), np.amax(list_z))
return list_x, list_y, list_z
### Surfaces
class SurfaceBaseSeries(BaseSeries):
"""A base class for 3D surfaces."""
is_3Dsurface = True
def __init__(self):
super().__init__()
self.surface_color = None
def get_color_array(self):
np = import_module('numpy')
c = self.surface_color
if isinstance(c, Callable):
f = np.vectorize(c)
nargs = arity(c)
if self.is_parametric:
variables = list(map(centers_of_faces, self.get_parameter_meshes()))
if nargs == 1:
return f(variables[0])
elif nargs == 2:
return f(*variables)
variables = list(map(centers_of_faces, self.get_meshes()))
if nargs == 1:
return f(variables[0])
elif nargs == 2:
return f(*variables[:2])
else:
return f(*variables)
else:
if isinstance(self, SurfaceOver2DRangeSeries):
return c*np.ones(min(self.nb_of_points_x, self.nb_of_points_y))
else:
return c*np.ones(min(self.nb_of_points_u, self.nb_of_points_v))
class SurfaceOver2DRangeSeries(SurfaceBaseSeries):
"""Representation for a 3D surface consisting of a SymPy expression and 2D
range."""
def __init__(self, expr, var_start_end_x, var_start_end_y, **kwargs):
super().__init__()
self.expr = sympify(expr)
self.var_x = sympify(var_start_end_x[0])
self.start_x = float(var_start_end_x[1])
self.end_x = float(var_start_end_x[2])
self.var_y = sympify(var_start_end_y[0])
self.start_y = float(var_start_end_y[1])
self.end_y = float(var_start_end_y[2])
self.nb_of_points_x = kwargs.get('nb_of_points_x', 50)
self.nb_of_points_y = kwargs.get('nb_of_points_y', 50)
self.surface_color = kwargs.get('surface_color', None)
self._xlim = (self.start_x, self.end_x)
self._ylim = (self.start_y, self.end_y)
def __str__(self):
return ('cartesian surface: %s for'
' %s over %s and %s over %s') % (
str(self.expr),
str(self.var_x),
str((self.start_x, self.end_x)),
str(self.var_y),
str((self.start_y, self.end_y)))
def get_meshes(self):
np = import_module('numpy')
mesh_x, mesh_y = np.meshgrid(np.linspace(self.start_x, self.end_x,
num=self.nb_of_points_x),
np.linspace(self.start_y, self.end_y,
num=self.nb_of_points_y))
f = vectorized_lambdify((self.var_x, self.var_y), self.expr)
mesh_z = f(mesh_x, mesh_y)
mesh_z = np.array(mesh_z, dtype=np.float64)
mesh_z = np.ma.masked_invalid(mesh_z)
self._zlim = (np.amin(mesh_z), np.amax(mesh_z))
return mesh_x, mesh_y, mesh_z
class ParametricSurfaceSeries(SurfaceBaseSeries):
"""Representation for a 3D surface consisting of three parametric SymPy
expressions and a range."""
is_parametric = True
def __init__(
self, expr_x, expr_y, expr_z, var_start_end_u, var_start_end_v,
**kwargs):
super().__init__()
self.expr_x = sympify(expr_x)
self.expr_y = sympify(expr_y)
self.expr_z = sympify(expr_z)
self.var_u = sympify(var_start_end_u[0])
self.start_u = float(var_start_end_u[1])
self.end_u = float(var_start_end_u[2])
self.var_v = sympify(var_start_end_v[0])
self.start_v = float(var_start_end_v[1])
self.end_v = float(var_start_end_v[2])
self.nb_of_points_u = kwargs.get('nb_of_points_u', 50)
self.nb_of_points_v = kwargs.get('nb_of_points_v', 50)
self.surface_color = kwargs.get('surface_color', None)
def __str__(self):
return ('parametric cartesian surface: (%s, %s, %s) for'
' %s over %s and %s over %s') % (
str(self.expr_x),
str(self.expr_y),
str(self.expr_z),
str(self.var_u),
str((self.start_u, self.end_u)),
str(self.var_v),
str((self.start_v, self.end_v)))
def get_parameter_meshes(self):
np = import_module('numpy')
return np.meshgrid(np.linspace(self.start_u, self.end_u,
num=self.nb_of_points_u),
np.linspace(self.start_v, self.end_v,
num=self.nb_of_points_v))
def get_meshes(self):
np = import_module('numpy')
mesh_u, mesh_v = self.get_parameter_meshes()
fx = vectorized_lambdify((self.var_u, self.var_v), self.expr_x)
fy = vectorized_lambdify((self.var_u, self.var_v), self.expr_y)
fz = vectorized_lambdify((self.var_u, self.var_v), self.expr_z)
mesh_x = fx(mesh_u, mesh_v)
mesh_y = fy(mesh_u, mesh_v)
mesh_z = fz(mesh_u, mesh_v)
mesh_x = np.array(mesh_x, dtype=np.float64)
mesh_y = np.array(mesh_y, dtype=np.float64)
mesh_z = np.array(mesh_z, dtype=np.float64)
mesh_x = np.ma.masked_invalid(mesh_x)
mesh_y = np.ma.masked_invalid(mesh_y)
mesh_z = np.ma.masked_invalid(mesh_z)
self._xlim = (np.amin(mesh_x), np.amax(mesh_x))
self._ylim = (np.amin(mesh_y), np.amax(mesh_y))
self._zlim = (np.amin(mesh_z), np.amax(mesh_z))
return mesh_x, mesh_y, mesh_z
### Contours
class ContourSeries(BaseSeries):
"""Representation for a contour plot."""
# The code is mostly repetition of SurfaceOver2DRange.
# Presently used in contour_plot function
is_contour = True
def __init__(self, expr, var_start_end_x, var_start_end_y):
super().__init__()
self.nb_of_points_x = 50
self.nb_of_points_y = 50
self.expr = sympify(expr)
self.var_x = sympify(var_start_end_x[0])
self.start_x = float(var_start_end_x[1])
self.end_x = float(var_start_end_x[2])
self.var_y = sympify(var_start_end_y[0])
self.start_y = float(var_start_end_y[1])
self.end_y = float(var_start_end_y[2])
self.get_points = self.get_meshes
self._xlim = (self.start_x, self.end_x)
self._ylim = (self.start_y, self.end_y)
def __str__(self):
return ('contour: %s for '
'%s over %s and %s over %s') % (
str(self.expr),
str(self.var_x),
str((self.start_x, self.end_x)),
str(self.var_y),
str((self.start_y, self.end_y)))
def get_meshes(self):
np = import_module('numpy')
mesh_x, mesh_y = np.meshgrid(np.linspace(self.start_x, self.end_x,
num=self.nb_of_points_x),
np.linspace(self.start_y, self.end_y,
num=self.nb_of_points_y))
f = vectorized_lambdify((self.var_x, self.var_y), self.expr)
return (mesh_x, mesh_y, f(mesh_x, mesh_y))
##############################################################################
# Backends
##############################################################################
class BaseBackend:
"""Base class for all backends. A backend represents the plotting library,
which implements the necessary functionalities in order to use SymPy
plotting functions.
How the plotting module works:
1. Whenever a plotting function is called, the provided expressions are
processed and a list of instances of the `BaseSeries` class is created,
containing the necessary information to plot the expressions (eg the
expression, ranges, series name, ...). Eventually, these objects will
generate the numerical data to be plotted.
2. A Plot object is instantiated, which stores the list of series and the
main attributes of the plot (eg axis labels, title, ...).
3. When the "show" command is executed, a new backend is instantiated,
which loops through each series object to generate and plot the
numerical data. The backend is also going to set the axis labels, title,
..., according to the values stored in the Plot instance.
The backend should check if it supports the data series that it's given
(eg TextBackend supports only LineOver1DRange).
It's the backend responsibility to know how to use the class of data series
that it's given. Note that the current implementation of the `*Series`
classes is "matplotlib-centric": the numerical data returned by the
`get_points` and `get_meshes` methods is meant to be used directly by
Matplotlib. Therefore, the new backend will have to pre-process the
numerical data to make it compatible with the chosen plotting library.
Keep in mind that future SymPy versions may improve the `*Series` classes in
order to return numerical data "non-matplotlib-centric", hence if you code
a new backend you have the responsibility to check if its working on each
SymPy release.
Please, explore the `MatplotlibBackend` source code to understand how a
backend should be coded.
Methods
=======
In order to be used by SymPy plotting functions, a backend must implement
the following methods:
* `show(self)`: used to loop over the data series, generate the numerical
data, plot it and set the axis labels, title, ...
* save(self, path): used to save the current plot to the specified file
path.
* close(self): used to close the current plot backend (note: some plotting
library does not support this functionality. In that case, just raise a
warning).
See also
========
MatplotlibBackend
"""
def __init__(self, parent):
super().__init__()
self.parent = parent
def show(self):
raise NotImplementedError
def save(self, path):
raise NotImplementedError
def close(self):
raise NotImplementedError
# Don't have to check for the success of importing matplotlib in each case;
# we will only be using this backend if we can successfully import matploblib
class MatplotlibBackend(BaseBackend):
""" This class implements the functionalities to use Matplotlib with SymPy
plotting functions.
"""
def __init__(self, parent):
super().__init__(parent)
self.matplotlib = import_module('matplotlib',
import_kwargs={'fromlist': ['pyplot', 'cm', 'collections']},
min_module_version='1.1.0', catch=(RuntimeError,))
self.plt = self.matplotlib.pyplot
self.cm = self.matplotlib.cm
self.LineCollection = self.matplotlib.collections.LineCollection
aspect = getattr(self.parent, 'aspect_ratio', 'auto')
if aspect != 'auto':
aspect = float(aspect[1]) / aspect[0]
if isinstance(self.parent, Plot):
nrows, ncolumns = 1, 1
series_list = [self.parent._series]
elif isinstance(self.parent, PlotGrid):
nrows, ncolumns = self.parent.nrows, self.parent.ncolumns
series_list = self.parent._series
self.ax = []
self.fig = self.plt.figure(figsize=parent.size)
for i, series in enumerate(series_list):
are_3D = [s.is_3D for s in series]
if any(are_3D) and not all(are_3D):
raise ValueError('The matplotlib backend cannot mix 2D and 3D.')
elif all(are_3D):
# mpl_toolkits.mplot3d is necessary for
# projection='3d'
mpl_toolkits = import_module('mpl_toolkits', # noqa
import_kwargs={'fromlist': ['mplot3d']})
self.ax.append(self.fig.add_subplot(nrows, ncolumns, i + 1, projection='3d', aspect=aspect))
elif not any(are_3D):
self.ax.append(self.fig.add_subplot(nrows, ncolumns, i + 1, aspect=aspect))
self.ax[i].spines['left'].set_position('zero')
self.ax[i].spines['right'].set_color('none')
self.ax[i].spines['bottom'].set_position('zero')
self.ax[i].spines['top'].set_color('none')
self.ax[i].xaxis.set_ticks_position('bottom')
self.ax[i].yaxis.set_ticks_position('left')
@staticmethod
def get_segments(x, y, z=None):
""" Convert two list of coordinates to a list of segments to be used
with Matplotlib's LineCollection.
Parameters
==========
x: list
List of x-coordinates
y: list
List of y-coordinates
z: list
List of z-coordinates for a 3D line.
"""
np = import_module('numpy')
if z is not None:
dim = 3
points = (x, y, z)
else:
dim = 2
points = (x, y)
points = np.ma.array(points).T.reshape(-1, 1, dim)
return np.ma.concatenate([points[:-1], points[1:]], axis=1)
def _process_series(self, series, ax, parent):
np = import_module('numpy')
mpl_toolkits = import_module(
'mpl_toolkits', import_kwargs={'fromlist': ['mplot3d']})
# XXX Workaround for matplotlib issue
# https://github.com/matplotlib/matplotlib/issues/17130
xlims, ylims, zlims = [], [], []
for s in series:
# Create the collections
if s.is_2Dline:
x, y = s.get_data()
if (isinstance(s.line_color, (int, float)) or
callable(s.line_color)):
segments = self.get_segments(x, y)
collection = self.LineCollection(segments)
collection.set_array(s.get_color_array())
ax.add_collection(collection)
else:
lbl = _str_or_latex(s.label)
line, = ax.plot(x, y, label=lbl, color=s.line_color)
elif s.is_contour:
ax.contour(*s.get_meshes())
elif s.is_3Dline:
x, y, z = s.get_data()
if (isinstance(s.line_color, (int, float)) or
callable(s.line_color)):
art3d = mpl_toolkits.mplot3d.art3d
segments = self.get_segments(x, y, z)
collection = art3d.Line3DCollection(segments)
collection.set_array(s.get_color_array())
ax.add_collection(collection)
else:
lbl = _str_or_latex(s.label)
ax.plot(x, y, z, label=lbl, color=s.line_color)
xlims.append(s._xlim)
ylims.append(s._ylim)
zlims.append(s._zlim)
elif s.is_3Dsurface:
x, y, z = s.get_meshes()
collection = ax.plot_surface(x, y, z,
cmap=getattr(self.cm, 'viridis', self.cm.jet),
rstride=1, cstride=1, linewidth=0.1)
if isinstance(s.surface_color, (float, int, Callable)):
color_array = s.get_color_array()
color_array = color_array.reshape(color_array.size)
collection.set_array(color_array)
else:
collection.set_color(s.surface_color)
xlims.append(s._xlim)
ylims.append(s._ylim)
zlims.append(s._zlim)
elif s.is_implicit:
points = s.get_raster()
if len(points) == 2:
# interval math plotting
x, y = _matplotlib_list(points[0])
ax.fill(x, y, facecolor=s.line_color, edgecolor='None')
else:
# use contourf or contour depending on whether it is
# an inequality or equality.
# XXX: ``contour`` plots multiple lines. Should be fixed.
ListedColormap = self.matplotlib.colors.ListedColormap
colormap = ListedColormap(["white", s.line_color])
xarray, yarray, zarray, plot_type = points
if plot_type == 'contour':
ax.contour(xarray, yarray, zarray, cmap=colormap,
label=_str_or_latex(s.label))
else:
ax.contourf(xarray, yarray, zarray, cmap=colormap,
label=_str_or_latex(s.label))
else:
raise NotImplementedError(
'{} is not supported in the SymPy plotting module '
'with matplotlib backend. Please report this issue.'
.format(ax))
Axes3D = mpl_toolkits.mplot3d.Axes3D
if not isinstance(ax, Axes3D):
ax.autoscale_view(
scalex=ax.get_autoscalex_on(),
scaley=ax.get_autoscaley_on())
else:
# XXX Workaround for matplotlib issue
# https://github.com/matplotlib/matplotlib/issues/17130
if xlims:
xlims = np.array(xlims)
xlim = (np.amin(xlims[:, 0]), np.amax(xlims[:, 1]))
ax.set_xlim(xlim)
else:
ax.set_xlim([0, 1])
if ylims:
ylims = np.array(ylims)
ylim = (np.amin(ylims[:, 0]), np.amax(ylims[:, 1]))
ax.set_ylim(ylim)
else:
ax.set_ylim([0, 1])
if zlims:
| ax.set_zlim([0, 1])
# Set global options.
# TODO The 3D stuff
# XXX The order of those is important.
if parent.xscale and not isinstance(ax, Axes3D):
ax.set_xscale(parent.xscale)
if parent.yscale and not isinstance(ax, Axes3D):
ax.set_yscale(parent.yscale)
if not isinstance(ax, Axes3D) or self.matplotlib.__version__ >= '1.2.0': # XXX in the distant future remove this check
ax.set_autoscale_on(parent.autoscale)
if parent.axis_center:
val = parent.axis_center
if isinstance(ax, Axes3D):
pass
elif val == 'center':
ax.spines['left'].set_position('center')
ax.spines['bottom'].set_position('center')
elif val == 'auto':
xl, xh = ax.get_xlim()
yl, yh = ax.get_ylim()
pos_left = ('data', 0) if xl*xh <= 0 else 'center'
pos_bottom = ('data', 0) if yl*yh <= 0 else 'center'
ax.spines['left'].set_position(pos_left)
ax.spines['bottom'].set_position(pos_bottom)
else:
ax.spines['left'].set_position(('data', val[0]))
ax.spines['bottom'].set_position(('data', val[1]))
if not parent.axis:
ax.set_axis_off()
if parent.legend:
if ax.legend():
ax.legend_.set_visible(parent.legend)
if parent.margin:
ax.set_xmargin(parent.margin)
ax.set_ymargin(parent.margin)
if parent.title:
ax.set_title(parent.title)
if parent.xlabel:
xlbl = _str_or_latex(parent.xlabel)
ax.set_xlabel(xlbl, position=(1, 0))
if parent.ylabel:
ylbl = _str_or_latex(parent.ylabel)
ax.set_ylabel(ylbl, position=(0, 1))
if isinstance(ax, Axes3D) and parent.zlabel:
zlbl = _str_or_latex(parent.zlabel)
ax.set_zlabel(zlbl, position=(0, 1))
if parent.annotations:
for a in parent.annotations:
ax.annotate(**a)
if parent.markers:
for marker in parent.markers:
# make a copy of the marker dictionary
# so that it doesn't get altered
m = marker.copy()
args = m.pop('args')
ax.plot(*args, **m)
if parent.rectangles:
for r in parent.rectangles:
rect = self.matplotlib.patches.Rectangle(**r)
ax.add_patch(rect)
if parent.fill:
ax.fill_between(**parent.fill)
# xlim and ylim shoulld always be set at last so that plot limits
# doesn't get altered during the process.
if parent.xlim:
ax.set_xlim(parent.xlim)
if parent.ylim:
ax.set_ylim(parent.ylim)
def process_series(self):
"""
Iterates over every ``Plot`` object and further calls
_process_series()
"""
parent = self.parent
if isinstance(parent, Plot):
series_list = [parent._series]
else:
series_list = parent._series
for i, (series, ax) in enumerate(zip(series_list, self.ax)):
if isinstance(self.parent, PlotGrid):
parent = self.parent.args[i]
self._process_series(series, ax, parent)
def show(self):
self.process_series()
#TODO after fixing https://github.com/ipython/ipython/issues/1255
# you can uncomment the next line and remove the pyplot.show() call
#self.fig.show()
if _show:
self.fig.tight_layout()
self.plt.show()
else:
self.close()
def save(self, path):
self.process_series()
self.fig.savefig(path)
def close(self):
self.plt.close(self.fig)
class TextBackend(BaseBackend):
def __init__(self, parent):
super().__init__(parent)
def show(self):
if not _show:
return
if len(self.parent._series) != 1:
raise ValueError(
'The TextBackend supports only one graph per Plot.')
elif not isinstance(self.parent._series[0], LineOver1DRangeSeries):
raise ValueError(
'The TextBackend supports only expressions over a 1D range')
else:
ser = self.parent._series[0]
textplot(ser.expr, ser.start, ser.end)
def close(self):
pass
class DefaultBackend(BaseBackend):
def __new__(cls, parent):
matplotlib = import_module('matplotlib', min_module_version='1.1.0', catch=(RuntimeError,))
if matplotlib:
return MatplotlibBackend(parent)
else:
return TextBackend(parent)
plot_backends = {
'matplotlib': MatplotlibBackend,
'text': TextBackend,
'default': DefaultBackend
}
##############################################################################
# Finding the centers of line segments or mesh faces
##############################################################################
def centers_of_segments(array):
np = import_module('numpy')
return np.mean(np.vstack((array[:-1], array[1:])), 0)
def centers_of_faces(array):
np = import_module('numpy')
return np.mean(np.dstack((array[:-1, :-1],
array[1:, :-1],
array[:-1, 1:],
array[:-1, :-1],
)), 2)
def flat(x, y, z, eps=1e-3):
"""Checks whether three points are almost collinear"""
np = import_module('numpy')
# Workaround plotting piecewise (#8577):
# workaround for `lambdify` in `.experimental_lambdify` fails
# to return numerical values in some cases. Lower-level fix
# in `lambdify` is possible.
vector_a = (x - y).astype(np.float64)
vector_b = (z - y).astype(np.float64)
dot_product = np.dot(vector_a, vector_b)
vector_a_norm = np.linalg.norm(vector_a)
vector_b_norm = np.linalg.norm(vector_b)
cos_theta = dot_product / (vector_a_norm * vector_b_norm)
return abs(cos_theta + 1) < eps
def _matplotlib_list(interval_list):
"""
Returns lists for matplotlib ``fill`` command from a list of bounding
rectangular intervals
"""
xlist = []
ylist = []
if len(interval_list):
for intervals in interval_list:
intervalx = intervals[0]
intervaly = intervals[1]
xlist.extend([intervalx.start, intervalx.start,
intervalx.end, intervalx.end, None])
ylist.extend([intervaly.start, intervaly.end,
intervaly.end, intervaly.start, None])
else:
#XXX Ugly hack. Matplotlib does not accept empty lists for ``fill``
xlist.extend((None, None, None, None))
ylist.extend((None, None, None, None))
return xlist, ylist
####New API for plotting module ####
# TODO: Add color arrays for plots.
# TODO: Add more plotting options for 3d plots.
# TODO: Adaptive sampling for 3D plots.
def plot(*args, show=True, **kwargs):
"""Plots a function of a single variable as a curve.
Parameters
==========
args :
The first argument is the expression representing the function
of single variable to be plotted.
The last argument is a 3-tuple denoting the range of the free
variable. e.g. ``(x, 0, 5)``
Typical usage examples are in the followings:
- Plotting a single expression with a single range.
``plot(expr, range, **kwargs)``
- Plotting a single expression with the default range (-10, 10).
``plot(expr, **kwargs)``
- Plotting multiple expressions with a single range.
``plot(expr1, expr2, ..., range, **kwargs)``
- Plotting multiple expressions with multiple ranges.
``plot((expr1, range1), (expr2, range2), ..., **kwargs)``
It is best practice to specify range explicitly because default
range may change in the future if a more advanced default range
detection algorithm is implemented.
show : bool, optional
The default value is set to ``True``. Set show to ``False`` and
the function will not display the plot. The returned instance of
the ``Plot`` class can then be used to save or display the plot
by calling the ``save()`` and ``show()`` methods respectively.
line_color : string, or float, or function, optional
Specifies the color for the plot.
See ``Plot`` to see how to set color for the plots.
Note that by setting ``line_color``, it would be applied simultaneously
to all the series.
title : str, optional
Title of the plot. It is set to the latex representation of
the expression, if the plot has only one expression.
label : str, optional
The label of the expression in the plot. It will be used when
called with ``legend``. Default is the name of the expression.
e.g. ``sin(x)``
xlabel : str or expression, optional
Label for the x-axis.
ylabel : str or expression, optional
Label for the y-axis.
xscale : 'linear' or 'log', optional
Sets the scaling of the x-axis.
yscale : 'linear' or 'log', optional
Sets the scaling of the y-axis.
axis_center : (float, float), optional
Tuple of two floats denoting the coordinates of the center or
{'center', 'auto'}
xlim : (float, float), optional
Denotes the x-axis limits, ``(min, max)```.
ylim : (float, float), optional
Denotes the y-axis limits, ``(min, max)```.
annotations : list, optional
A list of dictionaries specifying the type of annotation
required. The keys in the dictionary should be equivalent
to the arguments of the matplotlib's annotate() function.
markers : list, optional
A list of dictionaries specifying the type the markers required.
The keys in the dictionary should be equivalent to the arguments
of the matplotlib's plot() function along with the marker
related keyworded arguments.
rectangles : list, optional
A list of dictionaries specifying the dimensions of the
rectangles to be plotted. The keys in the dictionary should be
equivalent to the arguments of the matplotlib's
patches.Rectangle class.
fill : dict, optional
A dictionary specifying the type of color filling required in
the plot. The keys in the dictionary should be equivalent to the
arguments of the matplotlib's fill_between() function.
adaptive : bool, optional
The default value is set to ``True``. Set adaptive to ``False``
and specify ``nb_of_points`` if uniform sampling is required.
The plotting uses an adaptive algorithm which samples
recursively to accurately plot. The adaptive algorithm uses a
random point near the midpoint of two points that has to be
further sampled. Hence the same plots can appear slightly
different.
depth : int, optional
Recursion depth of the adaptive algorithm. A depth of value
``n`` samples a maximum of `2^{n}` points.
If the ``adaptive`` flag is set to ``False``, this will be
ignored.
nb_of_points : int, optional
Used when the ``adaptive`` is set to ``False``. The function
is uniformly sampled at ``nb_of_points`` number of points.
If the ``adaptive`` flag is set to ``True``, this will be
ignored.
size : (float, float), optional
A tuple in the form (width, height) in inches to specify the size of
the overall figure. The default value is set to ``None``, meaning
the size will be set by the default backend.
Examples
========
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy import symbols
>>> from sympy.plotting import plot
>>> x = symbols('x')
Single Plot
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> plot(x**2, (x, -5, 5))
Plot object containing:
[0]: cartesian line: x**2 for x over (-5.0, 5.0)
Multiple plots with single range.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> plot(x, x**2, x**3, (x, -5, 5))
Plot object containing:
[0]: cartesian line: x for x over (-5.0, 5.0)
[1]: cartesian line: x**2 for x over (-5.0, 5.0)
[2]: cartesian line: x**3 for x over (-5.0, 5.0)
Multiple plots with different ranges.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> plot((x**2, (x, -6, 6)), (x, (x, -5, 5)))
Plot object containing:
[0]: cartesian line: x**2 for x over (-6.0, 6.0)
[1]: cartesian line: x for x over (-5.0, 5.0)
No adaptive sampling.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> plot(x**2, adaptive=False, nb_of_points=400)
Plot object containing:
[0]: cartesian line: x**2 for x over (-10.0, 10.0)
See Also
========
Plot, LineOver1DRangeSeries
"""
args = list(map(sympify, args))
free = set()
for a in args:
if isinstance(a, Expr):
free |= a.free_symbols
if len(free) > 1:
raise ValueError(
'The same variable should be used in all '
'univariate expressions being plotted.')
x = free.pop() if free else Symbol('x')
kwargs.setdefault('xlabel', x)
kwargs.setdefault('ylabel', Function('f')(x))
series = []
plot_expr = check_arguments(args, 1, 1)
series = [LineOver1DRangeSeries(*arg, **kwargs) for arg in plot_expr]
plots = Plot(*series, **kwargs)
if show:
plots.show()
return plots
def plot_parametric(*args, show=True, **kwargs):
"""
Plots a 2D parametric curve.
Parameters
==========
args
Common specifications are:
- Plotting a single parametric curve with a range
``plot_parametric((expr_x, expr_y), range)``
- Plotting multiple parametric curves with the same range
``plot_parametric((expr_x, expr_y), ..., range)``
- Plotting multiple parametric curves with different ranges
``plot_parametric((expr_x, expr_y, range), ...)``
``expr_x`` is the expression representing $x$ component of the
parametric function.
``expr_y`` is the expression representing $y$ component of the
parametric function.
``range`` is a 3-tuple denoting the parameter symbol, start and
stop. For example, ``(u, 0, 5)``.
If the range is not specified, then a default range of (-10, 10)
is used.
However, if the arguments are specified as
``(expr_x, expr_y, range), ...``, you must specify the ranges
for each expressions manually.
Default range may change in the future if a more advanced
algorithm is implemented.
adaptive : bool, optional
Specifies whether to use the adaptive sampling or not.
The default value is set to ``True``. Set adaptive to ``False``
and specify ``nb_of_points`` if uniform sampling is required.
depth : int, optional
The recursion depth of the adaptive algorithm. A depth of
value $n$ samples a maximum of $2^n$ points.
nb_of_points : int, optional
Used when the ``adaptive`` flag is set to ``False``.
Specifies the number of the points used for the uniform
sampling.
line_color : string, or float, or function, optional
Specifies the color for the plot.
See ``Plot`` to see how to set color for the plots.
Note that by setting ``line_color``, it would be applied simultaneously
to all the series.
label : str, optional
The label of the expression in the plot. It will be used when
called with ``legend``. Default is the name of the expression.
e.g. ``sin(x)``
xlabel : str, optional
Label for the x-axis.
ylabel : str, optional
Label for the y-axis.
xscale : 'linear' or 'log', optional
Sets the scaling of the x-axis.
yscale : 'linear' or 'log', optional
Sets the scaling of the y-axis.
axis_center : (float, float), optional
Tuple of two floats denoting the coordinates of the center or
{'center', 'auto'}
xlim : (float, float), optional
Denotes the x-axis limits, ``(min, max)```.
ylim : (float, float), optional
Denotes the y-axis limits, ``(min, max)```.
size : (float, float), optional
A tuple in the form (width, height) in inches to specify the size of
the overall figure. The default value is set to ``None``, meaning
the size will be set by the default backend.
Examples
========
.. plot::
:context: reset
:format: doctest
:include-source: True
>>> from sympy import plot_parametric, symbols, cos, sin
>>> u = symbols('u')
A parametric plot with a single expression:
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> plot_parametric((cos(u), sin(u)), (u, -5, 5))
Plot object containing:
[0]: parametric cartesian line: (cos(u), sin(u)) for u over (-5.0, 5.0)
A parametric plot with multiple expressions with the same range:
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> plot_parametric((cos(u), sin(u)), (u, cos(u)), (u, -10, 10))
Plot object containing:
[0]: parametric cartesian line: (cos(u), sin(u)) for u over (-10.0, 10.0)
[1]: parametric cartesian line: (u, cos(u)) for u over (-10.0, 10.0)
A parametric plot with multiple expressions with different ranges
for each curve:
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> plot_parametric((cos(u), sin(u), (u, -5, 5)),
... (cos(u), u, (u, -5, 5)))
Plot object containing:
[0]: parametric cartesian line: (cos(u), sin(u)) for u over (-5.0, 5.0)
[1]: parametric cartesian line: (cos(u), u) for u over (-5.0, 5.0)
Notes
=====
The plotting uses an adaptive algorithm which samples recursively to
accurately plot the curve. The adaptive algorithm uses a random point
near the midpoint of two points that has to be further sampled.
Hence, repeating the same plot command can give slightly different
results because of the random sampling.
If there are multiple plots, then the same optional arguments are
applied to all the plots drawn in the same canvas. If you want to
set these options separately, you can index the returned ``Plot``
object and set it.
For example, when you specify ``line_color`` once, it would be
applied simultaneously to both series.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy import pi
>>> expr1 = (u, cos(2*pi*u)/2 + 1/2)
>>> expr2 = (u, sin(2*pi*u)/2 + 1/2)
>>> p = plot_parametric(expr1, expr2, (u, 0, 1), line_color='blue')
If you want to specify the line color for the specific series, you
should index each item and apply the property manually.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> p[0].line_color = 'red'
>>> p.show()
See Also
========
Plot, Parametric2DLineSeries
"""
args = list(map(sympify, args))
series = []
plot_expr = check_arguments(args, 2, 1)
series = [Parametric2DLineSeries(*arg, **kwargs) for arg in plot_expr]
plots = Plot(*series, **kwargs)
if show:
plots.show()
return plots
def plot3d_parametric_line(*args, show=True, **kwargs):
"""
Plots a 3D parametric line plot.
Usage
=====
Single plot:
``plot3d_parametric_line(expr_x, expr_y, expr_z, range, **kwargs)``
If the range is not specified, then a default range of (-10, 10) is used.
Multiple plots.
``plot3d_parametric_line((expr_x, expr_y, expr_z, range), ..., **kwargs)``
Ranges have to be specified for every expression.
Default range may change in the future if a more advanced default range
detection algorithm is implemented.
Arguments
=========
``expr_x`` : Expression representing the function along x.
``expr_y`` : Expression representing the function along y.
``expr_z`` : Expression representing the function along z.
``range``: ``(u, 0, 5)``, A 3-tuple denoting the range of the parameter
variable.
Keyword Arguments
=================
Arguments for ``Parametric3DLineSeries`` class.
``nb_of_points``: The range is uniformly sampled at ``nb_of_points``
number of points.
Aesthetics:
``line_color``: string, or float, or function, optional
Specifies the color for the plot.
See ``Plot`` to see how to set color for the plots.
Note that by setting ``line_color``, it would be applied simultaneously
to all the series.
``label``: str
The label to the plot. It will be used when called with ``legend=True``
to denote the function with the given label in the plot.
If there are multiple plots, then the same series arguments are applied to
all the plots. If you want to set these options separately, you can index
the returned ``Plot`` object and set it.
Arguments for ``Plot`` class.
``title`` : str. Title of the plot.
``size`` : (float, float), optional
A tuple in the form (width, height) in inches to specify the size of
the overall figure. The default value is set to ``None``, meaning
the size will be set by the default backend.
Examples
========
.. plot::
:context: reset
:format: doctest
:include-source: True
>>> from sympy import symbols, cos, sin
>>> from sympy.plotting import plot3d_parametric_line
>>> u = symbols('u')
Single plot.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> plot3d_parametric_line(cos(u), sin(u), u, (u, -5, 5))
Plot object containing:
[0]: 3D parametric cartesian line: (cos(u), sin(u), u) for u over (-5.0, 5.0)
Multiple plots.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> plot3d_parametric_line((cos(u), sin(u), u, (u, -5, 5)),
... (sin(u), u**2, u, (u, -5, 5)))
Plot object containing:
[0]: 3D parametric cartesian line: (cos(u), sin(u), u) for u over (-5.0, 5.0)
[1]: 3D parametric cartesian line: (sin(u), u**2, u) for u over (-5.0, 5.0)
See Also
========
Plot, Parametric3DLineSeries
"""
args = list(map(sympify, args))
series = []
plot_expr = check_arguments(args, 3, 1)
series = [Parametric3DLineSeries(*arg, **kwargs) for arg in plot_expr]
kwargs.setdefault("xlabel", "x")
kwargs.setdefault("ylabel", "y")
kwargs.setdefault("zlabel", "z")
plots = Plot(*series, **kwargs)
if show:
plots.show()
return plots
def plot3d(*args, show=True, **kwargs):
"""
Plots a 3D surface plot.
Usage
=====
Single plot
``plot3d(expr, range_x, range_y, **kwargs)``
If the ranges are not specified, then a default range of (-10, 10) is used.
Multiple plot with the same range.
``plot3d(expr1, expr2, range_x, range_y, **kwargs)``
If the ranges are not specified, then a default range of (-10, 10) is used.
Multiple plots with different ranges.
``plot3d((expr1, range_x, range_y), (expr2, range_x, range_y), ..., **kwargs)``
Ranges have to be specified for every expression.
Default range may change in the future if a more advanced default range
detection algorithm is implemented.
Arguments
=========
``expr`` : Expression representing the function along x.
``range_x``: (x, 0, 5), A 3-tuple denoting the range of the x
variable.
``range_y``: (y, 0, 5), A 3-tuple denoting the range of the y
variable.
Keyword Arguments
=================
Arguments for ``SurfaceOver2DRangeSeries`` class:
``nb_of_points_x``: int. The x range is sampled uniformly at
``nb_of_points_x`` of points.
``nb_of_points_y``: int. The y range is sampled uniformly at
``nb_of_points_y`` of points.
Aesthetics:
``surface_color``: Function which returns a float. Specifies the color for
the surface of the plot. See ``sympy.plotting.Plot`` for more details.
If there are multiple plots, then the same series arguments are applied to
all the plots. If you want to set these options separately, you can index
the returned ``Plot`` object and set it.
Arguments for ``Plot`` class:
``title`` : str. Title of the plot.
``size`` : (float, float), optional
A tuple in the form (width, height) in inches to specify the size of the
overall figure. The default value is set to ``None``, meaning the size will
be set by the default backend.
Examples
========
.. plot::
:context: reset
:format: doctest
:include-source: True
>>> from sympy import symbols
>>> from sympy.plotting import plot3d
>>> x, y = symbols('x y')
Single plot
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> plot3d(x*y, (x, -5, 5), (y, -5, 5))
Plot object containing:
[0]: cartesian surface: x*y for x over (-5.0, 5.0) and y over (-5.0, 5.0)
Multiple plots with same range
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> plot3d(x*y, -x*y, (x, -5, 5), (y, -5, 5))
Plot object containing:
[0]: cartesian surface: x*y for x over (-5.0, 5.0) and y over (-5.0, 5.0)
[1]: cartesian surface: -x*y for x over (-5.0, 5.0) and y over (-5.0, 5.0)
Multiple plots with different ranges.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> plot3d((x**2 + y**2, (x, -5, 5), (y, -5, 5)),
... (x*y, (x, -3, 3), (y, -3, 3)))
Plot object containing:
[0]: cartesian surface: x**2 + y**2 for x over (-5.0, 5.0) and y over (-5.0, 5.0)
[1]: cartesian surface: x*y for x over (-3.0, 3.0) and y over (-3.0, 3.0)
See Also
========
Plot, SurfaceOver2DRangeSeries
"""
args = list(map(sympify, args))
series = []
plot_expr = check_arguments(args, 1, 2)
series = [SurfaceOver2DRangeSeries(*arg, **kwargs) for arg in plot_expr]
kwargs.setdefault("xlabel", series[0].var_x)
kwargs.setdefault("ylabel", series[0].var_y)
kwargs.setdefault("zlabel", Function('f')(series[0].var_x, series[0].var_y))
plots = Plot(*series, **kwargs)
if show:
plots.show()
return plots
def plot3d_parametric_surface(*args, show=True, **kwargs):
"""
Plots a 3D parametric surface plot.
Explanation
===========
Single plot.
``plot3d_parametric_surface(expr_x, expr_y, expr_z, range_u, range_v, **kwargs)``
If the ranges is not specified, then a default range of (-10, 10) is used.
Multiple plots.
``plot3d_parametric_surface((expr_x, expr_y, expr_z, range_u, range_v), ..., **kwargs)``
Ranges have to be specified for every expression.
Default range may change in the future if a more advanced default range
detection algorithm is implemented.
Arguments
=========
``expr_x``: Expression representing the function along ``x``.
``expr_y``: Expression representing the function along ``y``.
``expr_z``: Expression representing the function along ``z``.
``range_u``: ``(u, 0, 5)``, A 3-tuple denoting the range of the ``u``
variable.
``range_v``: ``(v, 0, 5)``, A 3-tuple denoting the range of the v
variable.
Keyword Arguments
=================
Arguments for ``ParametricSurfaceSeries`` class:
``nb_of_points_u``: int. The ``u`` range is sampled uniformly at
``nb_of_points_v`` of points
``nb_of_points_y``: int. The ``v`` range is sampled uniformly at
``nb_of_points_y`` of points
Aesthetics:
``surface_color``: Function which returns a float. Specifies the color for
the surface of the plot. See ``sympy.plotting.Plot`` for more details.
If there are multiple plots, then the same series arguments are applied for
all the plots. If you want to set these options separately, you can index
the returned ``Plot`` object and set it.
Arguments for ``Plot`` class:
``title`` : str. Title of the plot.
``size`` : (float, float), optional
A tuple in the form (width, height) in inches to specify the size of the
overall figure. The default value is set to ``None``, meaning the size will
be set by the default backend.
Examples
========
.. plot::
:context: reset
:format: doctest
:include-source: True
>>> from sympy import symbols, cos, sin
>>> from sympy.plotting import plot3d_parametric_surface
>>> u, v = symbols('u v')
Single plot.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> plot3d_parametric_surface(cos(u + v), sin(u - v), u - v,
... (u, -5, 5), (v, -5, 5))
Plot object containing:
[0]: parametric cartesian surface: (cos(u + v), sin(u - v), u - v) for u over (-5.0, 5.0) and v over (-5.0, 5.0)
See Also
========
Plot, ParametricSurfaceSeries
"""
args = list(map(sympify, args))
series = []
plot_expr = check_arguments(args, 3, 2)
series = [ParametricSurfaceSeries(*arg, **kwargs) for arg in plot_expr]
kwargs.setdefault("xlabel", "x")
kwargs.setdefault("ylabel", "y")
kwargs.setdefault("zlabel", "z")
plots = Plot(*series, **kwargs)
if show:
plots.show()
return plots
def plot_contour(*args, show=True, **kwargs):
"""
Draws contour plot of a function
Usage
=====
Single plot
``plot_contour(expr, range_x, range_y, **kwargs)``
If the ranges are not specified, then a default range of (-10, 10) is used.
Multiple plot with the same range.
``plot_contour(expr1, expr2, range_x, range_y, **kwargs)``
If the ranges are not specified, then a default range of (-10, 10) is used.
Multiple plots with different ranges.
``plot_contour((expr1, range_x, range_y), (expr2, range_x, range_y), ..., **kwargs)``
Ranges have to be specified for every expression.
Default range may change in the future if a more advanced default range
detection algorithm is implemented.
Arguments
=========
``expr`` : Expression representing the function along x.
``range_x``: (x, 0, 5), A 3-tuple denoting the range of the x
variable.
``range_y``: (y, 0, 5), A 3-tuple denoting the range of the y
variable.
Keyword Arguments
=================
Arguments for ``ContourSeries`` class:
``nb_of_points_x``: int. The x range is sampled uniformly at
``nb_of_points_x`` of points.
``nb_of_points_y``: int. The y range is sampled uniformly at
``nb_of_points_y`` of points.
Aesthetics:
``surface_color``: Function which returns a float. Specifies the color for
the surface of the plot. See ``sympy.plotting.Plot`` for more details.
If there are multiple plots, then the same series arguments are applied to
all the plots. If you want to set these options separately, you can index
the returned ``Plot`` object and set it.
Arguments for ``Plot`` class:
``title`` : str. Title of the plot.
``size`` : (float, float), optional
A tuple in the form (width, height) in inches to specify the size of
the overall figure. The default value is set to ``None``, meaning
the size will be set by the default backend.
See Also
========
Plot, ContourSeries
"""
args = list(map(sympify, args))
plot_expr = check_arguments(args, 1, 2)
series = [ContourSeries(*arg) for arg in plot_expr]
plot_contours = Plot(*series, **kwargs)
if len(plot_expr[0].free_symbols) > 2:
raise ValueError('Contour Plot cannot Plot for more than two variables.')
if show:
plot_contours.show()
return plot_contours
def check_arguments(args, expr_len, nb_of_free_symbols):
"""
Checks the arguments and converts into tuples of the
form (exprs, ranges).
Examples
========
.. plot::
:context: reset
:format: doctest
:include-source: True
>>> from sympy import cos, sin, symbols
>>> from sympy.plotting.plot import check_arguments
>>> x = symbols('x')
>>> check_arguments([cos(x), sin(x)], 2, 1)
[(cos(x), sin(x), (x, -10, 10))]
>>> check_arguments([x, x**2], 1, 1)
[(x, (x, -10, 10)), (x**2, (x, -10, 10))]
"""
if not args:
return []
if expr_len > 1 and isinstance(args[0], Expr):
# Multiple expressions same range.
# The arguments are tuples when the expression length is
# greater than 1.
if len(args) < expr_len:
raise ValueError("len(args) should not be less than expr_len")
for i in range(len(args)):
if isinstance(args[i], Tuple):
break
else:
i = len(args) + 1
exprs = Tuple(*args[:i])
free_symbols = list(set().union(*[e.free_symbols for e in exprs]))
if len(args) == expr_len + nb_of_free_symbols:
#Ranges given
plots = [exprs + Tuple(*args[expr_len:])]
else:
default_range = Tuple(-10, 10)
ranges = []
for symbol in free_symbols:
ranges.append(Tuple(symbol) + default_range)
for i in range(len(free_symbols) - nb_of_free_symbols):
ranges.append(Tuple(Dummy()) + default_range)
plots = [exprs + Tuple(*ranges)]
return plots
if isinstance(args[0], Expr) or (isinstance(args[0], Tuple) and
len(args[0]) == expr_len and
expr_len != 3):
# Cannot handle expressions with number of expression = 3. It is
# not possible to differentiate between expressions and ranges.
#Series of plots with same range
for i in range(len(args)):
if isinstance(args[i], Tuple) and len(args[i]) != expr_len:
break
if not isinstance(args[i], Tuple):
args[i] = Tuple(args[i])
else:
i = len(args) + 1
exprs = args[:i]
assert all(isinstance(e, Expr) for expr in exprs for e in expr)
free_symbols = list(set().union(*[e.free_symbols for expr in exprs
for e in expr]))
if len(free_symbols) > nb_of_free_symbols:
raise ValueError("The number of free_symbols in the expression "
"is greater than %d" % nb_of_free_symbols)
if len(args) == i + nb_of_free_symbols and isinstance(args[i], Tuple):
ranges = Tuple(*[range_expr for range_expr in args[
i:i + nb_of_free_symbols]])
plots = [expr + ranges for expr in exprs]
return plots
else:
# Use default ranges.
default_range = Tuple(-10, 10)
ranges = []
for symbol in free_symbols:
ranges.append(Tuple(symbol) + default_range)
for i in range(nb_of_free_symbols - len(free_symbols)):
ranges.append(Tuple(Dummy()) + default_range)
ranges = Tuple(*ranges)
plots = [expr + ranges for expr in exprs]
return plots
elif isinstance(args[0], Tuple) and len(args[0]) == expr_len + nb_of_free_symbols:
# Multiple plots with different ranges.
for arg in args:
for i in range(expr_len):
if not isinstance(arg[i], Expr):
raise ValueError("Expected an expression, given %s" %
str(arg[i]))
for i in range(nb_of_free_symbols):
if not len(arg[i + expr_len]) == 3:
raise ValueError("The ranges should be a tuple of "
"length 3, got %s" % str(arg[i + expr_len]))
return args | zlims = np.array(zlims)
zlim = (np.amin(zlims[:, 0]), np.amax(zlims[:, 1]))
ax.set_zlim(zlim)
else:
|
discount.py | from datetime import date
from functools import wraps
from django.contrib import messages
from django.shortcuts import redirect
from django.template.response import TemplateResponse
from django.utils.translation import pgettext
from django.views.decorators.http import require_POST
from ...discount.models import Voucher
from ..forms import CheckoutVoucherForm
from ..models import Checkout
from ..utils import (
get_or_empty_db_checkout,
get_taxes_for_checkout,
recalculate_checkout_discount,
remove_voucher_from_checkout,
)
def add_voucher_form(view):
"""Decorate a view injecting a voucher form and handling its submission."""
@wraps(view)
def func(request, checkout):
prefix = "discount"
data = {k: v for k, v in request.POST.items() if k.startswith(prefix)}
voucher_form = CheckoutVoucherForm(
data or None, prefix=prefix, instance=checkout
)
if voucher_form.is_bound:
if voucher_form.is_valid():
voucher_form.save()
next_url = request.GET.get("next", request.META["HTTP_REFERER"])
return redirect(next_url)
else:
remove_voucher_from_checkout(checkout)
# if only discount form was used we clear post for other forms
request.POST = {}
else:
taxes = get_taxes_for_checkout(checkout, request.taxes)
recalculate_checkout_discount(checkout, request.discounts, taxes)
response = view(request, checkout)
if isinstance(response, TemplateResponse):
response.context_data["voucher_form"] = voucher_form
return response
return func
def validate_voucher(view):
"""Decorate a view making it check whether a discount voucher is valid.
If the voucher is invalid it will be removed and the user will be
redirected to the checkout summary view.
"""
@wraps(view)
def func(request, checkout):
if checkout.voucher_code:
try:
Voucher.objects.active(date=date.today()).get(
code=checkout.voucher_code
)
except Voucher.DoesNotExist:
remove_voucher_from_checkout(checkout)
msg = pgettext(
"Checkout warning",
"This voucher has expired. Please review your checkout.",
)
messages.warning(request, msg)
return redirect("checkout:summary")
return view(request, checkout)
return func
@require_POST
@get_or_empty_db_checkout(Checkout.objects.for_display())
def remove_voucher_view(request, checkout):
"""Clear the discount and remove the voucher."""
next_url = request.GET.get("next", request.META["HTTP_REFERER"])
remove_voucher_from_checkout(checkout) | return redirect(next_url) |
|
nabirds.py | import os
import pandas as pd
import warnings
from torchvision.datasets import VisionDataset
from torchvision.datasets.folder import default_loader
from torchvision.datasets.utils import check_integrity, extract_archive
class NABirds(VisionDataset):
"""`NABirds <https://dl.allaboutbirds.org/nabirds>`_ Dataset.
Args:
root (string): Root directory of the dataset.
train (bool, optional): If True, creates dataset from training set, otherwise
creates from test set.
transform (callable, optional): A function/transform that takes in an PIL image
and returns a transformed version. E.g, ``transforms.RandomCrop``
target_transform (callable, optional): A function/transform that takes in the
target and transforms it.
download (bool, optional): If true, downloads the dataset from the internet and
puts it in root directory. If dataset is already downloaded, it is not
downloaded again.
"""
base_folder = 'nabirds/images'
filename = 'nabirds.tar.gz'
md5 = 'df21a9e4db349a14e2b08adfd45873bd'
def __init__(self, root, train=True, transform=None, target_transform=None, download=None):
super(NABirds, self).__init__(root, transform=transform, target_transform=target_transform)
if download is True:
msg = ("The dataset is no longer publicly accessible. You need to "
"download the archives externally and place them in the root "
"directory.")
raise RuntimeError(msg)
elif download is False:
msg = ("The use of the download flag is deprecated, since the dataset "
"is no longer publicly accessible.")
warnings.warn(msg, RuntimeWarning)
dataset_path = os.path.join(root, 'nabirds')
if not os.path.isdir(dataset_path):
if not check_integrity(os.path.join(root, self.filename), self.md5):
raise RuntimeError('Dataset not found or corrupted.')
extract_archive(os.path.join(root, self.filename))
self.loader = default_loader
self.train = train
image_paths = pd.read_csv(os.path.join(dataset_path, 'images.txt'),
sep=' ', names=['img_id', 'filepath'])
image_class_labels = pd.read_csv(os.path.join(dataset_path, 'image_class_labels.txt'),
sep=' ', names=['img_id', 'target'])
# Since the raw labels are non-continuous, map them to new ones
self.label_map = get_continuous_class_map(image_class_labels['target'])
train_test_split = pd.read_csv(os.path.join(dataset_path, 'train_test_split.txt'),
sep=' ', names=['img_id', 'is_training_img'])
data = image_paths.merge(image_class_labels, on='img_id')
self.data = data.merge(train_test_split, on='img_id')
# Load in the train / test split
if self.train:
self.data = self.data[self.data.is_training_img == 1]
else:
self.data = self.data[self.data.is_training_img == 0]
# Load in the class data
self.class_names = load_class_names(dataset_path)
self.class_hierarchy = load_hierarchy(dataset_path)
def __len__(self):
return len(self.data)
def __getitem__(self, idx):
sample = self.data.iloc[idx]
path = os.path.join(self.root, self.base_folder, sample.filepath)
target = self.label_map[sample.target]
img = self.loader(path)
if self.transform is not None:
img = self.transform(img)
if self.target_transform is not None:
target = self.target_transform(target)
return img, target
def get_continuous_class_map(class_labels):
label_set = set(class_labels)
return {k: i for i, k in enumerate(label_set)}
def load_class_names(dataset_path=''):
names = {}
with open(os.path.join(dataset_path, 'classes.txt')) as f:
for line in f:
pieces = line.strip().split()
class_id = pieces[0]
names[class_id] = ' '.join(pieces[1:])
return names
def | (dataset_path=''):
parents = {}
with open(os.path.join(dataset_path, 'hierarchy.txt')) as f:
for line in f:
pieces = line.strip().split()
child_id, parent_id = pieces
parents[child_id] = parent_id
return parents
if __name__ == '__main__':
train_dataset = NABirds('./nabirds', train=True, download=False)
test_dataset = NABirds('./nabirds', train=False, download=False)
| load_hierarchy |
model_list_member_response.go | /*
* Twilio - Ip_messaging
*
* This is the public Twilio REST API.
*
* API version: 1.27.1
* Contact: [email protected]
*/
// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT.
package openapi
// ListMemberResponse struct for ListMemberResponse
type ListMemberResponse struct {
Members []IpMessagingV1Member `json:"members,omitempty"` | Meta ListCredentialResponseMeta `json:"meta,omitempty"`
} |
|
index.ts | import { constants, providers } from 'ethers';
import {
BaseDebtToken,
BaseDebtTokenInterface,
} from '../baseDebtToken-contract';
import BaseService from '../commons/BaseService';
import {
eEthereumTxType,
EthereumTransactionTypeExtended,
InterestRate,
ProtocolAction,
tEthereumAddress,
transactionType,
} from '../commons/types';
import { valueToWei } from '../commons/utils';
import { WETHValidator } from '../commons/validators/methodValidators';
import {
is0OrPositiveAmount,
isEthAddress,
isPositiveAmount,
isPositiveOrMinusOneAmount,
} from '../commons/validators/paramValidators';
import { IERC20ServiceInterface } from '../erc20-contract';
import { IWETHGateway } from './typechain/IWETHGateway';
import { IWETHGateway__factory } from './typechain/IWETHGateway__factory';
export type WETHDepositParamsType = {
lendingPool: tEthereumAddress;
user: tEthereumAddress;
amount: string; // normal
onBehalfOf?: tEthereumAddress;
referralCode?: string;
};
export type WETHWithdrawParamsType = {
lendingPool: tEthereumAddress;
user: tEthereumAddress;
amount: string;
lTokenAddress: tEthereumAddress;
onBehalfOf?: tEthereumAddress;
};
export type WETHRepayParamsType = {
lendingPool: tEthereumAddress;
user: tEthereumAddress;
amount: string;
interestRateMode: InterestRate;
onBehalfOf?: tEthereumAddress;
};
export type WETHBorrowParamsType = {
lendingPool: tEthereumAddress;
user: tEthereumAddress;
amount: string;
debtTokenAddress: tEthereumAddress;
interestRateMode: InterestRate;
referralCode?: string;
};
export interface WETHGatewayInterface {
depositETH: (
args: WETHDepositParamsType,
) => EthereumTransactionTypeExtended[];
withdrawETH: (
args: WETHWithdrawParamsType,
) => Promise<EthereumTransactionTypeExtended[]>;
repayETH: (args: WETHRepayParamsType) => EthereumTransactionTypeExtended[];
borrowETH: (
args: WETHBorrowParamsType,
) => Promise<EthereumTransactionTypeExtended[]>;
}
export class |
extends BaseService<IWETHGateway>
implements WETHGatewayInterface
{
readonly wethGatewayAddress: string;
readonly baseDebtTokenService: BaseDebtTokenInterface;
readonly erc20Service: IERC20ServiceInterface;
constructor(
provider: providers.Provider,
erc20Service: IERC20ServiceInterface,
wethGatewayAddress?: string,
) {
super(provider, IWETHGateway__factory);
this.erc20Service = erc20Service;
this.baseDebtTokenService = new BaseDebtToken(
this.provider,
this.erc20Service,
);
this.wethGatewayAddress = wethGatewayAddress ?? '';
this.depositETH = this.depositETH.bind(this);
this.withdrawETH = this.withdrawETH.bind(this);
this.repayETH = this.repayETH.bind(this);
this.borrowETH = this.borrowETH.bind(this);
}
@WETHValidator
public depositETH(
@isEthAddress('lendingPool')
@isEthAddress('user')
@isEthAddress('onBehalfOf')
@isPositiveAmount('amount')
@is0OrPositiveAmount('referralCode')
{
lendingPool,
user,
amount,
onBehalfOf,
referralCode,
}: WETHDepositParamsType,
): EthereumTransactionTypeExtended[] {
const convertedAmount: string = valueToWei(amount, 18);
const wethGatewayContract: IWETHGateway = this.getContractInstance(
this.wethGatewayAddress,
);
const txCallback: () => Promise<transactionType> = this.generateTxCallback({
rawTxMethod: async () =>
wethGatewayContract.populateTransaction.depositETH(
lendingPool,
onBehalfOf ?? user,
referralCode ?? '0',
),
from: user,
value: convertedAmount,
});
return [
{
tx: txCallback,
txType: eEthereumTxType.DLP_ACTION,
gas: this.generateTxPriceEstimation([], txCallback),
},
];
}
@WETHValidator
public async borrowETH(
@isEthAddress('lendingPool')
@isEthAddress('user')
@isPositiveAmount('amount')
@isEthAddress('debtTokenAddress')
@is0OrPositiveAmount('referralCode')
{
lendingPool,
user,
amount,
debtTokenAddress,
interestRateMode,
referralCode,
}: WETHBorrowParamsType,
): Promise<EthereumTransactionTypeExtended[]> {
const txs: EthereumTransactionTypeExtended[] = [];
const convertedAmount: string = valueToWei(amount, 18);
const numericRateMode = interestRateMode === InterestRate.Variable ? 2 : 1;
const delegationApproved: boolean =
await this.baseDebtTokenService.isDelegationApproved({
debtTokenAddress,
allowanceGiver: user,
allowanceReceiver: this.wethGatewayAddress,
amount,
});
if (!delegationApproved) {
const approveDelegationTx: EthereumTransactionTypeExtended =
this.baseDebtTokenService.approveDelegation({
user,
delegatee: this.wethGatewayAddress,
debtTokenAddress,
amount: constants.MaxUint256.toString(),
});
txs.push(approveDelegationTx);
}
const wethGatewayContract: IWETHGateway = this.getContractInstance(
this.wethGatewayAddress,
);
const txCallback: () => Promise<transactionType> = this.generateTxCallback({
rawTxMethod: async () =>
wethGatewayContract.populateTransaction.borrowETH(
lendingPool,
convertedAmount,
numericRateMode,
referralCode ?? '0',
),
from: user,
});
txs.push({
tx: txCallback,
txType: eEthereumTxType.DLP_ACTION,
gas: this.generateTxPriceEstimation(
txs,
txCallback,
ProtocolAction.borrowETH,
),
});
return txs;
}
@WETHValidator
public async withdrawETH(
@isEthAddress('lendingPool')
@isEthAddress('user')
@isEthAddress('onBehalfOf')
@isPositiveOrMinusOneAmount('amount')
@isEthAddress('lTokenAddress')
{
lendingPool,
user,
amount,
onBehalfOf,
lTokenAddress,
}: WETHWithdrawParamsType,
): Promise<EthereumTransactionTypeExtended[]> {
const txs: EthereumTransactionTypeExtended[] = [];
const { isApproved, approve }: IERC20ServiceInterface = this.erc20Service;
const convertedAmount: string =
amount === '-1'
? constants.MaxUint256.toString()
: valueToWei(amount, 18);
const approved: boolean = await isApproved({
token: lTokenAddress,
user,
spender: this.wethGatewayAddress,
amount,
});
if (!approved) {
const approveTx: EthereumTransactionTypeExtended = approve({
user,
token: lTokenAddress,
spender: this.wethGatewayAddress,
amount: constants.MaxUint256.toString(),
});
txs.push(approveTx);
}
const wethGatewayContract: IWETHGateway = this.getContractInstance(
this.wethGatewayAddress,
);
const txCallback: () => Promise<transactionType> = this.generateTxCallback({
rawTxMethod: async () =>
wethGatewayContract.populateTransaction.withdrawETH(
lendingPool,
convertedAmount,
onBehalfOf ?? user,
),
from: user,
});
txs.push({
tx: txCallback,
txType: eEthereumTxType.DLP_ACTION,
gas: this.generateTxPriceEstimation(
txs,
txCallback,
ProtocolAction.withdrawETH,
),
});
return txs;
}
@WETHValidator
public repayETH(
@isEthAddress('lendingPool')
@isEthAddress('user')
@isEthAddress('onBehalfOf')
@isPositiveAmount('amount')
{
lendingPool,
user,
amount,
interestRateMode,
onBehalfOf,
}: WETHRepayParamsType,
): EthereumTransactionTypeExtended[] {
const convertedAmount: string = valueToWei(amount, 18);
const numericRateMode = interestRateMode === InterestRate.Variable ? 2 : 1;
const wethGatewayContract: IWETHGateway = this.getContractInstance(
this.wethGatewayAddress,
);
const txCallback: () => Promise<transactionType> = this.generateTxCallback({
rawTxMethod: async () =>
wethGatewayContract.populateTransaction.repayETH(
lendingPool,
convertedAmount,
numericRateMode,
onBehalfOf ?? user,
),
gasSurplus: 30,
from: user,
value: convertedAmount,
});
return [
{
tx: txCallback,
txType: eEthereumTxType.DLP_ACTION,
gas: this.generateTxPriceEstimation([], txCallback),
},
];
}
}
| WETHGatewayService |
BlogController.py | """ A BlogController Module """
from masonite.controllers import Controller
from masonite.request import Request
from app.Blog import Blog
class BlogController(Controller):
def __init__(self, request: Request):
self.request = request
def show(self):
id = self.request.param("id")
return Blog.find(id)
def index(self):
return Blog.all()
def create(self):
subject = self.request.input("title")
details = self.request.input("body")
blog = Blog.create({"title": subject, "body": details})
return blog
def update(self):
title = self.request.input("title")
body = self.request.input("body")
id = self.request.param("id")
Blog.where("id", id).update({"title": title, "body": body})
return Blog.where("id", id).get()
def destroy(self):
| id = self.request.param("id")
blog = Blog.where("id", id).get()
Blog.where("id", id).delete()
return blog |
|
serviceDLLs.py | #Copyright 2014 Center for Internet Security - Computer Emergency Response Team (CIS-CERT)
#This is part of the CIS Enumeration and Scanning Program (CIS-ESP)
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
import _winreg
from modules import support
def getServiceDLLs(computerName,objRegistry,hostPath):
print computerName + " - checking service DLLs"
outFile = open(hostPath + "\SERVICEDLLS-" + computerName + ".csv", "w")
outFile.write("service,display_name,service_path,service_dll\n")
key = "SYSTEM\CurrentControlSet\Services"
result,subkeys = objRegistry.EnumKey(hDefKey=_winreg.HKEY_LOCAL_MACHINE,sSubKeyName=key)
if result == 0:
for subkey in subkeys:
display_name = "NULL"
service_path = "NULL"
service_dll = "NULL"
result,valueNames,valueTypes = objRegistry.EnumValues(hDefKey=_winreg.HKEY_LOCAL_MACHINE,sSubKeyName=key+"\\"+subkey)
if result == 0:
if valueNames != None and len(valueNames) > 0:
for value in valueNames:
if value.upper() == "DisplayName".upper():
result,display_name = objRegistry.GetStringValue(hDefKey=_winreg.HKEY_LOCAL_MACHINE,sSubKeyName=key+"\\"+subkey,sValueName=value)
if result != 0:
display_name = "NULL"
elif value.upper() == "ImagePath".upper():
result,service_path = objRegistry.GetStringValue(hDefKey=_winreg.HKEY_LOCAL_MACHINE,sSubKeyName=key+"\\"+subkey,sValueName=value)
if result != 0:
service_path = "NULL"
result,service_dll = objRegistry.GetStringValue(hDefKey=_winreg.HKEY_LOCAL_MACHINE,sSubKeyName=key+"\\"+subkey+"\\Parameters",sValueName="ServiceDll")
if result != 0:
|
display_name = support.convert_to_string(display_name)
service_path = support.convert_to_string(service_path)
service_dll = support.convert_to_string(service_dll)
outFile.write(subkey.replace(","," ") + "," + display_name.replace(","," ") + "," + service_path.replace(","," ") + "," + service_dll.replace(","," ") + "\n")
outFile.close()
| service_dll = "NULL" |
azure_namespace_import.py | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import os
import sys
import shutil
import asyncio
from azure_functions_worker import protos
from azure_functions_worker import testutils
async def vertify_nested_namespace_import():
|
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(vertify_nested_namespace_import())
loop.close()
| test_env = {}
request = protos.FunctionEnvironmentReloadRequest(
environment_variables=test_env)
request_msg = protos.StreamingMessage(
request_id='0',
function_environment_reload_request=request)
disp = testutils.create_dummy_dispatcher()
# Mock intepreter starts in placeholder mode
import azure.module_a as mod_a # noqa: F401
# Mock function specialization, load customer's libraries and functionapps
ns_root = os.path.join(
testutils.UNIT_TESTS_ROOT,
'azure_namespace_import',
'namespace_location_b')
test_path = os.path.join(ns_root, 'azure', 'namespace_b', 'module_b')
test_mod_path = os.path.join(test_path, 'test_module.py')
os.makedirs(test_path)
with open(test_mod_path, 'w') as f:
f.write('MESSAGE = "module_b is imported"')
try:
# Mock a customer uses test_module
if sys.argv[1].lower() == 'true':
await disp._handle__function_environment_reload_request(
request_msg)
from azure.namespace_b.module_b import test_module
print(test_module.MESSAGE)
except ModuleNotFoundError:
print('module_b fails to import')
finally:
# Cleanup
shutil.rmtree(ns_root) |
tx.go | package utils
import (
"bufio"
"bytes"
"fmt"
"io/ioutil"
"os"
"github.com/pkg/errors"
"github.com/spf13/viper"
"github.com/tendermint/go-amino-x"
"github.com/tendermint/classic/sdk/client/context"
"github.com/tendermint/classic/sdk/client/flags"
"github.com/tendermint/classic/sdk/client/input"
"github.com/tendermint/classic/sdk/client/keys"
sdk "github.com/tendermint/classic/sdk/types"
authtypes "github.com/tendermint/classic/sdk/x/auth/types"
)
// GasEstimateResponse defines a response definition for tx gas estimation.
type GasEstimateResponse struct {
GasEstimate uint64 `json:"gas_estimate" yaml:"gas_estimate"`
}
func (gr GasEstimateResponse) String() string {
return fmt.Sprintf("gas estimate: %d", gr.GasEstimate)
}
// GenerateOrBroadcastMsgs creates a StdTx given a series of messages. If
// the provided context has generate-only enabled, the tx will only be printed
// to STDOUT in a fully offline manner. Otherwise, the tx will be signed and
// broadcasted.
func GenerateOrBroadcastMsgs(cliCtx context.CLIContext, txBldr authtypes.TxBuilder, msgs []sdk.Msg) error {
if cliCtx.GenerateOnly {
return PrintUnsignedStdTx(txBldr, cliCtx, msgs)
}
return CompleteAndBroadcastTxCLI(txBldr, cliCtx, msgs)
}
// CompleteAndBroadcastTxCLI implements a utility function that facilitates
// sending a series of messages in a signed transaction given a TxBuilder and a
// QueryContext. It ensures that the account exists, has a proper number and
// sequence set. In addition, it builds and signs a transaction with the
// supplied messages. Finally, it broadcasts the signed transaction to a node.
func CompleteAndBroadcastTxCLI(txBldr authtypes.TxBuilder, cliCtx context.CLIContext, msgs []sdk.Msg) error {
txBldr, err := PrepareTxBuilder(txBldr, cliCtx)
if err != nil {
return err
}
fromName := cliCtx.GetFromName()
if txBldr.SimulateAndExecute() || cliCtx.Simulate {
txBldr, err = EnrichWithGas(txBldr, cliCtx, msgs)
if err != nil {
return err
}
gasEst := GasEstimateResponse{GasEstimate: txBldr.Gas()}
_, _ = fmt.Fprintf(os.Stderr, "%s\n", gasEst.String())
}
if cliCtx.Simulate {
return nil
}
if !cliCtx.SkipConfirm {
stdSignMsg, err := txBldr.BuildSignMsg(msgs)
if err != nil {
return err
}
var json []byte
if viper.GetBool(flags.FlagIndentResponse) {
json, err = amino.MarshalJSONIndent(stdSignMsg, "", " ")
if err != nil {
panic(err)
}
} else {
json = amino.MustMarshalJSON(stdSignMsg)
}
_, _ = fmt.Fprintf(os.Stderr, "%s\n\n", json)
buf := bufio.NewReader(os.Stdin)
ok, err := input.GetConfirmation("confirm transaction before signing and broadcasting", buf)
if err != nil || !ok {
_, _ = fmt.Fprintf(os.Stderr, "%s\n", "cancelled transaction")
return err
}
} | return err
}
// build and sign the transaction
txBytes, err := txBldr.BuildAndSign(fromName, passphrase, msgs)
if err != nil {
return err
}
// broadcast to a Tendermint node
res, err := cliCtx.BroadcastTx(txBytes)
if err != nil {
return err
}
return cliCtx.PrintOutput(res)
}
// EnrichWithGas calculates the gas estimate that would be consumed by the
// transaction and set the transaction's respective value accordingly.
func EnrichWithGas(txBldr authtypes.TxBuilder, cliCtx context.CLIContext, msgs []sdk.Msg) (authtypes.TxBuilder, error) {
_, adjusted, err := simulateMsgs(txBldr, cliCtx, msgs)
if err != nil {
return txBldr, err
}
return txBldr.WithGas(adjusted), nil
}
// CalculateGas simulates the execution of a transaction and returns
// both the estimate obtained by the query and the adjusted amount.
func CalculateGas(
queryFunc func(string, []byte) ([]byte, int64, error),
txBytes []byte, adjustment float64,
) (estimate, adjusted uint64, err error) {
// run a simulation (via /app/simulate query) to
// estimate gas and update TxBuilder accordingly
rawRes, _, err := queryFunc("/app/simulate", txBytes)
if err != nil {
return estimate, adjusted, err
}
estimate, err = parseQueryResponse(rawRes)
if err != nil {
return
}
adjusted = adjustGasEstimate(estimate, adjustment)
return estimate, adjusted, nil
}
// PrintUnsignedStdTx builds an unsigned StdTx and prints it to os.Stdout.
func PrintUnsignedStdTx(txBldr authtypes.TxBuilder, cliCtx context.CLIContext, msgs []sdk.Msg) error {
stdTx, err := buildUnsignedStdTxOffline(txBldr, cliCtx, msgs)
if err != nil {
return err
}
json, err := amino.MarshalJSON(stdTx)
if err != nil {
return err
}
_, _ = fmt.Fprintf(cliCtx.Output, "%s\n", json)
return nil
}
// SignStdTx appends a signature to a StdTx and returns a copy of it. If appendSig
// is false, it replaces the signatures already attached with the new signature.
// Don't perform online validation or lookups if offline is true.
func SignStdTx(
txBldr authtypes.TxBuilder, cliCtx context.CLIContext, name string,
stdTx authtypes.StdTx, appendSig bool, offline bool,
) (authtypes.StdTx, error) {
var signedStdTx authtypes.StdTx
info, err := txBldr.Keybase().Get(name)
if err != nil {
return signedStdTx, err
}
addr := info.GetPubKey().Address()
// check whether the address is a signer
if !isTxSigner(sdk.AccAddress(addr), stdTx.GetSigners()) {
return signedStdTx, fmt.Errorf("%s: %s", errInvalidSigner, name)
}
if !offline {
txBldr, err = populateAccountFromState(txBldr, cliCtx, sdk.AccAddress(addr))
if err != nil {
return signedStdTx, err
}
}
passphrase, err := keys.GetPassphrase(name)
if err != nil {
return signedStdTx, err
}
return txBldr.SignStdTx(name, passphrase, stdTx, appendSig)
}
// SignStdTxWithSignerAddress attaches a signature to a StdTx and returns a copy of a it.
// Don't perform online validation or lookups if offline is true, else
// populate account and sequence numbers from a foreign account.
func SignStdTxWithSignerAddress(txBldr authtypes.TxBuilder, cliCtx context.CLIContext,
addr sdk.AccAddress, name string, stdTx authtypes.StdTx,
offline bool) (signedStdTx authtypes.StdTx, err error) {
// check whether the address is a signer
if !isTxSigner(addr, stdTx.GetSigners()) {
return signedStdTx, fmt.Errorf("%s: %s", errInvalidSigner, name)
}
if !offline {
txBldr, err = populateAccountFromState(txBldr, cliCtx, addr)
if err != nil {
return signedStdTx, err
}
}
passphrase, err := keys.GetPassphrase(name)
if err != nil {
return signedStdTx, err
}
return txBldr.SignStdTx(name, passphrase, stdTx, false)
}
// Read and decode a StdTx from the given filename. Can pass "-" to read from stdin.
func ReadStdTxFromFile(filename string) (stdTx authtypes.StdTx, err error) {
var bytes []byte
if filename == "-" {
bytes, err = ioutil.ReadAll(os.Stdin)
} else {
bytes, err = ioutil.ReadFile(filename)
}
if err != nil {
return
}
if err = amino.UnmarshalJSON(bytes, &stdTx); err != nil {
return
}
return
}
func populateAccountFromState(
txBldr authtypes.TxBuilder, cliCtx context.CLIContext, addr sdk.AccAddress,
) (authtypes.TxBuilder, error) {
num, seq, err := authtypes.NewAccountRetriever(cliCtx).GetAccountNumberSequence(addr)
if err != nil {
return txBldr, err
}
return txBldr.WithAccountNumber(num).WithSequence(seq), nil
}
// GetTxEncoder return tx encoder from global sdk configuration if ones is defined.
// Otherwise returns encoder with default logic.
func GetTxEncoder() (encoder sdk.TxEncoder) {
encoder = sdk.GetConfig().GetTxEncoder()
if encoder == nil {
encoder = authtypes.DefaultTxEncoder()
}
return encoder
}
// nolint
// SimulateMsgs simulates the transaction and returns the gas estimate and the adjusted value.
func simulateMsgs(txBldr authtypes.TxBuilder, cliCtx context.CLIContext, msgs []sdk.Msg) (estimated, adjusted uint64, err error) {
txBytes, err := txBldr.BuildTxForSim(msgs)
if err != nil {
return
}
estimated, adjusted, err = CalculateGas(cliCtx.QueryWithData, txBytes, txBldr.GasAdjustment())
return
}
func adjustGasEstimate(estimate uint64, adjustment float64) uint64 {
return uint64(adjustment * float64(estimate))
}
func parseQueryResponse(rawRes []byte) (uint64, error) {
var simulationResult sdk.Result
if err := amino.UnmarshalSized(rawRes, &simulationResult); err != nil {
return 0, err
}
return simulationResult.GasUsed, nil
}
// PrepareTxBuilder populates a TxBuilder in preparation for the build of a Tx.
func PrepareTxBuilder(txBldr authtypes.TxBuilder, cliCtx context.CLIContext) (authtypes.TxBuilder, error) {
from := cliCtx.GetFromAddress()
accGetter := authtypes.NewAccountRetriever(cliCtx)
if err := accGetter.EnsureExists(from); err != nil {
return txBldr, err
}
txbldrAccNum, txbldrAccSeq := txBldr.AccountNumber(), txBldr.Sequence()
// TODO: (ref #1903) Allow for user supplied account number without
// automatically doing a manual lookup.
if txbldrAccNum == 0 || txbldrAccSeq == 0 {
num, seq, err := authtypes.NewAccountRetriever(cliCtx).GetAccountNumberSequence(from)
if err != nil {
return txBldr, err
}
if txbldrAccNum == 0 {
txBldr = txBldr.WithAccountNumber(num)
}
if txbldrAccSeq == 0 {
txBldr = txBldr.WithSequence(seq)
}
}
return txBldr, nil
}
func buildUnsignedStdTxOffline(txBldr authtypes.TxBuilder, cliCtx context.CLIContext, msgs []sdk.Msg) (stdTx authtypes.StdTx, err error) {
if txBldr.SimulateAndExecute() {
if cliCtx.GenerateOnly {
return stdTx, errors.New("cannot estimate gas with generate-only")
}
txBldr, err = EnrichWithGas(txBldr, cliCtx, msgs)
if err != nil {
return stdTx, err
}
_, _ = fmt.Fprintf(os.Stderr, "estimated gas = %v\n", txBldr.Gas())
}
stdSignMsg, err := txBldr.BuildSignMsg(msgs)
if err != nil {
return stdTx, nil
}
return authtypes.NewStdTx(stdSignMsg.Msgs, stdSignMsg.Fee, nil, stdSignMsg.Memo), nil
}
func isTxSigner(user sdk.AccAddress, signers []sdk.AccAddress) bool {
for _, s := range signers {
if bytes.Equal(user.Bytes(), s.Bytes()) {
return true
}
}
return false
} |
passphrase, err := keys.GetPassphrase(fromName)
if err != nil { |
clean_prediction.py | from gensim.models import Word2Vec
from sklearn.decomposition import PCA
from matplotlib import pyplot
import string
import fnmatch
# define training data
#sentences = open('new_file_sentence.txt', 'r', encoding='utf-8')
path = 'predictions_v11_1500_clean.txt'
output_file = open("predictions_v11_500.txt", "w")
input_texts = ()
with open(path) as f:
lines = f.read().split('\n')
for line in lines[: min(1000000, len(lines) - 1)]:
line = line.replace(' ','').split(',')
str = ''
#print(line)
#x = 'spotify*'
for i in range(2000):
if 'spotify:track:' in line[i]:
str += line[i]
| output_file.write(str)
output_file.write('\n')
#y = not (fnmatch.filter(line, x))
# print(y)
#print(line[i])
#print(line)
#print(x for x in line if 'spotify' in x)
#if "spotify" not in line:
# print(line)
# line=line[i].replace(line[i], '')
#print(line)
#input_texts.append(line)
#output_file.write(input_texts)
#output_file.write('\n')
#import fnmatch
#l = ['RT07010534.txt', 'RT07010533.txt', 'RT02010534.txt']
#pattern = 'RT0701*.txt'
#matching = fnmatch.filter(l, pattern)
#print(matching)
#print(sample1) | str += ','
print(line[i])
|
deploy-client.ts | import * as chalk from 'chalk';
import { concat } from 'rxjs/observable/concat';
import { factory } from './execute';
import { isRootDir } from './is-root-dir';
import { failWith } from './fail-with';
import { stdout, spawnShell } from '@machinelabs/core';
export function | (project, env) {
if (!isRootDir()) {
failWith('Command needs to be run from root dir');
}
return concat(
stdout(chalk.green(`Deploying client to ${project} with env=${env}`)),
spawnShell(`(cd ./client &&
npm run node_modules &&
npx ng build --prod --environment=${env} &&
npx firebase use ${project} &&
npx firebase deploy)`),
stdout(chalk.green('Client successfully deployed!'))
);
}
| deployClient |
gopro-request-status.ts | export enum GoproRequestStatus {
GOPRO_REQUEST_SUCCESS = 0, // The write message with ID indicated succeeded. | GOPRO_REQUEST_FAILED = 1, // The write message with ID indicated failed.
GOPRO_REQUEST_STATUS_ENUM_END = 2, //
} |
|
find.go | // Copyright 2021 Matrix Origin
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package fastmap
var (
Find func([]uint64, uint64) int
)
func find(xs []uint64, v uint64) int | {
for i, x := range xs {
if x == v {
return i
}
}
return -1
} |
|
ccl.py | import logging
import os.path as op
import numpy as np
import pycuda.driver as cuda
import pycuda.gpuarray as gpuarray
import pycuda.autoinit
from pycuda.compiler import SourceModule
from ..improc_types import int3
from ..utils import gpuregion, cpuregion
from ..cuda import asgpuarray, grid_kernel_config
from ._ccl import _remap, _relabel2d, _relabel3d, _merge_small3d
__dirname__ = op.dirname(__file__)
@gpuregion
def ccl3d(labels, remap=True):
assert labels.ndim == 3
assert labels.dtype == np.uint32
with open(op.join(__dirname__, "kernels", "ccl3d.cu"), "r") as f:
_mod_conv = SourceModule(f.read())
gpu_ccl_local = _mod_conv.get_function("uf_local")
gpu_ccl_global = _mod_conv.get_function("uf_global")
gpu_ccl_final = _mod_conv.get_function("uf_final")
labels_gpu = asgpuarray(labels, dtype=np.uint32)
result_gpu = gpuarray.zeros_like(labels_gpu)
shape = np.asarray(tuple(labels.shape[::-1]), dtype=int3)
| block, grid = grid_kernel_config(gpu_ccl_local, labels.shape)
shared = int(np.prod(block) * 8)
gpu_ccl_local(labels_gpu, result_gpu, shape, block=block, grid=grid, shared=shared)
gpu_ccl_global(labels_gpu, result_gpu, shape, block=block, grid=grid)
gpu_ccl_final(result_gpu, shape, block=block, grid=grid)
if remap:
return remap_labels(result_gpu.get())
return result_gpu
def remap_labels(labels):
assert labels.dtype == np.uint32
new_labels = _remap(labels.ravel())
new_labels.shape = labels.shape
return new_labels
def relabel(labels):
assert labels.dtype == np.uint32
if labels.ndim == 2:
new_labels = _relabel2d(labels.ravel(), labels.shape[1])
elif labels.ndim == 3:
new_labels = _relabel3d(labels.ravel(), labels.shape[1], labels.shape[2])
else:
raise ValueError(
"Input array has to be 2 or 3 dimensional: {}".format(labels.ndim)
)
new_labels.shape = labels.shape
return new_labels
@cpuregion
def merge_small(data, labels, min_size=1, **kwargs):
if data.ndim != labels.ndim + 1:
data = data[..., None]
assert data.ndim == labels.ndim + 1
return _merge_small3d(data, labels, labels.max() + 1, min_size) | |
test_attach_interfaces.py | # Copyright 2012 SINA Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
from nova.api.openstack.compute.contrib import attach_interfaces
from nova.compute import api as compute_api
from nova import context
from nova import exception
from nova.network import api as network_api
from nova.openstack.common import jsonutils
from nova import test
import webob
from webob import exc
CONF = cfg.CONF
FAKE_UUID1 = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
FAKE_UUID2 = 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'
FAKE_PORT_ID1 = '11111111-1111-1111-1111-111111111111'
FAKE_PORT_ID2 = '22222222-2222-2222-2222-222222222222'
FAKE_PORT_ID3 = '33333333-3333-3333-3333-333333333333'
FAKE_NET_ID1 = '44444444-4444-4444-4444-444444444444'
FAKE_NET_ID2 = '55555555-5555-5555-5555-555555555555'
FAKE_NET_ID3 = '66666666-6666-6666-6666-666666666666'
port_data1 = {
"id": FAKE_PORT_ID1,
"network_id": FAKE_NET_ID1,
"admin_state_up": True,
"status": "ACTIVE",
"mac_address": "aa:aa:aa:aa:aa:aa",
"fixed_ips": ["10.0.1.2"],
"device_id": FAKE_UUID1,
}
port_data2 = {
"id": FAKE_PORT_ID2,
"network_id": FAKE_NET_ID2,
"admin_state_up": True,
"status": "ACTIVE",
"mac_address": "bb:bb:bb:bb:bb:bb",
"fixed_ips": ["10.0.2.2"],
"device_id": FAKE_UUID1,
}
port_data3 = {
"id": FAKE_PORT_ID3,
"network_id": FAKE_NET_ID3,
"admin_state_up": True,
"status": "ACTIVE",
"mac_address": "bb:bb:bb:bb:bb:bb",
"fixed_ips": ["10.0.2.2"],
"device_id": '',
}
fake_networks = [FAKE_NET_ID1, FAKE_NET_ID2]
ports = [port_data1, port_data2, port_data3]
def fake_list_ports(self, *args, **kwargs):
result = []
for port in ports:
if port['device_id'] == kwargs['device_id']:
result.append(port)
return {'ports': result}
def fake_show_port(self, context, port_id, **kwargs):
for port in ports:
if port['id'] == port_id:
return {'port': port}
def fake_attach_interface(self, context, instance, network_id, port_id,
requested_ip='192.168.1.3'):
if not network_id:
# if no network_id is given when add a port to an instance, use the
# first default network.
network_id = fake_networks[0]
if not port_id:
port_id = ports[fake_networks.index(network_id)]['id']
network_info = [
{'bridge': 'br-100',
'id': network_id,
'cidr': '192.168.1.0/24',
'vlan': '101',
'injected': 'False',
'multi_host': 'False',
'bridge_interface': 'bridge_interface'
},
{'label': 'fake_network',
'broadcast': '192.168.1.255',
'mac': '11:22:33:11:22:33',
'vif_uuid': port_id,
'rxtx_cap': 0,
'dns': '8.8.8.8',
'dhcp_server': '192.168.1.1',
'ips': {'ip': requested_ip,
'enabled': 1,
'netmask': '255.255.255.0',
'gateway': '192.168.1.254'}
}
]
return network_info
def fake_detach_interface(self, context, instance, port_id):
for port in ports:
if port['id'] == port_id:
return
raise exception.PortNotFound(port_id=port_id)
def fake_get_instance(self, context, intance_id):
return {}
class InterfaceAttachTests(test.TestCase):
def setUp(self):
super(InterfaceAttachTests, self).setUp()
self.flags(quantum_auth_strategy=None)
self.flags(quantum_url='http://anyhost/')
self.flags(quantum_url_timeout=30)
self.stubs.Set(network_api.API, 'show_port', fake_show_port)
self.stubs.Set(network_api.API, 'list_ports', fake_list_ports)
self.stubs.Set(compute_api.API, 'get', fake_get_instance)
self.context = context.get_admin_context()
self.expected_show = {'interfaceAttachment':
{'net_id': FAKE_NET_ID1,
'port_id': FAKE_PORT_ID1,
'mac_addr': port_data1['mac_address'],
'port_state': port_data1['status'],
'fixed_ips': port_data1['fixed_ips'],
}}
def test_show(self):
attachments = attach_interfaces.InterfaceAttachmentController()
req = webob.Request.blank('/v2/fake/os-interfaces/show')
req.method = 'POST'
req.body = jsonutils.dumps({})
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context | result = attachments.show(req, FAKE_UUID1, FAKE_PORT_ID1)
self.assertEqual(self.expected_show, result)
def test_show_invalid(self):
attachments = attach_interfaces.InterfaceAttachmentController()
req = webob.Request.blank('/v2/fake/os-interfaces/show')
req.method = 'POST'
req.body = jsonutils.dumps({})
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
self.assertRaises(exc.HTTPNotFound,
attachments.show, req, FAKE_UUID2, FAKE_PORT_ID1)
def test_delete(self):
self.stubs.Set(compute_api.API, 'detach_interface',
fake_detach_interface)
attachments = attach_interfaces.InterfaceAttachmentController()
req = webob.Request.blank('/v2/fake/os-interfaces/delete')
req.method = 'POST'
req.body = jsonutils.dumps({})
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
result = attachments.delete(req, FAKE_UUID1, FAKE_PORT_ID1)
self.assertEqual('202 Accepted', result.status)
def test_delete_interface_not_found(self):
self.stubs.Set(compute_api.API, 'detach_interface',
fake_detach_interface)
attachments = attach_interfaces.InterfaceAttachmentController()
req = webob.Request.blank('/v2/fake/os-interfaces/delete')
req.method = 'POST'
req.body = jsonutils.dumps({})
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
self.assertRaises(exc.HTTPNotFound,
attachments.delete,
req,
FAKE_UUID1,
'invaid-port-id')
def test_attach_interface_without_network_id(self):
self.stubs.Set(compute_api.API, 'attach_interface',
fake_attach_interface)
attachments = attach_interfaces.InterfaceAttachmentController()
req = webob.Request.blank('/v2/fake/os-interfaces/attach')
req.method = 'POST'
body = jsonutils.dumps({'port_id': FAKE_PORT_ID1})
req.body = jsonutils.dumps({})
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
result = attachments.create(req, FAKE_UUID1, jsonutils.loads(req.body))
self.assertEqual(result['interfaceAttachment']['net_id'],
FAKE_NET_ID1)
def test_attach_interface_with_network_id(self):
self.stubs.Set(compute_api.API, 'attach_interface',
fake_attach_interface)
attachments = attach_interfaces.InterfaceAttachmentController()
req = webob.Request.blank('/v2/fake/os-interfaces/attach')
req.method = 'POST'
req.body = jsonutils.dumps({'interfaceAttachment':
{'net_id': FAKE_NET_ID2}})
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
result = attachments.create(req, FAKE_UUID1, jsonutils.loads(req.body))
self.assertEqual(result['interfaceAttachment']['net_id'],
FAKE_NET_ID2)
def test_attach_interface_with_port_and_network_id(self):
self.stubs.Set(compute_api.API, 'attach_interface',
fake_attach_interface)
attachments = attach_interfaces.InterfaceAttachmentController()
req = webob.Request.blank('/v2/fake/os-interfaces/attach')
req.method = 'POST'
req.body = jsonutils.dumps({'interfaceAttachment':
{'port_id': FAKE_PORT_ID1,
'net_id': FAKE_NET_ID2}})
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
self.assertRaises(exc.HTTPBadRequest,
attachments.create, req, FAKE_UUID1,
jsonutils.loads(req.body)) | |
object_utils_test.js | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var objectUtils = require('utils/object_utils');
describe('utils/object_utils', function() {
describe('#recursiveTree()', function() {
var testObj = {
a1: {
a2: 'v1',
a3: {
a4: {
a5: {
a6: 'v2',
a7: 'v3'
}
}
}
}
};
it('should return correct tree of childs', function(){
var result = objectUtils.recursiveTree(testObj);
expect(result).to.be.equal('a2 (/a1)<br/>a5 (/a1/a3/a4)<br/>');
});
it('should return `null` if type missed', function() {
var result = objectUtils.recursiveTree('{ a1: "v1"}');
expect(result).to.be.null;
});
});
describe('#recursiveKeysCount()', function() {
var tests = [
{
m: 'should return 1 child',
e: 3,
obj: {
a1: {
a2: 'v1',
a3: 'v2',
a4: {
a5: 'v3'
}
}
}
},
{
m: 'should return 1 childs',
e: 1,
obj: {
a1: 'c1'
}
},
{
m: 'should return `null`',
e: null,
obj: 'a1'
}
];
tests.forEach(function(test){
it(test.m, function() {
expect(objectUtils.recursiveKeysCount(test.obj)).to.be.eql(test.e);
});
});
});
describe('#deepEqual', function() {
it('simple values', function() {
expect(objectUtils.deepEqual(true, true)).to.true;
});
it('simple values strict', function() {
expect(objectUtils.deepEqual(true, 1)).to.false;
});
it('simple with complex', function() {
expect(objectUtils.deepEqual(true, {})).to.false;
});
it('complex with simple', function() {
expect(objectUtils.deepEqual({}, 2)).to.false;
});
it('simple objects', function() {
var a = {
value: 1
};
var b = {
value: 1
};
expect(objectUtils.deepEqual(a, b)).to.true;
});
it('simple objects failed', function() {
var a = {
value: 1,
c: 1
};
var b = {
value: 1
};
expect(objectUtils.deepEqual(a, b)).to.false;
});
it('complex objects', function() {
var a = {
value: 1,
c: {
d: {
x: {
val: 1
}
}
}
};
var b = {
value: 1,
c: {
d: {
x: {
val: 1
}
}
}
};
expect(objectUtils.deepEqual(a, b)).to.true;
});
it('complex objects failed', function() {
var a = {
value: 1,
c: {
d: {
x: {
val: 1
}
}
}
};
var b = {
value: 1,
c: {
d: {
x: {
val: 2
}
}
}
};
expect(objectUtils.deepEqual(a, b)).to.false;
});
it('complex array', function() {
var a = [1,2,{a: 2}, 4, {b:{}}];
var b = [1,2,{a: 2}, 4, {b:{}}];
expect(objectUtils.deepEqual(a, b)).to.true;
});
it('complex array failed', function() {
var a = [1,3,{a: 2}, 4, {b:{}}];
var b = [1,2,{a: 2}, 4, {b:{}}];
expect(objectUtils.deepEqual(a, b)).to.false;
});
it('simple array', function() {
var a = [1,3];
var b = [1,3];
expect(objectUtils.deepEqual(a, b)).to.true;
});
it('simple array failed', function() {
var a = [3,1];
var b = [1,3];
expect(objectUtils.deepEqual(a, b)).to.false;
});
});
describe('#deepMerge', function() {
var tests = [
{
target: {
a: [
{
c: 3
}
]
},
source: {
a: [
{
b: 2
}
]
},
e: {
a: [
{
c: 3
},
{
b: 2
}
]
}
},
{
target: {
a: {}
},
source: {
a: {
b: 2,
c: [1,2,3]
}
},
e: {
a: {
b: 2,
c: [1,2,3]
}
}
},
{
target: {
artifact_data: {
services: [
{
name: "HIVE",
configurations: [
{
"hive-site": {
hive_prop1: "hive_val1"
}
}
]
}
]
}
},
source: {
artifact_data: {
services: [
{
name: "HDFS",
configurations: [
{
"hdfs-site": {
hdfs_prop1: "hdfs_val1"
}
}
]
}
]
}
},
e: {
artifact_data: {
services: [
{
name: "HIVE",
configurations: [
{
"hive-site": {
hive_prop1: "hive_val1"
}
}
]
},
{
name: "HDFS",
configurations: [
{
"hdfs-site": {
hdfs_prop1: "hdfs_val1"
}
}
]
}
]
}
}
},
{
source: {
"artifact_data" : {
"identities" : [
{
"principal" : {
"value" : "HTTP/_HOST@${realm}",
"type" : "service"
},
"name" : "spnego",
"keytab" : {
"file" : "${keytab_dir}/spnego.service.keytab",
"owner" : {
"name" : "root",
"access" : "r"
},
"group" : {
"name" : "${cluster-env/user_group}",
"access" : "r"
}
}
},
{
"principal" : {
"value" : "${cluster-env/smokeuser}-----@${realm}",
"local_username" : "${cluster-env/smokeuser}",
"configuration" : "cluster-env/smokeuser_principal_name",
"type" : "user"
},
"name" : "smokeuser",
"keytab" : {
"file" : "${keytab_dir}/smokeuser.headless.keytab",
"owner" : {
"name" : "${cluster-env/smokeuser}",
"access" : "r"
},
"configuration" : "cluster-env/smokeuser_keytab",
"group" : { | }
]
}
},
target: {
"artifact_data" : {
"identities" : [
{
"principal" : {
"value" : "${cluster-env/smokeuser}@${realm}",
"local_username" : "${cluster-env/smokeuser}",
"configuration" : "cluster-env/smokeuser_principal_name",
"type" : "user"
},
"name" : "smokeuser",
"keytab" : {
"file" : "${keytab_dir}/smokeuser.headless.keytab",
"owner" : {
"name" : "${cluster-env/smokeuser}",
"access" : "r"
},
"configuration" : "cluster-env/smokeuser_keytab",
"group" : {
"name" : "${cluster-env/user_group}",
"access" : "r"
}
}
},
{
"principal" : {
"value" : "HTTP/_HOST@${realm}",
"local_username" : null,
"configuration" : null,
"type" : "service"
},
"name" : "spnego",
"keytab" : {
"file" : "${keytab_dir}/spnego.service.keytab",
"owner" : {
"name" : "root",
"access" : "r"
},
"configuration" : null,
"group" : {
"name" : "${cluster-env/user_group}",
"access" : "d"
}
}
},
{
"name": "anotherOne"
}
]
}
},
e: {
"artifact_data" : {
"identities" : [
{
"principal" : {
"value" : "${cluster-env/smokeuser}-----@${realm}",
"local_username" : "${cluster-env/smokeuser}",
"configuration" : "cluster-env/smokeuser_principal_name",
"type" : "user"
},
"name" : "smokeuser",
"keytab" : {
"file" : "${keytab_dir}/smokeuser.headless.keytab",
"owner" : {
"name" : "${cluster-env/smokeuser}",
"access" : "r"
},
"configuration" : "cluster-env/smokeuser_keytab",
"group" : {
"name" : "${cluster-env/user_group}",
"access" : "r"
}
}
},
{
"principal" : {
"value" : "HTTP/_HOST@${realm}",
"local_username" : null,
"configuration" : null,
"type" : "service"
},
"name" : "spnego",
"keytab" : {
"file" : "${keytab_dir}/spnego.service.keytab",
"owner" : {
"name" : "root",
"access" : "r"
},
"configuration" : null,
"group" : {
"name" : "${cluster-env/user_group}",
"access" : "r"
}
}
},
{
"name": "anotherOne"
}
]
}
}
}
];
tests.forEach(function(test) {
it("Should merge objects `{0}`, `{1}`".format(JSON.stringify(test.target), JSON.stringify(test.source)), function() {
expect(objectUtils.deepMerge(test.target, test.source, test.handler)).to.be.eql(test.e);
});
});
});
describe('#detectIndexedKey', function() {
var tests = [
{
target: [
{
a: 1,
b: []
},
{
a: 3,
b: 2
},
{
a: 2,
b: {}
}
],
e: 'a',
m: 'should detect uniq key as `a`'
},
{
target: [
{
"principal" : {
"value" : "HTTP/_HOST@${realm}",
"local_username" : null,
"configuration" : null,
"type" : "service"
},
"name" : "spnego",
"keytab" : {
"file" : "${keytab_dir}/spnego.service.keytab",
"owner" : {
"name" : "root",
"access" : "r"
},
"configuration" : null,
"group" : {
"name" : "${cluster-env/user_group}",
"access" : "r"
}
}
},
{
"principal" : {
"value" : "${cluster-env/smokeuser}-${cluster_name|toLower()}@${realm}",
"local_username" : "${cluster-env/smokeuser}",
"configuration" : "cluster-env/smokeuser_principal_name",
"type" : "user"
},
"name" : "smokeuser",
"keytab" : {
"file" : "${keytab_dir}/smokeuser.headless.keytab",
"owner" : {
"name" : "${cluster-env/smokeuser}",
"access" : "r"
},
"configuration" : "cluster-env/smokeuser_keytab",
"group" : {
"name" : "${cluster-env/user_group}",
"access" : "r"
}
}
}
],
e: 'name',
m: 'should detect uniq key as `name`'
},
];
tests.forEach(function(test) {
it(test.m, function() {
expect(objectUtils.detectIndexedKey(test.target)).to.eql(test.e);
});
});
});
describe('#smartArrayObjectMerge', function() {
var tests = [
{
target: [
{
a: 2,
B: 2
}
],
source: [
{
a: 3,
c: 4
},
],
m: 'should merge {0} {1}, into {2}',
e: [
{
a: 2,
B: 2
},
{
a: 3,
c: 4
}
]
},
{
target: [
{
a: 2,
B: 2
}
],
source: [
{
a: 2,
B: 3,
b: 4
},
{
a: 3,
c: 4
}
],
m: 'should merge {0} {1}, into {2}',
e: [
{
a: 2,
B: 3,
b: 4
},
{
a: 3,
c: 4
}
]
},
{
target: [
{
"spark-defaults" : {
"spark.history.kerberos.enabled" : "true",
"spark.history.enabled" : "no"
}
}
],
source: [
{
"spark-defaults" : {
"spark.history.kerberos.enabled" : "false"
}
},
{
"spark-site" : {
"spark.property" : "false"
}
}
],
m: 'should merge {0} {1}, into {2}',
e: [
{
"spark-defaults" : {
"spark.history.kerberos.enabled" : "true",
"spark.history.enabled" : "no"
}
},
{
"spark-site" : {
"spark.property" : "false"
}
}
]
}
];
tests.forEach(function(test) {
it(test.m.format(JSON.stringify(test.target), JSON.stringify(test.source), JSON.stringify(test.e)), function() {
expect(objectUtils.smartArrayObjectMerge(test.target, test.source).toArray()).to.be.eql(test.e);
});
});
});
}); | "name" : "${cluster-env/user_group}",
"access" : "r"
}
} |
test_derivatives.py | from functools import partial
from pathlib import Path
import numpy as np
import pandas as pd
import pytest
from numpy.testing import assert_array_almost_equal as aaae
from pandas.testing import assert_frame_equal
from scipy.optimize._numdiff import approx_derivative
from estimagic.differentiation.derivatives import _consolidate_one_step_derivatives
from estimagic.differentiation.derivatives import _convert_evaluation_data_to_frame
from estimagic.differentiation.derivatives import (
_convert_richardson_candidates_to_frame,
)
from estimagic.differentiation.derivatives import _nan_skipping_batch_evaluator
from estimagic.differentiation.derivatives import _select_minimizer_along_axis
from estimagic.differentiation.derivatives import first_derivative
from estimagic.examples.numdiff_functions import logit_loglike
from estimagic.examples.numdiff_functions import logit_loglike_gradient
from estimagic.examples.numdiff_functions import logit_loglikeobs
from estimagic.examples.numdiff_functions import logit_loglikeobs_jacobian
from estimagic.utilities import namedtuple_from_kwargs
@pytest.fixture
def binary_choice_inputs():
fix_path = Path(__file__).resolve().parent / "binary_choice_inputs.pickle"
inputs = pd.read_pickle(fix_path)
return inputs
methods = ["forward", "backward", "central"]
@pytest.mark.parametrize("method", methods)
def test_first_derivative_jacobian(binary_choice_inputs, method):
fix = binary_choice_inputs
func = partial(logit_loglikeobs, y=fix["y"], x=fix["x"])
calculated = first_derivative(
func=func,
method=method,
params=fix["params_np"],
n_steps=1,
base_steps=None,
lower_bounds=np.full(fix["params_np"].shape, -np.inf),
upper_bounds=np.full(fix["params_np"].shape, np.inf),
min_steps=1e-8,
step_ratio=2.0,
f0=func(fix["params_np"]),
n_cores=1,
)
expected = logit_loglikeobs_jacobian(fix["params_np"], fix["y"], fix["x"])
aaae(calculated["derivative"], expected, decimal=6)
def test_first_derivative_jacobian_works_at_defaults(binary_choice_inputs):
fix = binary_choice_inputs
func = partial(logit_loglikeobs, y=fix["y"], x=fix["x"])
calculated = first_derivative(func=func, params=fix["params_np"], n_cores=1)
expected = logit_loglikeobs_jacobian(fix["params_np"], fix["y"], fix["x"])
aaae(calculated["derivative"], expected, decimal=6)
@pytest.mark.parametrize("method", methods)
def test_first_derivative_gradient(binary_choice_inputs, method):
fix = binary_choice_inputs
func = partial(logit_loglike, y=fix["y"], x=fix["x"])
calculated = first_derivative(
func=func,
method=method,
params=fix["params_np"],
n_steps=1,
f0=func(fix["params_np"]),
n_cores=1,
)
expected = logit_loglike_gradient(fix["params_np"], fix["y"], fix["x"])
aaae(calculated["derivative"], expected, decimal=4)
@pytest.mark.parametrize("method", methods)
def test_first_derivative_scalar(method):
def f(x):
return x ** 2
calculated = first_derivative(f, 3.0, n_cores=1)
expected = 6.0
assert calculated["derivative"] == expected
@pytest.mark.parametrize("method", methods)
def test_first_derivative_scalar_with_return_func_value(method):
def f(x):
return x ** 2
calculated = first_derivative(
f, 3.0, return_func_value=True, return_info=False, n_cores=1
)
expected = {"derivative": 6.0, "func_value": 9.0}
assert calculated == expected
def test_nan_skipping_batch_evaluator():
arglist = [np.nan, np.ones(2), np.array([3, 4]), np.nan, np.array([1, 2])]
expected = [
np.full(2, np.nan),
np.ones(2),
np.array([9, 16]),
np.full(2, np.nan),
np.array([1, 4]),
]
calculated = _nan_skipping_batch_evaluator(
func=lambda x: x ** 2,
arguments=arglist,
n_cores=1,
error_handling="continue",
batch_evaluator="joblib",
)
for arr_calc, arr_exp in zip(calculated, expected):
if np.isnan(arr_exp).all():
assert np.isnan(arr_calc).all()
else:
aaae(arr_calc, arr_exp)
def test_consolidate_one_step_derivatives():
forward = np.ones((1, 4, 3))
forward[:, :, 0] = np.nan
backward = np.zeros_like(forward)
calculated = _consolidate_one_step_derivatives(
{"forward": forward, "backward": backward}, ["forward", "backward"]
)
expected = np.array([[0, 1, 1]] * 4)
aaae(calculated, expected)
@pytest.fixture()
def example_function_gradient_fixtures():
def f(x):
"""f:R^3 -> R"""
x1, x2, x3 = x[0], x[1], x[2]
y1 = np.sin(x1) + np.cos(x2) + x3 - x3
return y1
def fprime(x):
"""Gradient(f)(x):R^3 -> R^3"""
x1, x2, x3 = x[0], x[1], x[2]
grad = np.array([np.cos(x1), -np.sin(x2), x3 - x3])
return grad
return {"func": f, "func_prime": fprime}
@pytest.fixture()
def example_function_jacobian_fixtures():
|
def test_first_derivative_gradient_richardson(example_function_gradient_fixtures):
f = example_function_gradient_fixtures["func"]
fprime = example_function_gradient_fixtures["func_prime"]
true_fprime = fprime(np.ones(3))
scipy_fprime = approx_derivative(f, np.ones(3))
our_fprime = first_derivative(f, np.ones(3), n_steps=3, method="central", n_cores=1)
aaae(scipy_fprime, our_fprime["derivative"])
aaae(true_fprime, our_fprime["derivative"])
def test_first_derivative_jacobian_richardson(example_function_jacobian_fixtures):
f = example_function_jacobian_fixtures["func"]
fprime = example_function_jacobian_fixtures["func_prime"]
true_fprime = fprime(np.ones(3))
scipy_fprime = approx_derivative(f, np.ones(3))
our_fprime = first_derivative(f, np.ones(3), n_steps=3, method="central", n_cores=1)
aaae(scipy_fprime, our_fprime["derivative"])
aaae(true_fprime, our_fprime["derivative"])
def test_convert_evaluation_data_to_frame():
arr = np.arange(4).reshape(2, 2)
arr2 = arr.reshape(2, 1, 2)
steps = namedtuple_from_kwargs(pos=arr, neg=-arr)
evals = namedtuple_from_kwargs(pos=arr2, neg=-arr2)
expected = [
[1, 0, 0, 0, 0, 0],
[1, 0, 1, 0, 1, 1],
[1, 1, 0, 0, 2, 2],
[1, 1, 1, 0, 3, 3],
[-1, 0, 0, 0, 0, 0],
[-1, 0, 1, 0, 1, -1],
[-1, 1, 0, 0, 2, -2],
[-1, 1, 1, 0, 3, -3],
]
expected = pd.DataFrame(
expected, columns=["sign", "step_number", "dim_x", "dim_f", "step", "eval"]
)
got = _convert_evaluation_data_to_frame(steps, evals)
assert_frame_equal(expected, got.reset_index(), check_dtype=False)
def test__convert_richardson_candidates_to_frame():
jac = {
"forward1": np.array([[0, 1], [2, 3]]),
"forward2": np.array([[0.5, 1], [2, 3]]),
}
err = {
"forward1": np.array([[0, 0], [0, 1]]),
"forward2": np.array([[1, 0], [0, 0]]),
}
expected = [
["forward", 1, 0, 0, 0, 0],
["forward", 1, 1, 0, 1, 0],
["forward", 1, 0, 1, 2, 0],
["forward", 1, 1, 1, 3, 1],
["forward", 2, 0, 0, 0.5, 1],
["forward", 2, 1, 0, 1, 0],
["forward", 2, 0, 1, 2, 0],
["forward", 2, 1, 1, 3, 0],
]
expected = pd.DataFrame(
expected, columns=["method", "num_term", "dim_x", "dim_f", "der", "err"]
)
expected = expected.set_index(["method", "num_term", "dim_x", "dim_f"])
got = _convert_richardson_candidates_to_frame(jac, err)
assert_frame_equal(got, expected, check_dtype=False)
def test__select_minimizer_along_axis():
der = np.array([[[0, 1], [2, 3]], [[4, 5], [6, 7]]])
err = np.array([[[0, 1], [1, 0]], [[1, 0], [0, 1]]])
expected = (np.array([[0, 5], [6, 3]]), np.array([[0, 0], [0, 0]]))
got = _select_minimizer_along_axis(der, err)
aaae(expected, got)
| def f(x):
"""f:R^3 -> R^2"""
x1, x2, x3 = x[0], x[1], x[2]
y1, y2 = np.sin(x1) + np.cos(x2), np.exp(x3)
return np.array([y1, y2])
def fprime(x):
"""Jacobian(f)(x):R^3 -> R^(2x3)"""
x1, x2, x3 = x[0], x[1], x[2]
jac = np.array([[np.cos(x1), -np.sin(x2), 0], [0, 0, np.exp(x3)]])
return jac
return {"func": f, "func_prime": fprime} |
script.js | //modal close button
(function(){
//π.modalCloseButton = function(closingFunction){
// return π.button('pi-modal-close-button', null, null, closingFunction);
//};
})();
// globals
var body;
//helper functions
function copyCode(elem){
if (document.getElementById(elem)) {
// create hidden text element, if it doesn't already exist
var targetId = "_hiddenCopyText_";
// must use a temporary form element for the selection and copy
target = document.getElementById(targetId);
if (!target) {
var target = document.createElement("textarea");
target.style.position = "absolute";
target.style.left = "-9999px";
target.style.top = "0";
target.id = targetId;
document.body.appendChild(target);
}
target.value = document.getElementById(elem).innerText;
// select the content
target.setSelectionRange(0, target.value.length);
// copy the selection
var succeed;
try {
succeed = document.execCommand("copy");
} catch(e) {
sweetAlert("Oh, no...","Sorry, your browser doesn't support document.execCommand('copy'), so we can't copy this code to your clipboard.");
succeed = false;
}
if (succeed) sweetAlert("Copied to clipboard:",target.value);
return succeed;
} else {
sweetAlert("Oops!",elem + " not found when trying to copy code");
return false;
}
}
function booleanAttributeValue(element, attribute, defaultValue){
// returns true if an attribute is present with no value
// e.g. booleanAttributeValue(element, 'data-modal', false);
if (element.hasAttribute(attribute)) {
var value = element.getAttribute(attribute);
if (value === '' || value === 'true') {
return true;
} else if (value === 'false') {
return false;
}
}
return defaultValue;
}
function classOnCondition(element, className, condition) {
if (condition)
$(element).addClass(className);
else
$(element).removeClass(className);
}
function highestZ() {
var Z = 1000;
$("*").each(function(){
var thisZ = $(this).css('z-index');
if (thisZ != "auto" && thisZ > Z) Z = ++thisZ;
});
return Z;
}
function newDOMElement(tag, className, id){
var el = document.createElement(tag);
if (className) el.className = className;
if (id) el.id = id;
return el;
}
function px(n){
return n + 'px';
}
var kub = (function () {
var HEADER_HEIGHT;
var html, header, mainNav, quickstartButton, hero, encyclopedia, footer, wishField, headlineWrapper;
$(document).ready(function () {
html = $('html');
body = $('body');
header = $('header');
mainNav = $('#mainNav');
wishField = $('#wishField');
quickstartButton = $('#quickstartButton');
hero = $('#hero');
encyclopedia = $('#encyclopedia');
footer = $('footer');
headlineWrapper = $('#headlineWrapper');
HEADER_HEIGHT = header.outerHeight();
resetTheView();
window.addEventListener('resize', resetTheView);
window.addEventListener('scroll', resetTheView);
window.addEventListener('keydown', handleKeystrokes);
wishField[0].addEventListener('keydown', handleKeystrokes);
document.onunload = function(){
window.removeEventListener('resize', resetTheView);
window.removeEventListener('scroll', resetTheView);
window.removeEventListener('keydown', handleKeystrokes);
wishField[0].removeEventListener('keydown', handleKeystrokes);
};
setInterval(setFooterType, 10);
});
function setFooterType() {
var windowHeight = window.innerHeight;
var bodyHeight;
switch (html[0].id) {
case 'docs': {
bodyHeight = hero.outerHeight() + encyclopedia.outerHeight();
break;
}
case 'home':
bodyHeight = windowHeight;
break;
default: {
bodyHeight = hero.outerHeight() + $('#mainContent').outerHeight();
}
}
var footerHeight = footer.outerHeight();
classOnCondition(body, 'fixed', windowHeight - footerHeight > bodyHeight);
}
function resetTheView() {
if (html.hasClass('open-nav')) {
toggleMenu();
} else {
HEADER_HEIGHT = header.outerHeight();
}
if (html.hasClass('open-toc')) {
toggleToc();
}
classOnCondition(html, 'flip-nav', window.pageYOffset > 0);
if (html[0].id == 'home') {
setHomeHeaderStyles();
}
}
function setHomeHeaderStyles() {
var Y = window.pageYOffset;
var quickstartBottom = quickstartButton[0].getBoundingClientRect().bottom;
classOnCondition(html[0], 'y-enough', Y > quickstartBottom);
}
function toggleMenu() {
if (window.innerWidth < 800) {
pushmenu.show('primary');
}
else {
var newHeight = HEADER_HEIGHT;
if (!html.hasClass('open-nav')) {
newHeight = mainNav.outerHeight();
}
header.css({height: px(newHeight)});
html.toggleClass('open-nav');
}
}
function submitWish(textfield) {
window.location.replace("https://github.com/kubernetes/kubernetes.github.io/issues/new?title=I%20wish%20" +
window.location.pathname + "%20" + textfield.value + "&body=I%20wish%20" +
window.location.pathname + "%20" + textfield.value);
textfield.value = '';
textfield.blur();
}
function handleKeystrokes(e) {
switch (e.which) {
case 13: {
if (e.currentTarget === wishField[0]) {
submitWish(wishField[0]);
}
break;
}
case 27: {
if (html.hasClass('open-nav')) {
toggleMenu();
}
break;
}
}
}
function showVideo() {
$('body').css({overflow: 'hidden'});
var videoPlayer = $("#videoPlayer");
var videoIframe = videoPlayer.find("iframe")[0];
videoIframe.src = videoIframe.getAttribute("data-url");
videoPlayer.css({zIndex: highestZ()});
videoPlayer.fadeIn(300);
videoPlayer.click(function(){
$('body').css({overflow: 'auto'});
videoPlayer.fadeOut(300, function(){
videoIframe.src = '';
});
});
}
function tocWasClicked(e) {
var target = $(e.target);
var docsToc = $("#docsToc");
return (target[0] === docsToc[0] || target.parents("#docsToc").length > 0);
}
function listenForTocClick(e) {
if (!tocWasClicked(e)) toggleToc();
}
function toggleToc() {
html.toggleClass('open-toc');
setTimeout(function () {
if (html.hasClass('open-toc')) {
window.addEventListener('click', listenForTocClick);
} else {
window.removeEventListener('click', listenForTocClick);
}
}, 100);
}
return {
toggleToc: toggleToc,
toggleMenu: toggleMenu,
showVideo: showVideo
};
})();
// accordion
(function(){
var yah = true;
var moving = false;
var CSS_BROWSER_HACK_DELAY = 25;
$(document).ready(function(){
// Safari chokes on the animation here, so...
if (navigator.userAgent.indexOf('Chrome') == -1 && navigator.userAgent.indexOf('Safari') != -1){
var hackStyle = newDOMElement('style');
hackStyle.innerHTML = '.pi-accordion .wrapper{transition: none}';
body.append(hackStyle);
}
// Gross.
$('.pi-accordion').each(function () {
var accordion = this;
var content = this.innerHTML;
var container = newDOMElement('div', 'container');
container.innerHTML = content;
$(accordion).empty();
accordion.appendChild(container);
CollapseBox($(container));
});
setYAH();
setTimeout(function () {
yah = false;
}, 500);
});
function CollapseBox(container){
container.children('.item').each(function(){
// build the TOC DOM
// the animated open/close is enabled by having each item's content exist in the flow, at its natural height,
// enclosed in a wrapper with height = 0 when closed, and height = contentHeight when open.
var item = this;
// only add content wrappers to containers, not to links
var isContainer = item.tagName === 'DIV';
var titleText = item.getAttribute('data-title');
var title = newDOMElement('div', 'title');
title.innerHTML = titleText;
var wrapper, content;
if (isContainer) {
wrapper = newDOMElement('div', 'wrapper');
content = newDOMElement('div', 'content');
content.innerHTML = item.innerHTML;
wrapper.appendChild(content);
}
item.innerHTML = '';
item.appendChild(title);
if (wrapper) {
item.appendChild(wrapper);
$(wrapper).css({height: 0});
}
$(title).click(function(){
if (!yah) {
if (moving) return;
moving = true;
}
if (container[0].getAttribute('data-single')) {
var openSiblings = item.siblings().filter(function(sib){return sib.hasClass('on');});
openSiblings.forEach(function(sibling){
toggleItem(sibling);
});
}
setTimeout(function(){
if (!isContainer) {
moving = false;
return;
}
toggleItem(item);
}, CSS_BROWSER_HACK_DELAY);
});
function toggleItem(thisItem){
var thisWrapper = $(thisItem).find('.wrapper').eq(0);
if (!thisWrapper) return;
var contentHeight = thisWrapper.find('.content').eq(0).innerHeight() + 'px';
if ($(thisItem).hasClass('on')) {
thisWrapper.css({height: contentHeight});
$(thisItem).removeClass('on');
setTimeout(function(){
thisWrapper.css({height: 0});
moving = false;
}, CSS_BROWSER_HACK_DELAY);
} else {
$(item).addClass('on');
thisWrapper.css({height: contentHeight});
var duration = parseFloat(getComputedStyle(thisWrapper[0]).transitionDuration) * 1000;
setTimeout(function(){
thisWrapper.css({height: ''});
moving = false;
}, duration);
}
}
if (content) {
var innerContainers = $(content).children('.container');
if (innerContainers.length > 0) {
innerContainers.each(function(){
CollapseBox($(this));
});
}
}
});
}
function setYAH() {
var pathname = location.href.split('#')[0]; // on page load, make sure the page is YAH even if there's a hash
var currentLinks = [];
$('.pi-accordion a').each(function () {
if (pathname === this.href) currentLinks.push(this);
});
currentLinks.forEach(function (yahLink) {
$(yahLink).parents('.item').each(function(){
$(this).addClass('on');
$(this).find('.wrapper').eq(0).css({height: 'auto'});
$(this).find('.content').eq(0).css({opacity: 1});
});
$(yahLink).addClass('yah');
yahLink.onclick = function(e){e.preventDefault();};
});
}
})();
var pushmenu = (function(){
var allPushMenus = {};
$(document).ready(function(){
$('[data-auto-burger]').each(function(){
var container = this;
var id = container.getAttribute('data-auto-burger');
var autoBurger = document.getElementById(id) || newDOMElement('div', 'pi-pushmenu', id);
var ul = autoBurger.querySelector('ul') || newDOMElement('ul');
$(container).find('a[href], button').each(function () {
if (!booleanAttributeValue(this, 'data-auto-burger-exclude', false)) {
var clone = this.cloneNode(true);
clone.id = '';
if (clone.tagName == "BUTTON") {
var aTag = newDOMElement('a');
aTag.href = '';
aTag.innerHTML = clone.innerHTML;
aTag.onclick = clone.onclick;
clone = aTag;
}
var li = newDOMElement('li');
li.appendChild(clone);
ul.appendChild(li);
}
});
| });
$(".pi-pushmenu").each(function(){
allPushMenus[this.id] = PushMenu(this);
});
});
function show(objId) {
allPushMenus[objId].expose();
}
function PushMenu(el) {
var html = document.querySelector('html');
var overlay = newDOMElement('div', 'overlay');
var content = newDOMElement('div', 'content');
content.appendChild(el.querySelector('*'));
var side = el.getAttribute("data-side") || "right";
var sled = newDOMElement('div', 'sled');
$(sled).css(side, 0);
sled.appendChild(content);
var closeButton = newDOMElement('button', 'push-menu-close-button');
closeButton.onclick = closeMe;
sled.appendChild(closeButton);
overlay.appendChild(sled);
el.innerHTML = '';
el.appendChild(overlay);
sled.onclick = function(e){
e.stopPropagation();
};
overlay.onclick = closeMe;
window.addEventListener('resize', closeMe);
function closeMe(e) {
if (e.target == sled) return;
$(el).removeClass('on');
setTimeout(function(){
$(el).css({display: 'none'});
$(body).removeClass('overlay-on');
}, 300);
}
function exposeMe(){
$(body).addClass('overlay-on'); // in the default config, kills body scrolling
$(el).css({
display: 'block',
zIndex: highestZ()
});
setTimeout(function(){
$(el).addClass('on');
}, 10);
}
return {
expose: exposeMe
};
}
return {
show: show
};
})(); | autoBurger.appendChild(ul);
body.append(autoBurger); |
build.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use abi;
use ast::{P, Ident, Generics, NodeId, Expr};
use ast;
use ast_util;
use attr;
use codemap::{Span, respan, Spanned, DUMMY_SP, Pos};
use ext::base::ExtCtxt;
use fold::Folder;
use owned_slice::OwnedSlice;
use parse::token::special_idents;
use parse::token::InternedString;
use parse::token;
use std::gc::{Gc, GC};
// Transitional reexports so qquote can find the paths it is looking for
mod syntax {
pub use ext;
pub use parse;
}
pub trait AstBuilder {
// paths
fn path(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path;
fn path_ident(&self, span: Span, id: ast::Ident) -> ast::Path;
fn path_global(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path;
fn path_all(&self, sp: Span,
global: bool,
idents: Vec<ast::Ident> ,
lifetimes: Vec<ast::Lifetime>,
types: Vec<P<ast::Ty>> )
-> ast::Path;
// types
fn ty_mt(&self, ty: P<ast::Ty>, mutbl: ast::Mutability) -> ast::MutTy;
fn ty(&self, span: Span, ty: ast::Ty_) -> P<ast::Ty>;
fn ty_path(&self, ast::Path, Option<OwnedSlice<ast::TyParamBound>>) -> P<ast::Ty>;
fn ty_ident(&self, span: Span, idents: ast::Ident) -> P<ast::Ty>;
fn ty_rptr(&self, span: Span,
ty: P<ast::Ty>,
lifetime: Option<ast::Lifetime>,
mutbl: ast::Mutability) -> P<ast::Ty>;
fn ty_uniq(&self, span: Span, ty: P<ast::Ty>) -> P<ast::Ty>;
fn ty_option(&self, ty: P<ast::Ty>) -> P<ast::Ty>;
fn ty_infer(&self, sp: Span) -> P<ast::Ty>;
fn ty_nil(&self) -> P<ast::Ty>;
fn ty_vars(&self, ty_params: &OwnedSlice<ast::TyParam>) -> Vec<P<ast::Ty>> ;
fn ty_vars_global(&self, ty_params: &OwnedSlice<ast::TyParam>) -> Vec<P<ast::Ty>> ;
fn ty_field_imm(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> ast::TypeField;
fn strip_bounds(&self, bounds: &Generics) -> Generics;
fn typaram(&self,
span: Span,
id: ast::Ident,
bounds: OwnedSlice<ast::TyParamBound>,
unbound: Option<ast::TyParamBound>,
default: Option<P<ast::Ty>>) -> ast::TyParam;
fn trait_ref(&self, path: ast::Path) -> ast::TraitRef;
fn typarambound(&self, path: ast::Path) -> ast::TyParamBound;
fn lifetime(&self, span: Span, ident: ast::Name) -> ast::Lifetime;
fn lifetime_def(&self,
span: Span,
name: ast::Name,
bounds: Vec<ast::Lifetime>)
-> ast::LifetimeDef;
// statements
fn stmt_expr(&self, expr: Gc<ast::Expr>) -> Gc<ast::Stmt>;
fn stmt_let(&self, sp: Span, mutbl: bool, ident: ast::Ident,
ex: Gc<ast::Expr>) -> Gc<ast::Stmt>;
fn stmt_let_typed(&self,
sp: Span,
mutbl: bool,
ident: ast::Ident,
typ: P<ast::Ty>,
ex: Gc<ast::Expr>)
-> Gc<ast::Stmt>;
fn stmt_item(&self, sp: Span, item: Gc<ast::Item>) -> Gc<ast::Stmt>;
// blocks
fn block(&self, span: Span, stmts: Vec<Gc<ast::Stmt>>,
expr: Option<Gc<ast::Expr>>) -> P<ast::Block>;
fn block_expr(&self, expr: Gc<ast::Expr>) -> P<ast::Block>;
fn block_all(&self, span: Span,
view_items: Vec<ast::ViewItem> ,
stmts: Vec<Gc<ast::Stmt>> ,
expr: Option<Gc<ast::Expr>>) -> P<ast::Block>;
// expressions
fn expr(&self, span: Span, node: ast::Expr_) -> Gc<ast::Expr>;
fn expr_path(&self, path: ast::Path) -> Gc<ast::Expr>;
fn expr_ident(&self, span: Span, id: ast::Ident) -> Gc<ast::Expr>;
fn expr_self(&self, span: Span) -> Gc<ast::Expr>;
fn expr_binary(&self, sp: Span, op: ast::BinOp,
lhs: Gc<ast::Expr>, rhs: Gc<ast::Expr>) -> Gc<ast::Expr>;
fn expr_deref(&self, sp: Span, e: Gc<ast::Expr>) -> Gc<ast::Expr>;
fn expr_unary(&self, sp: Span, op: ast::UnOp, e: Gc<ast::Expr>) -> Gc<ast::Expr>;
fn expr_managed(&self, sp: Span, e: Gc<ast::Expr>) -> Gc<ast::Expr>;
fn expr_addr_of(&self, sp: Span, e: Gc<ast::Expr>) -> Gc<ast::Expr>;
fn expr_mut_addr_of(&self, sp: Span, e: Gc<ast::Expr>) -> Gc<ast::Expr>;
fn expr_field_access(&self, span: Span, expr: Gc<ast::Expr>,
ident: ast::Ident) -> Gc<ast::Expr>;
fn expr_call(&self, span: Span, expr: Gc<ast::Expr>,
args: Vec<Gc<ast::Expr>>) -> Gc<ast::Expr>;
fn expr_call_ident(&self, span: Span, id: ast::Ident,
args: Vec<Gc<ast::Expr>>) -> Gc<ast::Expr>;
fn expr_call_global(&self, sp: Span, fn_path: Vec<ast::Ident> ,
args: Vec<Gc<ast::Expr>>) -> Gc<ast::Expr>;
fn expr_method_call(&self, span: Span,
expr: Gc<ast::Expr>, ident: ast::Ident,
args: Vec<Gc<ast::Expr>> ) -> Gc<ast::Expr>;
fn expr_block(&self, b: P<ast::Block>) -> Gc<ast::Expr>;
fn expr_cast(&self, sp: Span, expr: Gc<ast::Expr>,
ty: P<ast::Ty>) -> Gc<ast::Expr>;
fn field_imm(&self, span: Span, name: Ident, e: Gc<ast::Expr>) -> ast::Field;
fn expr_struct(&self, span: Span, path: ast::Path,
fields: Vec<ast::Field> ) -> Gc<ast::Expr>;
fn expr_struct_ident(&self, span: Span, id: ast::Ident,
fields: Vec<ast::Field> ) -> Gc<ast::Expr>;
fn expr_lit(&self, sp: Span, lit: ast::Lit_) -> Gc<ast::Expr>;
fn expr_uint(&self, span: Span, i: uint) -> Gc<ast::Expr>;
fn expr_int(&self, sp: Span, i: int) -> Gc<ast::Expr>;
fn expr_u8(&self, sp: Span, u: u8) -> Gc<ast::Expr>;
fn expr_bool(&self, sp: Span, value: bool) -> Gc<ast::Expr>;
fn expr_vstore(&self, sp: Span, expr: Gc<ast::Expr>, vst: ast::ExprVstore) -> Gc<ast::Expr>;
fn expr_vec(&self, sp: Span, exprs: Vec<Gc<ast::Expr>> ) -> Gc<ast::Expr>;
fn expr_vec_ng(&self, sp: Span) -> Gc<ast::Expr>;
fn expr_vec_slice(&self, sp: Span, exprs: Vec<Gc<ast::Expr>> ) -> Gc<ast::Expr>;
fn expr_str(&self, sp: Span, s: InternedString) -> Gc<ast::Expr>;
fn expr_str_uniq(&self, sp: Span, s: InternedString) -> Gc<ast::Expr>;
fn expr_some(&self, sp: Span, expr: Gc<ast::Expr>) -> Gc<ast::Expr>;
fn expr_none(&self, sp: Span) -> Gc<ast::Expr>;
fn expr_tuple(&self, sp: Span, exprs: Vec<Gc<ast::Expr>>) -> Gc<ast::Expr>;
fn expr_fail(&self, span: Span, msg: InternedString) -> Gc<ast::Expr>;
fn expr_unreachable(&self, span: Span) -> Gc<ast::Expr>;
fn expr_ok(&self, span: Span, expr: Gc<ast::Expr>) -> Gc<ast::Expr>;
fn expr_err(&self, span: Span, expr: Gc<ast::Expr>) -> Gc<ast::Expr>;
fn expr_try(&self, span: Span, head: Gc<ast::Expr>) -> Gc<ast::Expr>;
fn pat(&self, span: Span, pat: ast::Pat_) -> Gc<ast::Pat>;
fn pat_wild(&self, span: Span) -> Gc<ast::Pat>;
fn pat_lit(&self, span: Span, expr: Gc<ast::Expr>) -> Gc<ast::Pat>;
fn pat_ident(&self, span: Span, ident: ast::Ident) -> Gc<ast::Pat>;
fn pat_ident_binding_mode(&self,
span: Span,
ident: ast::Ident,
bm: ast::BindingMode) -> Gc<ast::Pat>;
fn pat_enum(&self, span: Span, path: ast::Path,
subpats: Vec<Gc<ast::Pat>>) -> Gc<ast::Pat>;
fn pat_struct(&self, span: Span,
path: ast::Path, field_pats: Vec<ast::FieldPat> ) -> Gc<ast::Pat>;
fn pat_tuple(&self, span: Span, pats: Vec<Gc<ast::Pat>>) -> Gc<ast::Pat>;
fn pat_some(&self, span: Span, pat: Gc<ast::Pat>) -> Gc<ast::Pat>;
fn pat_none(&self, span: Span) -> Gc<ast::Pat>;
fn pat_ok(&self, span: Span, pat: Gc<ast::Pat>) -> Gc<ast::Pat>;
fn pat_err(&self, span: Span, pat: Gc<ast::Pat>) -> Gc<ast::Pat>;
fn arm(&self, span: Span, pats: Vec<Gc<ast::Pat>> , expr: Gc<ast::Expr>) -> ast::Arm;
fn arm_unreachable(&self, span: Span) -> ast::Arm;
fn expr_match(&self, span: Span, arg: Gc<ast::Expr>, arms: Vec<ast::Arm> ) -> Gc<ast::Expr>;
fn expr_if(&self, span: Span,
cond: Gc<ast::Expr>, then: Gc<ast::Expr>,
els: Option<Gc<ast::Expr>>) -> Gc<ast::Expr>;
fn expr_loop(&self, span: Span, block: P<ast::Block>) -> Gc<ast::Expr>;
fn lambda_fn_decl(&self, span: Span,
fn_decl: P<ast::FnDecl>, blk: P<ast::Block>) -> Gc<ast::Expr>;
fn lambda(&self, span: Span, ids: Vec<ast::Ident> , blk: P<ast::Block>) -> Gc<ast::Expr>;
fn lambda0(&self, span: Span, blk: P<ast::Block>) -> Gc<ast::Expr>;
fn lambda1(&self, span: Span, blk: P<ast::Block>, ident: ast::Ident) -> Gc<ast::Expr>;
fn lambda_expr(&self, span: Span, ids: Vec<ast::Ident> , blk: Gc<ast::Expr>) -> Gc<ast::Expr>;
fn lambda_expr_0(&self, span: Span, expr: Gc<ast::Expr>) -> Gc<ast::Expr>;
fn lambda_expr_1(&self, span: Span, expr: Gc<ast::Expr>, ident: ast::Ident) -> Gc<ast::Expr>;
fn lambda_stmts(&self, span: Span, ids: Vec<ast::Ident>,
blk: Vec<Gc<ast::Stmt>>) -> Gc<ast::Expr>;
fn lambda_stmts_0(&self, span: Span,
stmts: Vec<Gc<ast::Stmt>>) -> Gc<ast::Expr>;
fn lambda_stmts_1(&self, span: Span,
stmts: Vec<Gc<ast::Stmt>>, ident: ast::Ident) -> Gc<ast::Expr>;
// items
fn item(&self, span: Span,
name: Ident, attrs: Vec<ast::Attribute>,
node: ast::Item_) -> Gc<ast::Item>;
fn arg(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> ast::Arg;
// FIXME unused self
fn fn_decl(&self, inputs: Vec<ast::Arg> , output: P<ast::Ty>) -> P<ast::FnDecl>;
fn item_fn_poly(&self,
span: Span,
name: Ident,
inputs: Vec<ast::Arg> ,
output: P<ast::Ty>,
generics: Generics,
body: P<ast::Block>) -> Gc<ast::Item>;
fn item_fn(&self,
span: Span,
name: Ident,
inputs: Vec<ast::Arg> ,
output: P<ast::Ty>,
body: P<ast::Block>) -> Gc<ast::Item>;
fn variant(&self, span: Span, name: Ident, tys: Vec<P<ast::Ty>> ) -> ast::Variant;
fn item_enum_poly(&self,
span: Span,
name: Ident,
enum_definition: ast::EnumDef,
generics: Generics) -> Gc<ast::Item>;
fn item_enum(&self, span: Span, name: Ident,
enum_def: ast::EnumDef) -> Gc<ast::Item>;
fn item_struct_poly(&self,
span: Span,
name: Ident,
struct_def: ast::StructDef,
generics: Generics) -> Gc<ast::Item>;
fn item_struct(&self, span: Span, name: Ident,
struct_def: ast::StructDef) -> Gc<ast::Item>;
fn item_mod(&self, span: Span, inner_span: Span,
name: Ident, attrs: Vec<ast::Attribute>,
vi: Vec<ast::ViewItem>,
items: Vec<Gc<ast::Item>>) -> Gc<ast::Item>;
fn item_static(&self,
span: Span,
name: Ident,
ty: P<ast::Ty>,
mutbl: ast::Mutability,
expr: Gc<ast::Expr>)
-> Gc<ast::Item>;
fn item_ty_poly(&self,
span: Span,
name: Ident,
ty: P<ast::Ty>,
generics: Generics) -> Gc<ast::Item>;
fn item_ty(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> Gc<ast::Item>;
fn attribute(&self, sp: Span, mi: Gc<ast::MetaItem>) -> ast::Attribute;
fn meta_word(&self, sp: Span, w: InternedString) -> Gc<ast::MetaItem>;
fn meta_list(&self,
sp: Span,
name: InternedString,
mis: Vec<Gc<ast::MetaItem>>)
-> Gc<ast::MetaItem>;
fn meta_name_value(&self,
sp: Span,
name: InternedString,
value: ast::Lit_)
-> Gc<ast::MetaItem>;
fn view_use(&self, sp: Span,
vis: ast::Visibility, vp: Gc<ast::ViewPath>) -> ast::ViewItem;
fn view_use_simple(&self, sp: Span, vis: ast::Visibility, path: ast::Path) -> ast::ViewItem;
fn view_use_simple_(&self, sp: Span, vis: ast::Visibility,
ident: ast::Ident, path: ast::Path) -> ast::ViewItem;
fn view_use_list(&self, sp: Span, vis: ast::Visibility,
path: Vec<ast::Ident> , imports: &[ast::Ident]) -> ast::ViewItem;
fn view_use_glob(&self, sp: Span,
vis: ast::Visibility, path: Vec<ast::Ident> ) -> ast::ViewItem;
}
impl<'a> AstBuilder for ExtCtxt<'a> {
fn path(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path {
self.path_all(span, false, strs, Vec::new(), Vec::new())
}
fn path_ident(&self, span: Span, id: ast::Ident) -> ast::Path {
self.path(span, vec!(id))
}
fn path_global(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path {
self.path_all(span, true, strs, Vec::new(), Vec::new())
}
fn path_all(&self,
sp: Span,
global: bool,
mut idents: Vec<ast::Ident> ,
lifetimes: Vec<ast::Lifetime>,
types: Vec<P<ast::Ty>> )
-> ast::Path {
let last_identifier = idents.pop().unwrap();
let mut segments: Vec<ast::PathSegment> = idents.move_iter()
.map(|ident| {
ast::PathSegment {
identifier: ident,
lifetimes: Vec::new(),
types: OwnedSlice::empty(),
}
}).collect();
segments.push(ast::PathSegment {
identifier: last_identifier,
lifetimes: lifetimes,
types: OwnedSlice::from_vec(types),
});
ast::Path {
span: sp,
global: global,
segments: segments,
}
}
fn ty_mt(&self, ty: P<ast::Ty>, mutbl: ast::Mutability) -> ast::MutTy {
ast::MutTy {
ty: ty,
mutbl: mutbl
}
}
fn ty(&self, span: Span, ty: ast::Ty_) -> P<ast::Ty> {
P(ast::Ty {
id: ast::DUMMY_NODE_ID,
span: span,
node: ty
})
}
fn ty_path(&self, path: ast::Path, bounds: Option<OwnedSlice<ast::TyParamBound>>)
-> P<ast::Ty> {
self.ty(path.span,
ast::TyPath(path, bounds, ast::DUMMY_NODE_ID))
}
// Might need to take bounds as an argument in the future, if you ever want
// to generate a bounded existential trait type.
fn ty_ident(&self, span: Span, ident: ast::Ident)
-> P<ast::Ty> {
self.ty_path(self.path_ident(span, ident), None)
}
fn ty_rptr(&self,
span: Span,
ty: P<ast::Ty>,
lifetime: Option<ast::Lifetime>,
mutbl: ast::Mutability)
-> P<ast::Ty> {
self.ty(span,
ast::TyRptr(lifetime, self.ty_mt(ty, mutbl)))
}
fn ty_uniq(&self, span: Span, ty: P<ast::Ty>) -> P<ast::Ty> {
self.ty(span, ast::TyUniq(ty))
}
fn ty_option(&self, ty: P<ast::Ty>) -> P<ast::Ty> {
self.ty_path(
self.path_all(DUMMY_SP,
true,
vec!(
self.ident_of("std"),
self.ident_of("option"),
self.ident_of("Option")
),
Vec::new(),
vec!( ty )), None)
}
fn ty_field_imm(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> ast::TypeField {
ast::TypeField {
ident: name,
mt: ast::MutTy { ty: ty, mutbl: ast::MutImmutable },
span: span,
}
}
fn ty_infer(&self, span: Span) -> P<ast::Ty> {
self.ty(span, ast::TyInfer)
}
fn ty_nil(&self) -> P<ast::Ty> {
P(ast::Ty {
id: ast::DUMMY_NODE_ID,
node: ast::TyNil,
span: DUMMY_SP,
})
}
fn typaram(&self,
span: Span,
id: ast::Ident,
bounds: OwnedSlice<ast::TyParamBound>,
unbound: Option<ast::TyParamBound>,
default: Option<P<ast::Ty>>) -> ast::TyParam {
ast::TyParam {
ident: id,
id: ast::DUMMY_NODE_ID,
bounds: bounds,
unbound: unbound,
default: default,
span: span
}
}
// these are strange, and probably shouldn't be used outside of
// pipes. Specifically, the global version possible generates
// incorrect code.
fn ty_vars(&self, ty_params: &OwnedSlice<ast::TyParam>) -> Vec<P<ast::Ty>> {
ty_params.iter().map(|p| self.ty_ident(DUMMY_SP, p.ident)).collect()
}
fn ty_vars_global(&self, ty_params: &OwnedSlice<ast::TyParam>) -> Vec<P<ast::Ty>> {
ty_params.iter().map(|p| self.ty_path(
self.path_global(DUMMY_SP, vec!(p.ident)), None)).collect()
}
fn strip_bounds(&self, generics: &Generics) -> Generics {
let new_params = generics.ty_params.map(|ty_param| {
ast::TyParam { bounds: OwnedSlice::empty(), unbound: None, ..*ty_param }
});
Generics {
ty_params: new_params,
.. (*generics).clone()
}
}
fn trait_ref(&self, path: ast::Path) -> ast::TraitRef {
ast::TraitRef {
path: path,
ref_id: ast::DUMMY_NODE_ID
}
}
fn typarambound(&self, path: ast::Path) -> ast::TyParamBound {
ast::TraitTyParamBound(self.trait_ref(path))
}
fn lifetime(&self, span: Span, name: ast::Name) -> ast::Lifetime {
ast::Lifetime { id: ast::DUMMY_NODE_ID, span: span, name: name }
}
fn lifetime_def(&self,
span: Span,
name: ast::Name,
bounds: Vec<ast::Lifetime>)
-> ast::LifetimeDef {
ast::LifetimeDef {
lifetime: self.lifetime(span, name),
bounds: bounds
}
}
fn stmt_expr(&self, expr: Gc<ast::Expr>) -> Gc<ast::Stmt> {
box(GC) respan(expr.span, ast::StmtSemi(expr, ast::DUMMY_NODE_ID))
}
fn stmt_let(&self, sp: Span, mutbl: bool, ident: ast::Ident,
ex: Gc<ast::Expr>) -> Gc<ast::Stmt> {
let pat = if mutbl {
self.pat_ident_binding_mode(sp, ident, ast::BindByValue(ast::MutMutable))
} else {
self.pat_ident(sp, ident)
};
let local = box(GC) ast::Local {
ty: self.ty_infer(sp),
pat: pat,
init: Some(ex),
id: ast::DUMMY_NODE_ID,
span: sp,
source: ast::LocalLet,
};
let decl = respan(sp, ast::DeclLocal(local));
box(GC) respan(sp, ast::StmtDecl(box(GC) decl, ast::DUMMY_NODE_ID))
}
fn stmt_let_typed(&self,
sp: Span,
mutbl: bool,
ident: ast::Ident,
typ: P<ast::Ty>,
ex: Gc<ast::Expr>)
-> Gc<ast::Stmt> {
let pat = if mutbl {
self.pat_ident_binding_mode(sp, ident, ast::BindByValue(ast::MutMutable))
} else {
self.pat_ident(sp, ident)
};
let local = box(GC) ast::Local {
ty: typ,
pat: pat,
init: Some(ex),
id: ast::DUMMY_NODE_ID,
span: sp,
source: ast::LocalLet,
};
let decl = respan(sp, ast::DeclLocal(local));
box(GC) respan(sp, ast::StmtDecl(box(GC) decl, ast::DUMMY_NODE_ID))
}
fn block(&self,
span: Span,
stmts: Vec<Gc<ast::Stmt>>,
expr: Option<Gc<Expr>>)
-> P<ast::Block> {
self.block_all(span, Vec::new(), stmts, expr)
}
fn stmt_item(&self, sp: Span, item: Gc<ast::Item>) -> Gc<ast::Stmt> {
let decl = respan(sp, ast::DeclItem(item));
box(GC) respan(sp, ast::StmtDecl(box(GC) decl, ast::DUMMY_NODE_ID))
}
fn block_expr(&self, expr: Gc<ast::Expr>) -> P<ast::Block> {
self.block_all(expr.span, Vec::new(), Vec::new(), Some(expr))
}
fn block_all(&self,
span: Span,
view_items: Vec<ast::ViewItem> ,
stmts: Vec<Gc<ast::Stmt>>,
expr: Option<Gc<ast::Expr>>) -> P<ast::Block> {
P(ast::Block {
view_items: view_items,
stmts: stmts,
expr: expr,
id: ast::DUMMY_NODE_ID,
rules: ast::DefaultBlock,
span: span,
})
}
fn expr(&self, span: Span, node: ast::Expr_) -> Gc<ast::Expr> {
box(GC) ast::Expr {
id: ast::DUMMY_NODE_ID,
node: node,
span: span,
}
}
fn expr_path(&self, path: ast::Path) -> Gc<ast::Expr> {
self.expr(path.span, ast::ExprPath(path))
}
fn expr_ident(&self, span: Span, id: ast::Ident) -> Gc<ast::Expr> {
self.expr_path(self.path_ident(span, id))
}
fn expr_self(&self, span: Span) -> Gc<ast::Expr> {
self.expr_ident(span, special_idents::self_)
}
fn expr_binary(&self, sp: Span, op: ast::BinOp,
lhs: Gc<ast::Expr>, rhs: Gc<ast::Expr>) -> Gc<ast::Expr> {
self.expr(sp, ast::ExprBinary(op, lhs, rhs))
}
fn expr_deref(&self, sp: Span, e: Gc<ast::Expr>) -> Gc<ast::Expr> {
self.expr_unary(sp, ast::UnDeref, e)
}
fn expr_unary(&self, sp: Span, op: ast::UnOp, e: Gc<ast::Expr>) -> Gc<ast::Expr> {
self.expr(sp, ast::ExprUnary(op, e))
}
fn expr_managed(&self, sp: Span, e: Gc<ast::Expr>) -> Gc<ast::Expr> {
self.expr_unary(sp, ast::UnBox, e)
}
fn expr_field_access(&self, sp: Span, expr: Gc<ast::Expr>, ident: ast::Ident) -> Gc<ast::Expr> {
let field_name = token::get_ident(ident);
let field_span = Span {
lo: sp.lo - Pos::from_uint(field_name.get().len()),
hi: sp.hi,
expn_info: sp.expn_info,
};
let id = Spanned { node: ident, span: field_span };
self.expr(sp, ast::ExprField(expr, id, Vec::new()))
}
fn expr_addr_of(&self, sp: Span, e: Gc<ast::Expr>) -> Gc<ast::Expr> {
self.expr(sp, ast::ExprAddrOf(ast::MutImmutable, e))
}
fn expr_mut_addr_of(&self, sp: Span, e: Gc<ast::Expr>) -> Gc<ast::Expr> {
self.expr(sp, ast::ExprAddrOf(ast::MutMutable, e))
}
fn expr_call(&self, span: Span, expr: Gc<ast::Expr>,
args: Vec<Gc<ast::Expr>>) -> Gc<ast::Expr> {
self.expr(span, ast::ExprCall(expr, args))
}
fn expr_call_ident(&self, span: Span, id: ast::Ident,
args: Vec<Gc<ast::Expr>>) -> Gc<ast::Expr> {
self.expr(span, ast::ExprCall(self.expr_ident(span, id), args))
}
fn expr_call_global(&self, sp: Span, fn_path: Vec<ast::Ident> ,
args: Vec<Gc<ast::Expr>> ) -> Gc<ast::Expr> {
let pathexpr = self.expr_path(self.path_global(sp, fn_path));
self.expr_call(sp, pathexpr, args)
}
fn expr_method_call(&self, span: Span,
expr: Gc<ast::Expr>,
ident: ast::Ident,
mut args: Vec<Gc<ast::Expr>> ) -> Gc<ast::Expr> {
let id = Spanned { node: ident, span: span };
args.unshift(expr);
self.expr(span, ast::ExprMethodCall(id, Vec::new(), args))
}
fn expr_block(&self, b: P<ast::Block>) -> Gc<ast::Expr> {
self.expr(b.span, ast::ExprBlock(b))
}
fn field_imm(&self, span: Span, name: Ident, e: Gc<ast::Expr>) -> ast::Field {
ast::Field { ident: respan(span, name), expr: e, span: span }
}
fn expr_struct(&self, span: Span, path: ast::Path, fields: Vec<ast::Field> ) -> Gc<ast::Expr> {
self.expr(span, ast::ExprStruct(path, fields, None))
}
fn expr_struct_ident(&self, span: Span,
id: ast::Ident, fields: Vec<ast::Field> ) -> Gc<ast::Expr> {
self.expr_struct(span, self.path_ident(span, id), fields)
}
fn expr_lit(&self, sp: Span, lit: ast::Lit_) -> Gc<ast::Expr> {
self.expr(sp, ast::ExprLit(box(GC) respan(sp, lit)))
}
fn expr_uint(&self, span: Span, i: uint) -> Gc<ast::Expr> {
self.expr_lit(span, ast::LitInt(i as u64, ast::UnsignedIntLit(ast::TyU)))
}
fn expr_int(&self, sp: Span, i: int) -> Gc<ast::Expr> {
self.expr_lit(sp, ast::LitInt(i as u64, ast::SignedIntLit(ast::TyI, ast::Sign::new(i))))
}
fn expr_u8(&self, sp: Span, u: u8) -> Gc<ast::Expr> {
self.expr_lit(sp, ast::LitInt(u as u64, ast::UnsignedIntLit(ast::TyU8)))
}
fn expr_bool(&self, sp: Span, value: bool) -> Gc<ast::Expr> {
self.expr_lit(sp, ast::LitBool(value))
}
fn expr_vstore(&self, sp: Span, expr: Gc<ast::Expr>, vst: ast::ExprVstore) -> Gc<ast::Expr> {
self.expr(sp, ast::ExprVstore(expr, vst))
}
fn expr_vec(&self, sp: Span, exprs: Vec<Gc<ast::Expr>> ) -> Gc<ast::Expr> {
self.expr(sp, ast::ExprVec(exprs))
}
fn expr_vec_ng(&self, sp: Span) -> Gc<ast::Expr> {
self.expr_call_global(sp,
vec!(self.ident_of("std"),
self.ident_of("vec"),
self.ident_of("Vec"),
self.ident_of("new")),
Vec::new())
}
fn expr_vec_slice(&self, sp: Span, exprs: Vec<Gc<ast::Expr>> ) -> Gc<ast::Expr> {
self.expr_vstore(sp, self.expr_vec(sp, exprs), ast::ExprVstoreSlice)
}
fn expr_str(&self, sp: Span, s: InternedString) -> Gc<ast::Expr> {
self.expr_lit(sp, ast::LitStr(s, ast::CookedStr))
}
fn expr_str_uniq(&self, sp: Span, s: InternedString) -> Gc<ast::Expr> {
self.expr_vstore(sp, self.expr_str(sp, s), ast::ExprVstoreUniq)
}
fn expr_cast(&self, sp: Span, expr: Gc<ast::Expr>, ty: P<ast::Ty>) -> Gc<ast::Expr> {
self.expr(sp, ast::ExprCast(expr, ty))
}
fn expr_some(&self, sp: Span, expr: Gc<ast::Expr>) -> Gc<ast::Expr> {
let some = vec!(
self.ident_of("std"),
self.ident_of("option"),
self.ident_of("Some"));
self.expr_call_global(sp, some, vec!(expr))
}
fn expr_none(&self, sp: Span) -> Gc<ast::Expr> {
let none = self.path_global(sp, vec!(
self.ident_of("std"),
self.ident_of("option"),
self.ident_of("None")));
self.expr_path(none)
}
fn expr_tuple(&self, sp: Span, exprs: Vec<Gc<ast::Expr>>) -> Gc<ast::Expr> {
self.expr(sp, ast::ExprTup(exprs))
}
fn expr_fail(&self, span: Span, msg: InternedString) -> Gc<ast::Expr> {
let loc = self.codemap().lookup_char_pos(span.lo);
let expr_file = self.expr_str(span,
token::intern_and_get_ident(loc.file
.name
.as_slice()));
let expr_line = self.expr_uint(span, loc.line);
let expr_file_line_tuple = self.expr_tuple(span, vec!(expr_file, expr_line));
let expr_file_line_ptr = self.expr_addr_of(span, expr_file_line_tuple);
self.expr_call_global(
span,
vec!(
self.ident_of("std"),
self.ident_of("rt"),
self.ident_of("begin_unwind")),
vec!(
self.expr_str(span, msg),
expr_file_line_ptr))
}
fn expr_unreachable(&self, span: Span) -> Gc<ast::Expr> {
self.expr_fail(span,
InternedString::new(
"internal error: entered unreachable code"))
}
fn expr_ok(&self, sp: Span, expr: Gc<ast::Expr>) -> Gc<ast::Expr> {
let ok = vec!(
self.ident_of("std"),
self.ident_of("result"),
self.ident_of("Ok"));
self.expr_call_global(sp, ok, vec!(expr))
}
fn expr_err(&self, sp: Span, expr: Gc<ast::Expr>) -> Gc<ast::Expr> {
let err = vec!(
self.ident_of("std"),
self.ident_of("result"),
self.ident_of("Err"));
self.expr_call_global(sp, err, vec!(expr))
}
fn expr_try(&self, sp: Span, head: Gc<ast::Expr>) -> Gc<ast::Expr> {
let ok = self.ident_of("Ok");
let ok_path = self.path_ident(sp, ok);
let err = self.ident_of("Err");
let err_path = self.path_ident(sp, err);
let binding_variable = self.ident_of("__try_var");
let binding_pat = self.pat_ident(sp, binding_variable);
let binding_expr = self.expr_ident(sp, binding_variable);
// Ok(__try_var) pattern
let ok_pat = self.pat_enum(sp, ok_path, vec!(binding_pat));
// Err(__try_var) (pattern and expression resp.)
let err_pat = self.pat_enum(sp, err_path, vec!(binding_pat));
let err_inner_expr = self.expr_call_ident(sp, err, vec!(binding_expr));
// return Err(__try_var)
let err_expr = self.expr(sp, ast::ExprRet(Some(err_inner_expr)));
// Ok(__try_var) => __try_var
let ok_arm = self.arm(sp, vec!(ok_pat), binding_expr);
// Err(__try_var) => return Err(__try_var)
let err_arm = self.arm(sp, vec!(err_pat), err_expr);
// match head { Ok() => ..., Err() => ... }
self.expr_match(sp, head, vec!(ok_arm, err_arm))
}
fn pat(&self, span: Span, pat: ast::Pat_) -> Gc<ast::Pat> {
box(GC) ast::Pat { id: ast::DUMMY_NODE_ID, node: pat, span: span }
}
fn pat_wild(&self, span: Span) -> Gc<ast::Pat> {
self.pat(span, ast::PatWild(ast::PatWildSingle))
}
fn pat_lit(&self, span: Span, expr: Gc<ast::Expr>) -> Gc<ast::Pat> {
self.pat(span, ast::PatLit(expr))
}
fn pat_ident(&self, span: Span, ident: ast::Ident) -> Gc<ast::Pat> {
self.pat_ident_binding_mode(span, ident, ast::BindByValue(ast::MutImmutable))
}
fn pat_ident_binding_mode(&self,
span: Span,
ident: ast::Ident,
bm: ast::BindingMode) -> Gc<ast::Pat> {
let pat = ast::PatIdent(bm, Spanned{span: span, node: ident}, None);
self.pat(span, pat)
}
fn pat_enum(&self, span: Span, path: ast::Path, subpats: Vec<Gc<ast::Pat>> ) -> Gc<ast::Pat> {
let pat = ast::PatEnum(path, Some(subpats));
self.pat(span, pat)
}
fn pat_struct(&self, span: Span,
path: ast::Path, field_pats: Vec<ast::FieldPat> ) -> Gc<ast::Pat> {
let pat = ast::PatStruct(path, field_pats, false);
self.pat(span, pat)
}
fn pat_tuple(&self, span: Span, pats: Vec<Gc<ast::Pat>>) -> Gc<ast::Pat> {
let pat = ast::PatTup(pats);
self.pat(span, pat)
}
fn pat_some(&self, span: Span, pat: Gc<ast::Pat>) -> Gc<ast::Pat> {
let some = vec!(
self.ident_of("std"),
self.ident_of("option"),
self.ident_of("Some"));
let path = self.path_global(span, some);
self.pat_enum(span, path, vec!(pat))
}
fn pat_none(&self, span: Span) -> Gc<ast::Pat> {
let some = vec!(
self.ident_of("std"),
self.ident_of("option"),
self.ident_of("None"));
let path = self.path_global(span, some);
self.pat_enum(span, path, vec!())
}
fn pat_ok(&self, span: Span, pat: Gc<ast::Pat>) -> Gc<ast::Pat> {
let some = vec!(
self.ident_of("std"),
self.ident_of("result"),
self.ident_of("Ok"));
let path = self.path_global(span, some);
self.pat_enum(span, path, vec!(pat))
}
fn pat_err(&self, span: Span, pat: Gc<ast::Pat>) -> Gc<ast::Pat> {
let some = vec!(
self.ident_of("std"),
self.ident_of("result"),
self.ident_of("Err"));
let path = self.path_global(span, some);
self.pat_enum(span, path, vec!(pat))
}
fn arm(&self, _span: Span, pats: Vec<Gc<ast::Pat>> , expr: Gc<ast::Expr>) -> ast::Arm {
ast::Arm {
attrs: vec!(),
pats: pats,
guard: None,
body: expr
}
}
fn | (&self, span: Span) -> ast::Arm {
self.arm(span, vec!(self.pat_wild(span)), self.expr_unreachable(span))
}
fn expr_match(&self, span: Span, arg: Gc<ast::Expr>,
arms: Vec<ast::Arm>) -> Gc<Expr> {
self.expr(span, ast::ExprMatch(arg, arms))
}
fn expr_if(&self, span: Span,
cond: Gc<ast::Expr>, then: Gc<ast::Expr>,
els: Option<Gc<ast::Expr>>) -> Gc<ast::Expr> {
let els = els.map(|x| self.expr_block(self.block_expr(x)));
self.expr(span, ast::ExprIf(cond, self.block_expr(then), els))
}
fn expr_loop(&self, span: Span, block: P<ast::Block>) -> Gc<ast::Expr> {
self.expr(span, ast::ExprLoop(block, None))
}
fn lambda_fn_decl(&self, span: Span,
fn_decl: P<ast::FnDecl>, blk: P<ast::Block>) -> Gc<ast::Expr> {
self.expr(span, ast::ExprFnBlock(ast::CaptureByRef, fn_decl, blk))
}
fn lambda(&self, span: Span, ids: Vec<ast::Ident> , blk: P<ast::Block>) -> Gc<ast::Expr> {
let fn_decl = self.fn_decl(
ids.iter().map(|id| self.arg(span, *id, self.ty_infer(span))).collect(),
self.ty_infer(span));
self.expr(span, ast::ExprFnBlock(ast::CaptureByRef, fn_decl, blk))
}
fn lambda0(&self, span: Span, blk: P<ast::Block>) -> Gc<ast::Expr> {
self.lambda(span, Vec::new(), blk)
}
fn lambda1(&self, span: Span, blk: P<ast::Block>, ident: ast::Ident) -> Gc<ast::Expr> {
self.lambda(span, vec!(ident), blk)
}
fn lambda_expr(&self, span: Span, ids: Vec<ast::Ident> , expr: Gc<ast::Expr>) -> Gc<ast::Expr> {
self.lambda(span, ids, self.block_expr(expr))
}
fn lambda_expr_0(&self, span: Span, expr: Gc<ast::Expr>) -> Gc<ast::Expr> {
self.lambda0(span, self.block_expr(expr))
}
fn lambda_expr_1(&self, span: Span, expr: Gc<ast::Expr>, ident: ast::Ident) -> Gc<ast::Expr> {
self.lambda1(span, self.block_expr(expr), ident)
}
fn lambda_stmts(&self,
span: Span,
ids: Vec<ast::Ident>,
stmts: Vec<Gc<ast::Stmt>>)
-> Gc<ast::Expr> {
self.lambda(span, ids, self.block(span, stmts, None))
}
fn lambda_stmts_0(&self, span: Span,
stmts: Vec<Gc<ast::Stmt>>) -> Gc<ast::Expr> {
self.lambda0(span, self.block(span, stmts, None))
}
fn lambda_stmts_1(&self, span: Span, stmts: Vec<Gc<ast::Stmt>>,
ident: ast::Ident) -> Gc<ast::Expr> {
self.lambda1(span, self.block(span, stmts, None), ident)
}
fn arg(&self, span: Span, ident: ast::Ident, ty: P<ast::Ty>) -> ast::Arg {
let arg_pat = self.pat_ident(span, ident);
ast::Arg {
ty: ty,
pat: arg_pat,
id: ast::DUMMY_NODE_ID
}
}
// FIXME unused self
fn fn_decl(&self, inputs: Vec<ast::Arg> , output: P<ast::Ty>) -> P<ast::FnDecl> {
P(ast::FnDecl {
inputs: inputs,
output: output,
cf: ast::Return,
variadic: false
})
}
fn item(&self, span: Span,
name: Ident, attrs: Vec<ast::Attribute>,
node: ast::Item_) -> Gc<ast::Item> {
// FIXME: Would be nice if our generated code didn't violate
// Rust coding conventions
box(GC) ast::Item { ident: name,
attrs: attrs,
id: ast::DUMMY_NODE_ID,
node: node,
vis: ast::Inherited,
span: span }
}
fn item_fn_poly(&self,
span: Span,
name: Ident,
inputs: Vec<ast::Arg> ,
output: P<ast::Ty>,
generics: Generics,
body: P<ast::Block>) -> Gc<ast::Item> {
self.item(span,
name,
Vec::new(),
ast::ItemFn(self.fn_decl(inputs, output),
ast::NormalFn,
abi::Rust,
generics,
body))
}
fn item_fn(&self,
span: Span,
name: Ident,
inputs: Vec<ast::Arg> ,
output: P<ast::Ty>,
body: P<ast::Block>
) -> Gc<ast::Item> {
self.item_fn_poly(
span,
name,
inputs,
output,
ast_util::empty_generics(),
body)
}
fn variant(&self, span: Span, name: Ident, tys: Vec<P<ast::Ty>> ) -> ast::Variant {
let args = tys.move_iter().map(|ty| {
ast::VariantArg { ty: ty, id: ast::DUMMY_NODE_ID }
}).collect();
respan(span,
ast::Variant_ {
name: name,
attrs: Vec::new(),
kind: ast::TupleVariantKind(args),
id: ast::DUMMY_NODE_ID,
disr_expr: None,
vis: ast::Public
})
}
fn item_enum_poly(&self, span: Span, name: Ident,
enum_definition: ast::EnumDef,
generics: Generics) -> Gc<ast::Item> {
self.item(span, name, Vec::new(), ast::ItemEnum(enum_definition, generics))
}
fn item_enum(&self, span: Span, name: Ident,
enum_definition: ast::EnumDef) -> Gc<ast::Item> {
self.item_enum_poly(span, name, enum_definition,
ast_util::empty_generics())
}
fn item_struct(&self, span: Span, name: Ident,
struct_def: ast::StructDef) -> Gc<ast::Item> {
self.item_struct_poly(
span,
name,
struct_def,
ast_util::empty_generics()
)
}
fn item_struct_poly(&self, span: Span, name: Ident,
struct_def: ast::StructDef, generics: Generics) -> Gc<ast::Item> {
self.item(span, name, Vec::new(), ast::ItemStruct(box(GC) struct_def, generics))
}
fn item_mod(&self, span: Span, inner_span: Span, name: Ident,
attrs: Vec<ast::Attribute> ,
vi: Vec<ast::ViewItem> ,
items: Vec<Gc<ast::Item>>) -> Gc<ast::Item> {
self.item(
span,
name,
attrs,
ast::ItemMod(ast::Mod {
inner: inner_span,
view_items: vi,
items: items,
})
)
}
fn item_static(&self,
span: Span,
name: Ident,
ty: P<ast::Ty>,
mutbl: ast::Mutability,
expr: Gc<ast::Expr>)
-> Gc<ast::Item> {
self.item(span, name, Vec::new(), ast::ItemStatic(ty, mutbl, expr))
}
fn item_ty_poly(&self, span: Span, name: Ident, ty: P<ast::Ty>,
generics: Generics) -> Gc<ast::Item> {
self.item(span, name, Vec::new(), ast::ItemTy(ty, generics))
}
fn item_ty(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> Gc<ast::Item> {
self.item_ty_poly(span, name, ty, ast_util::empty_generics())
}
fn attribute(&self, sp: Span, mi: Gc<ast::MetaItem>) -> ast::Attribute {
respan(sp, ast::Attribute_ {
id: attr::mk_attr_id(),
style: ast::AttrOuter,
value: mi,
is_sugared_doc: false,
})
}
fn meta_word(&self, sp: Span, w: InternedString) -> Gc<ast::MetaItem> {
box(GC) respan(sp, ast::MetaWord(w))
}
fn meta_list(&self,
sp: Span,
name: InternedString,
mis: Vec<Gc<ast::MetaItem>> )
-> Gc<ast::MetaItem> {
box(GC) respan(sp, ast::MetaList(name, mis))
}
fn meta_name_value(&self,
sp: Span,
name: InternedString,
value: ast::Lit_)
-> Gc<ast::MetaItem> {
box(GC) respan(sp, ast::MetaNameValue(name, respan(sp, value)))
}
fn view_use(&self, sp: Span,
vis: ast::Visibility, vp: Gc<ast::ViewPath>) -> ast::ViewItem {
ast::ViewItem {
node: ast::ViewItemUse(vp),
attrs: Vec::new(),
vis: vis,
span: sp
}
}
fn view_use_simple(&self, sp: Span, vis: ast::Visibility, path: ast::Path) -> ast::ViewItem {
let last = path.segments.last().unwrap().identifier;
self.view_use_simple_(sp, vis, last, path)
}
fn view_use_simple_(&self, sp: Span, vis: ast::Visibility,
ident: ast::Ident, path: ast::Path) -> ast::ViewItem {
self.view_use(sp, vis,
box(GC) respan(sp,
ast::ViewPathSimple(ident,
path,
ast::DUMMY_NODE_ID)))
}
fn view_use_list(&self, sp: Span, vis: ast::Visibility,
path: Vec<ast::Ident> , imports: &[ast::Ident]) -> ast::ViewItem {
let imports = imports.iter().map(|id| {
respan(sp, ast::PathListIdent { name: *id, id: ast::DUMMY_NODE_ID })
}).collect();
self.view_use(sp, vis,
box(GC) respan(sp,
ast::ViewPathList(self.path(sp, path),
imports,
ast::DUMMY_NODE_ID)))
}
fn view_use_glob(&self, sp: Span,
vis: ast::Visibility, path: Vec<ast::Ident> ) -> ast::ViewItem {
self.view_use(sp, vis,
box(GC) respan(sp,
ast::ViewPathGlob(self.path(sp, path), ast::DUMMY_NODE_ID)))
}
}
struct Duplicator<'a>;
impl<'a> Folder for Duplicator<'a> {
fn new_id(&mut self, _: NodeId) -> NodeId {
ast::DUMMY_NODE_ID
}
}
pub trait Duplicate {
//
// Duplication functions
//
// These functions just duplicate AST nodes.
//
fn duplicate(&self, cx: &ExtCtxt) -> Self;
}
impl Duplicate for Gc<ast::Expr> {
fn duplicate(&self, _: &ExtCtxt) -> Gc<ast::Expr> {
let mut folder = Duplicator;
folder.fold_expr(*self)
}
}
| arm_unreachable |
test_item_option.py | # coding: utf-8
"""
UltraCart Rest API V2
UltraCart REST API Version 2
OpenAPI spec version: 2.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import ultracart
from ultracart.rest import ApiException
from ultracart.models.item_option import ItemOption
class TestItemOption(unittest.TestCase):
""" ItemOption unit test stubs """
def setUp(self):
pass
def | (self):
pass
def testItemOption(self):
"""
Test ItemOption
"""
# FIXME: construct object with mandatory attributes with example values
#model = ultracart.models.item_option.ItemOption()
pass
if __name__ == '__main__':
unittest.main()
| tearDown |
build_test.go | /*
Copyright 2019 The Skaffold Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package bazel
import (
"context"
"io/ioutil"
"testing"
"github.com/GoogleContainerTools/skaffold/pkg/skaffold/docker"
latest_v1 "github.com/GoogleContainerTools/skaffold/pkg/skaffold/schema/latest/v1"
"github.com/GoogleContainerTools/skaffold/pkg/skaffold/util"
"github.com/GoogleContainerTools/skaffold/testutil"
)
func TestBuildBazel(t *testing.T) {
testutil.Run(t, "", func(t *testutil.T) {
t.NewTempDir().Mkdir("bin").Chdir()
t.Override(&util.DefaultExecCommand, testutil.CmdRun("bazel build //:app.tar --color=no").AndRunOut("bazel info bazel-bin", "bin"))
testutil.CreateFakeImageTar("bazel:app", "bin/app.tar")
artifact := &latest_v1.Artifact{
Workspace: ".",
ArtifactType: latest_v1.ArtifactType{
BazelArtifact: &latest_v1.BazelArtifact{
BuildTarget: "//:app.tar",
},
},
}
builder := NewArtifactBuilder(fakeLocalDaemon(), &mockConfig{}, false)
_, err := builder.Build(context.Background(), ioutil.Discard, artifact, "img:tag")
t.CheckNoError(err)
})
}
func TestBuildBazelFailInvalidTarget(t *testing.T) {
testutil.Run(t, "", func(t *testutil.T) {
artifact := &latest_v1.Artifact{
ArtifactType: latest_v1.ArtifactType{
BazelArtifact: &latest_v1.BazelArtifact{
BuildTarget: "//:invalid-target",
},
},
}
builder := NewArtifactBuilder(nil, &mockConfig{}, false)
_, err := builder.Build(context.Background(), ioutil.Discard, artifact, "img:tag")
t.CheckErrorContains("the bazel build target should end with .tar", err)
})
}
func TestBazelBin(t *testing.T) {
testutil.Run(t, "", func(t *testutil.T) {
t.Override(&util.DefaultExecCommand, testutil.CmdRunOut(
"bazel info bazel-bin --arg1 --arg2",
"/absolute/path/bin\n",
))
bazelBin, err := bazelBin(context.Background(), ".", &latest_v1.BazelArtifact{
BuildArgs: []string{"--arg1", "--arg2"},
})
t.CheckNoError(err)
t.CheckDeepEqual("/absolute/path/bin", bazelBin)
})
}
func TestBuildTarPath(t *testing.T) {
buildTarget := "//:skaffold_example.tar"
tarPath := buildTarPath(buildTarget)
testutil.CheckDeepEqual(t, "skaffold_example.tar", tarPath)
}
func TestBuildImageTag(t *testing.T) |
func fakeLocalDaemon() docker.LocalDaemon {
return docker.NewLocalDaemon(&testutil.FakeAPIClient{}, nil, false, nil)
}
type mockConfig struct {
docker.Config
}
func (c *mockConfig) GetInsecureRegistries() map[string]bool { return nil }
| {
buildTarget := "//:skaffold_example.tar"
imageTag := buildImageTag(buildTarget)
testutil.CheckDeepEqual(t, "bazel:skaffold_example", imageTag)
} |
extension.py | """
Shared methods for Index subclasses backed by ExtensionArray.
"""
from typing import (
Hashable,
List,
Type,
TypeVar,
Union,
)
import numpy as np
from pandas.compat.numpy import function as nv
from pandas.errors import AbstractMethodError
from pandas.util._decorators import (
cache_readonly,
doc,
)
from pandas.core.dtypes.cast import (
find_common_type,
infer_dtype_from,
)
from pandas.core.dtypes.common import (
is_dtype_equal,
is_object_dtype,
pandas_dtype,
)
from pandas.core.dtypes.generic import (
ABCDataFrame,
ABCSeries,
)
from pandas.core.arrays import (
Categorical,
DatetimeArray,
IntervalArray,
PeriodArray,
TimedeltaArray,
)
from pandas.core.arrays._mixins import NDArrayBackedExtensionArray
from pandas.core.indexers import deprecate_ndim_indexing
from pandas.core.indexes.base import Index
from pandas.core.ops import get_op_result_name
_T = TypeVar("_T", bound="NDArrayBackedExtensionIndex")
def inherit_from_data(name: str, delegate, cache: bool = False, wrap: bool = False):
"""
Make an alias for a method of the underlying ExtensionArray.
Parameters
----------
name : str
Name of an attribute the class should inherit from its EA parent.
delegate : class
cache : bool, default False
Whether to convert wrapped properties into cache_readonly
wrap : bool, default False
Whether to wrap the inherited result in an Index.
Returns
-------
attribute, method, property, or cache_readonly
"""
attr = getattr(delegate, name)
if isinstance(attr, property) or type(attr).__name__ == "getset_descriptor":
# getset_descriptor i.e. property defined in cython class
if cache:
def cached(self):
return getattr(self._data, name)
cached.__name__ = name
cached.__doc__ = attr.__doc__
method = cache_readonly(cached)
else:
def fget(self):
result = getattr(self._data, name)
if wrap:
if isinstance(result, type(self._data)):
return type(self)._simple_new(result, name=self.name)
elif isinstance(result, ABCDataFrame):
return result.set_index(self)
return Index(result, name=self.name)
return result
def fset(self, value):
setattr(self._data, name, value)
fget.__name__ = name
fget.__doc__ = attr.__doc__
method = property(fget, fset)
elif not callable(attr):
# just a normal attribute, no wrapping
method = attr
else:
def method(self, *args, **kwargs):
result = attr(self._data, *args, **kwargs)
if wrap:
if isinstance(result, type(self._data)):
return type(self)._simple_new(result, name=self.name)
elif isinstance(result, ABCDataFrame):
return result.set_index(self)
return Index(result, name=self.name)
return result
method.__name__ = name
method.__doc__ = attr.__doc__
return method
def inherit_names(names: List[str], delegate, cache: bool = False, wrap: bool = False):
"""
Class decorator to pin attributes from an ExtensionArray to a Index subclass.
Parameters
----------
names : List[str]
delegate : class
cache : bool, default False
wrap : bool, default False
Whether to wrap the inherited result in an Index.
"""
def wrapper(cls):
for name in names:
meth = inherit_from_data(name, delegate, cache=cache, wrap=wrap)
setattr(cls, name, meth)
return cls
return wrapper
def _make_wrapped_comparison_op(opname: str):
"""
Create a comparison method that dispatches to ``._data``.
"""
def wrapper(self, other):
if isinstance(other, ABCSeries):
# the arrays defer to Series for comparison ops but the indexes
# don't, so we have to unwrap here.
other = other._values
other = _maybe_unwrap_index(other)
op = getattr(self._data, opname)
return op(other)
wrapper.__name__ = opname
return wrapper
def make_wrapped_arith_op(opname: str):
def method(self, other):
if (
isinstance(other, Index)
and is_object_dtype(other.dtype)
and type(other) is not Index
):
# We return NotImplemented for object-dtype index *subclasses* so they have
# a chance to implement ops before we unwrap them.
# See https://github.com/pandas-dev/pandas/issues/31109
return NotImplemented
meth = getattr(self._data, opname)
result = meth(_maybe_unwrap_index(other))
return _wrap_arithmetic_op(self, other, result)
method.__name__ = opname
return method
def _wrap_arithmetic_op(self, other, result):
if result is NotImplemented:
return NotImplemented
if isinstance(result, tuple):
# divmod, rdivmod
assert len(result) == 2
return (
_wrap_arithmetic_op(self, other, result[0]),
_wrap_arithmetic_op(self, other, result[1]),
)
if not isinstance(result, Index):
# Index.__new__ will choose appropriate subclass for dtype
result = Index(result)
res_name = get_op_result_name(self, other)
result.name = res_name
return result
def _maybe_unwrap_index(obj):
"""
If operating against another Index object, we need to unwrap the underlying
data before deferring to the DatetimeArray/TimedeltaArray/PeriodArray
implementation, otherwise we will incorrectly return NotImplemented.
Parameters
----------
obj : object
Returns
-------
unwrapped object
"""
if isinstance(obj, Index):
return obj._data
return obj
class ExtensionIndex(Index):
"""
Index subclass for indexes backed by ExtensionArray.
"""
# The base class already passes through to _data:
# size, __len__, dtype
_data: Union[IntervalArray, NDArrayBackedExtensionArray]
__eq__ = _make_wrapped_comparison_op("__eq__")
__ne__ = _make_wrapped_comparison_op("__ne__")
__lt__ = _make_wrapped_comparison_op("__lt__")
__gt__ = _make_wrapped_comparison_op("__gt__")
__le__ = _make_wrapped_comparison_op("__le__")
__ge__ = _make_wrapped_comparison_op("__ge__")
@property
def _has_complex_internals(self) -> bool:
# used to avoid libreduction code paths, which raise or require conversion
return True
# ---------------------------------------------------------------------
# NDarray-Like Methods
def __getitem__(self, key):
result = self._data[key]
if isinstance(result, type(self._data)):
if result.ndim == 1:
return type(self)(result, name=self.name)
# Unpack to ndarray for MPL compat
result = result._ndarray
# Includes cases where we get a 2D ndarray back for MPL compat
deprecate_ndim_indexing(result)
return result
def searchsorted(self, value, side="left", sorter=None) -> np.ndarray:
# overriding IndexOpsMixin improves performance GH#38083
return self._data.searchsorted(value, side=side, sorter=sorter)
# ---------------------------------------------------------------------
def _get_engine_target(self) -> np.ndarray:
return np.asarray(self._data)
def delete(self, loc):
"""
Make new Index with passed location(-s) deleted
Returns
-------
new_index : Index
"""
arr = self._data.delete(loc)
return type(self)._simple_new(arr, name=self.name)
def repeat(self, repeats, axis=None):
nv.validate_repeat((), {"axis": axis})
result = self._data.repeat(repeats, axis=axis)
return type(self)._simple_new(result, name=self.name)
def insert(self, loc: int, item):
# ExtensionIndex subclasses must override Index.insert
raise AbstractMethodError(self)
def _validate_fill_value(self, value):
"""
Convert value to be insertable to underlying array.
"""
return self._data._validate_setitem_value(value)
def _get_unique_index(self):
if self.is_unique:
return self
result = self._data.unique()
return self._shallow_copy(result)
@doc(Index.map)
def map(self, mapper, na_action=None):
# Try to run function on index first, and then on elements of index
# Especially important for group-by functionality
try:
result = mapper(self)
# Try to use this result if we can
if isinstance(result, np.ndarray):
result = Index(result)
if not isinstance(result, Index):
raise TypeError("The map function must return an Index object")
return result
except Exception:
return self.astype(object).map(mapper)
@doc(Index.astype)
def astype(self, dtype, copy=True):
dtype = pandas_dtype(dtype)
if is_dtype_equal(self.dtype, dtype):
if not copy:
# Ensure that self.astype(self.dtype) is self
return self
return self.copy()
if isinstance(dtype, np.dtype) and dtype.kind == "M" and dtype != "M8[ns]":
# For now Datetime supports this by unwrapping ndarray, but DTI doesn't
raise TypeError(f"Cannot cast {type(self._data).__name__} to dtype")
new_values = self._data.astype(dtype, copy=copy)
# pass copy=False because any copying will be done in the
# _data.astype call above
return Index(new_values, dtype=new_values.dtype, name=self.name, copy=False)
@cache_readonly
def _isnan(self) -> np.ndarray:
# error: Incompatible return value type (got "ExtensionArray", expected
# "ndarray")
return self._data.isna() # type: ignore[return-value]
@doc(Index.equals)
def equals(self, other) -> bool:
# Dispatch to the ExtensionArray's .equals method.
|
class NDArrayBackedExtensionIndex(ExtensionIndex):
"""
Index subclass for indexes backed by NDArrayBackedExtensionArray.
"""
_data: NDArrayBackedExtensionArray
_data_cls: Union[
Type[Categorical],
Type[DatetimeArray],
Type[TimedeltaArray],
Type[PeriodArray],
]
@classmethod
def _simple_new(
cls,
values: NDArrayBackedExtensionArray,
name: Hashable = None,
):
assert isinstance(values, cls._data_cls), type(values)
result = object.__new__(cls)
result._data = values
result._name = name
result._cache = {}
# For groupby perf. See note in indexes/base about _index_data
result._index_data = values._ndarray
result._reset_identity()
return result
def _get_engine_target(self) -> np.ndarray:
return self._data._ndarray
def insert(self: _T, loc: int, item) -> _T:
"""
Make new Index inserting new item at location. Follows
Python list.append semantics for negative values.
Parameters
----------
loc : int
item : object
Returns
-------
new_index : Index
Raises
------
ValueError if the item is not valid for this dtype.
"""
arr = self._data
try:
code = arr._validate_scalar(item)
except (ValueError, TypeError):
# e.g. trying to insert an integer into a DatetimeIndex
# We cannot keep the same dtype, so cast to the (often object)
# minimal shared dtype before doing the insert.
dtype, _ = infer_dtype_from(item, pandas_dtype=True)
dtype = find_common_type([self.dtype, dtype])
return self.astype(dtype).insert(loc, item)
else:
new_vals = np.concatenate(
(
arr._ndarray[:loc],
np.asarray([code], dtype=arr._ndarray.dtype),
arr._ndarray[loc:],
)
)
new_arr = arr._from_backing_data(new_vals)
return type(self)._simple_new(new_arr, name=self.name)
def putmask(self, mask, value) -> Index:
res_values = self._data.copy()
try:
res_values.putmask(mask, value)
except (TypeError, ValueError):
return self.astype(object).putmask(mask, value)
return type(self)._simple_new(res_values, name=self.name)
def _wrap_joined_index(self: _T, joined: np.ndarray, other: _T) -> _T:
name = get_op_result_name(self, other)
arr = self._data._from_backing_data(joined)
return type(self)._simple_new(arr, name=name)
| if self.is_(other):
return True
if not isinstance(other, type(self)):
return False
return self._data.equals(other._data) |
index.tsx | import styles from './readyToTalk.module.css';
export default function ReadyToTalk() {
return (
<>
<section className={styles.ready_to_talk}>
<div>
<h3>Ready to talk?</h3>
<p>Google Assistant can answer most of your questions.</p>
<div className={styles.icon}>
<div
className={styles.bar}
style={{ backgroundColor: '#3498db', marginLeft: -60 }}></div>
<div
className={styles.bar}
style={{ backgroundColor: '#e74c3c', marginLeft: -20 }}></div>
<div
className={styles.bar}
style={{ backgroundColor: '#f1c40f', marginLeft: 20 }}></div>
<div
className={styles.bar}
style={{ backgroundColor: '#27ae60', marginLeft: 60 }}></div>
</div>
<a
href="https://assistant.google.com/services/invoke/uid/0000006c15659a07"
target="_blank"
rel="noreferrer noopener"
className={` ${styles.btn} ${styles.talkDSC}`}
style={{ color: 'grey', backgroundColor: 'white' }}>
Talk to DSC KIIT
</a>
<br />
<br />
<div className={styles.centered}>
<a
href="#contact" | </div>
</div>
</section>
</>
);
} | className={`${styles.btn} ${styles.contactAssist}`}
style={{ color: 'grey', backgroundColor: 'white' }}>
Contact
</a> |
color.rs | //! Color conversions and types.
//!
//! If you want a compact color representation, use [`Color32`].
//! If you want to manipulate RGBA colors use [`Rgba`].
//! If you want to manipulate colors in a way closer to how humans think about colors, use [`HsvaGamma`].
/// This format is used for space-efficient color representation (32 bits).
///
/// Instead of manipulating this directly it is often better
/// to first convert it to either [`Rgba`] or [`Hsva`].
///
/// Internally this uses 0-255 gamma space `sRGBA` color with premultiplied alpha.
/// Alpha channel is in linear space.
#[derive(Clone, Copy, Debug, Default, Eq, Hash, PartialEq)]
#[cfg_attr(feature = "persistence", derive(serde::Deserialize, serde::Serialize))]
pub struct Color32(pub(crate) [u8; 4]);
impl std::ops::Index<usize> for Color32 {
type Output = u8;
#[inline(always)]
fn index(&self, index: usize) -> &u8 {
&self.0[index]
}
}
impl std::ops::IndexMut<usize> for Color32 {
#[inline(always)]
fn index_mut(&mut self, index: usize) -> &mut u8 {
&mut self.0[index]
}
}
#[deprecated = "Replaced by Color32::from_rgb… family of functions."]
pub const fn srgba(r: u8, g: u8, b: u8, a: u8) -> Color32 {
Color32::from_rgba_premultiplied(r, g, b, a)
}
impl Color32 {
pub const TRANSPARENT: Color32 = Color32::from_rgba_premultiplied(0, 0, 0, 0);
pub const BLACK: Color32 = Color32::from_rgb(0, 0, 0);
pub const LIGHT_GRAY: Color32 = Color32::from_rgb(220, 220, 220);
pub const GRAY: Color32 = Color32::from_rgb(160, 160, 160);
pub const WHITE: Color32 = Color32::from_rgb(255, 255, 255);
pub const RED: Color32 = Color32::from_rgb(255, 0, 0);
pub const YELLOW: Color32 = Color32::from_rgb(255, 255, 0);
pub const GREEN: Color32 = Color32::from_rgb(0, 255, 0);
pub const BLUE: Color32 = Color32::from_rgb(0, 0, 255);
pub const LIGHT_BLUE: Color32 = Color32::from_rgb(140, 160, 255);
pub const GOLD: Color32 = Color32::from_rgb(255, 215, 0);
#[inline(always)]
pub const fn from_rgb(r: u8, g: u8, b: u8) -> Self {
Self([r, g, b, 255])
}
#[inline(always)]
pub const fn from_rgb_additive(r: u8, g: u8, b: u8) -> Self {
Self([r, g, b, 0])
}
/// From `sRGBA` with premultiplied alpha.
#[inline(always)]
pub const fn from_rgba_premultiplied(r: u8, g: u8, b: u8, a: u8) -> Self {
Self([r, g, b, a])
}
/// From `sRGBA` WITHOUT premultiplied alpha.
pub fn from_rgba_unmultiplied(r: u8, g: u8, b: u8, a: u8) -> Self {
if a == 255 {
Self::from_rgba_premultiplied(r, g, b, 255) // common-case optimization
} else if a == 0 {
Self::TRANSPARENT // common-case optimization
} else {
let r_lin = linear_f32_from_gamma_u8(r);
let g_lin = linear_f32_from_gamma_u8(g);
let b_lin = linear_f32_from_gamma_u8(b);
let a_lin = linear_f32_from_linear_u8(a);
let r = gamma_u8_from_linear_f32(r_lin * a_lin);
let g = gamma_u8_from_linear_f32(g_lin * a_lin);
let b = gamma_u8_from_linear_f32(b_lin * a_lin);
Self::from_rgba_premultiplied(r, g, b, a)
}
}
#[deprecated = "Use from_rgb(..), from_rgba_premultiplied(..) or from_srgba_unmultiplied(..)"]
pub const fn new(r: u8, g: u8, b: u8, a: u8) -> Self {
Self([r, g, b, a])
}
#[inline(always)]
pub const fn from_gray(l: u8) -> Self {
Self([l, l, l, 255])
}
#[inline(always)]
pub const fn from_black_alpha(a: u8) -> Self {
Self([0, 0, 0, a])
}
pub fn from_white_alpha(a: u8) -> Self {
Rgba::from_white_alpha(linear_f32_from_linear_u8(a)).into()
}
#[inline(always)]
pub const fn from_additive_luminance(l: u8) -> Self {
Self([l, l, l, 0])
}
#[inline(always)]
pub fn is_opaque(&self) -> bool {
self.a() == 255
}
#[inline(always)]
pub fn r(&self) -> u8 {
self.0[0]
}
#[inline(always)]
pub fn g(&self) -> u8 {
self.0[1]
}
#[inline(always)]
pub fn b(&self) -> u8 {
self.0[2]
}
#[inline(always)]
pub fn a(&self) -> u8 {
self.0[3]
}
/// Returns an opaque version of self
pub fn to_opaque(self) -> Self {
Rgba::from(self).to_opaque().into()
}
/// Returns an additive version of self
#[inline(always)]
pub fn additive(self) -> Self {
let [r, g, b, _] = self.to_array();
Self([r, g, b, 0])
}
/// Premultiplied RGBA
#[inline(always)]
pub fn to_array(&self) -> [u8; 4] {
[self.r(), self.g(), self.b(), self.a()]
}
/// Premultiplied RGBA
#[inline(always)]
pub fn to_tuple(&self) -> (u8, u8, u8, u8) {
(self.r(), self.g(), self.b(), self.a())
}
/// Multiply with 0.5 to make color half as opaque.
pub fn linear_multiply(self, factor: f32) -> Color32 {
crate::epaint_assert!(0.0 <= factor && factor <= 1.0);
// As an unfortunate side-effect of using premultiplied alpha
// we need a somewhat expensive conversion to linear space and back.
Rgba::from(self).multiply(factor).into()
}
}
// ----------------------------------------------------------------------------
/// 0-1 linear space `RGBA` color with premultiplied alpha.
#[derive(Clone, Copy, Debug, Default, PartialEq)]
#[cfg_attr(feature = "persistence", derive(serde::Deserialize, serde::Serialize))]
pub struct Rgba(pub(crate) [f32; 4]);
impl std::ops::Index<usize> for Rgba {
type Output = f32;
#[inline(always)]
fn index(&self, index: usize) -> &f32 {
&self.0[index]
}
}
impl std::ops::IndexMut<usize> for Rgba {
#[inline(always)]
fn index_mut(&mut self, index: usize) -> &mut f32 {
&mut self.0[index]
}
}
impl Rgba {
pub const TRANSPARENT: Rgba = Rgba::from_rgba_premultiplied(0.0, 0.0, 0.0, 0.0);
pub const BLACK: Rgba = Rgba::from_rgb(0.0, 0.0, 0.0);
pub const WHITE: Rgba = Rgba::from_rgb(1.0, 1.0, 1.0);
pub const RED: Rgba = Rgba::from_rgb(1.0, 0.0, 0.0);
pub const GREEN: Rgba = Rgba::from_rgb(0.0, 1.0, 0.0);
pub const BLUE: Rgba = Rgba::from_rgb(0.0, 0.0, 1.0);
#[inline(always)]
pub const fn from_rgba_premultiplied(r: f32, g: f32, b: f32, a: f32) -> Self {
Self([r, g, b, a])
}
#[inline(always)]
pub const fn from_rgb(r: f32, g: f32, b: f32) -> Self {
Self([r, g, b, 1.0])
}
#[inline(always)]
pub const fn from_gray(l: f32) -> Self {
Self([l, l, l, 1.0])
}
pub fn fr | : f32, a: f32) -> Self {
crate::epaint_assert!(0.0 <= l && l <= 1.0);
crate::epaint_assert!(0.0 <= a && a <= 1.0);
Self([l * a, l * a, l * a, a])
}
/// Transparent black
#[inline(always)]
pub fn from_black_alpha(a: f32) -> Self {
crate::epaint_assert!(0.0 <= a && a <= 1.0);
Self([0.0, 0.0, 0.0, a])
}
/// Transparent white
#[inline(always)]
pub fn from_white_alpha(a: f32) -> Self {
crate::epaint_assert!(0.0 <= a && a <= 1.0);
Self([a, a, a, a])
}
/// Return an additive version of this color (alpha = 0)
#[inline(always)]
pub fn additive(self) -> Self {
let [r, g, b, _] = self.0;
Self([r, g, b, 0.0])
}
/// Multiply with e.g. 0.5 to make us half transparent
#[inline(always)]
pub fn multiply(self, alpha: f32) -> Self {
Self([
alpha * self[0],
alpha * self[1],
alpha * self[2],
alpha * self[3],
])
}
#[inline(always)]
pub fn r(&self) -> f32 {
self.0[0]
}
#[inline(always)]
pub fn g(&self) -> f32 {
self.0[1]
}
#[inline(always)]
pub fn b(&self) -> f32 {
self.0[2]
}
#[inline(always)]
pub fn a(&self) -> f32 {
self.0[3]
}
/// How perceptually intense (bright) is the color?
#[inline]
pub fn intensity(&self) -> f32 {
0.3 * self.r() + 0.59 * self.g() + 0.11 * self.b()
}
/// Returns an opaque version of self
pub fn to_opaque(&self) -> Self {
if self.a() == 0.0 {
// Additive or fully transparent black.
Self::from_rgba_premultiplied(self.r(), self.g(), self.b(), 1.0)
} else {
// un-multiply alpha:
Self::from_rgba_premultiplied(
self.r() / self.a(),
self.g() / self.a(),
self.b() / self.a(),
1.0,
)
}
}
/// Premultiplied RGBA
#[inline(always)]
pub fn to_array(&self) -> [f32; 4] {
[self.r(), self.g(), self.b(), self.a()]
}
/// Premultiplied RGBA
#[inline(always)]
pub fn to_tuple(&self) -> (f32, f32, f32, f32) {
(self.r(), self.g(), self.b(), self.a())
}
}
impl std::ops::Add for Rgba {
type Output = Rgba;
#[inline(always)]
fn add(self, rhs: Rgba) -> Rgba {
Rgba([
self[0] + rhs[0],
self[1] + rhs[1],
self[2] + rhs[2],
self[3] + rhs[3],
])
}
}
impl std::ops::Mul<Rgba> for Rgba {
type Output = Rgba;
#[inline(always)]
fn mul(self, other: Rgba) -> Rgba {
Rgba([
self[0] * other[0],
self[1] * other[1],
self[2] * other[2],
self[3] * other[3],
])
}
}
impl std::ops::Mul<f32> for Rgba {
type Output = Rgba;
#[inline(always)]
fn mul(self, factor: f32) -> Rgba {
Rgba([
self[0] * factor,
self[1] * factor,
self[2] * factor,
self[3] * factor,
])
}
}
impl std::ops::Mul<Rgba> for f32 {
type Output = Rgba;
#[inline(always)]
fn mul(self, rgba: Rgba) -> Rgba {
Rgba([
self * rgba[0],
self * rgba[1],
self * rgba[2],
self * rgba[3],
])
}
}
// ----------------------------------------------------------------------------
// Color conversion:
impl From<Color32> for Rgba {
fn from(srgba: Color32) -> Rgba {
Rgba([
linear_f32_from_gamma_u8(srgba.0[0]),
linear_f32_from_gamma_u8(srgba.0[1]),
linear_f32_from_gamma_u8(srgba.0[2]),
linear_f32_from_linear_u8(srgba.0[3]),
])
}
}
impl From<Rgba> for Color32 {
fn from(rgba: Rgba) -> Color32 {
Color32([
gamma_u8_from_linear_f32(rgba.0[0]),
gamma_u8_from_linear_f32(rgba.0[1]),
gamma_u8_from_linear_f32(rgba.0[2]),
linear_u8_from_linear_f32(rgba.0[3]),
])
}
}
/// gamma [0, 255] -> linear [0, 1].
pub fn linear_f32_from_gamma_u8(s: u8) -> f32 {
if s <= 10 {
s as f32 / 3294.6
} else {
((s as f32 + 14.025) / 269.025).powf(2.4)
}
}
/// linear [0, 255] -> linear [0, 1].
/// Useful for alpha-channel.
#[inline(always)]
pub fn linear_f32_from_linear_u8(a: u8) -> f32 {
a as f32 / 255.0
}
/// linear [0, 1] -> gamma [0, 255] (clamped).
/// Values outside this range will be clamped to the range.
pub fn gamma_u8_from_linear_f32(l: f32) -> u8 {
if l <= 0.0 {
0
} else if l <= 0.0031308 {
(3294.6 * l).round() as u8
} else if l <= 1.0 {
(269.025 * l.powf(1.0 / 2.4) - 14.025).round() as u8
} else {
255
}
}
/// linear [0, 1] -> linear [0, 255] (clamped).
/// Useful for alpha-channel.
#[inline(always)]
pub fn linear_u8_from_linear_f32(a: f32) -> u8 {
(a * 255.0).round() as u8 // rust does a saturating cast since 1.45
}
#[test]
pub fn test_srgba_conversion() {
for b in 0..=255 {
let l = linear_f32_from_gamma_u8(b);
assert!(0.0 <= l && l <= 1.0);
assert_eq!(gamma_u8_from_linear_f32(l), b);
}
}
/// gamma [0, 1] -> linear [0, 1] (not clamped).
/// Works for numbers outside this range (e.g. negative numbers).
pub fn linear_from_gamma(gamma: f32) -> f32 {
if gamma < 0.0 {
-linear_from_gamma(-gamma)
} else if gamma <= 0.04045 {
gamma / 12.92
} else {
((gamma + 0.055) / 1.055).powf(2.4)
}
}
/// linear [0, 1] -> gamma [0, 1] (not clamped).
/// Works for numbers outside this range (e.g. negative numbers).
pub fn gamma_from_linear(linear: f32) -> f32 {
if linear < 0.0 {
-gamma_from_linear(-linear)
} else if linear <= 0.0031308 {
12.92 * linear
} else {
1.055 * linear.powf(1.0 / 2.4) - 0.055
}
}
// ----------------------------------------------------------------------------
/// Hue, saturation, value, alpha. All in the range [0, 1].
/// No premultiplied alpha.
#[derive(Clone, Copy, Debug, Default, PartialEq)]
pub struct Hsva {
/// hue 0-1
pub h: f32,
/// saturation 0-1
pub s: f32,
/// value 0-1
pub v: f32,
/// alpha 0-1. A negative value signifies an additive color (and alpha is ignored).
pub a: f32,
}
impl Hsva {
pub fn new(h: f32, s: f32, v: f32, a: f32) -> Self {
Self { h, s, v, a }
}
/// From `sRGBA` with premultiplied alpha
pub fn from_srgba_premultiplied(srgba: [u8; 4]) -> Self {
Self::from_rgba_premultiplied([
linear_f32_from_gamma_u8(srgba[0]),
linear_f32_from_gamma_u8(srgba[1]),
linear_f32_from_gamma_u8(srgba[2]),
linear_f32_from_linear_u8(srgba[3]),
])
}
/// From `sRGBA` without premultiplied alpha
pub fn from_srgba_unmultiplied(srgba: [u8; 4]) -> Self {
Self::from_rgba_unmultiplied([
linear_f32_from_gamma_u8(srgba[0]),
linear_f32_from_gamma_u8(srgba[1]),
linear_f32_from_gamma_u8(srgba[2]),
linear_f32_from_linear_u8(srgba[3]),
])
}
/// From linear RGBA with premultiplied alpha
pub fn from_rgba_premultiplied([r, g, b, a]: [f32; 4]) -> Self {
#![allow(clippy::many_single_char_names)]
if a == 0.0 {
if r == 0.0 && b == 0.0 && a == 0.0 {
Hsva::default()
} else {
Hsva::from_additive_rgb([r, g, b])
}
} else {
let (h, s, v) = hsv_from_rgb([r / a, g / a, b / a]);
Hsva { h, s, v, a }
}
}
/// From linear RGBA without premultiplied alpha
pub fn from_rgba_unmultiplied([r, g, b, a]: [f32; 4]) -> Self {
#![allow(clippy::many_single_char_names)]
let (h, s, v) = hsv_from_rgb([r, g, b]);
Hsva { h, s, v, a }
}
pub fn from_additive_rgb(rgb: [f32; 3]) -> Self {
let (h, s, v) = hsv_from_rgb(rgb);
Hsva {
h,
s,
v,
a: -0.5, // anything negative is treated as additive
}
}
pub fn from_rgb(rgb: [f32; 3]) -> Self {
let (h, s, v) = hsv_from_rgb(rgb);
Hsva { h, s, v, a: 1.0 }
}
pub fn from_srgb([r, g, b]: [u8; 3]) -> Self {
Self::from_rgb([
linear_f32_from_gamma_u8(r),
linear_f32_from_gamma_u8(g),
linear_f32_from_gamma_u8(b),
])
}
// ------------------------------------------------------------------------
pub fn to_opaque(self) -> Self {
Self { a: 1.0, ..self }
}
pub fn to_rgb(&self) -> [f32; 3] {
rgb_from_hsv((self.h, self.s, self.v))
}
pub fn to_srgb(&self) -> [u8; 3] {
let [r, g, b] = self.to_rgb();
[
gamma_u8_from_linear_f32(r),
gamma_u8_from_linear_f32(g),
gamma_u8_from_linear_f32(b),
]
}
pub fn to_rgba_premultiplied(&self) -> [f32; 4] {
let [r, g, b, a] = self.to_rgba_unmultiplied();
let additive = a < 0.0;
if additive {
[r, g, b, 0.0]
} else {
[a * r, a * g, a * b, a]
}
}
/// Represents additive colors using a negative alpha.
pub fn to_rgba_unmultiplied(&self) -> [f32; 4] {
let Hsva { h, s, v, a } = *self;
let [r, g, b] = rgb_from_hsv((h, s, v));
[r, g, b, a]
}
pub fn to_srgba_premultiplied(&self) -> [u8; 4] {
let [r, g, b, a] = self.to_rgba_premultiplied();
[
gamma_u8_from_linear_f32(r),
gamma_u8_from_linear_f32(g),
gamma_u8_from_linear_f32(b),
linear_u8_from_linear_f32(a),
]
}
pub fn to_srgba_unmultiplied(&self) -> [u8; 4] {
let [r, g, b, a] = self.to_rgba_unmultiplied();
[
gamma_u8_from_linear_f32(r),
gamma_u8_from_linear_f32(g),
gamma_u8_from_linear_f32(b),
linear_u8_from_linear_f32(a.abs()),
]
}
}
impl From<Hsva> for Rgba {
fn from(hsva: Hsva) -> Rgba {
Rgba(hsva.to_rgba_premultiplied())
}
}
impl From<Rgba> for Hsva {
fn from(rgba: Rgba) -> Hsva {
Self::from_rgba_premultiplied(rgba.0)
}
}
impl From<Hsva> for Color32 {
fn from(hsva: Hsva) -> Color32 {
Color32::from(Rgba::from(hsva))
}
}
impl From<Color32> for Hsva {
fn from(srgba: Color32) -> Hsva {
Hsva::from(Rgba::from(srgba))
}
}
/// All ranges in 0-1, rgb is linear.
pub fn hsv_from_rgb([r, g, b]: [f32; 3]) -> (f32, f32, f32) {
#![allow(clippy::many_single_char_names)]
let min = r.min(g.min(b));
let max = r.max(g.max(b)); // value
let range = max - min;
let h = if max == min {
0.0 // hue is undefined
} else if max == r {
(g - b) / (6.0 * range)
} else if max == g {
(b - r) / (6.0 * range) + 1.0 / 3.0
} else {
// max == b
(r - g) / (6.0 * range) + 2.0 / 3.0
};
let h = (h + 1.0).fract(); // wrap
let s = if max == 0.0 { 0.0 } else { 1.0 - min / max };
(h, s, max)
}
/// All ranges in 0-1, rgb is linear.
pub fn rgb_from_hsv((h, s, v): (f32, f32, f32)) -> [f32; 3] {
#![allow(clippy::many_single_char_names)]
let h = (h.fract() + 1.0).fract(); // wrap
let s = s.clamp(0.0, 1.0);
let f = h * 6.0 - (h * 6.0).floor();
let p = v * (1.0 - s);
let q = v * (1.0 - f * s);
let t = v * (1.0 - (1.0 - f) * s);
match (h * 6.0).floor() as i32 % 6 {
0 => [v, t, p],
1 => [q, v, p],
2 => [p, v, t],
3 => [p, q, v],
4 => [t, p, v],
5 => [v, p, q],
_ => unreachable!(),
}
}
#[test]
#[ignore] // a bit expensive
fn test_hsv_roundtrip() {
for r in 0..=255 {
for g in 0..=255 {
for b in 0..=255 {
let srgba = Color32::from_rgb(r, g, b);
let hsva = Hsva::from(srgba);
assert_eq!(srgba, Color32::from(hsva));
}
}
}
}
// ----------------------------------------------------------------------------
/// Like Hsva but with the `v` value (brightness) being gamma corrected
/// so that it is somewhat perceptually even.
#[derive(Clone, Copy, Debug, Default, PartialEq)]
pub struct HsvaGamma {
/// hue 0-1
pub h: f32,
/// saturation 0-1
pub s: f32,
/// value 0-1, in gamma-space (~perceptually even)
pub v: f32,
/// alpha 0-1. A negative value signifies an additive color (and alpha is ignored).
pub a: f32,
}
impl From<HsvaGamma> for Rgba {
fn from(hsvag: HsvaGamma) -> Rgba {
Hsva::from(hsvag).into()
}
}
impl From<HsvaGamma> for Color32 {
fn from(hsvag: HsvaGamma) -> Color32 {
Rgba::from(hsvag).into()
}
}
impl From<HsvaGamma> for Hsva {
fn from(hsvag: HsvaGamma) -> Hsva {
let HsvaGamma { h, s, v, a } = hsvag;
Hsva {
h,
s,
v: linear_from_gamma(v),
a,
}
}
}
impl From<Rgba> for HsvaGamma {
fn from(rgba: Rgba) -> HsvaGamma {
Hsva::from(rgba).into()
}
}
impl From<Color32> for HsvaGamma {
fn from(srgba: Color32) -> HsvaGamma {
Hsva::from(srgba).into()
}
}
impl From<Hsva> for HsvaGamma {
fn from(hsva: Hsva) -> HsvaGamma {
let Hsva { h, s, v, a } = hsva;
HsvaGamma {
h,
s,
v: gamma_from_linear(v),
a,
}
}
}
// ----------------------------------------------------------------------------
/// Cheap and ugly.
/// Made for graying out disabled `Ui`:s.
pub fn tint_color_towards(color: Color32, target: Color32) -> Color32 {
let [mut r, mut g, mut b, mut a] = color.to_array();
if a == 0 {
r /= 2;
g /= 2;
b /= 2;
} else if a < 170 {
// Cheapish and looks ok.
// Works for e.g. grid stripes.
let div = (2 * 255 / a as i32) as u8;
r = r / 2 + target.r() / div;
g = g / 2 + target.g() / div;
b = b / 2 + target.b() / div;
a /= 2;
} else {
r = r / 2 + target.r() / 2;
g = g / 2 + target.g() / 2;
b = b / 2 + target.b() / 2;
}
Color32::from_rgba_premultiplied(r, g, b, a)
}
#[cfg(feature = "cint")]
mod impl_cint {
use super::*;
use cint::{Alpha, ColorInterop, EncodedSrgb, Hsv, LinearSrgb, PremultipliedAlpha};
// ---- Color32 ----
impl From<Alpha<EncodedSrgb<u8>>> for Color32 {
fn from(srgba: Alpha<EncodedSrgb<u8>>) -> Self {
let Alpha {
color: EncodedSrgb { r, g, b },
alpha: a,
} = srgba;
Color32::from_rgba_unmultiplied(r, g, b, a)
}
}
// No From<Color32> for Alpha<_> because Color32 is premultiplied
impl From<PremultipliedAlpha<EncodedSrgb<u8>>> for Color32 {
fn from(srgba: PremultipliedAlpha<EncodedSrgb<u8>>) -> Self {
let PremultipliedAlpha {
color: EncodedSrgb { r, g, b },
alpha: a,
} = srgba;
Color32::from_rgba_premultiplied(r, g, b, a)
}
}
impl From<Color32> for PremultipliedAlpha<EncodedSrgb<u8>> {
fn from(col: Color32) -> Self {
let (r, g, b, a) = col.to_tuple();
PremultipliedAlpha {
color: EncodedSrgb { r, g, b },
alpha: a,
}
}
}
impl From<PremultipliedAlpha<EncodedSrgb<f32>>> for Color32 {
fn from(srgba: PremultipliedAlpha<EncodedSrgb<f32>>) -> Self {
let PremultipliedAlpha {
color: EncodedSrgb { r, g, b },
alpha: a,
} = srgba;
// This is a bit of an abuse of the function name but it does what we want.
let r = linear_u8_from_linear_f32(r);
let g = linear_u8_from_linear_f32(g);
let b = linear_u8_from_linear_f32(b);
let a = linear_u8_from_linear_f32(a);
Color32::from_rgba_premultiplied(r, g, b, a)
}
}
impl From<Color32> for PremultipliedAlpha<EncodedSrgb<f32>> {
fn from(col: Color32) -> Self {
let (r, g, b, a) = col.to_tuple();
// This is a bit of an abuse of the function name but it does what we want.
let r = linear_f32_from_linear_u8(r);
let g = linear_f32_from_linear_u8(g);
let b = linear_f32_from_linear_u8(b);
let a = linear_f32_from_linear_u8(a);
PremultipliedAlpha {
color: EncodedSrgb { r, g, b },
alpha: a,
}
}
}
impl ColorInterop for Color32 {
type CintTy = PremultipliedAlpha<EncodedSrgb<u8>>;
}
// ---- Rgba ----
impl From<PremultipliedAlpha<LinearSrgb<f32>>> for Rgba {
fn from(srgba: PremultipliedAlpha<LinearSrgb<f32>>) -> Self {
let PremultipliedAlpha {
color: LinearSrgb { r, g, b },
alpha: a,
} = srgba;
Rgba([r, g, b, a])
}
}
impl From<Rgba> for PremultipliedAlpha<LinearSrgb<f32>> {
fn from(col: Rgba) -> Self {
let (r, g, b, a) = col.to_tuple();
PremultipliedAlpha {
color: LinearSrgb { r, g, b },
alpha: a,
}
}
}
impl ColorInterop for Rgba {
type CintTy = PremultipliedAlpha<LinearSrgb<f32>>;
}
// ---- Hsva ----
impl From<Alpha<Hsv<f32>>> for Hsva {
fn from(srgba: Alpha<Hsv<f32>>) -> Self {
let Alpha {
color: Hsv { h, s, v },
alpha: a,
} = srgba;
Hsva::new(h, s, v, a)
}
}
impl From<Hsva> for Alpha<Hsv<f32>> {
fn from(col: Hsva) -> Self {
let Hsva { h, s, v, a } = col;
Alpha {
color: Hsv { h, s, v },
alpha: a,
}
}
}
impl ColorInterop for Hsva {
type CintTy = Alpha<Hsv<f32>>;
}
// ---- HsvaGamma ----
impl ColorInterop for HsvaGamma {
type CintTy = Alpha<Hsv<f32>>;
}
impl From<Alpha<Hsv<f32>>> for HsvaGamma {
fn from(srgba: Alpha<Hsv<f32>>) -> Self {
let Alpha {
color: Hsv { h, s, v },
alpha: a,
} = srgba;
Hsva::new(h, s, v, a).into()
}
}
impl From<HsvaGamma> for Alpha<Hsv<f32>> {
fn from(col: HsvaGamma) -> Self {
let Hsva { h, s, v, a } = col.into();
Alpha {
color: Hsv { h, s, v },
alpha: a,
}
}
}
}
| om_luminance_alpha(l |
mod.rs | use std::cell::RefCell;
use std::rc::Rc;
use std::sync::Arc;
use async_trait::async_trait;
use cita_trie::MemoryDB;
use framework::binding::sdk::{DefalutServiceSDK, DefaultChainQuerier};
use framework::binding::state::{GeneralServiceState, MPTTrie};
use protocol::traits::{Context, NoopDispatcher, Storage};
use protocol::types::{
Address, Block, Hash, Proof, Receipt, ServiceContext, ServiceContextParams, SignedTransaction,
};
use protocol::{types::Bytes, ProtocolResult};
use crate::types::{
ApprovePayload, CreateAssetPayload, GetAllowancePayload, GetAssetPayload, GetBalancePayload,
TransferFromPayload, TransferPayload,
};
use crate::AssetService;
#[test]
fn test_create_asset() {
let cycles_limit = 1024 * 1024 * 1024; // 1073741824
let caller = Address::from_hex("0x755cdba6ae4f479f7164792b318b2a06c759833b").unwrap();
let context = mock_context(cycles_limit, caller.clone());
let mut service = new_asset_service();
let supply = 1024 * 1024;
// test create_asset
let asset = service
.create_asset(context.clone(), CreateAssetPayload {
name: "test".to_owned(),
symbol: "test".to_owned(),
supply,
})
.succeed_data;
let new_asset = service
.get_asset(context.clone(), GetAssetPayload {
id: asset.id.clone(),
})
.succeed_data;
assert_eq!(asset, new_asset);
let balance_res = service
.get_balance(context, GetBalancePayload {
asset_id: asset.id.clone(),
user: caller,
})
.succeed_data;
assert_eq!(balance_res.balance, supply);
assert_eq!(balance_res.asset_id, asset.id);
}
#[test]
fn test_transfer() {
let cycles_limit = 1024 * 1024 * 1024; // 1073741824
let caller = Address::from_hex("0x755cdba6ae4f479f7164792b318b2a06c759833b").unwrap();
let context = mock_context(cycles_limit, caller.clone());
let mut service = new_asset_service();
let supply = 1024 * 1024;
// test create_asset
let asset = service
.create_asset(context.clone(), CreateAssetPayload {
name: "test".to_owned(),
symbol: "test".to_owned(),
supply,
})
.succeed_data;
let to_address = Address::from_hex("0x666cdba6ae4f479f7164792b318b2a06c759833b").unwrap();
service.transfer(context.clone(), TransferPayload {
asset_id: asset.id.clone(),
to: to_address.clone(),
value: 1024,
});
let balance_res = service
.get_balance(context, GetBalancePayload {
asset_id: asset.id.clone(),
user: caller,
})
.succeed_data;
assert_eq!(balance_res.balance, supply - 1024);
let context = mock_context(cycles_limit, to_address.clone());
let balance_res = service
.get_balance(context, GetBalancePayload {
asset_id: asset.id,
user: to_address,
})
.succeed_data;
assert_eq!(balance_res.balance, 1024);
}
#[test]
fn test_approve() {
let cycles_limit = 1024 * 1024 * 1024; // 1073741824
let caller = Address::from_hex("0x755cdba6ae4f479f7164792b318b2a06c759833b").unwrap();
let context = mock_context(cycles_limit, caller.clone());
let mut service = new_asset_service();
let supply = 1024 * 1024;
let asset = service
.create_asset(context.clone(), CreateAssetPayload {
name: "test".to_owned(),
symbol: "test".to_owned(),
supply,
})
.succeed_data;
let to_address = Address::from_hex("0x666cdba6ae4f479f7164792b318b2a06c759833b").unwrap();
service.approve(context.clone(), ApprovePayload {
asset_id: asset.id.clone(),
to: to_address.clone(),
value: 1024,
});
let allowance_res = service
.get_allowance(context, GetAllowancePayload {
asset_id: asset.id.clone(),
grantor: caller,
grantee: to_address.clone(),
})
.succeed_data;
assert_eq!(allowance_res.asset_id, asset.id);
assert_eq!(allowance_res.grantee, to_address);
assert_eq!(allowance_res.value, 1024);
}
#[test]
fn test_transfer_from() {
let cycles_limit = 1024 * 1024 * 1024; // 1073741824
let caller = Address::from_hex("0x755cdba6ae4f479f7164792b318b2a06c759833b").unwrap();
let context = mock_context(cycles_limit, caller.clone());
let mut service = new_asset_service();
let supply = 1024 * 1024;
let asset = service
.create_asset(context.clone(), CreateAssetPayload {
name: "test".to_owned(),
symbol: "test".to_owned(),
supply,
})
.succeed_data;
let to_address = Address::from_hex("0x666cdba6ae4f479f7164792b318b2a06c759833b").unwrap();
service.approve(context.clone(), ApprovePayload {
asset_id: asset.id.clone(),
to: to_address.clone(),
value: 1024,
});
let to_context = mock_context(cycles_limit, to_address.clone());
service.transfer_from(to_context.clone(), TransferFromPayload {
asset_id: asset.id.clone(),
sender: caller.clone(),
recipient: to_address.clone(),
value: 24,
});
let allowance_res = service
.get_allowance(context.clone(), GetAllowancePayload {
asset_id: asset.id.clone(),
grantor: caller.clone(),
grantee: to_address.clone(),
})
.succeed_data;
assert_eq!(allowance_res.asset_id, asset.id.clone());
assert_eq!(allowance_res.grantee, to_address.clone());
assert_eq!(allowance_res.value, 1000);
let balance_res = service
.get_balance(context, GetBalancePayload {
asset_id: asset.id.clone(),
user: caller,
})
.succeed_data;
assert_eq!(balance_res.balance, supply - 24);
let balance_res = service
.get_balance(to_context, GetBalancePayload {
asset_id: asset.id,
user: to_address,
})
.succeed_data;
assert_eq!(balance_res.balance, 24);
}
fn new_asset_service() -> AssetService<
DefalutServiceSDK<
GeneralServiceState<MemoryDB>,
DefaultChainQuerier<MockStorage>,
NoopDispatcher,
>,
> {
let chain_db = DefaultChainQuerier::new(Arc::new(MockStorage {}));
let trie = MPTTrie::new(Arc::new(MemoryDB::new(false)));
let state = GeneralServiceState::new(trie);
let sdk = DefalutServiceSDK::new(
Rc::new(RefCell::new(state)),
Rc::new(chain_db),
NoopDispatcher {},
);
AssetService::new(sdk)
}
fn mock_context(cycles_limit: u64, caller: Address) -> ServiceContext {
let params = ServiceContextParams {
tx_hash: None,
nonce: None,
cycles_limit,
cycles_price: 1,
cycles_used: Rc::new(RefCell::new(0)),
caller,
height: 1,
timestamp: 0,
service_name: "service_name".to_owned(),
service_method: "service_method".to_owned(),
service_payload: "service_payload".to_owned(),
extra: None,
events: Rc::new(RefCell::new(vec![])),
};
ServiceContext::new(params)
}
struct MockStorage;
#[async_trait]
impl Storage for MockStorage {
async fn insert_transactions(
&self,
_: Context,
_: Vec<SignedTransaction>,
) -> ProtocolResult<()> {
unimplemented!()
}
async fn | (&self, _: Context, _: Block) -> ProtocolResult<()> {
unimplemented!()
}
async fn insert_receipts(&self, _: Context, _: Vec<Receipt>) -> ProtocolResult<()> {
unimplemented!()
}
async fn update_latest_proof(&self, _: Context, _: Proof) -> ProtocolResult<()> {
unimplemented!()
}
async fn get_transaction_by_hash(
&self,
_: Context,
_: Hash,
) -> ProtocolResult<SignedTransaction> {
unimplemented!()
}
async fn get_transactions(
&self,
_: Context,
_: Vec<Hash>,
) -> ProtocolResult<Vec<SignedTransaction>> {
unimplemented!()
}
async fn get_latest_block(&self, _: Context) -> ProtocolResult<Block> {
unimplemented!()
}
async fn get_block_by_height(&self, _: Context, _: u64) -> ProtocolResult<Block> {
unimplemented!()
}
async fn get_block_by_hash(&self, _: Context, _: Hash) -> ProtocolResult<Block> {
unimplemented!()
}
async fn get_receipt(&self, _: Context, _: Hash) -> ProtocolResult<Receipt> {
unimplemented!()
}
async fn get_receipts(&self, _: Context, _: Vec<Hash>) -> ProtocolResult<Vec<Receipt>> {
unimplemented!()
}
async fn get_latest_proof(&self, _: Context) -> ProtocolResult<Proof> {
unimplemented!()
}
async fn update_overlord_wal(&self, _: Context, _info: Bytes) -> ProtocolResult<()> {
unimplemented!()
}
async fn load_overlord_wal(&self, _: Context) -> ProtocolResult<Bytes> {
unimplemented!()
}
}
| insert_block |
index.ts | import { addBabelPlugin } from 'customize-cra'
import { IPlugin } from '@peajs/types'
export default class | implements IPlugin {
constructor(private options: any = {}) {}
updateWebpackConfig(config: any) {
const { style = true } = this.options
return addBabelPlugin([
'import',
{
libraryName: 'ant-design-pro',
libraryDirectory: 'lib',
style,
camel2DashComponentName: false,
},
'ant-design-pro',
])(config)
}
}
| AntdProPlugin |
vdso_in_none.go | // Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build linux,!386,!amd64,!arm !linux
package runtime
// A dummy version of inVDSOPage for targets that don't use a VDSO.
func inVDSOPage(pc uintptr) bool | {
return false
} |
|
mixture.py | # Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""The Mixture distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
import tensorflow as tf
from tensorflow_probability.python.distributions import categorical
from tensorflow_probability.python.distributions import distribution
from tensorflow_probability.python.distributions import seed_stream
from tensorflow_probability.python.internal import distribution_util
from tensorflow_probability.python.internal import reparameterization
from tensorflow.python.framework import tensor_util
class Mixture(distribution.Distribution):
"""Mixture distribution.
The `Mixture` object implements batched mixture distributions.
The mixture model is defined by a `Categorical` distribution (the mixture)
and a python list of `Distribution` objects.
Methods supported include `log_prob`, `prob`, `mean`, `sample`, and
`entropy_lower_bound`.
#### Examples
```python
# Create a mixture of two Gaussians:
tfd = tfp.distributions
mix = 0.3
bimix_gauss = tfd.Mixture(
cat=tfd.Categorical(probs=[mix, 1.-mix]),
components=[
tfd.Normal(loc=-1., scale=0.1),
tfd.Normal(loc=+1., scale=0.5),
])
# Plot the PDF.
import matplotlib.pyplot as plt
x = tf.linspace(-2., 3., int(1e4)).eval()
plt.plot(x, bimix_gauss.prob(x).eval());
```
"""
def __init__(self,
cat,
components,
validate_args=False,
allow_nan_stats=True,
use_static_graph=False,
name="Mixture"):
"""Initialize a Mixture distribution.
A `Mixture` is defined by a `Categorical` (`cat`, representing the
mixture probabilities) and a list of `Distribution` objects
all having matching dtype, batch shape, event shape, and continuity
properties (the components).
The `num_classes` of `cat` must be possible to infer at graph construction
time and match `len(components)`.
Args:
cat: A `Categorical` distribution instance, representing the probabilities
of `distributions`.
components: A list or tuple of `Distribution` instances.
Each instance must have the same type, be defined on the same domain,
and have matching `event_shape` and `batch_shape`.
validate_args: Python `bool`, default `False`. If `True`, raise a runtime
error if batch or event ranks are inconsistent between cat and any of
the distributions. This is only checked if the ranks cannot be
determined statically at graph construction time.
allow_nan_stats: Boolean, default `True`. If `False`, raise an
exception if a statistic (e.g. mean/mode/etc...) is undefined for any
batch member. If `True`, batch members with valid parameters leading to
undefined statistics will return NaN for this statistic.
use_static_graph: Calls to `sample` will not rely on dynamic tensor
indexing, allowing for some static graph compilation optimizations, but
at the expense of sampling all underlying distributions in the mixture.
(Possibly useful when running on TPUs).
Default value: `False` (i.e., use dynamic indexing).
name: A name for this distribution (optional).
Raises:
TypeError: If cat is not a `Categorical`, or `components` is not
a list or tuple, or the elements of `components` are not
instances of `Distribution`, or do not have matching `dtype`.
ValueError: If `components` is an empty list or tuple, or its
elements do not have a statically known event rank.
If `cat.num_classes` cannot be inferred at graph creation time,
or the constant value of `cat.num_classes` is not equal to
`len(components)`, or all `components` and `cat` do not have
matching static batch shapes, or all components do not
have matching static event shapes.
"""
parameters = dict(locals())
# TODO(b/117098119): Remove tf.distribution references once they're gone.
if not isinstance(cat, categorical.Categorical) and not isinstance(
cat, tf.distributions.Categorical):
raise TypeError("cat must be a Categorical distribution, but saw: %s" %
cat)
if not components:
raise ValueError("components must be a non-empty list or tuple")
if not isinstance(components, (list, tuple)):
raise TypeError("components must be a list or tuple, but saw: %s" %
components)
# TODO(b/117098119): Remove tf.distribution references once they're gone.
if not all(
isinstance(c, distribution.Distribution) or
isinstance(cat, tf.distributions.Distribution) for c in components):
raise TypeError(
"all entries in components must be Distribution instances"
" but saw: %s" % components)
dtype = components[0].dtype
if not all(d.dtype == dtype for d in components):
raise TypeError("All components must have the same dtype, but saw "
"dtypes: %s" % [(d.name, d.dtype) for d in components])
static_event_shape = components[0].event_shape
static_batch_shape = cat.batch_shape
for d in components:
static_event_shape = static_event_shape.merge_with(d.event_shape)
static_batch_shape = static_batch_shape.merge_with(d.batch_shape)
if static_event_shape.ndims is None:
raise ValueError(
"Expected to know rank(event_shape) from components, but "
"none of the components provide a static number of ndims")
# Ensure that all batch and event ndims are consistent.
with tf.name_scope(name, values=[cat.logits]) as name:
num_components = cat.event_size
static_num_components = tensor_util.constant_value(num_components)
if static_num_components is None:
raise ValueError(
"Could not infer number of classes from cat and unable "
"to compare this value to the number of components passed in.")
# Possibly convert from numpy 0-D array.
static_num_components = int(static_num_components)
if static_num_components != len(components):
raise ValueError("cat.num_classes != len(components): %d vs. %d" %
(static_num_components, len(components)))
cat_batch_shape = cat.batch_shape_tensor()
cat_batch_rank = tf.size(cat_batch_shape)
if validate_args:
batch_shapes = [d.batch_shape_tensor() for d in components]
batch_ranks = [tf.size(bs) for bs in batch_shapes]
check_message = ("components[%d] batch shape must match cat "
"batch shape")
self._assertions = [
tf.assert_equal(
cat_batch_rank, batch_ranks[di], message=check_message % di)
for di in range(len(components))
]
self._assertions += [
tf.assert_equal(
cat_batch_shape, batch_shapes[di], message=check_message % di)
for di in range(len(components))
]
else:
self._assertions = []
self._cat = cat
self._components = list(components)
self._num_components = static_num_components
self._static_event_shape = static_event_shape
self._static_batch_shape = static_batch_shape
self._use_static_graph = use_static_graph
if use_static_graph and static_num_components is None:
raise ValueError("Number of categories must be known statically when "
"`static_sample=True`.")
# We let the Mixture distribution access _graph_parents since its arguably
# more like a baseclass.
graph_parents = self._cat._graph_parents # pylint: disable=protected-access
for c in self._components:
graph_parents += c._graph_parents # pylint: disable=protected-access
super(Mixture, self).__init__(
dtype=dtype,
reparameterization_type=reparameterization.NOT_REPARAMETERIZED,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
graph_parents=graph_parents,
name=name)
@property
def cat(self):
return self._cat
@property
def components(self):
return self._components
@property
def num_components(self):
return self._num_components
def _batch_shape_tensor(self):
return self._cat.batch_shape_tensor()
def _batch_shape(self):
return self._static_batch_shape
def _event_shape_tensor(self):
return self._components[0].event_shape_tensor()
def _event_shape(self):
return self._static_event_shape
def _expand_to_event_rank(self, x):
"""Expand the rank of x up to static_event_rank times for broadcasting.
The static event rank was checked to not be None at construction time.
Args:
x: A tensor to expand.
Returns:
The expanded tensor.
"""
expanded_x = x
for _ in range(self.event_shape.ndims):
expanded_x = tf.expand_dims(expanded_x, -1)
return expanded_x
def _mean(self):
with tf.control_dependencies(self._assertions):
distribution_means = [d.mean() for d in self.components]
cat_probs = self._cat_probs(log_probs=False)
cat_probs = [self._expand_to_event_rank(c_p) for c_p in cat_probs]
partial_means = [
c_p * m for (c_p, m) in zip(cat_probs, distribution_means)
]
# These should all be the same shape by virtue of matching
# batch_shape and event_shape.
return tf.add_n(partial_means)
def _stddev(self):
with tf.control_dependencies(self._assertions):
distribution_means = [d.mean() for d in self.components]
distribution_devs = [d.stddev() for d in self.components]
cat_probs = self._cat_probs(log_probs=False)
stacked_means = tf.stack(distribution_means, axis=-1)
stacked_devs = tf.stack(distribution_devs, axis=-1)
cat_probs = [self._expand_to_event_rank(c_p) for c_p in cat_probs]
broadcasted_cat_probs = (
tf.stack(cat_probs, axis=-1) * tf.ones_like(stacked_means))
batched_dev = distribution_util.mixture_stddev(
tf.reshape(broadcasted_cat_probs, [-1, len(self.components)]),
tf.reshape(stacked_means, [-1, len(self.components)]),
tf.reshape(stacked_devs, [-1, len(self.components)]))
# I.e. re-shape to list(batch_shape) + list(event_shape).
return tf.reshape(batched_dev, tf.shape(broadcasted_cat_probs)[:-1])
def _log_prob(self, x):
with tf.control_dependencies(self._assertions):
x = tf.convert_to_tensor(x, name="x")
distribution_log_probs = [d.log_prob(x) for d in self.components]
cat_log_probs = self._cat_probs(log_probs=True)
final_log_probs = [
cat_lp + d_lp
for (cat_lp, d_lp) in zip(cat_log_probs, distribution_log_probs)
]
concat_log_probs = tf.stack(final_log_probs, 0)
log_sum_exp = tf.reduce_logsumexp(concat_log_probs, [0])
return log_sum_exp
def _log_cdf(self, x):
with tf.control_dependencies(self._assertions):
x = tf.convert_to_tensor(x, name="x")
distribution_log_cdfs = [d.log_cdf(x) for d in self.components]
cat_log_probs = self._cat_probs(log_probs=True)
final_log_cdfs = [
cat_lp + d_lcdf
for (cat_lp, d_lcdf) in zip(cat_log_probs, distribution_log_cdfs)
]
concatted_log_cdfs = tf.stack(final_log_cdfs, axis=0)
mixture_log_cdf = tf.reduce_logsumexp(concatted_log_cdfs, [0])
return mixture_log_cdf
def | (self, n, seed=None):
if self._use_static_graph:
# This sampling approach is almost the same as the approach used by
# `MixtureSameFamily`. The differences are due to having a list of
# `Distribution` objects rather than a single object, and maintaining
# random seed management that is consistent with the non-static code path.
samples = []
cat_samples = self.cat.sample(n, seed=seed)
stream = seed_stream.SeedStream(seed, salt="Mixture")
for c in range(self.num_components):
samples.append(self.components[c].sample(n, seed=stream()))
x = tf.stack(samples, -self._static_event_shape.ndims - 1) # [n, B, k, E]
npdt = x.dtype.as_numpy_dtype
mask = tf.one_hot(
indices=cat_samples, # [n, B]
depth=self._num_components, # == k
on_value=np.ones([], dtype=npdt),
off_value=np.zeros([], dtype=npdt)) # [n, B, k]
mask = distribution_util.pad_mixture_dimensions(
mask, self, self._cat,
self._static_event_shape.ndims) # [n, B, k, [1]*e]
return tf.reduce_sum(
x * mask, axis=-1 - self._static_event_shape.ndims) # [n, B, E]
with tf.control_dependencies(self._assertions):
n = tf.convert_to_tensor(n, name="n")
static_n = tensor_util.constant_value(n)
n = int(static_n) if static_n is not None else n
cat_samples = self.cat.sample(n, seed=seed)
static_samples_shape = cat_samples.shape
if static_samples_shape.is_fully_defined():
samples_shape = static_samples_shape.as_list()
samples_size = static_samples_shape.num_elements()
else:
samples_shape = tf.shape(cat_samples)
samples_size = tf.size(cat_samples)
static_batch_shape = self.batch_shape
if static_batch_shape.is_fully_defined():
batch_shape = static_batch_shape.as_list()
batch_size = static_batch_shape.num_elements()
else:
batch_shape = self.batch_shape_tensor()
batch_size = tf.reduce_prod(batch_shape)
static_event_shape = self.event_shape
if static_event_shape.is_fully_defined():
event_shape = np.array(static_event_shape.as_list(), dtype=np.int32)
else:
event_shape = self.event_shape_tensor()
# Get indices into the raw cat sampling tensor. We will
# need these to stitch sample values back out after sampling
# within the component partitions.
samples_raw_indices = tf.reshape(tf.range(0, samples_size), samples_shape)
# Partition the raw indices so that we can use
# dynamic_stitch later to reconstruct the samples from the
# known partitions.
partitioned_samples_indices = tf.dynamic_partition(
data=samples_raw_indices,
partitions=cat_samples,
num_partitions=self.num_components)
# Copy the batch indices n times, as we will need to know
# these to pull out the appropriate rows within the
# component partitions.
batch_raw_indices = tf.reshape(
tf.tile(tf.range(0, batch_size), [n]), samples_shape)
# Explanation of the dynamic partitioning below:
# batch indices are i.e., [0, 1, 0, 1, 0, 1]
# Suppose partitions are:
# [1 1 0 0 1 1]
# After partitioning, batch indices are cut as:
# [batch_indices[x] for x in 2, 3]
# [batch_indices[x] for x in 0, 1, 4, 5]
# i.e.
# [1 1] and [0 0 0 0]
# Now we sample n=2 from part 0 and n=4 from part 1.
# For part 0 we want samples from batch entries 1, 1 (samples 0, 1),
# and for part 1 we want samples from batch entries 0, 0, 0, 0
# (samples 0, 1, 2, 3).
partitioned_batch_indices = tf.dynamic_partition(
data=batch_raw_indices,
partitions=cat_samples,
num_partitions=self.num_components)
samples_class = [None for _ in range(self.num_components)]
stream = seed_stream.SeedStream(seed, salt="Mixture")
for c in range(self.num_components):
n_class = tf.size(partitioned_samples_indices[c])
samples_class_c = self.components[c].sample(
n_class, seed=stream())
# Pull out the correct batch entries from each index.
# To do this, we may have to flatten the batch shape.
# For sample s, batch element b of component c, we get the
# partitioned batch indices from
# partitioned_batch_indices[c]; and shift each element by
# the sample index. The final lookup can be thought of as
# a matrix gather along locations (s, b) in
# samples_class_c where the n_class rows correspond to
# samples within this component and the batch_size columns
# correspond to batch elements within the component.
#
# Thus the lookup index is
# lookup[c, i] = batch_size * s[i] + b[c, i]
# for i = 0 ... n_class[c] - 1.
lookup_partitioned_batch_indices = (
batch_size * tf.range(n_class) + partitioned_batch_indices[c])
samples_class_c = tf.reshape(
samples_class_c, tf.concat([[n_class * batch_size], event_shape],
0))
samples_class_c = tf.gather(
samples_class_c,
lookup_partitioned_batch_indices,
name="samples_class_c_gather")
samples_class[c] = samples_class_c
# Stitch back together the samples across the components.
lhs_flat_ret = tf.dynamic_stitch(
indices=partitioned_samples_indices, data=samples_class)
# Reshape back to proper sample, batch, and event shape.
ret = tf.reshape(
lhs_flat_ret, tf.concat(
[samples_shape, self.event_shape_tensor()], 0))
ret.set_shape(
tf.TensorShape(static_samples_shape).concatenate(self.event_shape))
return ret
def entropy_lower_bound(self, name="entropy_lower_bound"):
r"""A lower bound on the entropy of this mixture model.
The bound below is not always very tight, and its usefulness depends
on the mixture probabilities and the components in use.
A lower bound is useful for ELBO when the `Mixture` is the variational
distribution:
\\(
\log p(x) >= ELBO = \int q(z) \log p(x, z) dz + H[q]
\\)
where \\( p \\) is the prior distribution, \\( q \\) is the variational,
and \\( H[q] \\) is the entropy of \\( q \\). If there is a lower bound
\\( G[q] \\) such that \\( H[q] \geq G[q] \\) then it can be used in
place of \\( H[q] \\).
For a mixture of distributions \\( q(Z) = \sum_i c_i q_i(Z) \\) with
\\( \sum_i c_i = 1 \\), by the concavity of \\( f(x) = -x \log x \\), a
simple lower bound is:
\\(
\begin{align}
H[q] & = - \int q(z) \log q(z) dz \\\
& = - \int (\sum_i c_i q_i(z)) \log(\sum_i c_i q_i(z)) dz \\\
& \geq - \sum_i c_i \int q_i(z) \log q_i(z) dz \\\
& = \sum_i c_i H[q_i]
\end{align}
\\)
This is the term we calculate below for \\( G[q] \\).
Args:
name: A name for this operation (optional).
Returns:
A lower bound on the Mixture's entropy.
"""
with self._name_scope(name, values=[self.cat.logits]):
with tf.control_dependencies(self._assertions):
distribution_entropies = [d.entropy() for d in self.components]
cat_probs = self._cat_probs(log_probs=False)
partial_entropies = [
c_p * m for (c_p, m) in zip(cat_probs, distribution_entropies)
]
# These are all the same shape by virtue of matching batch_shape
return tf.add_n(partial_entropies)
def _cat_probs(self, log_probs):
"""Get a list of num_components batchwise probabilities."""
which_softmax = tf.nn.log_softmax if log_probs else tf.nn.softmax
cat_probs = which_softmax(self.cat.logits)
cat_probs = tf.unstack(cat_probs, num=self.num_components, axis=-1)
return cat_probs
| _sample_n |
api_call.go | // Copyright 2020 Coinbase, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated by: OpenAPI Generator (https://openapi-generator.tech)
package server
import (
"encoding/json"
"net/http"
"strings"
"github.com/coinbase/rosetta-sdk-go/asserter"
"github.com/coinbase/rosetta-sdk-go/types"
)
// A CallAPIController binds http requests to an api service and writes the service results to the
// http response
type CallAPIController struct {
service CallAPIServicer
asserter *asserter.Asserter
}
// NewCallAPIController creates a default api controller
func NewCallAPIController(
s CallAPIServicer,
asserter *asserter.Asserter,
) Router |
// Routes returns all of the api route for the CallAPIController
func (c *CallAPIController) Routes() Routes {
return Routes{
{
"Call",
strings.ToUpper("Post"),
"/call",
c.Call,
},
}
}
// Call - Make a Network-Specific Procedure Call
func (c *CallAPIController) Call(w http.ResponseWriter, r *http.Request) {
callRequest := &types.CallRequest{}
if err := json.NewDecoder(r.Body).Decode(&callRequest); err != nil {
EncodeJSONResponse(&types.Error{
Message: err.Error(),
}, http.StatusInternalServerError, w)
return
}
// Assert that CallRequest is correct
if err := c.asserter.CallRequest(callRequest); err != nil {
EncodeJSONResponse(&types.Error{
Message: err.Error(),
}, http.StatusInternalServerError, w)
return
}
result, serviceErr := c.service.Call(r.Context(), callRequest)
if serviceErr != nil {
EncodeJSONResponse(serviceErr, http.StatusInternalServerError, w)
return
}
EncodeJSONResponse(result, http.StatusOK, w)
}
| {
return &CallAPIController{
service: s,
asserter: asserter,
}
} |
conftest.py | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import os
import random
import uuid
from google.api_core import client_options
import google.api_core.exceptions
import google.auth
from google.cloud import bigquery
from google.cloud import bigquery_datatransfer
from google.cloud import pubsub_v1
import pytest
RESOURCE_PREFIX = "python_bigquery_datatransfer_samples_snippets"
RESOURCE_DATE_FORMAT = "%Y%m%d%H%M%S"
RESOURCE_DATE_LENGTH = 4 + 2 + 2 + 2 + 2 + 2
def resource_prefix() -> str:
|
def resource_name_to_date(resource_name: str):
start_date = len(RESOURCE_PREFIX) + 1
date_string = resource_name[start_date : start_date + RESOURCE_DATE_LENGTH]
parsed_date = datetime.datetime.strptime(date_string, RESOURCE_DATE_FORMAT)
return parsed_date
@pytest.fixture(scope="session", autouse=True)
def cleanup_pubsub_topics(pubsub_client: pubsub_v1.PublisherClient, project_id):
yesterday = datetime.datetime.utcnow() - datetime.timedelta(days=1)
for topic in pubsub_client.list_topics(project=f"projects/{project_id}"):
topic_id = topic.name.split("/")[-1]
if (
topic_id.startswith(RESOURCE_PREFIX)
and resource_name_to_date(topic_id) < yesterday
):
pubsub_client.delete_topic(topic=topic.name)
def temp_suffix():
now = datetime.datetime.now()
return f"{now.strftime('%Y%m%d%H%M%S')}_{uuid.uuid4().hex[:8]}"
@pytest.fixture(scope="session")
def bigquery_client(default_credentials):
credentials, project_id = default_credentials
return bigquery.Client(credentials=credentials, project=project_id)
@pytest.fixture(scope="session")
def pubsub_client(default_credentials):
credentials, _ = default_credentials
return pubsub_v1.PublisherClient(credentials=credentials)
@pytest.fixture(scope="session")
def pubsub_topic(pubsub_client: pubsub_v1.PublisherClient, project_id):
topic_id = resource_prefix()
topic_path = pubsub_v1.PublisherClient.topic_path(project_id, topic_id)
pubsub_client.create_topic(name=topic_path)
yield topic_path
pubsub_client.delete_topic(topic=topic_path)
@pytest.fixture(scope="session")
def dataset_id(bigquery_client, project_id):
dataset_id = f"bqdts_{temp_suffix()}"
bigquery_client.create_dataset(f"{project_id}.{dataset_id}")
yield dataset_id
bigquery_client.delete_dataset(dataset_id, delete_contents=True)
@pytest.fixture(scope="session")
def default_credentials():
return google.auth.default(["https://www.googleapis.com/auth/cloud-platform"])
@pytest.fixture(scope="session")
def project_id():
return os.environ["GOOGLE_CLOUD_PROJECT"]
@pytest.fixture(scope="session")
def service_account_name(default_credentials):
credentials, _ = default_credentials
# The service_account_email attribute is not available when running with
# user account credentials, but should be available when running from our
# continuous integration tests.
return getattr(credentials, "service_account_email", None)
@pytest.fixture(scope="session")
def transfer_client(default_credentials, project_id):
credentials, _ = default_credentials
options = client_options.ClientOptions(quota_project_id=project_id)
transfer_client = bigquery_datatransfer.DataTransferServiceClient(
credentials=credentials, client_options=options
)
# Ensure quota is always attributed to the correct project.
bigquery_datatransfer.DataTransferServiceClient = lambda: transfer_client
return transfer_client
@pytest.fixture(scope="session")
def transfer_config_name(transfer_client, project_id, dataset_id, service_account_name):
from . import manage_transfer_configs, scheduled_query
# Use the transfer_client fixture so we know quota is attributed to the
# correct project.
assert transfer_client is not None
# To conserve limited BQ-DTS quota, this fixture creates only one transfer
# config for a whole session and is used to test the scheduled_query.py and
# the delete operation in manage_transfer_configs.py.
transfer_config = scheduled_query.create_scheduled_query(
{
"project_id": project_id,
"dataset_id": dataset_id,
"service_account_name": service_account_name,
}
)
yield transfer_config.name
manage_transfer_configs.delete_config(
{"transfer_config_name": transfer_config.name}
)
@pytest.fixture
def to_delete_configs(transfer_client):
to_delete = []
yield to_delete
for config_name in to_delete:
try:
transfer_client.delete_transfer_config(name=config_name)
except google.api_core.exceptions.GoogleAPICallError:
pass
| timestamp = datetime.datetime.utcnow().strftime(RESOURCE_DATE_FORMAT)
random_string = hex(random.randrange(1000000))[2:]
return f"{RESOURCE_PREFIX}_{timestamp}_{random_string}" |
commands.rs | // rudis_async/src/commands.rs
use crate::RUDIS_DB;
use resp::Value;
use futures::{future, Future};
use std::io::Error;
pub fn handle_get(v: Vec<Value>) -> Result<Value, Value> {
let v = v.iter().skip(1).collect::<Vec<_>>();
let db_ref = RUDIS_DB.lock().unwrap();
let reply = if let Value::Bulk(ref s) = &v[0] {
db_ref.get(s).map(|e| Value::String(e.to_string())).unwrap_or(Value::Null)
} else {
Value::Null
};
Ok(reply)
}
pub fn handle_set(v: Vec<Value>) -> Result<Value, Value> {
let v = v.iter().skip(1).collect::<Vec<_>>();
if v.is_empty() || v.len() < 2 {
return Err(Value::Error("Expected 2 arguments for SET command".to_string()))
}
match (&v[0], &v[1]) {
(Value::Bulk(k), Value::Bulk(v)) => {
let _ = RUDIS_DB
.lock()
.unwrap()
.insert(k.to_string(), v.to_string());
} | }
pub fn process_client_request(decoded_msg: Value) -> impl Future<Item = Vec<u8>, Error = Error> {
let reply = if let Value::Array(v) = decoded_msg {
match &v[0] {
Value::Bulk(ref s) if s == "GET" || s == "get" => handle_get(v),
Value::Bulk(ref s) if s == "SET" || s == "set" => handle_set(v),
other => unimplemented!("{:?} is not supported as of now", other),
}
} else {
Err(Value::Error("Invalid Command".to_string()))
};
future::ok(match reply {
Ok(r) | Err(r) => r.encode(),
})
} | _ => unimplemented!("SET not implemented for {:?}", v),
}
Ok(Value::String("OK".to_string())) |
conftest.py | """
conftest.py pytest_fixtures can be accessed by multiple test files
test function has fixture func name as param, then fixture func called and result
passed to test func
added localhost.localdomain to /etc/hosts
"""
import pytest
from cnf.main import setup_app
import pymongo
config_name = 'testing'
the_app = setup_app(config_name, dict(
TESTING=True,
LOGIN_DISABLED=False,
MAIL_SUPPRESS_SEND=True,
SERVER_NAME='localhost.localdomain',
WTF_CSRF_ENABLED=False,
))
# the_app = setup_app()
the_app.app_context().push()
@pytest.fixture(scope='session')
def app():
"""Makes app parameter available to test funcs"""
return the_app
@pytest.fixture(scope='session', autouse=True)
def db():
"""Create a test copy of cnf for session"""
client = pymongo.MongoClient("localhost", 27017)
if not client['cnf_test']:
client.admin.command('copydb', fromdb='cnf',
todb='cnf_test')
db = client['cnf_test']
#delete example_user from user collection
user_coll = db.users
myquery = {"username": "example_user"}
user_coll.delete_one(myquery)
return db
@pytest.fixture(scope='function')
def data():
pass
@pytest.fixture(scope='session') | def client(app):
return app.test_client() | |
create_trace_graphviz.py | """
Read in the output from the trace-inputlocator script and create a GraphViz file.
Pass as input the path to the yaml output of the trace-inputlocator script via config file.
The output is written to the trace-inputlocator location.
WHY? because the trace-inputlocator only has the GraphViz output of the last call to the script. This
version re-creates the trace-data from the (merged) yaml file (the yaml output is merged if pre-existing in the output
file). | import cea.config
from cea.tests.trace_inputlocator import create_graphviz_output
def main(config):
with open(config.trace_inputlocator.yaml_output_file, 'r') as f:
yaml_data = yaml.load(f)
trace_data = []
for script in yaml_data.keys():
for direction in ('input', 'output'):
for locator, file in yaml_data[script][direction]:
trace_data.append((direction, script, locator, file))
create_graphviz_output(trace_data, config.trace_inputlocator.graphviz_output_file)
if __name__ == '__main__':
main(cea.config.Configuration()) | """
from __future__ import print_function
import yaml |
ModifyAlertContactRequest.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkemr.endpoint import endpoint_data
class ModifyAlertContactRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Emr', '2016-04-08', 'ModifyAlertContact','emr')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_EmailVerificationCode(self):
return self.get_query_params().get('EmailVerificationCode')
def set_EmailVerificationCode(self,EmailVerificationCode):
self.add_query_param('EmailVerificationCode',EmailVerificationCode)
def get_ResourceOwnerId(self):
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self,ResourceOwnerId):
self.add_query_param('ResourceOwnerId',ResourceOwnerId)
def get_PhoneNumberVerificationCode(self):
return self.get_query_params().get('PhoneNumberVerificationCode')
def set_PhoneNumberVerificationCode(self,PhoneNumberVerificationCode):
|
def get_BizId(self):
return self.get_query_params().get('BizId')
def set_BizId(self,BizId):
self.add_query_param('BizId',BizId)
def get_Name(self):
return self.get_query_params().get('Name')
def set_Name(self,Name):
self.add_query_param('Name',Name)
def get_PhoneNumber(self):
return self.get_query_params().get('PhoneNumber')
def set_PhoneNumber(self,PhoneNumber):
self.add_query_param('PhoneNumber',PhoneNumber)
def get_Email(self):
return self.get_query_params().get('Email')
def set_Email(self,Email):
self.add_query_param('Email',Email) | self.add_query_param('PhoneNumberVerificationCode',PhoneNumberVerificationCode) |
main.go | package main
import (
"context"
"encoding/json"
"fmt"
"os"
"os/exec"
"time"
)
const (
commandName = "setxkbmap"
timeout = time.Second * 3
icon = "keyboard"
)
var (
errNotEnoughArguments = fmt.Errorf("language argument not provided")
errCouldNotEncode = fmt.Errorf("could not encode json")
)
type jsonOutput struct {
Icon string `json:"icon,omitempty"`
State string `json:"state,omitempty"`
Text string `json:"text,omitempty"`
}
func main() | {
if len(os.Args) < 2 {
output := &jsonOutput{
Icon: icon,
State: "Critical",
Text: errNotEnoughArguments.Error(),
}
// Ignore error
_ = json.NewEncoder(os.Stdout).Encode(output)
os.Exit(1)
}
lang := os.Args[1]
ctx, cancel := context.WithTimeout(context.Background(), timeout)
defer cancel()
cmd := exec.CommandContext(ctx, commandName, lang)
err := cmd.Run()
if err != nil {
output := &jsonOutput{
Icon: icon,
State: "Critical",
Text: err.Error(),
}
// Ignore error
_ = json.NewEncoder(os.Stdout).Encode(output)
os.Exit(1)
}
output := &jsonOutput{
Icon: icon,
State: "",
Text: lang,
}
// Ignore error
_ = json.NewEncoder(os.Stdout).Encode(output)
os.Exit(0)
} |
|
log.go | package cmd
import (
"fmt"
"io" | log "github.com/sirupsen/logrus"
)
func init() {
time := time.Now().Unix()
logname := fmt.Sprintf("preflight-%d.log", time)
logFile, err := os.OpenFile(logname, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0700)
if err == nil {
mw := io.MultiWriter(os.Stdout, logFile)
log.SetOutput(mw)
} else {
log.Info("Failed to log to file, using default stderr")
}
log.SetFormatter(&log.TextFormatter{})
log.SetLevel(log.TraceLevel)
} | "os"
"time"
|
settings.models.ts | ///
/// Copyright © 2016-2020 The Thingsboard Authors
///
/// Licensed under the Apache License, Version 2.0 (the "License");
/// you may not use this file except in compliance with the License.
/// You may obtain a copy of the License at
///
/// http://www.apache.org/licenses/LICENSE-2.0
///
/// Unless required by applicable law or agreed to in writing, software
/// distributed under the License is distributed on an "AS IS" BASIS, | /// limitations under the License.
///
export const smtpPortPattern: RegExp = /^([0-9]{1,4}|[1-5][0-9]{4}|6[0-4][0-9]{3}|65[0-4][0-9]{2}|655[0-2][0-9]|6553[0-5])$/;
export interface AdminSettings<T> {
key: string;
jsonValue: T;
}
export declare type SmtpProtocol = 'smtp' | 'smtps';
export interface MailServerSettings {
mailFrom: string;
smtpProtocol: SmtpProtocol;
smtpHost: string;
smtpPort: number;
timeout: number;
enableTls: boolean;
username: string;
password: string;
enableProxy: boolean;
proxyHost: string;
proxyPort: number;
proxyUser: string;
proxyPassword: string;
}
export interface GeneralSettings {
baseUrl: string;
}
export interface UserPasswordPolicy {
minimumLength: number;
minimumUppercaseLetters: number;
minimumLowercaseLetters: number;
minimumDigits: number;
minimumSpecialCharacters: number;
passwordExpirationPeriodDays: number;
}
export interface SecuritySettings {
passwordPolicy: UserPasswordPolicy;
}
export interface UpdateMessage {
message: string;
updateAvailable: boolean;
} | /// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
/// See the License for the specific language governing permissions and |
solar_monitor.py |
#!/usr/bin/python
from Subfact_ina219 import INA219
import time
import os
import glob
import Tkinter as tk
import math
import copy
from OneFifo import OneFifo
import json
import socket
import select
from SolarMonitor import SolarMonitor
from SolarSensors import SolarSensors
from SolarServer import SolarServer
from SolarDb import SolarDb
def orig_main():
ina = INA219()
result = ina.getBusVoltage_V()
print "Shunt : %.3f mV" % ina.getShuntVoltage_mV()
print "Bus : %.3f V" % ina.getBusVoltage_V()
print "Current : %.3f mA" % ina.getCurrent_mA()
class Solar:
def __init__(self, sensors, timestamper, filenamePrefix="solarLog_"):
self.m_SolarSensors = sensors;
self.m_SolarDb = SolarDb(filenamePrefix);
self.m_Timestamper = timestamper;
def gatherData(self):
data = self.m_SolarSensors.getData();
return data;
def formatPrintData(self, results):
returnValue = []
returnValue.append( "%-20s %-20s %-20s %-20s %-20s %-20s" % (results["names"][0],results["names"][1],results["names"][2],results["names"][4],results["names"][5],results["names"][3]));
returnValue.append( "%2.3f V %2.3f V %2.3f V %2.3f V %2.3f V %2.3f V" % (results["voltage"][0],results["voltage"][1],results["voltage"][2],results["voltage"][4],results["voltage"][5],results["voltage"][3]));
returnValue.append( "%5.0f mA %5.0f mA %5.0f mA %5.0f mA %5.0f mA %5.0f mA" % (results["current"][0],results["current"][1],results["current"][2],results["current"][4],results["current"][5],results["current"][3]));
returnValue.append( "%5.0f mW %5.0f mW %5.0f mW %5.0f mW %5.0f mW %5.0f mW" % (results["voltage"][0]*results["current"][0],results["voltage"][1]*results["current"][1],results["voltage"][2]*results["current"][2],results["voltage"][4]*results["current"][4],results["voltage"][5]*results["current"][5],results["voltage"][3]*results["current"][3]));
return returnValue;
def printResults(self, results):
text = self.formatPrintData(results)
print;
for index in xrange(len(text)):
print(text[index]);
def recordData(self,data):
rollOver = self.m_SolarDb.addEntry(self.m_Timestamper.getDate(), self.m_Timestamper.getTime(), data );
return rollOver
def getEmptyStatsDB(self):
results = []
for channelIndex in xrange(6):
tempVal = {}
tempVal["minEnergy"] = 0
tempVal["maxEnergy"] = 0
tempVal["cumulativeEnergy"] = 0
results.append(tempVal);
return results
def computeNetPower(self, data, prevPwr=None):
if prevPwr == None:
results = self.getEmptyStatsDB()
else:
results = prevPwr
for channelIndex in xrange(6):
for index in xrange( len(data[channelIndex]["voltage"])-1 ):
timeDelta = self.convertTimeString( data[channelIndex]["time"][index+1]) - self.convertTimeString(data[channelIndex]["time"][index])
if (timeDelta <= 12 ):
# power=data[channelIndex]["voltage"][index] * data[channelIndex]["current"][index]
power=data[channelIndex]["current"][index] # use mAHr for power.
energy = power*timeDelta
results[channelIndex]["cumulativeEnergy"] = results[channelIndex]["cumulativeEnergy"] + energy
if results[channelIndex]["cumulativeEnergy"] < results[channelIndex]["minEnergy"]:
results[channelIndex]["minEnergy"] = results[channelIndex]["cumulativeEnergy"];
elif results[channelIndex]["cumulativeEnergy"] > results[channelIndex]["maxEnergy"]:
results[channelIndex]["maxEnergy"] = results[channelIndex]["cumulativeEnergy"]
for channelIndex in xrange(6):
print("minEnergy=%.1f mAHr maxEnergy=%.1f mAHr cumulative=%.1f mAHr" % ( results[channelIndex]["minEnergy"]/3600.0, results[channelIndex]["maxEnergy"]/3600.0, results[channelIndex]["cumulativeEnergy"]/3600.0))
print
return results
def convertTimeString(self, time):
timeSec = 0;
timeSec = timeSec + int(time[0:2])*60*60
timeSec = timeSec + int(time[3:5])*60
timeSec = timeSec + int(time[6:8])
return timeSec
class | :
def getDate(self):
pass;
def getTime(self):
pass
class Timestamper(TimestamperInterface):
def getDate(self):
return (time.strftime("%Y_%m_%d"))
def getTime(self):
return (time.strftime("%H:%M:%S"))
#class Application(tk.Frame):
class Application():
def __init__(self, master=None):
#tk.Frame.__init__(self, master)
#self.grid(sticky=tk.N+tk.S+tk.E+tk.W)
#self.createWidgets()
self.plotData = None;
self.leftPad = 40
self.topPad = 10
self.bottomPad = 30
self.rightPad = 10
self.currentParm = -1;
self.currentFileIndex = 0; # most recent
self.firstPoint = 0
self.lastPoint = 0;
self.currentBatPwr = 0
self.currentPanelPwr = 0
self.currentLoadPwr = 0
self.currentBatPwrList = []
for index in xrange(4):
self.currentBatPwrList.append(0)
self.plotheight = 1; # dummy values.
self.plotwidth = 1; # dummy values.
self.todayStats = None
self.batmap = [1,2,4,5] # list of channels that are batteries
def setSolar(self, solar):
self.mySolar = solar
(plotData, filename) = self.mySolar.m_SolarDb.readDayLog(self.currentFileIndex);
self.todayStats = self.mySolar.computeNetPower(plotData)
self.prevStats = None
for index in xrange(1,-1,-1): # fixme put back to 4,-1,-1
(plotData, filename) = self.mySolar.m_SolarDb.readDayLog(self.currentFileIndex+index);
print("processing %s" % filename)
self.prevStats = self.mySolar.computeNetPower(plotData, prevPwr=self.prevStats)
#~ def createWidgets(self):
#~ #
#~ # set up frames for the 6 sensors
#~ #
#~ top=self.winfo_toplevel()
#~ top.rowconfigure(0, weight=1)
#~ top.columnconfigure(0, weight=1)
#~ #
#~ # set up overall window frame
#~ #
#~ self.energy_LabelFrame = tk.LabelFrame(top, text="System Summary")
#~ self.energy_LabelFrame.grid(column=0, row=0, sticky=tk.N+tk.S+tk.E+tk.W)
#~ #
#~ # set up frames for the 6 sensors
#~ #
#~ self.energy_Col_LabelFrame = []
#~ labels = ["Batt 1","Batt 2","Batt 3","Batt 4","Today","Now"]
#~ for sensorIndex in xrange(6):
#~ myField = tk.LabelFrame(self.energy_LabelFrame, text=labels[sensorIndex] )
#~ myField.grid(column=sensorIndex, row=0, sticky=tk.N+tk.S+tk.E+tk.W)
#~ myField.rowconfigure(0, weight=1)
#~ myField.rowconfigure(1, weight=0)
#~ myField.columnconfigure(0, weight=1)
#~ self.energy_LabelFrame.rowconfigure(0, weight=1, minsize=100)
#~ self.energy_LabelFrame.columnconfigure(sensorIndex, weight=1, minsize=70)
#~ self.energy_Col_LabelFrame.append( myField )
#~ #
#~ # set canvas for each bar graph
#~ #
#~ self.energy_Col_graph_canvas = []
#~ for sensorIndex in xrange(6):
#~ myField = tk.Canvas(self.energy_Col_LabelFrame[sensorIndex], width=70, height=200)
#~ myField.grid(column=0,row=0, sticky=tk.E + tk.W + tk.N + tk.S )
#~ self.energy_Col_graph_canvas.append( myField )
#~ # myTextField = myField.create_text(anchor=tk.SW)
#~ #
#~ # add resize handler
#~ #
#~ #self.energy_Col_graph_canvas[0].bind("<Configure>", self.on_resize)
#~ #
#~ # set text fields for each bottom
#~ #
#~ self.energy_Col_Label = []
#~ self.energy_Col_text = []
#~ for sensorIndex in xrange(6):
#~ myStringVar = tk.StringVar()
#~ myStringVar.set("0 mA")
#~ myField = tk.Label(self.energy_Col_LabelFrame[sensorIndex], textvariable=myStringVar)
#~ myField.grid(column=0,row=1, sticky=tk.E + tk.W + tk.N + tk.S )
#~ self.energy_Col_Label.append( myField )
#~ self.energy_Col_text.append( myStringVar )
def accumulateEnergy(self, solarData):
# 0-panel; 1-bat 1; 2-bat 2; 3-load; 4-bat 3; 5-bat 4
powerInts = []
for index in xrange(6):
value = int(solarData["current"][index])
powerInts.append(value)
#~ bat_1_pwr = int(solarData["current"][1])
#~ bat_2_pwr = int(solarData["current"][2])
#~ bat_3_pwr = int(solarData["current"][4])
#~ bat_4_pwr = int(solarData["current"][5])
#~ self.currentBatPwrList.append( bat_1_pwr )
#~ self.currentBatPwrList.append( bat_2_pwr )
#~ self.currentBatPwrList.append( bat_3_pwr )
#~ self.currentBatPwrList.append( bat_4_pwr )
self.currentBatPwr = 0;
#~ self.currentBatPwrList = []
for index in xrange(4):
self.currentBatPwrList[index] = powerInts[self.batmap[index]]
self.currentBatPwr = self.currentBatPwr + self.currentBatPwrList[index]
panelPwr = powerInts[0]
loadPwr = powerInts[3]
self.currentPanelPwr = int( panelPwr )
self.currentLoadPwr = int( loadPwr )
# add new readings to totals; assume 1 second integration window
for index in xrange(6):
self.todayStats[index]["cumulativeEnergy"] = self.todayStats[index]["cumulativeEnergy"] + powerInts[index]
self.prevStats[index]["cumulativeEnergy"] = self.prevStats[index]["cumulativeEnergy"] + powerInts[index]
if self.prevStats[index]["cumulativeEnergy"] < self.prevStats[index]["minEnergy"]:
self.prevStats[index]["minEnergy"] = self.prevStats[index]["cumulativeEnergy"];
elif self.prevStats[index]["cumulativeEnergy"] > self.prevStats[index]["maxEnergy"]:
self.prevStats[index]["maxEnergy"] = self.prevStats[index]["cumulativeEnergy"]
def periodicEventHandler(self):
#self.after(1000,self.periodicEventHandler);
data = self.mySolar.gatherData();
self.accumulateEnergy(data);
#~ self.plotGraph()
rollOver = self.mySolar.recordData(data);
if rollOver:
self.todayStats = self.mySolar.getEmptyStatsDB() # we had a day rollover. reset the daily stats
self.mySolar.printResults(data)
self.mySolarServer.sendUpdate(data, self)
def main(config):
#~ app = Application()
#~ app.setSolar( setupSolar() )
#~ app.mySolarServer = SolarServer()
#~ mySolarSensors = SolarSensors(config)
#~ mySolarServer = SolarServer()
mySolarMonitor = SolarMonitor(config)
mySolarMonitor.run()
#~ while True:
#~ # app.periodicEventHandler()
#~ live_data = mySolarSensors.getData()
#~ mySolarServer.sendUpdate(live_data, cumulative_data)
#~ print(live_data)
#~ time.sleep(1.0)
if __name__ == "__main__":
fp = open("config.json", "r")
config_string = fp.read()
fp.close()
config = json.loads(config_string)
length = len(config)
for index in range(length-1, -1, -1):
print('index=%d' % (index))
if 'enable' in config[index]:
if config[index]['enable'] != 1:
dropped_entry = config.pop(index)
print('dropping disabled entry from config')
print(dropped_entry)
main(config)
| TimestamperInterface |
db_multitenant_database.go | // Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
//
package api
import (
"net/http"
"github.com/gorilla/mux"
"github.com/mattermost/mattermost-cloud/model"
)
| func initMultitenantDatabases(apiRouter *mux.Router, context *Context) {
addContext := func(handler contextHandlerFunc) *contextHandler {
return newContextHandler(context, handler)
}
MultitenantDatabasesRouter := apiRouter.PathPrefix("/multitenant_databases").Subrouter()
MultitenantDatabasesRouter.Handle("", addContext(handleGetMultitenantDatabases)).Methods("GET")
MultitenantDatabaseRouter := apiRouter.PathPrefix("/multitenant_database/{multitenant_database:[A-Za-z0-9]{26}}").Subrouter()
MultitenantDatabaseRouter.Handle("", addContext(handleGetMultitenantDatabase)).Methods("GET")
MultitenantDatabaseRouter.Handle("", addContext(handleUpdateMultitenantDatabase)).Methods("PUT")
}
// handleGetMultitenantDatabases responds to GET /api/databases/multitenant_databases,
// returning a list of multitenant databases.
func handleGetMultitenantDatabases(c *Context, w http.ResponseWriter, r *http.Request) {
paging, err := parsePaging(r.URL)
if err != nil {
c.Logger.WithError(err).Error("failed to parse paging parameters")
w.WriteHeader(http.StatusBadRequest)
return
}
filter := &model.MultitenantDatabaseFilter{
VpcID: parseString(r.URL, "vpc_id", ""),
DatabaseType: parseString(r.URL, "database_type", ""),
Paging: paging,
MaxInstallationsLimit: model.NoInstallationsLimit,
}
multitenantDatabases, err := c.Store.GetMultitenantDatabases(filter)
if err != nil {
c.Logger.WithError(err).Error("failed to query multitenant databases")
w.WriteHeader(http.StatusInternalServerError)
return
}
if multitenantDatabases == nil {
multitenantDatabases = []*model.MultitenantDatabase{}
}
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
outputJSON(c, w, multitenantDatabases)
}
// handleGetMultitenantDatabase responds to GET /api/databases/multitenant_database/{multitenant_database},
// returning the multitenant database in question.
func handleGetMultitenantDatabase(c *Context, w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
multitenantDatabaseID := vars["multitenant_database"]
c.Logger = c.Logger.WithField("multitenant_database", multitenantDatabaseID)
multitenantDatabase, err := c.Store.GetMultitenantDatabase(multitenantDatabaseID)
if err != nil {
c.Logger.WithError(err).Error("failed to query multitenant database")
w.WriteHeader(http.StatusInternalServerError)
return
}
if multitenantDatabase == nil {
w.WriteHeader(http.StatusNotFound)
return
}
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
outputJSON(c, w, multitenantDatabase)
}
// handleUpdateMultitenantDatabase responds to PUT /api/databases/multitenant_database/{multitenant_database},
// updating the database configuration values.
func handleUpdateMultitenantDatabase(c *Context, w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
multitenantDatabaseID := vars["multitenant_database"]
c.Logger = c.Logger.WithField("multitenant_database", multitenantDatabaseID)
patchDatabaseRequest, err := model.NewPatchMultitenantDatabaseRequestFromReader(r.Body)
if err != nil {
c.Logger.WithError(err).Error("failed to decode request")
w.WriteHeader(http.StatusBadRequest)
return
}
multitenantDatabase, status, unlockOnce := lockDatabase(c, multitenantDatabaseID)
if status != 0 {
w.WriteHeader(status)
return
}
defer unlockOnce()
if patchDatabaseRequest.Apply(multitenantDatabase) {
err = c.Store.UpdateMultitenantDatabase(multitenantDatabase)
if err != nil {
c.Logger.WithError(err).Error("failed to update multitenant database")
w.WriteHeader(http.StatusInternalServerError)
return
}
}
unlockOnce()
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
outputJSON(c, w, multitenantDatabase)
} | // initMultitenantDatabases registers multitenant database endpoints on the given router. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.