file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
splitter.go | // Copyright 2012 The Walk Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build windows
package declarative
import (
"github.com/lxn/walk"
)
type HSplitter struct {
// Window
Accessibility Accessibility
Background Brush
ContextMenuItems []MenuItem
DoubleBuffering bool
Enabled Property
Font Font
MaxSize Size
MinSize Size
Name string
OnBoundsChanged walk.EventHandler
OnKeyDown walk.KeyEventHandler
OnKeyPress walk.KeyEventHandler
OnKeyUp walk.KeyEventHandler
OnMouseDown walk.MouseEventHandler
OnMouseMove walk.MouseEventHandler
OnMouseUp walk.MouseEventHandler
OnSizeChanged walk.EventHandler
Persistent bool
RightToLeftReading bool
ToolTipText Property
Visible Property
// Widget
Alignment Alignment2D
AlwaysConsumeSpace bool
Column int
ColumnSpan int
GraphicsEffects []walk.WidgetGraphicsEffect
Row int
RowSpan int
StretchFactor int
// Container
Children []Widget
DataBinder DataBinder
// Splitter
AssignTo **walk.Splitter
HandleWidth int
}
func (s HSplitter) Create(builder *Builder) error {
w, err := walk.NewHSplitter(builder.Parent())
if err != nil {
return err
}
if s.AssignTo != nil {
*s.AssignTo = w
}
w.SetSuspended(true)
builder.Defer(func() error {
w.SetSuspended(false)
return nil
})
return builder.InitWidget(s, w, func() error {
if s.HandleWidth > 0 {
if err := w.SetHandleWidth(s.HandleWidth); err != nil |
}
return nil
})
}
type VSplitter struct {
// Window
Accessibility Accessibility
Background Brush
ContextMenuItems []MenuItem
Enabled Property
Font Font
MaxSize Size
MinSize Size
Name string
OnBoundsChanged walk.EventHandler
OnKeyDown walk.KeyEventHandler
OnKeyPress walk.KeyEventHandler
OnKeyUp walk.KeyEventHandler
OnMouseDown walk.MouseEventHandler
OnMouseMove walk.MouseEventHandler
OnMouseUp walk.MouseEventHandler
OnSizeChanged walk.EventHandler
Persistent bool
RightToLeftReading bool
ToolTipText Property
Visible Property
// Widget
AlwaysConsumeSpace bool
Column int
ColumnSpan int
Row int
RowSpan int
StretchFactor int
// Container
Children []Widget
DataBinder DataBinder
// Splitter
AssignTo **walk.Splitter
HandleWidth int
}
func (s VSplitter) Create(builder *Builder) error {
w, err := walk.NewVSplitter(builder.Parent())
if err != nil {
return err
}
if s.AssignTo != nil {
*s.AssignTo = w
}
w.SetSuspended(true)
builder.Defer(func() error {
w.SetSuspended(false)
return nil
})
return builder.InitWidget(s, w, func() error {
if s.HandleWidth > 0 {
if err := w.SetHandleWidth(s.HandleWidth); err != nil {
return err
}
}
return nil
})
}
| {
return err
} |
parser.rs | use super::cargo_cmds::{cargo_fmt, cargo_fmt_file, cargo_run, MAIN_FILE};
use super::highlight::highlight;
use crate::irust::format::format_eval_output;
use crate::irust::printer::{Printer, PrinterItem, PrinterItemType};
use crate::irust::{IRust, IRustError};
use crate::utils::{remove_main, stdout_and_stderr};
const SUCCESS: &str = "Ok!";
impl IRust {
pub fn parse(&mut self) -> Result<Printer, IRustError> {
match self.buffer.to_string().as_str() {
":help" => self.help(),
":reset" => self.reset(),
":show" => self.show(),
":pop" => self.pop(),
":irust" => self.irust(),
cmd if cmd.starts_with("::") => self.run_cmd(),
cmd if cmd.starts_with(":edit") => self.extern_edit(),
cmd if cmd.starts_with(":add") => self.add_dep(),
cmd if cmd.starts_with(":load") => self.load_script(),
cmd if cmd.starts_with(":type") => self.show_type(),
cmd if cmd.starts_with(":del") => self.del(),
_ => self.parse_second_order(),
}
}
fn reset(&mut self) -> Result<Printer, IRustError> {
self.repl.reset();
let mut outputs = Printer::new(PrinterItem::new(SUCCESS.to_string(), PrinterItemType::Ok));
outputs.add_new_line(1);
Ok(outputs)
}
fn pop(&mut self) -> Result<Printer, IRustError> {
self.repl.pop();
let mut outputs = Printer::new(PrinterItem::new(SUCCESS.to_string(), PrinterItemType::Ok));
outputs.add_new_line(1);
Ok(outputs)
}
fn del(&mut self) -> Result<Printer, IRustError> {
if let Some(line_num) = self.buffer.to_string().split_whitespace().last() {
self.repl.del(line_num)?;
}
let mut outputs = Printer::new(PrinterItem::new(SUCCESS.to_string(), PrinterItemType::Ok));
outputs.add_new_line(1);
Ok(outputs)
}
fn | (&mut self) -> Result<Printer, IRustError> {
let repl_code = highlight(&self.repl.show());
Ok(repl_code)
}
fn add_dep(&mut self) -> Result<Printer, IRustError> {
let dep: Vec<String> = self
.buffer
.to_string()
.split_whitespace()
.skip(1)
.map(ToOwned::to_owned)
.collect();
self.cursor.save_position()?;
self.wait_add(self.repl.add_dep(&dep)?, "Add")?;
self.wait_add(self.repl.build()?, "Build")?;
self.write_newline()?;
let mut outputs = Printer::new(PrinterItem::new(SUCCESS.to_string(), PrinterItemType::Ok));
outputs.add_new_line(1);
Ok(outputs)
}
fn load_script(&mut self) -> Result<Printer, IRustError> {
let buffer = self.buffer.to_string();
let script = buffer.split_whitespace().last().unwrap();
let script_code = std::fs::read(script)?;
if let Ok(s) = String::from_utf8(script_code) {
// Format script to make `remove_main` function work correctly
let s = cargo_fmt(&s)?;
let s = remove_main(&s);
self.repl.insert(s);
}
let mut outputs = Printer::new(PrinterItem::new(SUCCESS.to_string(), PrinterItemType::Ok));
outputs.add_new_line(1);
Ok(outputs)
}
fn show_type(&mut self) -> Result<Printer, IRustError> {
const TYPE_FOUND_MSG: &str = "found type `";
const EMPTY_TYPE_MSG: &str = "dev [unoptimized + debuginfo]";
let variable = self
.buffer
.to_string()
.trim_start_matches(":type")
.to_string();
let mut raw_out = String::new();
self.repl
.eval_in_tmp_repl(variable, || -> Result<(), IRustError> {
raw_out = cargo_run(false).unwrap();
Ok(())
})?;
let var_type = if raw_out.find(TYPE_FOUND_MSG).is_some() {
raw_out
.lines()
.find(|l| l.contains(TYPE_FOUND_MSG))
.unwrap()
.split('`')
.nth(1)
.unwrap()
.to_string()
} else if raw_out.find(EMPTY_TYPE_MSG).is_some() {
"()".into()
} else {
"Uknown".into()
};
Ok(Printer::new(PrinterItem::new(
var_type,
PrinterItemType::Ok,
)))
}
fn run_cmd(&mut self) -> Result<Printer, IRustError> {
// remove ::
let buffer = &self.buffer.to_string()[2..];
let mut cmd = buffer.split_whitespace();
let output = stdout_and_stderr(
std::process::Command::new(cmd.next().unwrap_or_default())
.args(&cmd.collect::<Vec<&str>>())
.output()?,
);
Ok(Printer::new(PrinterItem::new(
output,
PrinterItemType::Shell,
)))
}
fn parse_second_order(&mut self) -> Result<Printer, IRustError> {
if self.buffer.to_string().trim().is_empty() {
Ok(Printer::default())
} else if self.buffer.to_string().trim().ends_with(';') {
self.repl.insert(self.buffer.to_string());
let printer = Printer::default();
Ok(printer)
} else {
let mut outputs = Printer::default();
let mut eval_output = format_eval_output(&self.repl.eval(self.buffer.to_string())?);
outputs.append(&mut eval_output);
outputs.add_new_line(1);
Ok(outputs)
}
}
fn extern_edit(&mut self) -> Result<Printer, IRustError> {
// exp: :edit vi
let editor: String = match self.buffer.to_string().split_whitespace().nth(1) {
Some(ed) => ed.to_string(),
None => return Err(IRustError::Custom("No editor specified".to_string())),
};
self.raw_terminal.write_with_color(
format!("waiting for {}...", editor),
crossterm::Color::Magenta,
)?;
self.write_newline()?;
// write current repl (to ensure eval leftover is cleaned)
self.repl.write()?;
// beautify code
if self.repl.body.len() > 2 {
let _ = cargo_fmt_file(&*MAIN_FILE);
}
std::process::Command::new(editor)
.arg(&*MAIN_FILE)
.spawn()?
.wait()?;
match self.repl.update_from_main_file() {
Ok(_) => Ok(Printer::new(PrinterItem::new(
SUCCESS.to_string(),
PrinterItemType::Ok,
))),
Err(e) => {
self.repl.reset();
Err(e)
}
}
}
fn irust(&mut self) -> Result<Printer, IRustError> {
let irust = self.ferris();
Ok(Printer::new(PrinterItem::new(
irust,
PrinterItemType::Custom(crossterm::Color::Red),
)))
}
}
| show |
voxelnet.py | import time
from enum import Enum
from functools import reduce
import contextlib
import numpy as np
import torch
from torch import nn
from torch.nn import functional as F
import torchplus
from second.pytorch.core import box_torch_ops
from second.pytorch.core.losses import (WeightedSigmoidClassificationLoss,
WeightedSmoothL1LocalizationLoss,
WeightedSoftmaxClassificationLoss)
from second.pytorch.models import middle, pointpillars, rpn, voxel_encoder
from torchplus import metrics
from second.pytorch.utils import torch_timer
from second.sphere.model import DepConvNet3, ConvNet
def _get_pos_neg_loss(cls_loss, labels):
# cls_loss: [N, num_anchors, num_class]
# labels: [N, num_anchors]
batch_size = cls_loss.shape[0]
if cls_loss.shape[-1] == 1 or len(cls_loss.shape) == 2:
cls_pos_loss = (labels > 0).type_as(cls_loss) * cls_loss.view(
batch_size, -1)
cls_neg_loss = (labels == 0).type_as(cls_loss) * cls_loss.view(
batch_size, -1)
cls_pos_loss = cls_pos_loss.sum() / batch_size
cls_neg_loss = cls_neg_loss.sum() / batch_size
else:
cls_pos_loss = cls_loss[..., 1:].sum() / batch_size
cls_neg_loss = cls_loss[..., 0].sum() / batch_size
return cls_pos_loss, cls_neg_loss
REGISTERED_NETWORK_CLASSES = {}
def register_voxelnet(cls, name=None):
global REGISTERED_NETWORK_CLASSES
if name is None:
name = cls.__name__
assert name not in REGISTERED_NETWORK_CLASSES, f"exist class: {REGISTERED_NETWORK_CLASSES}"
REGISTERED_NETWORK_CLASSES[name] = cls
return cls
def get_voxelnet_class(name):
global REGISTERED_NETWORK_CLASSES
assert name in REGISTERED_NETWORK_CLASSES, f"available class: {REGISTERED_NETWORK_CLASSES}"
return REGISTERED_NETWORK_CLASSES[name]
class LossNormType(Enum):
NormByNumPositives = "norm_by_num_positives"
NormByNumExamples = "norm_by_num_examples" | @register_voxelnet
class VoxelNet(nn.Module):
def __init__(self,
output_shape,
num_class=2,
num_input_features=4,
vfe_class_name="VoxelFeatureExtractor",
vfe_num_filters=[32, 128],
with_distance=False,
middle_class_name="SparseMiddleExtractor",
middle_num_input_features=-1,
middle_num_filters_d1=[64],
middle_num_filters_d2=[64, 64],
rpn_class_name="RPN",
rpn_num_input_features=-1,
rpn_layer_nums=[3, 5, 5],
rpn_layer_strides=[2, 2, 2],
rpn_num_filters=[128, 128, 256],
rpn_upsample_strides=[1, 2, 4],
rpn_num_upsample_filters=[256, 256, 256],
use_norm=True,
use_groupnorm=False,
num_groups=32,
use_direction_classifier=True,
use_sigmoid_score=False,
encode_background_as_zeros=True,
use_rotate_nms=True,
multiclass_nms=False,
nms_score_thresholds=None,
nms_pre_max_sizes=None,
nms_post_max_sizes=None,
nms_iou_thresholds=None,
target_assigner=None,
cls_loss_weight=1.0,
loc_loss_weight=1.0,
pos_cls_weight=1.0,
neg_cls_weight=1.0,
direction_loss_weight=1.0,
loss_norm_type=LossNormType.NormByNumPositives,
encode_rad_error_by_sin=False,
loc_loss_ftor=None,
cls_loss_ftor=None,
measure_time=False,
voxel_generator=None,
post_center_range=None,
dir_offset=0.0,
sin_error_factor=1.0,
nms_class_agnostic=False,
num_direction_bins=2,
direction_limit_offset=0,
name='voxelnet'):
super().__init__()
self.name = name
self._sin_error_factor = sin_error_factor
self._num_class = num_class
self._use_rotate_nms = use_rotate_nms
self._multiclass_nms = multiclass_nms
self._nms_score_thresholds = nms_score_thresholds
self._nms_pre_max_sizes = nms_pre_max_sizes
self._nms_post_max_sizes = nms_post_max_sizes
self._nms_iou_thresholds = nms_iou_thresholds
self._use_sigmoid_score = use_sigmoid_score
self._encode_background_as_zeros = encode_background_as_zeros
self._use_direction_classifier = use_direction_classifier
self._num_input_features = num_input_features
self._box_coder = target_assigner.box_coder
self.target_assigner = target_assigner
self.voxel_generator = voxel_generator
self._pos_cls_weight = pos_cls_weight
self._neg_cls_weight = neg_cls_weight
self._encode_rad_error_by_sin = encode_rad_error_by_sin
self._loss_norm_type = loss_norm_type
self._dir_loss_ftor = WeightedSoftmaxClassificationLoss()
self._diff_loc_loss_ftor = WeightedSmoothL1LocalizationLoss()
self._dir_offset = dir_offset
self._loc_loss_ftor = loc_loss_ftor
self._cls_loss_ftor = cls_loss_ftor
self._direction_loss_weight = direction_loss_weight
self._cls_loss_weight = cls_loss_weight
self._loc_loss_weight = loc_loss_weight
self._post_center_range = post_center_range or []
self.measure_time = measure_time
self._nms_class_agnostic = nms_class_agnostic
self._num_direction_bins = num_direction_bins
self._dir_limit_offset = direction_limit_offset
self.voxel_feature_extractor = voxel_encoder.get_vfe_class(vfe_class_name)(
num_input_features,
use_norm,
num_filters=vfe_num_filters,
with_distance=with_distance,
voxel_size=self.voxel_generator.voxel_size,
pc_range=self.voxel_generator.point_cloud_range,
)
self.middle_feature_extractor = middle.get_middle_class(middle_class_name)(
output_shape,
use_norm,
num_input_features=middle_num_input_features,
num_filters_down1=middle_num_filters_d1,
num_filters_down2=middle_num_filters_d2)
# self.feature_extractor = DepConvNet3(5)
# self.feature_extractor = ConvNet(5)
self.rpn = rpn.get_rpn_class(rpn_class_name)(
use_norm=True,
num_class=num_class,
layer_nums=rpn_layer_nums,
layer_strides=rpn_layer_strides,
num_filters=rpn_num_filters,
upsample_strides=rpn_upsample_strides,
num_upsample_filters=rpn_num_upsample_filters,
num_input_features=rpn_num_input_features,
num_anchor_per_loc=target_assigner.num_anchors_per_location,
encode_background_as_zeros=encode_background_as_zeros,
use_direction_classifier=use_direction_classifier,
use_groupnorm=use_groupnorm,
num_groups=num_groups,
box_code_size=target_assigner.box_coder.code_size,
num_direction_bins=self._num_direction_bins)
self.rpn_acc = metrics.Accuracy(
dim=-1, encode_background_as_zeros=encode_background_as_zeros)
self.rpn_precision = metrics.Precision(dim=-1)
self.rpn_recall = metrics.Recall(dim=-1)
self.rpn_metrics = metrics.PrecisionRecall(
dim=-1,
thresholds=[0.1, 0.3, 0.5, 0.7, 0.8, 0.9, 0.95],
use_sigmoid_score=use_sigmoid_score,
encode_background_as_zeros=encode_background_as_zeros)
self.rpn_cls_loss = metrics.Scalar()
self.rpn_loc_loss = metrics.Scalar()
self.rpn_total_loss = metrics.Scalar()
self.register_buffer("global_step", torch.LongTensor(1).zero_())
self._time_dict = {}
self._time_total_dict = {}
self._time_count_dict = {}
def start_timer(self, *names):
if not self.measure_time:
return
torch.cuda.synchronize()
for name in names:
self._time_dict[name] = time.time()
def end_timer(self, name):
if not self.measure_time:
return
torch.cuda.synchronize()
time_elapsed = time.time() - self._time_dict[name]
if name not in self._time_count_dict:
self._time_count_dict[name] = 1
self._time_total_dict[name] = time_elapsed
else:
self._time_count_dict[name] += 1
self._time_total_dict[name] += time_elapsed
self._time_dict[name] = 0
def clear_timer(self):
self._time_count_dict.clear()
self._time_dict.clear()
self._time_total_dict.clear()
@contextlib.contextmanager
def profiler(self):
old_measure_time = self.measure_time
self.measure_time = True
yield
self.measure_time = old_measure_time
def get_avg_time_dict(self):
ret = {}
for name, val in self._time_total_dict.items():
count = self._time_count_dict[name]
ret[name] = val / max(1, count)
return ret
def update_global_step(self):
self.global_step += 1
def get_global_step(self):
return int(self.global_step.cpu().numpy()[0])
def clear_global_step(self):
self.global_step.zero_()
def loss(self, example, preds_dict):
box_preds = preds_dict["box_preds"]
cls_preds = preds_dict["cls_preds"]
batch_size_dev = cls_preds.shape[0]
self.start_timer("loss forward")
labels = example['labels']
reg_targets = example['reg_targets']
importance = example['importance']
self.start_timer("prepare weight forward")
cls_weights, reg_weights, cared = prepare_loss_weights(
labels,
pos_cls_weight=self._pos_cls_weight,
neg_cls_weight=self._neg_cls_weight,
loss_norm_type=self._loss_norm_type,
dtype=box_preds.dtype)
cls_targets = labels * cared.type_as(labels)
cls_targets = cls_targets.unsqueeze(-1)
self.end_timer("prepare weight forward")
self.start_timer("create_loss forward")
loc_loss, cls_loss = create_loss(
self._loc_loss_ftor,
self._cls_loss_ftor,
box_preds=box_preds,
cls_preds=cls_preds,
cls_targets=cls_targets,
cls_weights=cls_weights * importance,
reg_targets=reg_targets,
reg_weights=reg_weights * importance,
num_class=self._num_class,
encode_rad_error_by_sin=self._encode_rad_error_by_sin,
encode_background_as_zeros=self._encode_background_as_zeros,
box_code_size=self._box_coder.code_size,
sin_error_factor=self._sin_error_factor,
num_direction_bins=self._num_direction_bins,
)
loc_loss_reduced = loc_loss.sum() / batch_size_dev
loc_loss_reduced *= self._loc_loss_weight
cls_pos_loss, cls_neg_loss = _get_pos_neg_loss(cls_loss, labels)
cls_pos_loss /= self._pos_cls_weight
cls_neg_loss /= self._neg_cls_weight
cls_loss_reduced = cls_loss.sum() / batch_size_dev
cls_loss_reduced *= self._cls_loss_weight
loss = loc_loss_reduced + cls_loss_reduced
self.end_timer("create_loss forward")
if self._use_direction_classifier:
dir_targets = get_direction_target(
example['anchors'],
reg_targets,
dir_offset=self._dir_offset,
num_bins=self._num_direction_bins)
dir_logits = preds_dict["dir_cls_preds"].view(
batch_size_dev, -1, self._num_direction_bins)
weights = (labels > 0).type_as(dir_logits) * importance
weights /= torch.clamp(weights.sum(-1, keepdim=True), min=1.0)
dir_loss = self._dir_loss_ftor(
dir_logits, dir_targets, weights=weights)
dir_loss = dir_loss.sum() / batch_size_dev
loss += dir_loss * self._direction_loss_weight
self.end_timer("loss forward")
res = {
"loss": loss,
"cls_loss": cls_loss,
"loc_loss": loc_loss,
"cls_pos_loss": cls_pos_loss,
"cls_neg_loss": cls_neg_loss,
"cls_preds": cls_preds,
"cls_loss_reduced": cls_loss_reduced,
"loc_loss_reduced": loc_loss_reduced,
"cared": cared,
}
if self._use_direction_classifier:
res["dir_loss_reduced"] = dir_loss
return res
# def network_forward(self, feature, batch_size):
def network_forward(self, voxels, num_points, coors, batch_size):
"""this function is used for subclass.
you can add custom network architecture by subclass VoxelNet class
and override this function.
Returns:
preds_dict: {
box_preds: ...
cls_preds: ...
dir_cls_preds: ...
}
"""
self.start_timer("voxel_feature_extractor")
voxel_features = self.voxel_feature_extractor(voxels, num_points,
coors)
self.end_timer("voxel_feature_extractor")
self.start_timer("middle forward")
spatial_features = self.middle_feature_extractor(
voxel_features, coors, batch_size)
# spatial_features = self.feature_extractor(feature)
self.end_timer("middle forward")
self.start_timer("rpn forward")
preds_dict = self.rpn(spatial_features)
self.end_timer("rpn forward")
return preds_dict
def forward(self, example):
"""module's forward should always accept dict and return loss.
"""
voxels = example["voxels"]
num_points = example["num_points"]
coors = example["coordinates"]
# feature = example["feature"]
# feature = torch.tensor(feature, device="cuda", dtype=torch.float32)
if len(num_points.shape) == 2: # multi-gpu
num_voxel_per_batch = example["num_voxels"].cpu().numpy().reshape(
-1)
voxel_list = []
num_points_list = []
coors_list = []
for i, num_voxel in enumerate(num_voxel_per_batch):
voxel_list.append(voxels[i, :num_voxel])
num_points_list.append(num_points[i, :num_voxel])
coors_list.append(coors[i, :num_voxel])
voxels = torch.cat(voxel_list, dim=0)
num_points = torch.cat(num_points_list, dim=0)
coors = torch.cat(coors_list, dim=0)
coors[:,0] -= coors[:,0].min()
# print("voxels shape = ", voxels.shape)
# print("coors shape = ", coors.shape)
# print("num_points shape = ", num_points.shape)
batch_anchors = example["anchors"]
# print("batch anchor shpae", batch_anchors.shape)
batch_size_dev = batch_anchors.shape[0]
# features: [num_voxels, max_num_points_per_voxel, 7]
# num_points: [num_voxels]
# coors: [num_voxels, 4]
preds_dict = self.network_forward(voxels, num_points, coors, batch_size_dev)
# preds_dict = self.network_forward(feature, batch_size_dev)
# need to check size.
box_preds = preds_dict["box_preds"].view(batch_size_dev, -1, self._box_coder.code_size)
err_msg = f"num_anchors={batch_anchors.shape[1]}, but num_output={box_preds.shape[1]}. please check size"
assert batch_anchors.shape[1] == box_preds.shape[1], err_msg
if self.training:
return self.loss(example, preds_dict)
else:
self.start_timer("predict")
with torch.no_grad():
res = self.predict(example, preds_dict)
self.end_timer("predict")
return res
def predict(self, example, preds_dict):
"""start with v1.6.0, this function don't contain any kitti-specific code.
Returns:
predict: list of pred_dict.
pred_dict: {
box3d_lidar: [N, 7] 3d box.
scores: [N]
label_preds: [N]
metadata: meta-data which contains dataset-specific information.
for kitti, it contains image idx (label idx),
for nuscenes, sample_token is saved in it.
}
"""
batch_size = example['anchors'].shape[0]
if "metadata" not in example or len(example["metadata"]) == 0:
meta_list = [None] * batch_size
else:
meta_list = example["metadata"]
batch_anchors = example["anchors"].view(batch_size, -1,
example["anchors"].shape[-1])
if "anchors_mask" not in example:
batch_anchors_mask = [None] * batch_size
else:
batch_anchors_mask = example["anchors_mask"].view(batch_size, -1)
t = time.time()
batch_box_preds = preds_dict["box_preds"]
batch_cls_preds = preds_dict["cls_preds"]
batch_box_preds = batch_box_preds.view(batch_size, -1,
self._box_coder.code_size)
num_class_with_bg = self._num_class
if not self._encode_background_as_zeros:
num_class_with_bg = self._num_class + 1
batch_cls_preds = batch_cls_preds.view(batch_size, -1,
num_class_with_bg)
batch_box_preds = self._box_coder.decode_torch(batch_box_preds,
batch_anchors)
if self._use_direction_classifier:
batch_dir_preds = preds_dict["dir_cls_preds"]
batch_dir_preds = batch_dir_preds.view(batch_size, -1,
self._num_direction_bins)
else:
batch_dir_preds = [None] * batch_size
predictions_dicts = []
post_center_range = None
if len(self._post_center_range) > 0:
post_center_range = torch.tensor(
self._post_center_range,
dtype=batch_box_preds.dtype,
device=batch_box_preds.device).float()
for box_preds, cls_preds, dir_preds, a_mask, meta in zip(
batch_box_preds, batch_cls_preds, batch_dir_preds,
batch_anchors_mask, meta_list):
if a_mask is not None:
box_preds = box_preds[a_mask]
cls_preds = cls_preds[a_mask]
box_preds = box_preds.float()
cls_preds = cls_preds.float()
if self._use_direction_classifier:
if a_mask is not None:
dir_preds = dir_preds[a_mask]
dir_labels = torch.max(dir_preds, dim=-1)[1]
if self._encode_background_as_zeros:
# this don't support softmax
assert self._use_sigmoid_score is True
total_scores = torch.sigmoid(cls_preds)
else:
# encode background as first element in one-hot vector
if self._use_sigmoid_score:
total_scores = torch.sigmoid(cls_preds)[..., 1:]
else:
total_scores = F.softmax(cls_preds, dim=-1)[..., 1:]
# Apply NMS in birdeye view
if self._use_rotate_nms:
nms_func = box_torch_ops.rotate_nms
else:
nms_func = box_torch_ops.nms
feature_map_size_prod = batch_box_preds.shape[
1] // self.target_assigner.num_anchors_per_location
if self._multiclass_nms:
assert self._encode_background_as_zeros is True
boxes_for_nms = box_preds[:, [0, 1, 3, 4, 6]]
if not self._use_rotate_nms:
box_preds_corners = box_torch_ops.center_to_corner_box2d(
boxes_for_nms[:, :2], boxes_for_nms[:, 2:4],
boxes_for_nms[:, 4])
boxes_for_nms = box_torch_ops.corner_to_standup_nd(
box_preds_corners)
selected_boxes, selected_labels, selected_scores = [], [], []
selected_dir_labels = []
scores = total_scores
boxes = boxes_for_nms
selected_per_class = []
score_threshs = self._nms_score_thresholds
pre_max_sizes = self._nms_pre_max_sizes
post_max_sizes = self._nms_post_max_sizes
iou_thresholds = self._nms_iou_thresholds
for class_idx, score_thresh, pre_ms, post_ms, iou_th in zip(
range(self._num_class),
score_threshs,
pre_max_sizes, post_max_sizes, iou_thresholds):
if self._nms_class_agnostic:
class_scores = total_scores.view(
feature_map_size_prod, -1,
self._num_class)[..., class_idx]
class_scores = class_scores.contiguous().view(-1)
class_boxes_nms = boxes.view(-1,
boxes_for_nms.shape[-1])
class_boxes = box_preds
class_dir_labels = dir_labels
else:
anchors_range = self.target_assigner.anchors_range(class_idx)
class_scores = total_scores.view(
-1,
self._num_class)[anchors_range[0]:anchors_range[1], class_idx]
class_boxes_nms = boxes.view(-1,
boxes_for_nms.shape[-1])[anchors_range[0]:anchors_range[1], :]
class_scores = class_scores.contiguous().view(-1)
class_boxes_nms = class_boxes_nms.contiguous().view(
-1, boxes_for_nms.shape[-1])
class_boxes = box_preds.view(-1,
box_preds.shape[-1])[anchors_range[0]:anchors_range[1], :]
class_boxes = class_boxes.contiguous().view(
-1, box_preds.shape[-1])
if self._use_direction_classifier:
class_dir_labels = dir_labels.view(-1)[anchors_range[0]:anchors_range[1]]
class_dir_labels = class_dir_labels.contiguous(
).view(-1)
if score_thresh > 0.0:
class_scores_keep = class_scores >= score_thresh
if class_scores_keep.shape[0] == 0:
selected_per_class.append(None)
continue
class_scores = class_scores[class_scores_keep]
if class_scores.shape[0] != 0:
if score_thresh > 0.0:
class_boxes_nms = class_boxes_nms[
class_scores_keep]
class_boxes = class_boxes[class_scores_keep]
class_dir_labels = class_dir_labels[
class_scores_keep]
keep = nms_func(class_boxes_nms, class_scores, pre_ms,
post_ms, iou_th)
if keep.shape[0] != 0:
selected_per_class.append(keep)
else:
selected_per_class.append(None)
else:
selected_per_class.append(None)
selected = selected_per_class[-1]
if selected is not None:
selected_boxes.append(class_boxes[selected])
selected_labels.append(
torch.full([class_boxes[selected].shape[0]],
class_idx,
dtype=torch.int64,
device=box_preds.device))
if self._use_direction_classifier:
selected_dir_labels.append(
class_dir_labels[selected])
selected_scores.append(class_scores[selected])
selected_boxes = torch.cat(selected_boxes, dim=0)
selected_labels = torch.cat(selected_labels, dim=0)
selected_scores = torch.cat(selected_scores, dim=0)
if self._use_direction_classifier:
selected_dir_labels = torch.cat(selected_dir_labels, dim=0)
else:
# get highest score per prediction, than apply nms
# to remove overlapped box.
if num_class_with_bg == 1:
top_scores = total_scores.squeeze(-1)
top_labels = torch.zeros(
total_scores.shape[0],
device=total_scores.device,
dtype=torch.long)
else:
top_scores, top_labels = torch.max(
total_scores, dim=-1)
if self._nms_score_thresholds[0] > 0.0:
top_scores_keep = top_scores >= self._nms_score_thresholds[0]
top_scores = top_scores.masked_select(top_scores_keep)
if top_scores.shape[0] != 0:
if self._nms_score_thresholds[0] > 0.0:
box_preds = box_preds[top_scores_keep]
if self._use_direction_classifier:
dir_labels = dir_labels[top_scores_keep]
top_labels = top_labels[top_scores_keep]
boxes_for_nms = box_preds[:, [0, 1, 3, 4, 6]]
if not self._use_rotate_nms:
box_preds_corners = box_torch_ops.center_to_corner_box2d(
boxes_for_nms[:, :2], boxes_for_nms[:, 2:4],
boxes_for_nms[:, 4])
boxes_for_nms = box_torch_ops.corner_to_standup_nd(
box_preds_corners)
# the nms in 3d detection just remove overlap boxes.
selected = nms_func(
boxes_for_nms,
top_scores,
pre_max_size=self._nms_pre_max_sizes[0],
post_max_size=self._nms_post_max_sizes[0],
iou_threshold=self._nms_iou_thresholds[0],
)
else:
selected = []
# if selected is not None:
selected_boxes = box_preds[selected]
if self._use_direction_classifier:
selected_dir_labels = dir_labels[selected]
selected_labels = top_labels[selected]
selected_scores = top_scores[selected]
# finally generate predictions.
if selected_boxes.shape[0] != 0:
box_preds = selected_boxes
scores = selected_scores
label_preds = selected_labels
if self._use_direction_classifier:
dir_labels = selected_dir_labels
period = (2 * np.pi / self._num_direction_bins)
dir_rot = box_torch_ops.limit_period(
box_preds[..., 6] - self._dir_offset,
self._dir_limit_offset, period)
box_preds[
...,
6] = dir_rot + self._dir_offset + period * dir_labels.to(
box_preds.dtype)
final_box_preds = box_preds
final_scores = scores
final_labels = label_preds
if post_center_range is not None:
mask = (final_box_preds[:, :3] >=
post_center_range[:3]).all(1)
mask &= (final_box_preds[:, :3] <=
post_center_range[3:]).all(1)
predictions_dict = {
"box3d_lidar": final_box_preds[mask],
"scores": final_scores[mask],
"label_preds": label_preds[mask],
"metadata": meta,
}
else:
predictions_dict = {
"box3d_lidar": final_box_preds,
"scores": final_scores,
"label_preds": label_preds,
"metadata": meta,
}
else:
dtype = batch_box_preds.dtype
device = batch_box_preds.device
predictions_dict = {
"box3d_lidar":
torch.zeros([0, box_preds.shape[-1]],
dtype=dtype,
device=device),
"scores":
torch.zeros([0], dtype=dtype, device=device),
"label_preds":
torch.zeros([0], dtype=top_labels.dtype, device=device),
"metadata":
meta,
}
predictions_dicts.append(predictions_dict)
return predictions_dicts
def metrics_to_float(self):
self.rpn_acc.float()
self.rpn_metrics.float()
self.rpn_cls_loss.float()
self.rpn_loc_loss.float()
self.rpn_total_loss.float()
def update_metrics(self, cls_loss, loc_loss, cls_preds, labels, sampled):
batch_size = cls_preds.shape[0]
num_class = self._num_class
if not self._encode_background_as_zeros:
num_class += 1
cls_preds = cls_preds.view(batch_size, -1, num_class)
rpn_acc = self.rpn_acc(labels, cls_preds, sampled).numpy()[0]
prec, recall = self.rpn_metrics(labels, cls_preds, sampled)
prec = prec.numpy()
recall = recall.numpy()
rpn_cls_loss = self.rpn_cls_loss(cls_loss).numpy()[0]
rpn_loc_loss = self.rpn_loc_loss(loc_loss).numpy()[0]
ret = {
"loss": {
"cls_loss": float(rpn_cls_loss),
"cls_loss_rt": float(cls_loss.data.cpu().numpy()),
'loc_loss': float(rpn_loc_loss),
"loc_loss_rt": float(loc_loss.data.cpu().numpy()),
},
"rpn_acc": float(rpn_acc),
"pr": {},
}
for i, thresh in enumerate(self.rpn_metrics.thresholds):
ret["pr"][f"prec@{int(thresh*100)}"] = float(prec[i])
ret["pr"][f"rec@{int(thresh*100)}"] = float(recall[i])
return ret
def clear_metrics(self):
self.rpn_acc.clear()
self.rpn_metrics.clear()
self.rpn_cls_loss.clear()
self.rpn_loc_loss.clear()
self.rpn_total_loss.clear()
@staticmethod
def convert_norm_to_float(net):
'''
BatchNorm layers to have parameters in single precision.
Find all layers and convert them back to float. This can't
be done with built in .apply as that function will apply
fn to all modules, parameters, and buffers. Thus we wouldn't
be able to guard the float conversion based on the module type.
'''
if isinstance(net, torch.nn.modules.batchnorm._BatchNorm):
net.float()
for child in net.children():
VoxelNet.convert_norm_to_float(child)
return net
def add_sin_difference(boxes1, boxes2, boxes1_rot, boxes2_rot, factor=1.0):
if factor != 1.0:
boxes1_rot = factor * boxes1_rot
boxes2_rot = factor * boxes2_rot
rad_pred_encoding = torch.sin(boxes1_rot) * torch.cos(boxes2_rot)
rad_tg_encoding = torch.cos(boxes1_rot) * torch.sin(boxes2_rot)
boxes1 = torch.cat([boxes1[..., :6], rad_pred_encoding, boxes1[..., 7:]],
dim=-1)
boxes2 = torch.cat([boxes2[..., :6], rad_tg_encoding, boxes2[..., 7:]],
dim=-1)
return boxes1, boxes2
def create_loss(loc_loss_ftor,
cls_loss_ftor,
box_preds,
cls_preds,
cls_targets,
cls_weights,
reg_targets,
reg_weights,
num_class,
encode_background_as_zeros=True,
encode_rad_error_by_sin=True,
sin_error_factor=1.0,
box_code_size=7,
num_direction_bins=2):
batch_size = int(box_preds.shape[0])
box_preds = box_preds.view(batch_size, -1, box_code_size)
if encode_background_as_zeros:
cls_preds = cls_preds.view(batch_size, -1, num_class)
else:
cls_preds = cls_preds.view(batch_size, -1, num_class + 1)
cls_targets = cls_targets.squeeze(-1)
one_hot_targets = torchplus.nn.one_hot(
cls_targets, depth=num_class + 1, dtype=box_preds.dtype)
if encode_background_as_zeros:
one_hot_targets = one_hot_targets[..., 1:]
if encode_rad_error_by_sin:
# sin(a - b) = sinacosb-cosasinb
# reg_tg_rot = box_torch_ops.limit_period(
# reg_targets[..., 6:7], 0.5, 2 * np.pi / num_direction_bins)
box_preds, reg_targets = add_sin_difference(box_preds, reg_targets,
box_preds[..., 6:7], reg_targets[..., 6:7], sin_error_factor)
loc_losses = loc_loss_ftor(
box_preds, reg_targets, weights=reg_weights) # [N, M]
cls_losses = cls_loss_ftor(
cls_preds, one_hot_targets, weights=cls_weights) # [N, M]
return loc_losses, cls_losses
def prepare_loss_weights(labels,
pos_cls_weight=1.0,
neg_cls_weight=1.0,
loss_norm_type=LossNormType.NormByNumPositives,
dtype=torch.float32):
"""get cls_weights and reg_weights from labels.
"""
cared = labels >= 0
# cared: [N, num_anchors]
positives = labels > 0
negatives = labels == 0
negative_cls_weights = negatives.type(dtype) * neg_cls_weight
cls_weights = negative_cls_weights + pos_cls_weight * positives.type(dtype)
reg_weights = positives.type(dtype)
if loss_norm_type == LossNormType.NormByNumExamples:
num_examples = cared.type(dtype).sum(1, keepdim=True)
num_examples = torch.clamp(num_examples, min=1.0)
cls_weights /= num_examples
bbox_normalizer = positives.sum(1, keepdim=True).type(dtype)
reg_weights /= torch.clamp(bbox_normalizer, min=1.0)
elif loss_norm_type == LossNormType.NormByNumPositives: # for focal loss
pos_normalizer = positives.sum(1, keepdim=True).type(dtype)
reg_weights /= torch.clamp(pos_normalizer, min=1.0)
cls_weights /= torch.clamp(pos_normalizer, min=1.0)
elif loss_norm_type == LossNormType.NormByNumPosNeg:
pos_neg = torch.stack([positives, negatives], dim=-1).type(dtype)
normalizer = pos_neg.sum(1, keepdim=True) # [N, 1, 2]
cls_normalizer = (pos_neg * normalizer).sum(-1) # [N, M]
cls_normalizer = torch.clamp(cls_normalizer, min=1.0)
# cls_normalizer will be pos_or_neg_weight/num_pos_or_neg
normalizer = torch.clamp(normalizer, min=1.0)
reg_weights /= normalizer[:, 0:1, 0]
cls_weights /= cls_normalizer
elif loss_norm_type == LossNormType.DontNorm: # support ghm loss
pos_normalizer = positives.sum(1, keepdim=True).type(dtype)
reg_weights /= torch.clamp(pos_normalizer, min=1.0)
else:
raise ValueError(
f"unknown loss norm type. available: {list(LossNormType)}")
return cls_weights, reg_weights, cared
def assign_weight_to_each_class(labels,
weight_per_class,
norm_by_num=True,
dtype=torch.float32):
weights = torch.zeros(labels.shape, dtype=dtype, device=labels.device)
for label, weight in weight_per_class:
positives = (labels == label).type(dtype)
weight_class = weight * positives
if norm_by_num:
normalizer = positives.sum()
normalizer = torch.clamp(normalizer, min=1.0)
weight_class /= normalizer
weights += weight_class
return weights
def get_direction_target(anchors,
reg_targets,
one_hot=True,
dir_offset=0,
num_bins=2):
batch_size = reg_targets.shape[0]
anchors = anchors.view(batch_size, -1, anchors.shape[-1])
rot_gt = reg_targets[..., 6] + anchors[..., 6]
offset_rot = box_torch_ops.limit_period(rot_gt - dir_offset, 0, 2 * np.pi)
dir_cls_targets = torch.floor(offset_rot / (2 * np.pi / num_bins)).long()
dir_cls_targets = torch.clamp(dir_cls_targets, min=0, max=num_bins - 1)
if one_hot:
dir_cls_targets = torchplus.nn.one_hot(
dir_cls_targets, num_bins, dtype=anchors.dtype)
return dir_cls_targets | NormByNumPosNeg = "norm_by_num_pos_neg"
DontNorm = "dont_norm"
|
index.d.ts | export interface Result {
/**
* All updated commands.
*/
commands: string[];
/**
* Human-readable logs.
*/
logs: string[];
/**
* `success` means success.
* `warning` means that the commands are correct but spu can't update it cuz of some reasons, like Minecraft totally removed them in further versions.
* `error` means that the commands have syntax error(s).
*/
state: 'success' | 'warning' | 'error';
}
/**
* Update command(s).
* @param commands The command(s) that will be updated. Support blank lines and comments. Support slashes(`/`) before commands.
* @param from The original version of the command(s). `X` stands for *Minecraft Java Edition 1.X*.
* @param to The target version. `X` stands for *Minecraft Java Edition 1.X*. | export declare function update(commands: string[], from: number, to: number): Result; | */ |
flight_placeholder_field.pb.go | // Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.27.1
// protoc v3.17.3
// source: google/ads/googleads/v7/enums/flight_placeholder_field.proto
package enums
import (
reflect "reflect"
sync "sync"
_ "google.golang.org/genproto/googleapis/api/annotations"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// Possible values for Flight placeholder fields.
type FlightPlaceholderFieldEnum_FlightPlaceholderField int32
const (
// Not specified.
FlightPlaceholderFieldEnum_UNSPECIFIED FlightPlaceholderFieldEnum_FlightPlaceholderField = 0
// Used for return value only. Represents value unknown in this version.
FlightPlaceholderFieldEnum_UNKNOWN FlightPlaceholderFieldEnum_FlightPlaceholderField = 1
// Data Type: STRING. Required. Destination id. Example: PAR, LON.
// For feed items that only have destination id, destination id must be a
// unique key. For feed items that have both destination id and origin id,
// then the combination must be a unique key.
FlightPlaceholderFieldEnum_DESTINATION_ID FlightPlaceholderFieldEnum_FlightPlaceholderField = 2
// Data Type: STRING. Origin id. Example: PAR, LON.
// Optional. Combination of destination id and origin id must be unique per
// offer.
FlightPlaceholderFieldEnum_ORIGIN_ID FlightPlaceholderFieldEnum_FlightPlaceholderField = 3
// Data Type: STRING. Required. Main headline with product name to be shown
// in dynamic ad.
FlightPlaceholderFieldEnum_FLIGHT_DESCRIPTION FlightPlaceholderFieldEnum_FlightPlaceholderField = 4
// Data Type: STRING. Shorter names are recommended.
FlightPlaceholderFieldEnum_ORIGIN_NAME FlightPlaceholderFieldEnum_FlightPlaceholderField = 5
// Data Type: STRING. Shorter names are recommended.
FlightPlaceholderFieldEnum_DESTINATION_NAME FlightPlaceholderFieldEnum_FlightPlaceholderField = 6
// Data Type: STRING. Price to be shown in the ad.
// Example: "100.00 USD"
FlightPlaceholderFieldEnum_FLIGHT_PRICE FlightPlaceholderFieldEnum_FlightPlaceholderField = 7
// Data Type: STRING. Formatted price to be shown in the ad.
// Example: "Starting at $100.00 USD", "$80 - $100"
FlightPlaceholderFieldEnum_FORMATTED_PRICE FlightPlaceholderFieldEnum_FlightPlaceholderField = 8
// Data Type: STRING. Sale price to be shown in the ad.
// Example: "80.00 USD"
FlightPlaceholderFieldEnum_FLIGHT_SALE_PRICE FlightPlaceholderFieldEnum_FlightPlaceholderField = 9
// Data Type: STRING. Formatted sale price to be shown in the ad.
// Example: "On sale for $80.00", "$60 - $80"
FlightPlaceholderFieldEnum_FORMATTED_SALE_PRICE FlightPlaceholderFieldEnum_FlightPlaceholderField = 10
// Data Type: URL. Image to be displayed in the ad.
FlightPlaceholderFieldEnum_IMAGE_URL FlightPlaceholderFieldEnum_FlightPlaceholderField = 11
// Data Type: URL_LIST. Required. Final URLs for the ad when using Upgraded
// URLs. User will be redirected to these URLs when they click on an ad, or
// when they click on a specific flight for ads that show multiple
// flights.
FlightPlaceholderFieldEnum_FINAL_URLS FlightPlaceholderFieldEnum_FlightPlaceholderField = 12
// Data Type: URL_LIST. Final mobile URLs for the ad when using Upgraded
// URLs.
FlightPlaceholderFieldEnum_FINAL_MOBILE_URLS FlightPlaceholderFieldEnum_FlightPlaceholderField = 13
// Data Type: URL. Tracking template for the ad when using Upgraded URLs.
FlightPlaceholderFieldEnum_TRACKING_URL FlightPlaceholderFieldEnum_FlightPlaceholderField = 14
// Data Type: STRING. Android app link. Must be formatted as:
// android-app://{package_id}/{scheme}/{host_path}.
// The components are defined as follows:
// package_id: app ID as specified in Google Play.
// scheme: the scheme to pass to the application. Can be HTTP, or a custom
// scheme.
// host_path: identifies the specific content within your application.
FlightPlaceholderFieldEnum_ANDROID_APP_LINK FlightPlaceholderFieldEnum_FlightPlaceholderField = 15
// Data Type: STRING_LIST. List of recommended destination IDs to show
// together with this item.
FlightPlaceholderFieldEnum_SIMILAR_DESTINATION_IDS FlightPlaceholderFieldEnum_FlightPlaceholderField = 16
// Data Type: STRING. iOS app link.
FlightPlaceholderFieldEnum_IOS_APP_LINK FlightPlaceholderFieldEnum_FlightPlaceholderField = 17
// Data Type: INT64. iOS app store ID.
FlightPlaceholderFieldEnum_IOS_APP_STORE_ID FlightPlaceholderFieldEnum_FlightPlaceholderField = 18
)
// Enum value maps for FlightPlaceholderFieldEnum_FlightPlaceholderField.
var (
FlightPlaceholderFieldEnum_FlightPlaceholderField_name = map[int32]string{
0: "UNSPECIFIED",
1: "UNKNOWN",
2: "DESTINATION_ID",
3: "ORIGIN_ID",
4: "FLIGHT_DESCRIPTION",
5: "ORIGIN_NAME",
6: "DESTINATION_NAME",
7: "FLIGHT_PRICE",
8: "FORMATTED_PRICE",
9: "FLIGHT_SALE_PRICE",
10: "FORMATTED_SALE_PRICE",
11: "IMAGE_URL",
12: "FINAL_URLS",
13: "FINAL_MOBILE_URLS",
14: "TRACKING_URL",
15: "ANDROID_APP_LINK",
16: "SIMILAR_DESTINATION_IDS",
17: "IOS_APP_LINK",
18: "IOS_APP_STORE_ID",
}
FlightPlaceholderFieldEnum_FlightPlaceholderField_value = map[string]int32{
"UNSPECIFIED": 0,
"UNKNOWN": 1,
"DESTINATION_ID": 2,
"ORIGIN_ID": 3,
"FLIGHT_DESCRIPTION": 4,
"ORIGIN_NAME": 5,
"DESTINATION_NAME": 6,
"FLIGHT_PRICE": 7,
"FORMATTED_PRICE": 8,
"FLIGHT_SALE_PRICE": 9,
"FORMATTED_SALE_PRICE": 10,
"IMAGE_URL": 11,
"FINAL_URLS": 12,
"FINAL_MOBILE_URLS": 13,
"TRACKING_URL": 14,
"ANDROID_APP_LINK": 15,
"SIMILAR_DESTINATION_IDS": 16,
"IOS_APP_LINK": 17,
"IOS_APP_STORE_ID": 18,
}
)
func (x FlightPlaceholderFieldEnum_FlightPlaceholderField) Enum() *FlightPlaceholderFieldEnum_FlightPlaceholderField {
p := new(FlightPlaceholderFieldEnum_FlightPlaceholderField)
*p = x
return p
}
func (x FlightPlaceholderFieldEnum_FlightPlaceholderField) String() string {
return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
}
func (FlightPlaceholderFieldEnum_FlightPlaceholderField) Descriptor() protoreflect.EnumDescriptor {
return file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_enumTypes[0].Descriptor()
}
func (FlightPlaceholderFieldEnum_FlightPlaceholderField) Type() protoreflect.EnumType {
return &file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_enumTypes[0]
}
func (x FlightPlaceholderFieldEnum_FlightPlaceholderField) Number() protoreflect.EnumNumber {
return protoreflect.EnumNumber(x)
}
// Deprecated: Use FlightPlaceholderFieldEnum_FlightPlaceholderField.Descriptor instead.
func (FlightPlaceholderFieldEnum_FlightPlaceholderField) EnumDescriptor() ([]byte, []int) {
return file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_rawDescGZIP(), []int{0, 0}
}
// Values for Flight placeholder fields.
// For more information about dynamic remarketing feeds, see
// https://support.google.com/google-ads/answer/6053288.
type FlightPlaceholderFieldEnum struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
}
func (x *FlightPlaceholderFieldEnum) Reset() {
*x = FlightPlaceholderFieldEnum{}
if protoimpl.UnsafeEnabled {
mi := &file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *FlightPlaceholderFieldEnum) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*FlightPlaceholderFieldEnum) ProtoMessage() {}
func (x *FlightPlaceholderFieldEnum) ProtoReflect() protoreflect.Message {
mi := &file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use FlightPlaceholderFieldEnum.ProtoReflect.Descriptor instead.
func (*FlightPlaceholderFieldEnum) Descriptor() ([]byte, []int) {
return file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_rawDescGZIP(), []int{0}
}
var File_google_ads_googleads_v7_enums_flight_placeholder_field_proto protoreflect.FileDescriptor
var file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_rawDesc = []byte{
0x0a, 0x3c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x37, 0x2f, 0x65, 0x6e, 0x75, 0x6d, 0x73, 0x2f,
0x66, 0x6c, 0x69, 0x67, 0x68, 0x74, 0x5f, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, 0x64,
0x65, 0x72, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x1d,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x37, 0x2e, 0x65, 0x6e, 0x75, 0x6d, 0x73, 0x1a, 0x1c, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61,
0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xb2, 0x03, 0x0a, 0x1a,
0x46, 0x6c, 0x69, 0x67, 0x68, 0x74, 0x50, 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, 0x64, 0x65,
0x72, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x45, 0x6e, 0x75, 0x6d, 0x22, 0x93, 0x03, 0x0a, 0x16, 0x46,
0x6c, 0x69, 0x67, 0x68, 0x74, 0x50, 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72,
0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x0f, 0x0a, 0x0b, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49,
0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57,
0x4e, 0x10, 0x01, 0x12, 0x12, 0x0a, 0x0e, 0x44, 0x45, 0x53, 0x54, 0x49, 0x4e, 0x41, 0x54, 0x49,
0x4f, 0x4e, 0x5f, 0x49, 0x44, 0x10, 0x02, 0x12, 0x0d, 0x0a, 0x09, 0x4f, 0x52, 0x49, 0x47, 0x49,
0x4e, 0x5f, 0x49, 0x44, 0x10, 0x03, 0x12, 0x16, 0x0a, 0x12, 0x46, 0x4c, 0x49, 0x47, 0x48, 0x54,
0x5f, 0x44, 0x45, 0x53, 0x43, 0x52, 0x49, 0x50, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x04, 0x12, 0x0f,
0x0a, 0x0b, 0x4f, 0x52, 0x49, 0x47, 0x49, 0x4e, 0x5f, 0x4e, 0x41, 0x4d, 0x45, 0x10, 0x05, 0x12,
0x14, 0x0a, 0x10, 0x44, 0x45, 0x53, 0x54, 0x49, 0x4e, 0x41, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x4e,
0x41, 0x4d, 0x45, 0x10, 0x06, 0x12, 0x10, 0x0a, 0x0c, 0x46, 0x4c, 0x49, 0x47, 0x48, 0x54, 0x5f,
0x50, 0x52, 0x49, 0x43, 0x45, 0x10, 0x07, 0x12, 0x13, 0x0a, 0x0f, 0x46, 0x4f, 0x52, 0x4d, 0x41,
0x54, 0x54, 0x45, 0x44, 0x5f, 0x50, 0x52, 0x49, 0x43, 0x45, 0x10, 0x08, 0x12, 0x15, 0x0a, 0x11,
0x46, 0x4c, 0x49, 0x47, 0x48, 0x54, 0x5f, 0x53, 0x41, 0x4c, 0x45, 0x5f, 0x50, 0x52, 0x49, 0x43,
0x45, 0x10, 0x09, 0x12, 0x18, 0x0a, 0x14, 0x46, 0x4f, 0x52, 0x4d, 0x41, 0x54, 0x54, 0x45, 0x44,
0x5f, 0x53, 0x41, 0x4c, 0x45, 0x5f, 0x50, 0x52, 0x49, 0x43, 0x45, 0x10, 0x0a, 0x12, 0x0d, 0x0a,
0x09, 0x49, 0x4d, 0x41, 0x47, 0x45, 0x5f, 0x55, 0x52, 0x4c, 0x10, 0x0b, 0x12, 0x0e, 0x0a, 0x0a,
0x46, 0x49, 0x4e, 0x41, 0x4c, 0x5f, 0x55, 0x52, 0x4c, 0x53, 0x10, 0x0c, 0x12, 0x15, 0x0a, 0x11,
0x46, 0x49, 0x4e, 0x41, 0x4c, 0x5f, 0x4d, 0x4f, 0x42, 0x49, 0x4c, 0x45, 0x5f, 0x55, 0x52, 0x4c,
0x53, 0x10, 0x0d, 0x12, 0x10, 0x0a, 0x0c, 0x54, 0x52, 0x41, 0x43, 0x4b, 0x49, 0x4e, 0x47, 0x5f,
0x55, 0x52, 0x4c, 0x10, 0x0e, 0x12, 0x14, 0x0a, 0x10, 0x41, 0x4e, 0x44, 0x52, 0x4f, 0x49, 0x44,
0x5f, 0x41, 0x50, 0x50, 0x5f, 0x4c, 0x49, 0x4e, 0x4b, 0x10, 0x0f, 0x12, 0x1b, 0x0a, 0x17, 0x53,
0x49, 0x4d, 0x49, 0x4c, 0x41, 0x52, 0x5f, 0x44, 0x45, 0x53, 0x54, 0x49, 0x4e, 0x41, 0x54, 0x49,
0x4f, 0x4e, 0x5f, 0x49, 0x44, 0x53, 0x10, 0x10, 0x12, 0x10, 0x0a, 0x0c, 0x49, 0x4f, 0x53, 0x5f,
0x41, 0x50, 0x50, 0x5f, 0x4c, 0x49, 0x4e, 0x4b, 0x10, 0x11, 0x12, 0x14, 0x0a, 0x10, 0x49, 0x4f,
0x53, 0x5f, 0x41, 0x50, 0x50, 0x5f, 0x53, 0x54, 0x4f, 0x52, 0x45, 0x5f, 0x49, 0x44, 0x10, 0x12,
0x42, 0xf1, 0x01, 0x0a, 0x21, 0x63, 0x6f, 0x6d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e,
0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x37,
0x2e, 0x65, 0x6e, 0x75, 0x6d, 0x73, 0x42, 0x1c, 0x46, 0x6c, 0x69, 0x67, 0x68, 0x74, 0x73, 0x50,
0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x50,
0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x42, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x67,
0x6f, 0x6c, 0x61, 0x6e, 0x67, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x67, 0x65, 0x6e, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x61, 0x64,
0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x37, 0x2f, 0x65,
0x6e, 0x75, 0x6d, 0x73, 0x3b, 0x65, 0x6e, 0x75, 0x6d, 0x73, 0xa2, 0x02, 0x03, 0x47, 0x41, 0x41,
0xaa, 0x02, 0x1d, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x41, 0x64, 0x73, 0x2e, 0x47, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x41, 0x64, 0x73, 0x2e, 0x56, 0x37, 0x2e, 0x45, 0x6e, 0x75, 0x6d, 0x73,
0xca, 0x02, 0x1d, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x5c, 0x41, 0x64, 0x73, 0x5c, 0x47, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x41, 0x64, 0x73, 0x5c, 0x56, 0x37, 0x5c, 0x45, 0x6e, 0x75, 0x6d, 0x73,
0xea, 0x02, 0x21, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x3a, 0x3a, 0x41, 0x64, 0x73, 0x3a, 0x3a,
0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x41, 0x64, 0x73, 0x3a, 0x3a, 0x56, 0x37, 0x3a, 0x3a, 0x45,
0x6e, 0x75, 0x6d, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_rawDescOnce sync.Once
file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_rawDescData = file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_rawDesc
)
func | () []byte {
file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_rawDescOnce.Do(func() {
file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_rawDescData = protoimpl.X.CompressGZIP(file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_rawDescData)
})
return file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_rawDescData
}
var file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_enumTypes = make([]protoimpl.EnumInfo, 1)
var file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_msgTypes = make([]protoimpl.MessageInfo, 1)
var file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_goTypes = []interface{}{
(FlightPlaceholderFieldEnum_FlightPlaceholderField)(0), // 0: google.ads.googleads.v7.enums.FlightPlaceholderFieldEnum.FlightPlaceholderField
(*FlightPlaceholderFieldEnum)(nil), // 1: google.ads.googleads.v7.enums.FlightPlaceholderFieldEnum
}
var file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_depIdxs = []int32{
0, // [0:0] is the sub-list for method output_type
0, // [0:0] is the sub-list for method input_type
0, // [0:0] is the sub-list for extension type_name
0, // [0:0] is the sub-list for extension extendee
0, // [0:0] is the sub-list for field type_name
}
func init() { file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_init() }
func file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_init() {
if File_google_ads_googleads_v7_enums_flight_placeholder_field_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*FlightPlaceholderFieldEnum); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_rawDesc,
NumEnums: 1,
NumMessages: 1,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_goTypes,
DependencyIndexes: file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_depIdxs,
EnumInfos: file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_enumTypes,
MessageInfos: file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_msgTypes,
}.Build()
File_google_ads_googleads_v7_enums_flight_placeholder_field_proto = out.File
file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_rawDesc = nil
file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_goTypes = nil
file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_depIdxs = nil
}
| file_google_ads_googleads_v7_enums_flight_placeholder_field_proto_rawDescGZIP |
user.go | package model
import (
"github.com/gohouse/gorose/v2"
"github.com/pp553933054/micro-go-book/ch13-seckill/pkg/mysql"
"log"
)
type User struct {
UserId int64 `json:"user_id"` //Id
UserName string `json:"user_name"` //用户名称
Password string `json:"password"` //密码
Age int `json:"age"` //年龄
}
type UserModel struct {
}
func NewUserModel() *UserModel {
return &UserModel{}
}
func (p *UserModel) getTableName() string {
return "user"
}
func (p *UserModel) GetUserList() ([]gorose.Data, error) {
conn := mysql.DB()
list, err := conn.Table(p.getTableName()).Get()
if err != nil {
log.Printf("Error : %v", err)
return nil, err
}
return list, nil
}
/*func (p *UserModel) GetUserByUsername(username string) (*User, error) {
conn := mysql.DB()
if result, err := conn.Table(p.getTableName()).Where(map[string]interface{}{"username": username}).First(); err == nil{
}else {
return nil, err
}
}*/
func (p *UserModel) CheckUser(username string, password string) (*User, error) {
conn := mysql.DB()
data, err := conn.Table(p.getTableName()).Where(map[string]interface{}{"user_name": username, "password": password}).First()
if err != nil { | return nil, err
}
user := &User{
UserId: data["user_id"].(int64),
UserName: data["user_name"].(string),
Password: data["password"].(string),
Age: int(data["age"].(int64)),
}
return user, nil
}
func (p *UserModel) CreateUser(user *User) error {
conn := mysql.DB()
_, err := conn.Table(p.getTableName()).Data(map[string]interface{}{
"user_id": user.UserId,
"user_name": user.UserName,
"password": user.Password,
"age": user.Age,
}).Insert()
if err != nil {
log.Printf("Error : %v", err)
return err
}
return nil
} | log.Printf("Error : %v", err) |
translate_value_test.go | // Copyright 2019 Istio Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package translate
import (
"testing"
"github.com/ghodss/yaml"
"github.com/gogo/protobuf/jsonpb"
"github.com/kr/pretty"
"istio.io/istio/operator/pkg/apis/istio/v1alpha1"
"istio.io/istio/operator/pkg/util"
"istio.io/istio/operator/pkg/version"
)
func TestValueToProto(t *testing.T) {
tests := []struct {
desc string
valueYAML string
want string
wantErr string
}{
{
desc: "K8s resources translation",
valueYAML: `
pilot:
enabled: true
rollingMaxSurge: 100%
rollingMaxUnavailable: 25%
resources:
requests:
cpu: 1000m
memory: 1G
replicaCount: 1
nodeSelector:
beta.kubernetes.io/os: linux
tolerations:
- key: dedicated
operator: Exists
effect: NoSchedule
- key: CriticalAddonsOnly
operator: Exists
autoscaleEnabled: true
autoscaleMax: 3
autoscaleMin: 1
cpu:
targetAverageUtilization: 80
traceSampling: 1.0
image: pilot
env:
GODEBUG: gctrace=1
podAntiAffinityLabelSelector:
- key: istio
operator: In
values: pilot
topologyKey: "kubernetes.io/hostname"
global:
hub: docker.io/istio
istioNamespace: istio-system
policyNamespace: istio-policy
tag: 1.2.3
telemetryNamespace: istio-telemetry
proxy:
readinessInitialDelaySeconds: 2
controlPlaneSecurityEnabled: false
mixer:
policy:
enabled: true
image: mixer
replicaCount: 1
telemetry:
enabled: false
`,
want: `
hub: docker.io/istio
tag: 1.2.3
meshConfig:
rootNamespace: istio-system
components:
telemetry:
enabled: false
policy:
enabled: true
k8s:
replicaCount: 1
pilot:
enabled: true
k8s:
replicaCount: 1
env:
- name: GODEBUG
value: gctrace=1
hpaSpec:
maxReplicas: 3
minReplicas: 1
scaleTargetRef:
apiVersion: apps/v1
kind: Deployment
name: istio-pilot
metrics:
- resource:
name: cpu
targetAverageUtilization: 80
type: Resource
nodeSelector:
beta.kubernetes.io/os: linux
tolerations:
- key: dedicated
operator: Exists
effect: NoSchedule
- key: CriticalAddonsOnly
operator: Exists
resources:
requests:
cpu: 1000m
memory: 1G
strategy:
rollingUpdate:
maxSurge: 100%
maxUnavailable: 25%
values:
global:
controlPlaneSecurityEnabled: false
proxy:
readinessInitialDelaySeconds: 2
policyNamespace: istio-policy
telemetryNamespace: istio-telemetry
pilot:
image: pilot
autoscaleEnabled: true
traceSampling: 1
podAntiAffinityLabelSelector:
- key: istio
operator: In
values: pilot
topologyKey: "kubernetes.io/hostname"
mixer:
policy:
image: mixer
`,
},
{
desc: "All Enabled",
valueYAML: `
global:
hub: docker.io/istio
istioNamespace: istio-system
policyNamespace: istio-policy
tag: 1.2.3
telemetryNamespace: istio-telemetry
mixer:
policy:
enabled: true
telemetry:
enabled: true
pilot:
enabled: true
istiocoredns:
enabled: true
gateways:
enabled: true
istio-ingressgateway:
rollingMaxSurge: 4
rollingMaxUnavailable: 1
resources:
requests:
cpu: 1000m
memory: 1G
enabled: true
`,
want: `
hub: docker.io/istio
tag: 1.2.3
meshConfig:
rootNamespace: istio-system
components:
telemetry:
enabled: true
policy:
enabled: true
pilot:
enabled: true
ingressGateways:
- name: istio-ingressgateway
enabled: true
k8s:
resources:
requests:
cpu: 1000m
memory: 1G
strategy:
rollingUpdate:
maxSurge: 4
maxUnavailable: 1
addonComponents:
istiocoredns:
enabled: true
values:
global:
policyNamespace: istio-policy
telemetryNamespace: istio-telemetry
`,
},
{
desc: "Some components Disabled",
valueYAML: `
pilot:
enabled: true
global:
hub: docker.io/istio
istioNamespace: istio-system
policyNamespace: istio-policy
tag: 1.2.3
telemetryNamespace: istio-telemetry
mixer:
policy:
enabled: true
telemetry:
enabled: false
`,
want: `
hub: docker.io/istio
tag: 1.2.3
components:
telemetry:
enabled: false
policy:
enabled: true
pilot:
enabled: true
meshConfig:
rootNamespace: istio-system
values:
global:
telemetryNamespace: istio-telemetry
policyNamespace: istio-policy
`,
},
}
tr, err := NewReverseTranslator(version.NewMinorVersion(1, 5))
if err != nil {
t.Fatalf("fail to get helm value.yaml translator: %v", err)
}
for _, tt := range tests {
t.Run(tt.desc, func(t *testing.T) {
valueStruct := v1alpha1.Values{}
err = util.UnmarshalValuesWithJSONPB(tt.valueYAML, &valueStruct, false)
if err != nil {
t.Fatalf("unmarshal(%s): got error %s", tt.desc, err)
}
scope.Debugf("value struct: \n%s\n", pretty.Sprint(valueStruct))
gotSpec, err := tr.TranslateFromValueToSpec([]byte(tt.valueYAML), false)
if gotErr, wantErr := errToString(err), tt.wantErr; gotErr != wantErr {
t.Errorf("ValuesToProto(%s)(%v): gotErr:%s, wantErr:%s", tt.desc, tt.valueYAML, gotErr, wantErr)
}
if tt.wantErr == "" {
ms := jsonpb.Marshaler{}
gotString, err := ms.MarshalToString(gotSpec)
if err != nil {
t.Errorf("failed to marshal translated IstioOperatorSpec: %s", err)
}
cpYaml, _ := yaml.JSONToYAML([]byte(gotString))
if want := tt.want; !util.IsYAMLEqual(gotString, want) {
t.Errorf("ValuesToProto(%s): got:\n%s\n\nwant:\n%s\nDiff:\n%s\n", tt.desc, string(cpYaml), want, util.YAMLDiff(gotString, want))
}
}
})
}
}
func TestNewReverseTranslator(t *testing.T) | {
tests := []struct {
name string
minorVersion version.MinorVersion
wantVer string
wantErr bool
}{
{
name: "version 1.4",
minorVersion: version.NewMinorVersion(1, 4),
wantVer: "1.4",
wantErr: false,
},
// TODO: implement 1.5 and fallback logic.
{
name: "version 1.99",
minorVersion: version.NewMinorVersion(1, 99),
wantVer: "",
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := NewReverseTranslator(tt.minorVersion)
if (err != nil) != tt.wantErr {
t.Errorf("NewReverseTranslator() error = %v, wantErr %v", err, tt.wantErr)
return
}
if got != nil && tt.wantVer != got.Version.String() {
t.Errorf("NewReverseTranslator() got = %v, want %v", got.Version.String(), tt.wantVer)
}
})
}
} |
|
MediaView.js | import React from 'react';
import { Link } from 'react-router';
class | extends React.Component {
render() {
return (
<div class="single-photo">
<h1>
<Link to="/">picsame</Link>
</h1>
</div>
)
}
}
export default MediaView; | MediaView |
request.go | package grequests
import (
"bytes"
"crypto/tls"
"encoding/json"
"encoding/xml"
"errors"
"fmt"
"io"
"mime"
"mime/multipart"
"net"
"net/http"
"net/http/cookiejar"
"net/textproto"
"net/url"
"strconv"
"strings"
"time"
"github.com/google/go-querystring/query"
"context"
"golang.org/x/net/publicsuffix"
)
// RequestOptions is the location that of where the data
type RequestOptions struct {
// Data is a map of key values that will eventually convert into the
// the body of a POST request.
Data map[string]string
// Params is a map of query strings that may be used within a GET request
Params map[string]string
// QueryStruct is a struct that encapsulates a set of URL query params
// this paramter is mutually exclusive with `Params map[string]string` (they cannot be combined)
// for more information please see https://godoc.org/github.com/google/go-querystring/query
QueryStruct interface{}
// Files is where you can include files to upload. The use of this data
// structure is limited to POST requests
Files []FileUpload
// JSON can be used when you wish to send JSON within the request body
JSON interface{}
// XML can be used if you wish to send XML within the request body
XML interface{}
// Headers if you want to add custom HTTP headers to the request,
// this is your friend
Headers map[string]string
// InsecureSkipVerify is a flag that specifies if we should validate the
// server's TLS certificate. It should be noted that Go's TLS verify mechanism
// doesn't validate if a certificate has been revoked
InsecureSkipVerify bool
// DisableCompression will disable gzip compression on requests
DisableCompression bool
// UserAgent allows you to set an arbitrary custom user agent
UserAgent string
// Host allows you to set an arbitrary custom host
Host string
// Auth allows you to specify a user name and password that you wish to
// use when requesting the URL. It will use basic HTTP authentication
// formatting the username and password in base64 the format is:
// []string{username, password}
Auth []string
// IsAjax is a flag that can be set to make the request appear
// to be generated by browser Javascript
IsAjax bool
// Cookies is an array of `http.Cookie` that allows you to attach
// cookies to your request
Cookies []*http.Cookie
// UseCookieJar will create a custom HTTP client that will
// process and store HTTP cookies when they are sent down
UseCookieJar bool
// Proxies is a map in the following format
// *protocol* => proxy address e.g http => http://127.0.0.1:8080
Proxies map[string]*url.URL
// TLSHandshakeTimeout specifies the maximum amount of time waiting to
// wait for a TLS handshake. Zero means no timeout.
TLSHandshakeTimeout time.Duration
// DialTimeout is the maximum amount of time a dial will wait for
// a connect to complete.
DialTimeout time.Duration
// KeepAlive specifies the keep-alive period for an active
// network connection. If zero, keep-alive are not enabled.
DialKeepAlive time.Duration
// RequestTimeout is the maximum amount of time a whole request(include dial / request / redirect)
// will wait.
RequestTimeout time.Duration
// HTTPClient can be provided if you wish to supply a custom HTTP client
// this is useful if you want to use an OAUTH client with your request.
HTTPClient *http.Client
// SensitiveHTTPHeaders is a map of sensitive HTTP headers that a user
// doesn't want passed on a redirect.
SensitiveHTTPHeaders map[string]struct{}
// RedirectLimit is the acceptable amount of redirects that we should expect
// before returning an error be default this is set to 30. You can change this
// globally by modifying the `RedirectLimit` variable.
RedirectLimit int
// RequestBody allows you to put anything matching an `io.Reader` into the request
// this option will take precedence over any other request option specified
RequestBody io.Reader
// CookieJar allows you to specify a special cookiejar to use with your request.
// this option will take precedence over the `UseCookieJar` option above.
CookieJar http.CookieJar
// Context can be used to maintain state between requests https://golang.org/pkg/context/#Context
Context context.Context
// BeforeRequest is a hook that can be used to modify the request object
// before the request has been fired. This is useful for adding authentication
// and other functionality not provided in this library
BeforeRequest func(req *http.Request) error
// LocalAddr allows you to send the request on any local interface
LocalAddr *net.TCPAddr
}
// DoRegularRequest adds generic test functionality
func DoRegularRequest(requestVerb, url string, ro *RequestOptions) (*Response, error) {
return buildResponse(buildRequest(requestVerb, url, ro, nil))
}
func doSessionRequest(requestVerb, url string, ro *RequestOptions, httpClient *http.Client) (*Response, error) {
return buildResponse(buildRequest(requestVerb, url, ro, httpClient))
}
var quoteEscaper = strings.NewReplacer("\\", "\\\\", `"`, "\\\"")
func escapeQuotes(s string) string |
// buildRequest is where most of the magic happens for request processing
func buildRequest(httpMethod, url string, ro *RequestOptions, httpClient *http.Client) (*http.Response, error) {
if ro == nil {
ro = &RequestOptions{}
}
if ro.CookieJar != nil {
ro.UseCookieJar = true
}
// Create our own HTTP client
if httpClient == nil {
httpClient = BuildHTTPClient(*ro)
}
var err error // we don't want to shadow url so we won't use :=
switch {
case len(ro.Params) != 0:
if url, err = buildURLParams(url, ro.Params); err != nil {
return nil, err
}
case ro.QueryStruct != nil:
if url, err = buildURLStruct(url, ro.QueryStruct); err != nil {
return nil, err
}
}
// Build the request
req, err := buildHTTPRequest(httpMethod, url, ro)
if err != nil {
return nil, err
}
// Do we need to add any HTTP headers or Basic Auth?
addHTTPHeaders(ro, req)
addCookies(ro, req)
addRedirectFunctionality(httpClient, ro)
if ro.Context != nil {
req = req.WithContext(ro.Context)
}
if ro.BeforeRequest != nil {
if err := ro.BeforeRequest(req); err != nil {
return nil, err
}
}
return httpClient.Do(req)
}
func buildHTTPRequest(httpMethod, userURL string, ro *RequestOptions) (*http.Request, error) {
if ro.RequestBody != nil {
return http.NewRequest(httpMethod, userURL, ro.RequestBody)
}
if ro.JSON != nil {
return createBasicJSONRequest(httpMethod, userURL, ro)
}
if ro.XML != nil {
return createBasicXMLRequest(httpMethod, userURL, ro)
}
if ro.Files != nil {
return createFileUploadRequest(httpMethod, userURL, ro)
}
if ro.Data != nil {
return createBasicRequest(httpMethod, userURL, ro)
}
return http.NewRequest(httpMethod, userURL, nil)
}
func createFileUploadRequest(httpMethod, userURL string, ro *RequestOptions) (*http.Request, error) {
if httpMethod == "POST" {
return createMultiPartPostRequest(httpMethod, userURL, ro)
}
// This may be a PUT or PATCH request so we will just put the raw
// io.ReadCloser in the request body
// and guess the MIME type from the file name
// At the moment, we will only support 1 file upload as a time
// when uploading using PUT or PATCH
req, err := http.NewRequest(httpMethod, userURL, ro.Files[0].FileContents)
if err != nil {
return nil, err
}
req.Header.Set("Content-Type", mime.TypeByExtension(ro.Files[0].FileName))
return req, nil
}
func createBasicXMLRequest(httpMethod, userURL string, ro *RequestOptions) (*http.Request, error) {
var reader io.Reader
switch ro.XML.(type) {
case string:
reader = strings.NewReader(ro.XML.(string))
case []byte:
reader = bytes.NewReader(ro.XML.([]byte))
default:
byteSlice, err := xml.Marshal(ro.XML)
if err != nil {
return nil, err
}
reader = bytes.NewReader(byteSlice)
}
req, err := http.NewRequest(httpMethod, userURL, reader)
if err != nil {
return nil, err
}
req.Header.Set("Content-Type", "application/xml")
return req, nil
}
func createMultiPartPostRequest(httpMethod, userURL string, ro *RequestOptions) (*http.Request, error) {
requestBody := &bytes.Buffer{}
multipartWriter := multipart.NewWriter(requestBody)
for i, f := range ro.Files {
if f.FileContents == nil {
return nil, errors.New("grequests: Pointer FileContents cannot be nil")
}
fieldName := f.FieldName
if fieldName == "" {
if len(ro.Files) > 1 {
fieldName = strings.Join([]string{"file", strconv.Itoa(i + 1)}, "")
} else {
fieldName = "file"
}
}
var writer io.Writer
var err error
if f.FileMime != "" {
if f.FileName == "" {
f.FileName = "filename"
}
h := make(textproto.MIMEHeader)
h.Set("Content-Disposition", fmt.Sprintf(`form-data; name="%s"; filename="%s"`, escapeQuotes(fieldName), escapeQuotes(f.FileName)))
h.Set("Content-Type", f.FileMime)
writer, err = multipartWriter.CreatePart(h)
} else {
writer, err = multipartWriter.CreateFormFile(fieldName, f.FileName)
}
if err != nil {
return nil, err
}
if _, err = io.Copy(writer, f.FileContents); err != nil && err != io.EOF {
return nil, err
}
if err := f.FileContents.Close(); err != nil {
return nil, err
}
}
// Populate the other parts of the form (if there are any)
for key, value := range ro.Data {
multipartWriter.WriteField(key, value)
}
if err := multipartWriter.Close(); err != nil {
return nil, err
}
req, err := http.NewRequest(httpMethod, userURL, requestBody)
if err != nil {
return nil, err
}
req.Header.Add("Content-Type", multipartWriter.FormDataContentType())
return req, err
}
func createBasicJSONRequest(httpMethod, userURL string, ro *RequestOptions) (*http.Request, error) {
var reader io.Reader
switch ro.JSON.(type) {
case string:
reader = strings.NewReader(ro.JSON.(string))
case []byte:
reader = bytes.NewReader(ro.JSON.([]byte))
default:
byteSlice, err := json.Marshal(ro.JSON)
if err != nil {
return nil, err
}
reader = bytes.NewReader(byteSlice)
}
req, err := http.NewRequest(httpMethod, userURL, reader)
if err != nil {
return nil, err
}
req.Header.Set("Content-Type", "application/json")
return req, nil
}
func createBasicRequest(httpMethod, userURL string, ro *RequestOptions) (*http.Request, error) {
req, err := http.NewRequest(httpMethod, userURL, strings.NewReader(encodePostValues(ro.Data)))
if err != nil {
return nil, err
}
// The content type must be set to a regular form
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
return req, nil
}
func encodePostValues(postValues map[string]string) string {
urlValues := &url.Values{}
for key, value := range postValues {
urlValues.Set(key, value)
}
return urlValues.Encode() // This will sort all of the string values
}
// proxySettings will default to the default proxy settings if none are provided
// if settings are provided – they will override the environment variables
func (ro RequestOptions) proxySettings(req *http.Request) (*url.URL, error) {
// No proxies – lets use the default
if len(ro.Proxies) == 0 {
return http.ProxyFromEnvironment(req)
}
// There was a proxy specified – do we support the protocol?
if _, ok := ro.Proxies[req.URL.Scheme]; ok {
return ro.Proxies[req.URL.Scheme], nil
}
// Proxies were specified but not for any protocol that we use
return http.ProxyFromEnvironment(req)
}
// dontUseDefaultClient will tell the "client creator" if a custom client is needed
// it checks the following items (and will create a custom client of these are)
// true
// 1. Do we want to accept invalid SSL certificates?
// 2. Do we want to disable compression?
// 3. Do we want a custom proxy?
// 4. Do we want to change the default timeout for TLS Handshake?
// 5. Do we want to change the default request timeout?
// 6. Do we want to change the default connection timeout?
// 7. Do you want to use the http.Client's cookieJar?
// 8. Do you want to change the request timeout?
// 9. Do you want to set a custom LocalAddr to send the request from
func (ro RequestOptions) dontUseDefaultClient() bool {
switch {
case ro.InsecureSkipVerify == true:
case ro.DisableCompression == true:
case len(ro.Proxies) != 0:
case ro.TLSHandshakeTimeout != 0:
case ro.DialTimeout != 0:
case ro.DialKeepAlive != 0:
case len(ro.Cookies) != 0:
case ro.UseCookieJar != false:
case ro.RequestTimeout != 0:
case ro.LocalAddr != nil:
default:
return false
}
return true
}
// BuildHTTPClient is a function that will return a custom HTTP client based on the request options provided
// the check is in UseDefaultClient
func BuildHTTPClient(ro RequestOptions) *http.Client {
if ro.HTTPClient != nil {
return ro.HTTPClient
}
// Does the user want to change the defaults?
if !ro.dontUseDefaultClient() {
return http.DefaultClient
}
// Using the user config for tls timeout or default
if ro.TLSHandshakeTimeout == 0 {
ro.TLSHandshakeTimeout = tlsHandshakeTimeout
}
// Using the user config for dial timeout or default
if ro.DialTimeout == 0 {
ro.DialTimeout = dialTimeout
}
// Using the user config for dial keep alive or default
if ro.DialKeepAlive == 0 {
ro.DialKeepAlive = dialKeepAlive
}
if ro.RequestTimeout == 0 {
ro.RequestTimeout = requestTimeout
}
var cookieJar http.CookieJar
if ro.UseCookieJar {
if ro.CookieJar != nil {
cookieJar = ro.CookieJar
} else {
// The function does not return an error ever... so we are just ignoring it
cookieJar, _ = cookiejar.New(&cookiejar.Options{PublicSuffixList: publicsuffix.List})
}
}
return &http.Client{
Jar: cookieJar,
Transport: createHTTPTransport(ro),
Timeout: ro.RequestTimeout,
}
}
func createHTTPTransport(ro RequestOptions) *http.Transport {
ourHTTPTransport := &http.Transport{
// These are borrowed from the default transporter
Proxy: ro.proxySettings,
Dial: (&net.Dialer{
Timeout: ro.DialTimeout,
KeepAlive: ro.DialKeepAlive,
LocalAddr: ro.LocalAddr,
}).Dial,
TLSHandshakeTimeout: ro.TLSHandshakeTimeout,
// Here comes the user settings
TLSClientConfig: &tls.Config{InsecureSkipVerify: ro.InsecureSkipVerify},
DisableCompression: ro.DisableCompression,
}
EnsureTransporterFinalized(ourHTTPTransport)
return ourHTTPTransport
}
// buildURLParams returns a URL with all of the params
// Note: This function will override current URL params if they contradict what is provided in the map
// That is what the "magic" is on the last line
func buildURLParams(userURL string, params map[string]string) (string, error) {
parsedURL, err := url.Parse(userURL)
if err != nil {
return "", err
}
parsedQuery, err := url.ParseQuery(parsedURL.RawQuery)
if err != nil {
return "", nil
}
for key, value := range params {
parsedQuery.Set(key, value)
}
return addQueryParams(parsedURL, parsedQuery), nil
}
// addHTTPHeaders adds any additional HTTP headers that need to be added are added here including:
// 1. Custom User agent
// 2. Authorization Headers
// 3. Any other header requested
func addHTTPHeaders(ro *RequestOptions, req *http.Request) {
for key, value := range ro.Headers {
req.Header.Set(key, value)
}
if ro.UserAgent != "" {
req.Header.Set("User-Agent", ro.UserAgent)
} else {
req.Header.Set("User-Agent", localUserAgent)
}
if ro.Host != "" {
req.Host = ro.Host
}
if ro.Auth != nil {
req.SetBasicAuth(ro.Auth[0], ro.Auth[1])
}
if ro.IsAjax == true {
req.Header.Set("X-Requested-With", "XMLHttpRequest")
}
}
func addCookies(ro *RequestOptions, req *http.Request) {
for _, c := range ro.Cookies {
req.AddCookie(c)
}
}
func addQueryParams(parsedURL *url.URL, parsedQuery url.Values) string {
return strings.Join([]string{strings.Replace(parsedURL.String(), "?"+parsedURL.RawQuery, "", -1), parsedQuery.Encode()}, "?")
}
func buildURLStruct(userURL string, URLStruct interface{}) (string, error) {
parsedURL, err := url.Parse(userURL)
if err != nil {
return "", err
}
parsedQuery, err := url.ParseQuery(parsedURL.RawQuery)
if err != nil {
return "", err
}
queryStruct, err := query.Values(URLStruct)
if err != nil {
return "", err
}
for key, value := range queryStruct {
for _, v := range value {
parsedQuery.Add(key, v)
}
}
return addQueryParams(parsedURL, parsedQuery), nil
}
| {
return quoteEscaper.Replace(s)
} |
ident.rs | use crate::typescript::TsTypeAnn;
use serde::Deserialize;
use serde::Serialize;
use swc_atoms::JsWord;
use swc_common::ast_node;
use swc_common::EqIgnoreSpan;
use swc_common::Span;
use swc_common::Spanned;
/// Identifer used as a pattern.
#[derive(Spanned, Clone, Debug, PartialEq, Eq, Hash, EqIgnoreSpan, Serialize, Deserialize)]
#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
pub struct BindingIdent {
#[span]
#[serde(flatten)]
pub id: Ident,
#[serde(default, rename = "typeAnnotation")]
pub type_ann: Option<TsTypeAnn>,
}
impl From<Ident> for BindingIdent {
fn from(id: Ident) -> Self {
Self { id, type_ann: None }
}
}
/// Ident with span.
#[ast_node("Identifier")]
#[derive(Eq, Hash, EqIgnoreSpan)]
pub struct Ident {
pub span: Span,
#[serde(rename = "value")]
pub sym: JsWord,
/// TypeScript only. Used in case of an optional parameter.
#[serde(default)]
pub optional: bool,
}
#[cfg(feature = "arbitrary")]
impl<'a> arbitrary::Arbitrary<'a> for Ident {
fn arbitrary(u: &mut arbitrary::Unstructured<'_>) -> arbitrary::Result<Self> {
let span = u.arbitrary()?;
let sym = u.arbitrary::<String>()?;
if sym.is_empty() {
return Err(arbitrary::Error::NotEnoughData);
}
let sym = sym.into();
let optional = u.arbitrary()?;
Ok(Self {
span,
sym,
optional,
})
}
}
#[ast_node("PrivateName")]
#[derive(Eq, Hash, EqIgnoreSpan)]
#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
pub struct PrivateName {
pub span: Span,
pub id: Ident,
}
impl AsRef<str> for Ident {
fn as_ref(&self) -> &str |
}
impl Ident {
pub const fn new(sym: JsWord, span: Span) -> Self {
Ident {
span,
sym,
optional: false,
}
}
}
pub trait IdentExt: AsRef<str> {
fn is_reserved(&self) -> bool {
[
"break",
"case",
"catch",
"class",
"const",
"continue",
"debugger",
"default",
"delete",
"do",
"else",
"enum",
"export",
"extends",
"false",
"finally",
"for",
"function",
"if",
"import",
"in",
"instanceof",
"new",
"null",
"package",
"return",
"super",
"switch",
"this",
"throw",
"true",
"try",
"typeof",
"var",
"void",
"while",
"with",
]
.contains(&self.as_ref())
}
fn is_reserved_in_strict_mode(&self, is_module: bool) -> bool {
if is_module && self.as_ref() == "await" {
return true;
}
[
"implements",
"interface",
"let",
"package",
"private",
"protected",
"public",
"static",
"yield",
]
.contains(&self.as_ref())
}
fn is_reserved_in_strict_bind(&self) -> bool {
["eval", "arguments"].contains(&self.as_ref())
}
fn is_reserved_in_es3(&self) -> bool {
[
"abstract",
"boolean",
"byte",
"char",
"double",
"final",
"float",
"goto",
"int",
"long",
"native",
"short",
"synchronized",
"throws",
"transient",
"volatile",
]
.contains(&self.as_ref())
}
}
impl IdentExt for JsWord {}
impl IdentExt for Ident {}
impl IdentExt for &'_ str {}
impl IdentExt for String {}
| {
&self.sym
} |
image-analysis.py | from dotenv import load_dotenv
import os
from array import array
from PIL import Image, ImageDraw
import sys
import time
from matplotlib import pyplot as plt
import numpy as np
# import namespaces
from azure.cognitiveservices.vision.computervision import ComputerVisionClient
from azure.cognitiveservices.vision.computervision.models import VisualFeatureTypes
from msrest.authentication import CognitiveServicesCredentials
def main():
global cv_client
try:
# Get Configuration Settings
load_dotenv()
cog_endpoint = os.getenv('COG_SERVICE_ENDPOINT')
cog_key = os.getenv('COG_SERVICE_KEY')
# Get image
image_file = 'images/street.jpg'
if len(sys.argv) > 1:
image_file = sys.argv[1]
# Authenticate Computer Vision client
credential = CognitiveServicesCredentials(cog_key)
cv_client = ComputerVisionClient(cog_endpoint, credential)
# Analyze image
AnalyzeImage(image_file)
# Generate thumbnail
GetThumbnail(image_file)
except Exception as ex:
print(ex)
def AnalyzeImage(image_file):
|
def GetThumbnail(image_file):
print('Generating thumbnail')
# Generate a thumbnail
with open(image_file, mode="rb") as image_data:
# Get thumbnail data
thumbnail_stream = cv_client.generate_thumbnail_in_stream(100, 100, image_data, True)
# Save thumbnail image
thumbnail_file_name = 'thumbnail.png'
with open(thumbnail_file_name, "wb") as thumbnail_file:
for chunk in thumbnail_stream:
thumbnail_file.write(chunk)
print('Thumbnail saved in.', thumbnail_file_name)
if __name__ == "__main__":
main()
| print('Analyzing', image_file)
# Specify features to be retrieved
features = [VisualFeatureTypes.description,
VisualFeatureTypes.tags,
VisualFeatureTypes.categories,
VisualFeatureTypes.brands,
VisualFeatureTypes.objects,
VisualFeatureTypes.adult]
# Get image analysis
with open(image_file, mode="rb") as image_data:
analysis = cv_client.analyze_image_in_stream(image_data , features)
# Get image description
for caption in analysis.description.captions:
print("Description: '{}' (confidence: {:.2f}%)".format(caption.text, caption.confidence * 100))
# Get image tags
if (len(analysis.tags) > 0):
print("Tags: ")
for tag in analysis.tags:
print(" -'{}' (confidence: {:.2f}%)".format(tag.name, tag.confidence * 100))
# Get image categories (including celebrities and landmarks)
if (len(analysis.categories) > 0):
print("Categories:")
landmarks = []
celebrities = []
for category in analysis.categories:
# Print the category
print(" -'{}' (confidence: {:.2f}%)".format(category.name, category.score * 100))
if category.detail:
# Get landmarks in this category
if category.detail.landmarks:
for landmark in category.detail.landmarks:
if landmark not in landmarks:
landmarks.append(landmark)
# Get celebrities in this category
if category.detail.celebrities:
for celebrity in category.detail.celebrities:
if celebrity not in celebrities:
celebrities.append(celebrity)
# If there were landmarks, list them
if len(landmarks) > 0:
print("Landmarks:")
for landmark in landmarks:
print(" -'{}' (confidence: {:.2f}%)".format(landmark.name, landmark.confidence * 100))
# If there were celebrities, list them
if len(celebrities) > 0:
print("Celebrities:")
for celebrity in celebrities:
print(" -'{}' (confidence: {:.2f}%)".format(celebrity.name, celebrity.confidence * 100))
# Get brands in the image
if (len(analysis.brands) > 0):
print("Brands: ")
for brand in analysis.brands:
print(" -'{}' (confidence: {:.2f}%)".format(brand.name, brand.confidence * 100))
# Get objects in the image
if len(analysis.objects) > 0:
print("Objects in image:")
# Prepare image for drawing
fig = plt.figure(figsize=(8, 8))
plt.axis('off')
image = Image.open(image_file)
draw = ImageDraw.Draw(image)
color = 'cyan'
for detected_object in analysis.objects:
# Print object name
print(" -{} (confidence: {:.2f}%)".format(detected_object.object_property, detected_object.confidence * 100))
# Draw object bounding box
r = detected_object.rectangle
bounding_box = ((r.x, r.y), (r.x + r.w, r.y + r.h))
draw.rectangle(bounding_box, outline=color, width=3)
plt.annotate(detected_object.object_property,(r.x, r.y), backgroundcolor=color)
# Save annotated image
plt.imshow(image)
outputfile = 'objects.jpg'
fig.savefig(outputfile)
print(' Results saved in', outputfile)
# Get moderation ratings
ratings = 'Ratings:\n -Adult: {}\n -Racy: {}\n -Gore: {}'.format(analysis.adult.is_adult_content,
analysis.adult.is_racy_content,
analysis.adult.is_gory_content)
print(ratings) |
hasher.rs | pub struct Hasher(u64);
// This is basically FNV, just extended to do 4 bytes at a time.
impl Hasher {
pub fn new() -> Self
{
Hasher(0xcbf29ce484222325)
}
pub fn write_u32(&mut self, value : u32)
{
let value = value as u64;
self.0 = (self.0 ^ value).wrapping_mul(0x100000001b3);
}
| } | pub fn finish(&self) -> u64
{
self.0
} |
evaluate.py | """
Evaluate
"""
import re
import math
import datetime
import random
import torch
from torch.nn import functional as F
from torch.utils.data import DataLoader
import matplotlib.pyplot as plt
from loss import iou_loss, HairMattingLoss, acc_loss, F1_loss
from utils import create_multi_figure
USE_CUDA = torch.cuda.is_available()
DEVICE = torch.device("cuda" if USE_CUDA else "cpu")
def evalTest(test_data, model, args):
testloader = DataLoader(test_data, batch_size=4, shuffle=False)
hairmat_loss = HairMattingLoss(args.grad_lambda)
total_loss, total_iou, total_acc, total_f1 = 0, 0, 0, 0
for batch in testloader:
image, mask = (i.to(DEVICE) for i in batch)
pred = model(image)
total_loss += hairmat_loss(pred, mask, image).item()
iloss = iou_loss(pred, mask).item()
total_iou += iloss
aloss = acc_loss(pred, mask).item()
total_acc += aloss
floss = F1_loss(pred, mask).item()
total_f1 += floss
print("Testing Loss: ", total_loss / len(testloader))
print("Testing IOU: ", total_iou / len(testloader))
print("Testing Acc: ", total_acc / len(testloader))
print("Testing F1: ", total_f1 / len(testloader))
def evaluateOne(img, model, absolute=True):
img = img.to(DEVICE).unsqueeze(0)
pred = model(img)
if absolute:
pred[pred > 0.5] = 1.0
pred[pred <= 0.5] = 0.0
else:
pred[pred < 0.4] = 0
# pred[pred < .90] = 0
rows = [[img[0], pred[0]]]
create_multi_figure(rows, dye=True)
plt.savefig("result.jpg")
def evaluate(test_data, model, num, absolute=True):
rows = [None] * num
for i in range(num):
idx = random.randint(0, len(test_data) - 1)
image, mask = (i.to(DEVICE).unsqueeze(0) for i in test_data[idx])
pred = model(image)
if absolute:
pred[pred > 0.5] = 1.0
pred[pred <= 0.5] = 0.0
else:
pred[pred < 0.4] = 0
| create_multi_figure(rows, dye=True)
plt.savefig("result.jpg") | rows[i] = [image[0], mask[0], pred[0]] # get batch
|
test_settings.py | DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
ROOT_URLCONF = 'django_autoconfig.autourlconf'
INSTALLED_APPS = [
'django.contrib.auth',
'nuit',
]
STATIC_URL = '/static/' | configure_settings(globals()) | STATIC_ROOT = '.static'
from django_autoconfig.autoconfig import configure_settings |
test_api.py |
import skytools
def | ():
a = skytools.natsort_key(skytools.__version__)
b = skytools.natsort_key('3.3')
assert a >= b
| test_version |
create-lead.component.ts | import { Popup } from '@syncfusion/ej2-popups';
import { Component, OnInit, Inject, ElementRef, Optional, ViewChild, OnDestroy } from '@angular/core';
import { MatDialog, MatDialogRef, MAT_DIALOG_DATA, MatSnackBar } from '@angular/material/';
import { FormGroup, FormBuilder, Validators, FormControl, FormArray, AbstractControl, ValidatorFn } from '@angular/forms';
import { Router } from '@angular/router';
import { DataCommunicationService } from '@app/core/services/global.service';
import { ContactleadService } from '@app/core/services/contactlead.service';
import { ConversationService } from '@app/core';
import { MasterApiService, OfflineService, routes, ErrorMessage } from '@app/core/services';
import { newConversationService } from '@app/core/services/new-conversation.service';
import { DatePipe } from '@angular/common';
import { RoutingState } from '@app/core/services/navigation.service';
import { Store } from '@ngrx/store';
import { AppState } from '@app/core/state';
import { ClearMyopenlead, ClearOpenLeadState, } from '@app/core/state/actions/leads.action';
import { removeSpaces, checkLimit, specialCharacter } from '@app/shared/pipes/white-space.validator';
import { EncrDecrService } from '@app/core/services/encr-decr.service';
import { MeetingService } from '@app/core/services/meeting.service';
import { MyOpenLeadsService, LeadCustomErrorMessages, leadAdvnHeaders, leadAdvnNames, DnBAccountHeader } from '@app/core/services/myopenlead.service';
import { ClearMeetingList, ClearActivity, ClearActivityDetails } from '@app/core/state/actions/activities.actions';
import { environment as env } from '@env/environment';
import { AdvancelookuptabsComponent } from '@app/shared/modals/advancelookuptabs/advancelookuptabs.component';
import { ClearContactList, ClearRelationshipCount } from '@app/core/state/actions/contact.action';
import { FileUploadService } from '@app/core/services/file-upload.service';
import { ActivityService } from '@app/core/services/activity.service';
import { S3MasterApiService } from '@app/core/services/master-api-s3.service';
import { CustomerpopupComponent } from '@app/shared/components/customerpopup/customerpopup.component';
import { CacheDataService } from '@app/core/services/look-up-cache-data/cache-data.serive';
import { EnvService } from '@app/core/services/env.service';
declare let FileTransfer: any;
export interface selectedCustomer {
FullName: string,
Designation: string,
isKeyContact: boolean,
SysGuid: string,
MapGuid: string,
Email:string,
LinkActionType: number
}
export interface selectedContact {
FullName: string,
Designation: string,
MapGuid: string,
SysGuid: string
}
export interface selectedOppertunity {
Type: string,
MapGuid: string,
Guid: string,
Title: string,
LinkActionType: number
}
export const ServiceLineTable: any[] = [
{ id: 1, name: 'serviceLines', title: 'Service line', place: 'Search service line', controltype: 'autocomplete', closePopUp: "@serviceLines", serviceData: [], reqFildId: 'Guid', isRequired: true, IsRelation: ["practice", "slbdm"], ErrMsg: "Please search service line name" },
{ id: 1, name: 'practice', title: 'Practice', place: 'Search practice', controltype: 'autocomplete', closePopUp: "@practice", serviceData: [], reqFildId: 'practiceGuid', isRequired: false },
{ id: 1, name: 'slbdm', title: 'SL BDM', place: 'Search service line BDM', controltype: 'autocomplete', closePopUp: "@slbdm", serviceData: [], reqFildId: 'bdmidGuid', isRequired: true, submit: true, ErrMsg: "Please search SL BDM name" },
]
@Component({
selector: 'app-create-lead',
templateUrl: './create-lead.component.html',
styleUrls: ['./create-lead.component.scss']
})
export class CreateLeadComponent implements OnInit, OnDestroy {
addcommmentpopover: boolean = false;
viewcommentpopover: boolean
autooverlay = false
leadDetailsForm: FormGroup;
leadDealOwnerForm: FormGroup; leadinfo = true;
dealinfo = false;
twoactive = false;
items: any;
lists: any;
check: any;
checking: any;
listing: any;
term;
term1;
term2;
term3;
term4;
consent = false;
agendaview = true;
emailview = false;
suggestion = false;
suggestion1 = false;
suggestion2 = false;
suggestion3 = false;
suggestion4 = false;
enquiry: any;
enquiryType: any[]
leadType: any[];
leadData;
ServiceTable = [];
LeadSource: Array<any>;
companyNameSearch: any = [];
Vertical: Array<any>;
Conversationsagp: Array<any>;
Conversationscamp: Array<any>;
Conversationsoppo: Array<any>;
Conversationssbu: Array<any>;
ConversationsserviceLine: Array<any>;
Conversationslead: Array<any>
accntCompany: any;
enquiryId: any;
verticalId: string = "";
serviceline: any;
leadSource: any
linkGuid = '';
campaignId: any;
leadSourceId: string = "";
sbuId: string = "";
servicelineSysGuid: any;
selectedCustomer: selectedCustomer[] = [];
customerContactdetails: Array<any>;
wiproContactdetails: Array<any>;
wiproContactowner: Array<any>;
currency: Array<any>;
currencyId: string = "";
currencyRateValue: any;
leadOwnerId: string = '';
leadguId: any;
cantactName: any;
selectedOppertunity: selectedOppertunity[] = [];
selectedContact: selectedContact[] = [];
selectedContactowner: {}[] = [];
initialVal: any;
isLoading: boolean = false;
conversationId: any;
CreateLeadDetails: any = [];
selectedConversation: any = []
IsChildLead: boolean = false;
conversationArr: any;
ActivitiesLeadArr: any;
tableName: any;
Isprospect: any;
Account: any;
allcampaigndetails: any = [];
ActiveCampaignDetails: any = [];
isvalidation: boolean = false;
disableOpportunity: boolean = true;
userSysGuid: any;
LeadGuid: any;
isCreateLead: boolean = false;
autofilledId: string = "";
isFormValid: boolean = true
currencyres: any;
showConversationsbu: boolean = false;
Conversationsbu: string = "";
ConversationNameSwitchsbu: boolean = false;
showConversationleadsrc: boolean = false;
Conversationleadsrc: string = "";
ConversationNameSwitchleadSource: boolean = false;
showConversationserviceline: boolean = false;
Conversationserviceline: string = "";
ConversationNameSwitchserviceline: boolean = true;
selectedServiceLine: any = [];
servicelisList: any = [];
showConversationvertical: boolean = false;
Conversationvrtcl: string = "";
ConversationNameSwitchvertical: boolean = false;
showConversation: boolean = false;
Conversation: string = "";
ConversationNameSwitch: boolean = false;
wiproProspectAccount: any = '';
customerAccount: any = '';
nameAutoFill = '';
showConversationlead: boolean = false;
Conversationlead: string = "";
ConversationNameSwitchlead: boolean = false;
conversationList: any = [];
showConversationcamp: boolean = false;
Conversationcamp: string = "";
ConversationNameSwitchcamp: boolean = false;
selectedCamp: any = [];
campList: any = [];
showConversationoppo: boolean = false;
Conversationoppo: string = "";
ConversationNameSwitchoppo: boolean = false;
oppArr: any = [];
showCustomer: boolean = false;
customerName: string = "";
customerNameSwitch: boolean = false;
showConversationagp: boolean = false;
Conversationagp: string = "";
ConversationNameSwitchagp: boolean = false;
selectedAgp: any = [];
showContact: boolean = false;
contactName: string = "";
contactNameSwitch: boolean = true;
showContactorigin: boolean = false;
contactNameorigin: string = "";
contactNameSwitchorigin: boolean = true;
selectedContactorigin: {}[] = [];
showContactowner: boolean = false;
contactNameowner: string = "";
contactNameSwitchowner: boolean = false;
addComments = true;
IsOwnerChildLead: boolean = false;
commentId: any;
verticalName: any;
leadSourceName: any;
sbuName: any;
selectedservicelineauto: {}[] = [];
servicelineautoArr: any = [];
servicelineTable_Data: any[];
servicelinesitems: FormArray;
id: any;
serviceTabledata: any = [];
serviceTableName: any = [];
Orinator: any;
isdelete: boolean = false;
descriptionLength: boolean = false;
createClicked: boolean = false;
sixMonthDate: any;
StartDate: any;
userId: any;
submit: any;
disableSbu: boolean = true;
disableCurrency: boolean = true;
disableVertical: boolean = true;
AllianceAccountArr: any = [];
AdvisorAccountArr: any = [];
addLeadContactFlag: any;
TempFomData: any;
AccountSelected: any;
VerticalSelected: any;
SbuSelected: any;
leadSourceSelected: any;
enqiuryTypedata: any;
tempSevicelineData: [];
AllianceData: any;
AdvisorData: any;
leadSrcName: string = "";
createleadId: any;
tableInValid: boolean = false;
dontGoNextStep: boolean = false;
selectedAll: any; | servicelineautoSwitch: boolean = true;
ServiceMapGuid: any;
createId: any;
servicelinesguid: any;
practiceGuid: any;
bdmGuid: any;
SGuid: any;
searchitem = ""
searchPractice = ""
searchSlbdm = ""
AdvisorsInflunce: boolean = false;
Alliance_Partner: boolean = false;
isCampaign: boolean = false;
isAnalyst: boolean = false;
isAdvertisementCampaign: Boolean = false;
isMCICampaign: boolean = false;
AllianceAccountguid: string = "";
AllianceName: string = "";
allianceAccountSwitch: boolean = false;
Verticaldetails: any;
equiryName: any;
AccName: string = '';
ConversationName: any;
activicityMapguid: any;
linkName: any;
AdvisorAccountguid: string = "";
AdvisorName: string = "";
CurrencySwitch: boolean = false
AdvisorAccountSwitch: boolean = false;
disableAccount: boolean
disableActivityGroup: boolean = true;
disableCampaign: boolean = true;
// disableLeadOwner: boolean
disableContacts: boolean
AdvisorsAccount: boolean;
AllianceAccount: boolean;
requestCamp: boolean;
countrySwitch: boolean = false;
leadOwnerName: string = '';
ExistingActivityData = []
finalActivityGroup = []
ExistingCampaignData = []
finalCampaignGroup = [];
ExistingOpportunityData = []
finalOppotunityGroup = []
ExistingContactData = []
finalContactGroup = []
ExistingAgpData = []
finalAgpGroup = []
ExistingWiproSolutionData = []
finalWiproSolutionGroup = []
TempLeadDetails
IsModuleSwitch
ModuleSwitch
countrySearch: any;
iswiprosolution: boolean;
isServicelines: boolean;
wiproSolustionSwitch: boolean = false;
selectedWiproSolution = [];
wiproSolutionsearch: any;
selectedCountry: any;
countryId: string = "";
countryName: string = "";
currencySelected
currencyName: string = "";
isCurrencySearchLoading: boolean = false
isLeadSourceNameSearchLoading: boolean = false
isAccountSearchLoader: boolean = false
isSbuLoder: boolean = false
isVerticalLoader: boolean = false
isAllianceLoader: boolean = false
isAdvisorLoader: boolean = false
isCountryLoading: boolean = false
iswiproSolutionLoader: boolean = false
isServicelineLoader: boolean = false
isAcivityGroupLoader: boolean = false
isCampaignLoading: boolean = false
isOpportunityLoader: boolean = false
isAgpLoader: boolean = false
isLeadOwnerLoader: boolean = false
isCustometContactLoader: boolean = false
arrowkeyLocation = 0;
AccId: string = "";
isProspect: boolean;
testingarray = ["1", "2"]
disabledOriginator: boolean = true
showFormMode: number = 0
showFirstForm: boolean;
moduleTypeStateData
dealValueError: boolean = false;
isCurrencyMandatory: boolean = false
OriginatorDetails: any = [];
CurrencyArrayList: any = []
RequestAlliance: boolean;
accountdetails = [];
headerdb = [];
enquiryTypeAria: any = ''
sendCampaignToAdvance = []
sendAccountToAdvance: any = []
sendAllianceToAdvance: any = []
sendOwnerToAdvance: any = []
sendWiproSolutionToAdvance: any = []
sendActivityToAdvance: any = []
sendOppToAdvance: any = []
sendCustomerToAdvance: any = []
sendAdvisorToAdvance: any = []
isFromMeeting: boolean = false;
dalaValue: any;
// accountModuleRouting : any;
//advance look up data
ActivityAdvanceData: any=[];
CampaignAdvanceData: any=[];
OppAdvanceData: any=[];
WiproSolAdvanceData: any=[];
customerAdvanceData: any=[];
@ViewChild('searchAccountNameList')
acc: ElementRef
//------------------------------------advance lookup ts file starts--------------------------------//
lookupdata = {
tabledata: [],
recordCount: 10,
headerdata: [],
Isadvancesearchtabs: false,
controlName: '',
lookupName: '',
isCheckboxRequired: false,
IsProspectAccount : true,
inputValue: '',
pageNo: 1,
nextLink: '',
TotalRecordCount: 0,
selectedRecord: [],
isLoader: false,
errorMsg: {
isError: false,
message: ""
},
otherDbData: {
countryvalue: [],
isLoader: false,
}
};
isMobileDevice : boolean = false;
IdentifyAppendFunc = {
'accountSearch': (data) => { this.appendAccountName(data.Name, data, true, true, true, 0) },
'allianceSearch': (data) => { this.appendalliance(data.Name, data, true, 0) },
'advisorSearch': (data) => { this.appendadvisor(data.Name, data, true, 0) },
'ownerSearch': (data) => { this.appendOwner(data.FullName, data, true, 0) },
'wiproSoluSearch': (data) => { this.appendWiproSolutions(data.Name, data, true, true, 0) },
'activitySearch': (data) => { this.appendActivityGroup(data.Name, data, true, true, 0) },
'campaignSearch': (data) => { this.appendCampaignGroup(data.Name, data, true, true, 0) },
'oppoSearch': (data) => { this.appendOpportunityGroup(data.Name, data, true, true, 0) },
'agpSearch': (data) => { this.appendAgpGroup(data.Name, data, true, 0) },
'contactSearch': (data) => { this.appendCustomerGroup(data.FullName, data, true, true, 0) },
'serviceLines': (data) => { this.appendservicelineauto(data, data.rowIndex, data.rowData, data.rowLine, true) },
'practice': (data) => { this.appendservicelineauto(data, data.rowIndex, data.rowData, data.rowLine, true) },
'slbdm': (data) => { this.appendservicelineauto(data, data.rowIndex, data.rowData, data.rowLine, true) }
}
selectedLookupData(controlName) {
switch (controlName) {
case 'accountSearch': { return (this.sendAccountToAdvance.length > 0) ? this.sendAccountToAdvance : [] }
case 'allianceSearch': { return (this.sendAllianceToAdvance.length > 0) ? this.sendAllianceToAdvance : [] }
case 'advisorSearch': { return (this.sendAdvisorToAdvance.length > 0) ? this.sendAdvisorToAdvance : [] }
case 'ownerSearch': { return (this.sendOwnerToAdvance.length > 0) ? this.sendOwnerToAdvance : [] }
case 'wiproSoluSearch': { return (this.sendWiproSolutionToAdvance.length > 0) ? this.sendWiproSolutionToAdvance : [] }
case 'activitySearch': { return (this.sendActivityToAdvance.length > 0) ? this.sendActivityToAdvance : [] }
case 'campaignSearch': { return (this.selectedCamp.length > 0) ? this.selectedCamp : [] }
case 'oppoSearch': { return (this.sendOppToAdvance.length > 0) ? this.sendOppToAdvance : [] }
case 'contactSearch': { return (this.sendCustomerToAdvance.length > 0) ? this.sendCustomerToAdvance : [] }
default: { return [] }
}
}
ngOnDestroy(): void {
}
// cache current form details entered and push to redis
// 1)if he clicks save the clear the caches.
// 2)if he clicks cancel clear the cache.
// 3)if user creates lead from other modules , no need to bind the cache data but we have to update the cache
// 4)if user tries to create contact and activity ,then we have to update the cache.
CreateRedisCache() {
if (!this.createClicked) {
this.showFirstForm = this.checkShowFirstformOrSecond()
this.AppendRedisCache()
}
}
AppendRedisCache(data?) {
this.service.SetRedisCacheData((data) ? (data) : (this.createTempData()), 'createLead').subscribe(res => {
if (!res.IsError) {
console.log("SUCESS FULL AUTO SAVE")
}
}, error => {
console.log(error)
})
}
checkShowFirstformOrSecond(): boolean {
if (this.leadDealOwnerForm.value.dealName != '') {
return false
} else {
return true
}
}
// duplicates removed from advance lookup
emptyArray(controlName) {
switch (controlName) {
case 'accountSearch': {
return this.AccountSelected = [], this.sendAccountToAdvance = []
}
case 'allianceSearch': {
return this.AllianceData = [], this.sendAllianceToAdvance = []
}
case 'advisorSearch': {
return this.AdvisorData = [], this.sendAdvisorToAdvance = []
}
case 'ownerSearch': {
return this.selectedContactowner = [], this.sendOwnerToAdvance = []
}
case 'wiproSoluSearch': {
return this.selectedWiproSolution = [], this.sendWiproSolutionToAdvance = []
}
case 'activitySearch': {
return this.selectedConversation = [], this.sendActivityToAdvance = []
}
case 'campaignSearch': {
return this.selectedCamp = []
}
case 'oppoSearch': {
return this.selectedOppertunity = [], this.sendOppToAdvance = []
}
case 'contactSearch': {
return this.selectedCustomer = [], this.sendCustomerToAdvance = []
}
}
}
//---------------------------------advance lookup ts file ends-------------------------------//
constructor(
public el: ElementRef,
public router: Router,
public dialog: MatDialog,
public service: DataCommunicationService,
public contactLeadService: ContactleadService,
public conversationService: ConversationService,
public leadFormBuilder: FormBuilder,
private masterApi: MasterApiService,
private newconversationService: newConversationService,
private routingState: RoutingState,
public matSnackBar: MatSnackBar,
private datepipe: DatePipe,
private offlineService: OfflineService,
private EncrDecr: EncrDecrService,
private store: Store<AppState>,
private myOpenLeadService: MyOpenLeadsService,
private errPopup: ErrorMessage,
private meetingApi: MeetingService,
private activityService: ActivityService,
private S3MasterApiService: S3MasterApiService,
private fileService: FileUploadService,
private cacheDataService: CacheDataService,public envr : EnvService) {
this.contactLeadService.attachmentList = []
this.items = [{ name: "archie" }, { name: "jake" }, { name: "richard" }];
this.lists = [{ name: "archie" }, { name: "jake" }, { name: "richard" }];
this.listing = [{ name: "archie" }, { name: "jake" }, { name: "richard" }];
this.check = [{ name: "archie" }, { name: "jake" }, { name: "richard" }];
this.checking = [{ name: "archie" }, { name: "jake" }, { name: "richard" }];
this.StartDate = new Date();
var month = (this.StartDate.getMonth() + 6);
var date = this.StartDate.getDate();
var year = this.StartDate.getFullYear();
this.sixMonthDate = new Date(year, month, date)
}
servicelineautoclick() {
this.servicelineautoSwitch = true;
this.leadDetailsForm.value.serviceLine = true;
this.autooverlay = true;
}
practiceswitchauto() {
this.leadDetailsForm.value.practice = true;
this.autooverlay = true;
}
solutionswitchauto() {
this.leadDetailsForm.value.slbdm = true;
this.autooverlay = true;
}
closeoverlayauto() {
this.autooverlay = false;
this.leadDetailsForm.value.serviceLine = false;
this.leadDetailsForm.value.practice = false;
this.leadDetailsForm.value.slbdm = false;
this.servicelineautoSwitch = false;
}
wiproSolutionData(data: string) {
return data.replace(/ *\([^)]*\) */g, "");
}
ngOnInit() {
this.isMobileDevice = window.innerWidth < 800 ? true : false;
this.newconversationService.attachmentList = [];
this.createClicked = false;
this.S3MasterApiService.getdnbtoken('code').subscribe(res => {
localStorage.setItem('dNBToken', res.ResponseObject.access_token)
})
this.Orinator = localStorage.getItem('upn')
let userID = localStorage.getItem('userID')
this.userId = this.EncrDecr.get('EncryptionEncryptionEncryptionEn', userID, 'DecryptionDecrip')
this.OriginatorDetails = [{ FullName: this.Orinator, ownerId: this.userId }]
this.servicelineTable_Data = ServiceLineTable;
this.InitializeCreateForm()
this.setValidatorsForCustomer()
this.OnChange()
this.appendOwner(this.OriginatorDetails[0].FullName, this.OriginatorDetails[0], 0, false)
if (!sessionStorage.getItem('TempLeadDetails')) {
// this.accountModuleRouting = undefined;
this.servicelinesevent(false)
// Auto save ,conditions - it shd not have any sesssion data to populate.
this.service.GetRedisCacheData('createLead').subscribe(res => {
this.isLoading = false
if (!res.IsError) {
if (res.ResponseObject) {
if (res.ResponseObject != '') {
this.TempLeadDetails = JSON.parse(res.ResponseObject)
this.AppendTheFormData(this.TempLeadDetails)
if (this.TempLeadDetails.attachments.length > 0) {
this.contactLeadService.attachmentList = this.TempLeadDetails.attachments
}
} else {
this.getenquiryType()
// this.createTableNewRow();
}
} else {
this.getenquiryType()
// this.createTableNewRow();
}
}
})
this.getenquiryType()
this.servicelinesevent(false)
// this.createTableNewRow();
} else if (sessionStorage.getItem('TempLeadDetails')) { // module switch flow
debugger
console.log("APPENDING THE LOCAL TEMP MODULE SWITCH!!!")
this.isLoading = false
this.TempLeadDetails = JSON.parse(sessionStorage.getItem('TempLeadDetails'))
// if create module is switched, we need to update cache
this.AppendTheFormData(this.TempLeadDetails)
this.AppendRedisCache(this.TempLeadDetails)
// this.accountModuleRouting = (this.TempLeadDetails.moduletype.Moduleroute) ? this.TempLeadDetails.moduletype.Moduleroute : undefined;
} else {
this.getenquiryType()
// this.createTableNewRow();
}
this.leadDetailsForm.controls.leadSource.valueChanges.subscribe(val => {
if (this.ConversationNameSwitchleadSource) {
this.isLeadSourceNameSearchLoading = true
this.LeadSource = []
this.contactLeadService.getsearchLeadSource(val).subscribe(res => {
this.isLeadSourceNameSearchLoading = false
if (res.IsError === false) {
this.LeadSource = res.ResponseObject;
} else {
this.errPopup.throwError(res.Message);
this.LeadSource = []
}
}, error => {
this.isLeadSourceNameSearchLoading = false;
this.LeadSource = []
});
}
})
this.leadDetailsForm.controls.accountOrCompanyName.valueChanges.subscribe(val => {
if (this.ConversationNameSwitch) {
this.isAccountSearchLoader = true
this.companyNameSearch = []
this.contactLeadService.getsearchAccountCompanyNew(val).subscribe(res => {
this.isAccountSearchLoader = false
this.isLoading = false;
if (res.IsError === false) {
this.lookupdata.TotalRecordCount = res.TotalRecordCount
this.lookupdata.nextLink = (res.OdatanextLink) ? res.OdatanextLink : '';
this.companyNameSearch = res.ResponseObject;
} else {
this.errPopup.throwError(res.Message);
this.companyNameSearch = []
}
}, error => {
this.isAccountSearchLoader = false;
this.companyNameSearch = []
});
}
})
this.leadDetailsForm.controls.sbu.valueChanges.subscribe(val => {
if (val != "" && val != null && this.ConversationNameSwitchsbu) {
this.isSbuLoder = true
this.Conversationssbu = []
this.contactLeadService.getsearchSBUbyName(val, this.AccId, this.isProspect).subscribe(res => {
this.isSbuLoder = false
if (res.IsError === false) {
this.Conversationssbu = res.ResponseObject;
} else {
this.errPopup.throwError(res.Message);
this.Conversationssbu = []
}
}, error => {
this.isSbuLoder = false;
this.Conversationssbu = []
});
}
else {
this.ResetValidatorsVerticalInput()
}
})
this.leadDetailsForm.controls.vertical.valueChanges.subscribe(val => {
if (val != "" && val != null && this.ConversationNameSwitchvertical) {
this.isVerticalLoader = true
this.Vertical = []
let verticalSearchreqBody = {
SearchText: "",
Guid: this.AccId,
SBUGuid: this.sbuId,
isProspect: this.isProspect,
PageSize: 10,
OdatanextLink: "",
RequestedPageNumber: 1
}
this.contactLeadService.getsearchVerticalBySbu(verticalSearchreqBody).subscribe(res => {
this.isVerticalLoader = false
if (res.IsError === false) {
this.Vertical = res.ResponseObject;
} else {
this.errPopup.throwError(res.Message);
this.Vertical = []
}
}, error => {
this.isVerticalLoader = false;
this.Vertical = []
});
}
})
this.leadDetailsForm.controls.allianceAccount.valueChanges.subscribe(val => {
if (val !== "" && val != null && this.allianceAccountSwitch) {
this.isAllianceLoader = true
this.AllianceAccountArr = []
this.contactLeadService.GetAllianceAccount(val).subscribe(
data => {
this.isAllianceLoader = false
if (data.IsError === false) {
this.lookupdata.TotalRecordCount = data.TotalRecordCount
this.lookupdata.nextLink = (data.OdatanextLink) ? data.OdatanextLink : '';
this.AllianceAccountArr = data.ResponseObject;
} else {
this.errPopup.throwError(data.Message);
this.isAllianceLoader = false
}
}, error => {
this.isAllianceLoader = false;
this.AllianceAccountArr = []
});
}
})
this.leadDetailsForm.controls.advisorAccount.valueChanges.subscribe(val => {
if (val !== "" && val != null && this.AdvisorAccountSwitch) {
this.isAdvisorLoader = true
this.AdvisorAccountArr = []
this.contactLeadService.GetAdvisorAccount(val).subscribe(
data => {
this.isAdvisorLoader = false
if (data.IsError === false) {
this.lookupdata.TotalRecordCount = data.TotalRecordCount
this.lookupdata.nextLink = (data.OdatanextLink) ? data.OdatanextLink : '';
this.AdvisorAccountArr = data.ResponseObject;
} else {
this.errPopup.throwError(data.Message);
this.AdvisorAccountArr = []
}
}, error => {
this.isAdvisorLoader = false;
this.AdvisorAccountArr = []
});
}
})
this.leadDetailsForm.controls.country.valueChanges.subscribe(val => {
if (val !== "" && val != null && this.countrySwitch) {
this.isCountryLoading = true
this.countrySearch = []
this.contactLeadService.getCoutry(val).subscribe(data => {
this.isCountryLoading = false
if (data.IsError === false) {
this.countrySearch = data.ResponseObject;
} else {
this.errPopup.throwError(data.Message);
this.countrySearch = []
}
}, error => {
this.isCountryLoading = false;
this.countrySearch = []
});
}
})
this.leadDetailsForm.controls.WiproSolutions.valueChanges.subscribe(val => {
if (val !== "" && val != null && this.wiproSolustionSwitch) {
this.iswiproSolutionLoader = true
this.wiproSolutionsearch = []
this.contactLeadService.getWiproSolutions(val).subscribe(
data => {
this.iswiproSolutionLoader = false
if (data.IsError === false) {
this.lookupdata.TotalRecordCount = data.TotalRecordCount
this.lookupdata.nextLink = (data.OdatanextLink) ? data.OdatanextLink : '';
this.WiproSolAdvanceData = data.ResponseObject;
if (this.selectedWiproSolution.length > 0 && data.ResponseObject.length > 0) {
this.wiproSolutionsearch = this.CompareRemoveSelected(data.ResponseObject, this.selectedWiproSolution, "SysGuid")
} else {
this.wiproSolutionsearch = data.ResponseObject;
}
} else {
this.errPopup.throwError(data.Message);
this.wiproSolutionsearch = []
}
}, error => {
this.iswiproSolutionLoader = false;
this.wiproSolutionsearch = []
});
}
})
this.leadDetailsForm.controls.activityGroup.valueChanges.subscribe(val => {
if (val !== "" && val != null && this.ConversationNameSwitchlead) {
this.isAcivityGroupLoader = true
this.Conversationslead = []
this.contactLeadService.getSearchActivityGroup(val, this.AccId, this.isProspect).subscribe(res => {
this.isAcivityGroupLoader = false
if (res.IsError === false) {
this.lookupdata.TotalRecordCount = res.TotalRecordCount
this.lookupdata.nextLink = (res.OdatanextLink) ? res.OdatanextLink : '';
this.ActivityAdvanceData = res.ResponseObject;
if (this.selectedConversation.length > 0 && res.ResponseObject.length > 0) {
this.Conversationslead = this.CompareRemoveSelected(res.ResponseObject, this.selectedConversation, "Guid")
} else {
this.Conversationslead = res.ResponseObject;
}
} else {
this.errPopup.throwError(res.Message)
this.Conversationslead = []
}
}, error => {
this.isAcivityGroupLoader = false;
this.Conversationslead = []
});
}
})
this.leadDetailsForm.controls.campaign.valueChanges.subscribe(val => {
if (val !== "" && val != null && this.ConversationNameSwitchcamp) {
this.isCampaignLoading = true
this.Conversationscamp = []
this.contactLeadService.getsearchCampaign(val, this.AccId, this.isProspect).subscribe(res => {
this.isCampaignLoading = false
if (res.IsError === false) {
this.lookupdata.TotalRecordCount = res.TotalRecordCount
this.lookupdata.nextLink = (res.OdatanextLink) ? res.OdatanextLink : '';
this.CampaignAdvanceData = res.ResponseObject
if (this.selectedCamp.length > 0 && res.ResponseObject.length > 0) {
this.Conversationscamp = this.CompareRemoveSelected(res.ResponseObject, this.selectedCamp, "Id")
} else {
this.Conversationscamp = res.ResponseObject;
}
} else {
this.errPopup.throwError(res.Message);
this.Conversationscamp = []
}
}, error => {
this.isCampaignLoading = false;
this.Conversationscamp = []
});
}
})
this.leadDetailsForm.controls.opportunity.valueChanges.subscribe(val => {
if (val !== "" && this.ConversationNameSwitchoppo) {
this.isOpportunityLoader = true
this.Conversationsoppo = []
this.contactLeadService.searchOpportunityOrder(val, this.AccId, this.isProspect).subscribe(res => {
this.isOpportunityLoader = false
if (res.IsError === false) {
this.lookupdata.TotalRecordCount = res.TotalRecordCount
this.lookupdata.nextLink = (res.OdatanextLink) ? res.OdatanextLink : '';
this.OppAdvanceData = res.ResponseObject;
if (this.selectedOppertunity.length > 0 && res.ResponseObject.length > 0) {
this.Conversationsoppo = this.CompareRemoveSelected(res.ResponseObject, this.selectedOppertunity, "Guid")
} else {
this.Conversationsoppo = res.ResponseObject;
}
} else {
this.errPopup.throwError(res.Message);
this.Conversationsoppo = []
}
}, error => {
this.isOpportunityLoader = false;
this.Conversationsoppo = []
});
}
})
this.leadDetailsForm.controls.link.valueChanges.subscribe(val => {
if (val !== "" && val != null && this.ConversationNameSwitchagp) {
this.isAgpLoader = true
this.Conversationsagp = []
this.contactLeadService.getsearchLinkAGP(val).subscribe(res => {
this.isAgpLoader = false
if (res.IsError === false) {
this.Conversationsagp = res.ResponseObject;
} else {
this.errPopup.throwError(res.Message);
this.Conversationsagp = []
}
}, error => {
this.isAgpLoader = false;
this.Conversationsagp = []
});
}
})
this.leadDetailsForm.controls.leadOwner.valueChanges.subscribe(val => {
if (val !== "" && val != null && this.contactNameSwitchowner) {
this.isLeadOwnerLoader = true
this.wiproContactowner = []
this.contactLeadService.getsearchLeadOwner(val).subscribe(data => {
this.isLeadOwnerLoader = false
if (data.IsError === false) {
this.lookupdata.TotalRecordCount = data.TotalRecordCount
this.lookupdata.nextLink = (data.OdatanextLink) ? data.OdatanextLink : '';
this.wiproContactowner = data.ResponseObject;
} else {
this.errPopup.throwError(data.Message);
this.wiproContactowner = []
}
}, error => {
this.isLeadOwnerLoader = false;
this.wiproContactowner = []
});
}
})
this.leadDealOwnerForm.controls.currency.valueChanges.subscribe(val => {
if (val !== "" && val != null && this.CurrencySwitch) {
this.isCurrencySearchLoading = true
this.CurrencyArrayList = []
this.contactLeadService.getsearchCurrency(val).subscribe(res => {
this.isCurrencySearchLoading = false
if (res.IsError === false) {
this.CurrencyArrayList = res.ResponseObject.map(x => x = { ...x, Desc: this.getSymbol(x.Desc) });
} else {
this.errPopup.throwError(res.Message);
this.CurrencyArrayList = []
}
}, error => {
this.isCurrencySearchLoading = false;
this.CurrencyArrayList = []
});
}
})
this.leadDealOwnerForm.controls.customerContact.valueChanges.subscribe(val => {
if (val !== "" && val != null && this.customerNameSwitch) {
this.isCustometContactLoader = true
this.customerContactdetails = []
this.contactLeadService.searchCustomerparticipants(val, this.AccId, this.isProspect).subscribe(data => {
this.isCustometContactLoader = false
if (data.IsError === false) {
this.lookupdata.TotalRecordCount = data.TotalRecordCount
this.lookupdata.nextLink = (data.OdatanextLink) ? data.OdatanextLink : '';
this.customerAdvanceData =data.ResponseObject;
if (this.selectedCustomer.length > 0 && data.ResponseObject.length > 0) {
this.customerContactdetails = this.CompareRemoveSelected(data.ResponseObject, this.selectedCustomer, "Guid")
} else {
this.customerContactdetails = data.ResponseObject;
}
this.customerContactdetails = data.ResponseObject;
} else {
this.errPopup.throwError(data.Message);
this.customerContactdetails = []
}
}, error => {
this.isCustometContactLoader = false;
this.customerContactdetails = []
});
}
})
}
AppendTheFormData(data) {
this.ModuleSwitch = data.moduleSwitch
this.showFirstForm = data.showFirstForm
this.moduleTypeStateData = (data.moduletype) ? data.moduletype : null
this.populateCreateLeadForm(data)
if (!this.ModuleSwitch) {
this.LeadNameValidation();
if (!this.showFirstForm) {
this.steptwo()
}
}
this.ServiceTable.forEach(res => {
ServiceLineTable.forEach(item => {
res[item.closePopUp] = false
})
})
}
closepopover() {
this.addcommmentpopover = false;
}
LeadNameChange() {
this.CreateRedisCache()
}
savepopover(index) {
this.viewcommentpopover = true;
this.addcommmentpopover = false;
console.log(`comments ${this.leadDetailsForm.value.comments}`)
this.contactLeadService.attachmentList[index].Comments[0].Description = this.leadDetailsForm.value.comments
}
commentindex = -1
onAddCommentPopup(index) {
this.commentindex = index
this.leadDetailsForm.patchValue({
comments: ''
})
this.addcommmentpopover = true
}
onViewCommentPopUp(index) {
this.commentindex = index
this.leadDetailsForm.patchValue({
comments: this.contactLeadService.attachmentList[index].Comments[0].Description
})
this.addcommmentpopover = true
}
CompareRemoveSelected(array1, array2, key) {
return array1.filter(item1 =>
!array2.some(item2 => (item2[key] === item1[key])))
}
SbuVerticalreset(val) {
if (val == '') {
this.AccId = undefined;
this.disableSbu = true;
this.disableVertical = true;
this.sbuId = undefined;
this.verticalId = undefined;
this.leadDetailsForm.controls.sbu.patchValue('')
this.leadDetailsForm.controls.vertical.patchValue('')
}
}
get f() {
return this.leadDetailsForm.controls
}
get F() {
return this.leadDealOwnerForm.controls
}
get lines() {
return this.leadDetailsForm.get("lines") as FormArray;
}
InitializeCreateForm() {
this.leadDetailsForm = this.leadFormBuilder.group({
leadName: ['', Validators.compose([Validators.required, removeSpaces, checkLimit(101)])],
leadSource: ['', Validators.required],
allianceAccount: [''],
advisorAccount: [''],
accountOrCompanyName: ['', { validators: Validators.required }, { disabled: this.disableAccount }],
accountOrProspect: new FormControl({ value: '', disabled: true }, Validators.required),
vertical: ['', { disabled: this.disableVertical }],
sbu: ['', { disabled: this.disableSbu }],
activityGroup: ['', { disabled: this.disableActivityGroup }],
campaign: ['', { disabled: this.disableCampaign }],
opportunity: ['', { disabled: this.disableOpportunity }],
link: [''],
enquiryType: [''],
description: [''],
serviceLineToggle: [false],
WiproSolutionToggle: [false],
country: [''],
WiproSolutions: [''],
leadOriginator: new FormControl({ value: this.Orinator, disabled: true }),
leadOwner: ['', Validators.required ],
comments: ['']
});
this.leadDealOwnerForm = this.leadFormBuilder.group({
dealName: ['', Validators.required],
currency: ['', Validators.required],
estimatedRateValue: [''],
timeline: ['', Validators.required],
customerContact: ['', { disabled: this.disableContacts }],
wiproContact: ['']
});
}
leadNameChanges : string = "";
leadInputChange(event) {
this.leadNameChanges = event.target.value;
}
dealValuewithComma(val, isRedisCache){
if(val != ''){
var x = val.split('.')
var diffentiateComma = x[0].replace(/\D/g,"")
if(diffentiateComma.length > 10){
this.leadDealOwnerForm.get('dealName').setErrors({"dealValueInvalid" : true})
}
else{
var b = Number(diffentiateComma).toLocaleString('en-US')
if(x[1]){
this.leadDealOwnerForm.patchValue({
dealName:`${b}.${x[1]}`
})
} else {
this.leadDealOwnerForm.patchValue({
dealName:`${b}.00`
})
}
}
}else{
this.leadDealOwnerForm.patchValue({
dealName:''
})
}
if(isRedisCache){
this.CreateRedisCache()
}
}
removeCommas(number){
number = number.replace(/[.,\s]/g, '');
number = number.substring(0, number.length - 2)
this.leadDealOwnerForm.patchValue({
dealName: number
})
}
appendleadSource(value, item, isRedisCache, i) {
this.leadSourceSelected = item
this.leadSourceId = item.SysGuid;
this.leadSrcName = item.Name;
this.leadDetailsForm.patchValue({
leadSource: value
})
this.ConversationNameSwitchleadSource = false;
this.leadSourceDependencyFunctions(value)
this.AdvisorName=''
if (isRedisCache) {
this.CreateRedisCache()
}
}
leadSourceDependencyFunctions(value) {
if (value === "Advisors ( Influencers )" || value === "Analysts") {
this.requestCamp = false
this.showAdvisorAccount()
} else if (value === "Alliance/Partner") {
this.requestCamp = false
this.showAllianceAccount()
} else if (value == "Advertisement" || value == "M&CI - iProfile" || value == "Events" || value == "Conference" || value == "Campaign") {
this.clearAllianceAdvisorValidators()
this.makeRequestCampaignValidators()
} else {
this.requestCamp = false
this.clearAllianceAdvisorValidators()
}
}
leadSourceClearDependencyFunctions(value) {
if (value === "Advisors ( Influencers )" || value === "Analysts") {
this.AdvisorsAccount = false;
this.AdvisorAccountguid = "";
this.AdvisorName = "";
this.leadDetailsForm.controls.advisorAccount.clearValidators()
this.leadDetailsForm.controls.advisorAccount.reset()
} else if (value === "Alliance/Partner") {
this.leadDetailsForm.controls.allianceAccount.clearValidators()
this.RequestAlliance = false
this.leadDetailsForm.controls.allianceAccount.reset(this.leadDetailsForm.controls.allianceAccount.value)
} else if (value == "Advertisement" || value == "M&CI - iProfile" || value == "Events" || value == "Conference" || value == "Campaign") {
this.leadDetailsForm.controls.campaign.reset()
this.leadDetailsForm.controls.campaign.clearValidators()
this.requestCamp = false
}
}
appendCurrency(value, item, isRedisCache, i) {
this.currencySelected = item
this.currencyId = item.Id;
this.currencyName = item.Desc;
this.leadDealOwnerForm.patchValue({
currency: value
})
this.CurrencySwitch = false;
if (isRedisCache) {
this.CreateRedisCache()
}
}
makeRequestCampaignValidators() {
if (this.selectedCamp.length > 0) {
this.requestCamp = false
this.removeValidatorsForCampaign()
} else {
this.setValidatorsForCampaign()
this.requestCamp = true
}
}
removeValidatorsForCampaign() {
this.leadDetailsForm.controls.campaign.clearValidators()
this.leadDetailsForm.controls.campaign.reset()
this.leadDetailsForm.updateValueAndValidity()
}
setValidatorsForCampaign() {
this.leadDetailsForm.controls.campaign.setValidators([Validators.required])
this.leadDetailsForm.patchValue({
campaign: ""
})
this.leadDetailsForm.updateValueAndValidity()
}
/**
*
* @param value - value(name) which was selected
* @param item - full item which was clicked
* @param sbuverticalFlag - To auto bind the sbu and vertical value based on accid
* @param AccLinkedFlag - To clear the linked data(link activity,campaign,opp,contacts) if account is changed
*/
appendAccountName(value: string, item, sbuverticalFlag, AccLinkedFlag, isRedisCache, i) {
// if (i > this.companyNameSearch.length) {
// this.openadvancetabs('accountSearch', this.companyNameSearch, this.leadDetailsForm.get('accountOrCompanyName').value);
// this.AccountNameclose();
// } else {
this.accntCompany = '';
this.AccName = item.Name;
this.AccountSelected = item
this.sendAccountToAdvance.push({ ...item, Id: item.SysGuid })
this.AccId = item.SysGuid
this.isProspect = item.isProspect
// this.contactLeadService.GetValidAccount(item.SysGuid, item.isProspect, 1).subscribe(res => {
// if (res.IsError == false) {
if (this.AccId != '' || this.AccId != null || this.AccId != undefined) {
this.disableCampaign = false
this.disableActivityGroup = false
this.disableOpportunity = false
}
// linked leads are enabled when account is present
if (AccLinkedFlag) {
this.delinkAccountChanges()
}
if (item.isProspect) {
this.leadDetailsForm.patchValue({ accountOrProspect: "Prospect" })
this.wiproProspectAccount = item.SysGuid;
this.customerAccount = ''
this.isProspect = true
} else {
this.leadDetailsForm.patchValue({ accountOrProspect: "Account" })
this.customerAccount = item.SysGuid;
this.wiproProspectAccount = ''
this.isProspect = false
}
this.leadDetailsForm.patchValue({
accountOrCompanyName: value
})
if (isRedisCache) {
this.CreateRedisCache()
}
// 1)auto populate SBU & Vertical from ACC ID if flag is true
// 2)SBU & Vertical disabled when auto populate SBU & vertical from ACC
let SbuReqParam = {
Guid: this.AccId,
isProspect: item.isProspect
}
this.myOpenLeadService.GetSbuAccountdata(SbuReqParam).subscribe(res => {
if (!res.IsError) {
if (res.ResponseObject.length > 0) {
this.appendConversationsbu(res.ResponseObject[0].Name, res.ResponseObject[0], true, 0)
this.disableSbu = true
this.disableVertical = true
} else {
this.ConversationNameSwitchsbu = false
this.resetSbuVerAccount()
}
} else {
this.ConversationNameSwitchsbu = false
this.errPopup.throwError(res.Message)
this.resetSbuVerAccount()
}
}, error => {
this.ConversationNameSwitchsbu = false
this.resetSbuVerAccount()
})
// } else if (res.IsError == true) {
// this.errPopup.throwError(res.Message)
// this.leadDetailsForm.patchValue({
// accountOrCompanyName: ""
// })
// this.AccId = ""
// this.acc.nativeElement.value = '';
// this.resetSbuVerAccount()
// }
// }, error => {
// this.companyNameSearch = [];
// this.leadDetailsForm.patchValue({
// accountOrCompanyName: ""
// })
// this.acc.nativeElement.value = '';
// });
this.ConversationNameSwitch = false
// }
}
resetSbuVerAccount() {
this.sbuId = ""
this.leadDetailsForm.controls.sbu.reset()
this.ResetValidatorsVerticalInput()
this.isServicelines = false
this.leadDetailsForm.patchValue({
serviceLineToggle: false
})
this.ServiceTable = [];
this.createTableNewRow()
}
appendConversationsbu(value: string, item, isRedisCache, i) {
this.SbuSelected = item
this.sbuId = item.Id;
this.sbuName = value;
this.leadDetailsForm.patchValue({
sbu: value
})
let verticalSearchreqBody = {
SearchText: "",
Guid: this.AccId,
SBUGuid: this.sbuId,
isProspect: this.isProspect,
PageSize: 10,
OdatanextLink: "",
RequestedPageNumber: 1
}
this.contactLeadService.getsearchVerticalBySbu(verticalSearchreqBody).subscribe(res => {
if (!res.IsError) {
this.appendVertical(res.ResponseObject[0].Name, res.ResponseObject[0], true)
this.disableVertical = true
}
})
if (isRedisCache) {
this.CreateRedisCache()
}
}
appendVertical(value: string, item, isRedisCache) {
if (this.leadDetailsForm.value.sbu != '') {
this.VerticalSelected = item
this.verticalId = item.Id;
this.verticalName = value;
this.leadDetailsForm.patchValue({
vertical: value
})
this.ConversationNameSwitchvertical = false;
} else {
this.ResetValidatorsVerticalInput()
}
if (isRedisCache) {
this.CreateRedisCache()
}
}
appendalliance(value: string, item, isRedisCache, i) {
this.AllianceData = item
this.AllianceAccountguid = item.Guid;
this.AllianceName = item.Name;
this.sendAllianceToAdvance.push({ ...item, Id: item.Guid })
this.leadDetailsForm.patchValue({
allianceAccount: value
})
this.allianceAccountSwitch = false;
this.AdvisorAccountguid = ""
if (isRedisCache) {
this.CreateRedisCache()
}
}
appendadvisor(value: string, item, isRedisCache, i) {
if (i > this.AdvisorAccountArr.length) {
this.openadvancetabs('advisorSearch', this.AdvisorAccountArr, this.leadDetailsForm.get('advisorAccount').value);
this.Leadadvisorclose();
} else {
this.AdvisorData = item
this.AdvisorAccountguid = item.Guid;
this.AdvisorName = item.Name;
this.sendAdvisorToAdvance.push({ ...item, Id: item.Guid })
this.leadDetailsForm.patchValue({
advisorAccount: value
})
this.AdvisorAccountSwitch = false
this.AllianceAccountguid = ""
if (isRedisCache) {
this.CreateRedisCache()
}
}
}
appendEnquiryTypeMOb(event) {
if (!this.isEmpty(event)) {
this.enquiryId = event.target.value;
this.enquiryType.forEach(element => {
if (element.Id == this.enquiryId) {
this.equiryName = element.Value
this.enquiryTypeAria = this.equiryName
}
})
this.enqiuryTypedata = { Id: this.enquiryId, Name: this.equiryName }
}
this.CreateRedisCache()
}
appendEnquiryType(event) {
if (!this.isEmpty(event)) {
this.enquiryId = event.value;
this.enquiryType.forEach(element => {
if (element.Id == this.enquiryId) {
this.equiryName = element.Value
this.enquiryTypeAria = this.equiryName
}
})
this.enqiuryTypedata = { Id: this.enquiryId, Name: this.equiryName }
}
this.CreateRedisCache()
}
appendCountry(value: string, item, isRedisCache, i) {
this.selectedCountry = item
this.countryId = item.SysGuid
this.countryName = item.Name
this.leadDetailsForm.patchValue({
country: value
})
if (isRedisCache) {
this.CreateRedisCache()
}
}
appendWiproSolutions(value: string, item, checkDuplicateItem, isRedisCache, i) {
this.leadDetailsForm.patchValue({
WiproSolutions: ""
})
const result = this.myOpenLeadService.GenerateLinkActionType(item, this.ExistingWiproSolutionData, this.finalWiproSolutionGroup, "SysGuid")
item = result.item,
this.finalWiproSolutionGroup = result.data
if (checkDuplicateItem && this.selectedWiproSolution.some(x => x.SysGuid == item.SysGuid)) {
this.errPopup.throwError(LeadCustomErrorMessages.WiproSolutionDuplicateError)
}
this.selectedWiproSolution.push(item)
this.sendWiproSolutionToAdvance.push({ ...item, Id: item.SysGuid })
this.selectedWiproSolution = this.service.removeDuplicates(this.selectedWiproSolution, "SysGuid");
if (this.selectedWiproSolution.length > 0) {
this.removeValidatorsWiproSolution()
}
this.wiproSolustionSwitch = false
if (isRedisCache) {
this.CreateRedisCache()
}
}
appendActivityGroup(value: string, item, checkDuplicateItem, isRedisCache, i) {
this.leadDetailsForm.patchValue({
activityGroup: ""
})
const result = this.myOpenLeadService.GenerateLinkActionType(item, this.ExistingActivityData, this.finalActivityGroup, "Guid")
item = result.item,
this.finalActivityGroup = result.data
this.sendActivityToAdvance.push({ ...item, Id: item.Guid })
if (checkDuplicateItem && this.selectedConversation.some(x => x.Guid == item.Guid)) {
this.errPopup.throwError(LeadCustomErrorMessages.ActivityDuplicateError)
}
this.selectedConversation.push(item)
this.selectedConversation = this.service.removeDuplicates(this.selectedConversation, "Guid");
this.ConversationNameSwitchlead = false;
if (isRedisCache) {
this.CreateRedisCache()
}
}
appendCampaignGroup(value: string, item, checkDuplicateItem, isRedisCache, i) {
this.campaignId = item.Id;
const result = this.myOpenLeadService.GenerateLinkActionType(item, this.ExistingCampaignData, this.finalCampaignGroup, "Id")
item = result.item,
this.finalCampaignGroup = result.data
if (checkDuplicateItem && this.selectedCamp.some(x => x.Id == item.Id)) {
this.errPopup.throwError(LeadCustomErrorMessages.CampaignDuplicateError)
}
this.selectedCamp.push(item)
this.selectedCamp = this.service.removeDuplicates(this.selectedCamp, "Id");
this.leadDetailsForm.patchValue({
campaign: ""
})
this.ConversationNameSwitchcamp = false;
if (this.selectedCamp.length > 0) {
this.leadDetailsForm.controls['campaign'].clearValidators()
this.leadDetailsForm.controls['campaign'].reset()
this.leadDetailsForm.updateValueAndValidity()
}
if (isRedisCache) {
this.CreateRedisCache()
}
}
appendOpportunityGroup(value: string, item, checkDuplicateItem, isRedisCache, i) {
this.leadDetailsForm.patchValue({ opportunity: value })
let json = { Guid: item.Guid, MapGuid: (item.MapGuid) ? item.MapGuid : "", Title: item.Title, Type: item.Type, LinkActionType: 1 }
let json1 = { Guid: item.Guid, MapGuid: (item.MapGuid) ? item.MapGuid : "", Title: item.Title, Type: item.Type, LinkActionType: 1, Id: item.Guid }
const result = this.myOpenLeadService.GenerateLinkActionType(json, this.ExistingOpportunityData, this.finalOppotunityGroup, "Guid")
item = result.item,
this.finalOppotunityGroup = result.data
if (checkDuplicateItem && this.selectedOppertunity.some(x => x.Guid == json.Guid)) {
this.errPopup.throwError(LeadCustomErrorMessages.OpportunityDuplicateError)
}
this.selectedOppertunity.push(json);
this.sendOppToAdvance.push(json1)
this.ConversationNameSwitchoppo = false
this.selectedOppertunity = this.service.removeDuplicates(this.selectedOppertunity, "Guid");
this.leadDetailsForm.controls.opportunity.reset();
this.leadDetailsForm.updateValueAndValidity()
if (isRedisCache) {
this.CreateRedisCache()
}
}
appendAgpGroup(value: string, item, isRedisCache, i) {
this.linkGuid = item.SysGuid;
this.linkName = item.Name
const result = this.myOpenLeadService.GenerateLinkActionType(item, this.ExistingAgpData, this.finalAgpGroup, "SysGuid")
item = result.item,
this.finalAgpGroup = result.data
this.selectedAgp = []
this.selectedAgp.push(item)
this.selectedAgp = this.service.removeDuplicates(this.selectedAgp, "SysGuid");
this.leadDetailsForm.controls.link.reset();
this.leadDetailsForm.updateValueAndValidity()
this.ConversationNameSwitchagp = false;
if (isRedisCache) {
this.CreateRedisCache()
}
}
appendOwner(value: string, item, i, isRedisCache) {
this.leadOwnerId = item.ownerId;
this.leadOwnerName = value
this.selectedContactowner = [{ FullName: item.FullName, ownerId: item.ownerId }];
this.sendOwnerToAdvance.push({ ...item, Id: item.ownerId })
// this.selectedContactowner = this.service.removeDuplicates(this.selectedContactowner, "ownerId");
this.leadDetailsForm.patchValue({
leadOwner: item.FullName
});
this.contactNameSwitchowner = false
this.leadDetailsForm.controls.leadOwner.clearValidators();
this.leadDetailsForm.controls.leadOwner.updateValueAndValidity();
if (isRedisCache) {
this.CreateRedisCache()
}
}
appendCustomerGroup(value: string, item, checkDuplicateItem, isRedisCache, i) {
let json = { FullName: item.FullName, LinkActionType: 1, Designation: (item.Designation) ? item.Designation : "", isKeyContact: (item.isKeyContact) ? (item.isKeyContact) : false, MapGuid: (item.MapGuid) ? item.MapGuid : "", Guid: item.Guid, SysGuid: item.Guid, Email:item.Email };
let json1 = { FullName: item.FullName, LinkActionType: 1, Designation: (item.Designation) ? item.Designation : "", isKeyContact: (item.isKeyContact) ? (item.isKeyContact) : false, MapGuid: (item.MapGuid) ? item.MapGuid : "", Guid: item.Guid, SysGuid: item.Guid, Email:item.Email, Id: item.Guid };
const result = this.myOpenLeadService.GenerateLinkActionType(json, this.ExistingContactData, this.finalContactGroup, "SysGuid")
item = result.item,
this.finalContactGroup = result.data
if (checkDuplicateItem && this.selectedCustomer.some(x => x.SysGuid == json.SysGuid)) {
this.errPopup.throwError(LeadCustomErrorMessages.ContactDuplicateError)
}
this.selectedCustomer.push(json);
this.sendCustomerToAdvance.push(json1)
this.selectedCustomer = this.service.removeDuplicates(this.selectedCustomer, "SysGuid");
this.leadDealOwnerForm.patchValue({
customerContact: ""
})
this.customerNameSwitch = false
if (this.selectedCustomer.length > 0) {
this.removeValidatorsForCustomer()
}
if (isRedisCache) {
this.CreateRedisCache()
}
}
removeValidatorsForCustomer() {
this.leadDealOwnerForm.controls.customerContact.clearValidators();
this.leadDealOwnerForm.controls.customerContact.reset();
this.leadDealOwnerForm.controls.customerContact.updateValueAndValidity()
}
setValidatorsForCustomer() {
this.leadDealOwnerForm.controls.customerContact.setValidators([Validators.required])
this.leadDealOwnerForm.patchValue({
customerContact: null
})
this.leadDealOwnerForm.updateValueAndValidity()
}
wiprosolutionevent(isRedisCache) {
if (this.leadDetailsForm.controls.WiproSolutionToggle.value == true) {
this.iswiprosolution = true;
(this.selectedWiproSolution.length > 0) ? this.removeValidatorsWiproSolution() : this.setValidatorsWiproSolution()
} else if (this.leadDetailsForm.controls.WiproSolutionToggle.value == false) {
this.iswiprosolution = false
this.removeValidatorsWiproSolution()
}
if (isRedisCache) {
this.CreateRedisCache()
}
}
servicelinesevent(isRedisCache) {
if (this.leadDetailsForm.controls.serviceLineToggle.value == true) {
this.MakeServiceLineLablesMandatory()
this.isServicelines = true
} else if (this.leadDetailsForm.controls.serviceLineToggle.value == false) {
this.MakeServiceLineLablesNonMandatory()
this.isServicelines = false
this.ServiceTable=this.ServiceTable.filter(x => x.apiId != "0").map(x=>{
return x;
});
this.createTableNewRow();
}
if (isRedisCache) {
this.CreateRedisCache()
}
}
showAllianceAccount() {
this.RequestAlliance = true
this.AdvisorsAccount = false;
// this.AdvisorAccountguid = null;
this.AdvisorName = "";
this.leadDetailsForm.controls.allianceAccount.setValidators([Validators.required])
this.leadDetailsForm.controls.campaign.clearValidators()
this.leadDetailsForm.controls.campaign.reset()
this.leadDetailsForm.controls.advisorAccount.clearValidators()
this.leadDetailsForm.controls.advisorAccount.reset()
this.leadDetailsForm.updateValueAndValidity()
}
showAdvisorAccount() {
this.RequestAlliance = false
this.AdvisorsAccount = true;
this.leadDetailsForm.controls.advisorAccount.setValidators([Validators.required])
this.leadDetailsForm.controls.campaign.clearValidators()
this.leadDetailsForm.controls.allianceAccount.clearValidators()
// this.leadDetailsForm.controls.allianceAccount.reset(this.leadDetailsForm.controls.allianceAccount.value)
this.leadDetailsForm.controls.campaign.reset()
this.leadDetailsForm.updateValueAndValidity()
}
clearAllianceAdvisorValidators() {
this.RequestAlliance = false
this.requestCamp = false
this.AdvisorsAccount = false;
// this.AdvisorAccountguid = "";
this.AdvisorName = "";
this.leadDetailsForm.controls.campaign.clearValidators()
this.leadDetailsForm.controls.allianceAccount.clearValidators()
this.leadDetailsForm.controls.advisorAccount.clearValidators()
this.leadDetailsForm.controls.advisorAccount.reset()
// this.leadDetailsForm.controls.allianceAccount.reset(this.leadDetailsForm.controls.allianceAccount.value)
this.leadDetailsForm.controls.campaign.reset()
this.leadDetailsForm.updateValueAndValidity()
}
SetValidatorsVerticalInput() {
this.leadDetailsForm.controls.vertical.setValidators([Validators.required])
this.leadDetailsForm.patchValue({
vertical: null
})
this.leadDetailsForm.updateValueAndValidity()
}
ResetValidatorsVerticalInput() {
this.disableVertical = true
this.VerticalSelected = ""
this.verticalId = null
this.verticalName = null
this.leadDetailsForm.controls.vertical.clearValidators()
this.leadDetailsForm.controls.vertical.reset()
this.leadDetailsForm.updateValueAndValidity()
}
setValidatorsWiproSolution() {
this.leadDetailsForm.controls.WiproSolutions.setValidators([Validators.required])
this.leadDetailsForm.patchValue({
WiproSolutions: null
})
this.leadDetailsForm.updateValueAndValidity()
}
removeValidatorsWiproSolution() {
this.leadDetailsForm.controls['WiproSolutions'].clearValidators()
this.leadDetailsForm.controls['WiproSolutions'].reset()
this.leadDetailsForm.updateValueAndValidity()
}
MakeServiceLineLablesNonMandatory() {
ServiceLineTable[0].title = 'Service line'
ServiceLineTable[0].isRequired = false
ServiceLineTable[0].IsRelation = []
ServiceLineTable[2].title = 'SL BDM'
ServiceLineTable[2].isRequired = false
}
MakeServiceLineLablesMandatory() {
ServiceLineTable[0].title = 'Service line*'
ServiceLineTable[0].isRequired = true
ServiceLineTable[0].IsRelation = ["practice", "slbdm"]
ServiceLineTable[2].title = 'SL BDM*'
ServiceLineTable[2].isRequired = true
}
/**
* Delinking all the links according to the account selection
*/
delinkAccountChanges() {
if (this.AccId)
if (this.selectedConversation.length > 0) {
this.delinkActivityGroupbyAccountChanges()
}
if (this.selectedCamp.length > 0) {
this.delinkCampaignbyAccountChanges()
}
if (this.selectedOppertunity.length > 0) {
this.delinkOpportunitybyAccountChanges()
}
if (this.selectedOppertunity.length > 0) {
this.delinkCustomerContactbyAccountChanges()
}
}
delinkActivityGroupbyAccountChanges() {
this.finalActivityGroup = this.finalActivityGroup.filter(res => res.LinkActionType !== 1)
this.finalActivityGroup = this.ChangeExistingToDelete(this.finalActivityGroup)
this.selectedConversation = []
}
delinkOpportunitybyAccountChanges() {
this.finalOppotunityGroup = this.finalOppotunityGroup.filter(res => res.LinkActionType !== 1)
this.finalOppotunityGroup = this.ChangeExistingToDelete(this.finalOppotunityGroup)
this.selectedOppertunity = []
}
delinkCampaignbyAccountChanges() {
this.finalCampaignGroup = this.finalCampaignGroup.filter(res => res.LinkActionType !== 1)
this.finalCampaignGroup = this.ChangeExistingToDelete(this.finalCampaignGroup)
this.selectedCamp = []
}
delinkCustomerContactbyAccountChanges() {
this.finalContactGroup = this.finalContactGroup.filter(res => res.LinkActionType !== 1)
this.finalContactGroup = this.ChangeExistingToDelete(this.finalContactGroup)
this.selectedCustomer = []
}
ChangeExistingToDelete(filterExistingdata: any[]) {
if (filterExistingdata) {
if (filterExistingdata.length > 0) {
filterExistingdata = filterExistingdata.map(res => {
return { ...res, LinkActionType: 3 }
})
return filterExistingdata
}
else {
return []
}
} else {
return []
}
}
delinkWiproSolutions(item) {
if (this.selectedWiproSolution.length > 0) {
const result = this.myOpenLeadService.generateDelinkLinkActionType(item, this.ExistingWiproSolutionData, this.finalWiproSolutionGroup, "SysGuid")
item = result.item,
this.finalWiproSolutionGroup = result.data
this.selectedWiproSolution = this.selectedWiproSolution.filter(x => x.SysGuid != item.SysGuid)
this.sendWiproSolutionToAdvance = this.sendWiproSolutionToAdvance.filter(x => x.SysGuid != item.SysGuid)
}
if (this.selectedWiproSolution.length == 0 && this.iswiprosolution) {
this.setValidatorsWiproSolution()
}
this.CreateRedisCache()
}
delinkOpp(item, i) {
if (this.selectedOppertunity.length > 0) {
const result = this.myOpenLeadService.generateDelinkLinkActionType(item, this.ExistingOpportunityData, this.finalOppotunityGroup, "Guid")
item = result.item,
this.finalOppotunityGroup = result.data
this.selectedOppertunity = this.selectedOppertunity.filter(x => x.Guid != item.Guid)
this.sendOppToAdvance = this.sendOppToAdvance.filter(x => x.Guid != item.Guid)
}
this.CreateRedisCache()
}
delinkConv(item, i) {
if (this.selectedConversation.length > 0) {
const result = this.myOpenLeadService.generateDelinkLinkActionType(item, this.ExistingActivityData, this.finalActivityGroup, "Guid")
item = result.item,
this.finalOppotunityGroup = result.data
this.selectedConversation = this.selectedConversation.filter(x => x.Guid != item.Guid)
this.sendActivityToAdvance = this.sendActivityToAdvance.filter(x => x.Guid != item.Guid)
}
this.CreateRedisCache()
}
delinkCampaign(item, i) {
this.campList.splice(i, 1)
if (this.selectedCamp.length > 0) {
const result = this.myOpenLeadService.generateDelinkLinkActionType(item, this.ExistingCampaignData, this.finalCampaignGroup, "Id")
item = result.item,
this.finalCampaignGroup = result.data
this.selectedCamp = this.selectedCamp.filter(x => x.Id != item.Id)
}
if (this.selectedCamp.length == 0 && this.setCampaignForLeadSource()) {
this.setValidatorsForCampaign()
}
this.CreateRedisCache()
}
setCampaignForLeadSource() {
if (this.leadDetailsForm.controls.leadSource.value == "Advertisement" ||
this.leadDetailsForm.controls.leadSource.value == "M&CI - iProfile" ||
this.leadDetailsForm.controls.leadSource.value == "Events" ||
this.leadDetailsForm.controls.leadSource.value == "Conference") {
return true
} else {
return false
}
}
delinkAgp(item, i) {
if (this.selectedAgp.length > 0) {
this.selectedAgp = this.selectedAgp.filter(x => x.SysGuid != item.SysGuid)
}
this.CreateRedisCache()
}
// delinkLeadOwner(item, i) {
// if (this.selectedContactowner.length > 0) {
// this.selectedContactowner = this.selectedContactowner.filter(x => x.ownerId != item.ownerId)
// }
// this.CreateRedisCache()
// }
delinkCustomerContacts(item) {
if (this.selectedCustomer.length > 0) {
const result = this.myOpenLeadService.generateDelinkLinkActionType(item, this.ExistingContactData, this.finalContactGroup, "Id")
item = result.item,
this.finalContactGroup = result.data
this.selectedCustomer = this.selectedCustomer.filter(x => x.SysGuid != item.SysGuid)
this.sendCustomerToAdvance = this.sendCustomerToAdvance.filter(x => x.SysGuid != item.SysGuid)
}
if (this.selectedCustomer.length == 0) {
this.setValidatorsForCustomer()
}
this.CreateRedisCache()
}
opencancelpop(): void {
const dialogRef = this.dialog.open(cancelpopComponent, {
width: '400px',
data: this.moduleTypeStateData
});
dialogRef.afterClosed().subscribe(res => {
if (res) {
this.createClicked = true
this.ClearRedisCache()
} else {
this.createClicked = false
}
})
}
//------------------------------------------------CLOSE FUNCTIONS START---------------------------------------------------------------------//
LeadSourceclose() {
this.ConversationNameSwitchleadSource = false;
if (this.leadSrcName == "") {
this.leadDetailsForm.patchValue({
leadSource: ""
})
}
if (this.leadSrcName !== "") {
this.leadDetailsForm.patchValue({
leadSource: this.leadSrcName
})
}
}
Currencyclose() {
this.CurrencySwitch = false;
if (this.currencyId == "") {
this.leadDetailsForm.patchValue({
currency: ""
})
}
}
ConversationSBUclose() {
this.ConversationNameSwitchsbu = false;
if (this.sbuId === "") {
this.leadDetailsForm.patchValue({
sbu: ""
})
}
}
Leadverticalclose() {
this.ConversationNameSwitchvertical = false;
if (this.verticalId === "") {
this.leadDetailsForm.patchValue({
vertical: ""
})
}
}
AccountNameclose() {
this.ConversationNameSwitch = false
if (this.AccName === "") {
this.leadDetailsForm.patchValue({
accountOrCompanyName: ""
})
}
if (this.AccName != "") {
this.leadDetailsForm.patchValue({
accountOrCompanyName: this.AccName
})
}
}
Leadadvisorclose() {
this.AdvisorAccountSwitch = false;
if (this.AdvisorName === "") {
this.leadDetailsForm.patchValue({
advisorAccount: ""
})
}
if (this.AdvisorName != "") {
this.leadDetailsForm.patchValue({
advisorAccount: this.AdvisorName
})
}
}
Leadallianceclose() {
this.allianceAccountSwitch = false;
if (this.AllianceName === "") {
this.leadDetailsForm.patchValue({
allianceAccount: ""
})
}
if (this.AllianceName != "") {
this.leadDetailsForm.patchValue({
allianceAccount: this.AllianceName
})
}
}
CampaignClose() {
this.ConversationNameSwitchcamp = false;
this.leadDetailsForm.patchValue({
campaign: ""
})
}
ActivityGoupclose() {
this.ConversationNameSwitchlead = false;
this.leadDetailsForm.patchValue({
activityGroup: ""
})
}
Oppotunityclose() {
this.ConversationNameSwitchoppo = false;
this.leadDetailsForm.patchValue({
opportunity: ""
})
}
Agpclose() {
this.ConversationNameSwitchagp = false;
this.leadDetailsForm.patchValue({
link: ""
})
}
countryclose() {
this.countrySwitch = false;
if (this.countryName === "") {
this.leadDetailsForm.patchValue({
country: ""
})
}
}
wiproSoltionclose() {
this.wiproSolustionSwitch = false;
this.leadDetailsForm.patchValue({
WiproSolutions: ""
})
}
//------------------------------------------------CLOSE FUNCTIONS END---------------------------------------------------------------------//
//------------------------------------------------CLEAR FUNCTIONS START---------------------------------------------------------------------//
clearCuurencyName() {
this.leadDealOwnerForm.patchValue({
currency: ""
});
this.currencyId = '';
this.currencyName = '';
}
clearCountry() {
this.leadDetailsForm.patchValue({
country: ""
});
this.countryName = "";
this.countryId = "";
}
clearLeadSource(value) {
this.leadDetailsForm.patchValue({
leadSource: ""
});
this.leadSrcName = "";
this.leadSourceId = "";
this.leadSourceClearDependencyFunctions(value)
}
clearAccount() {
this.leadDetailsForm.patchValue({
accountOrCompanyName: "",
sbu: "",
vertical: ""
});
this.AccName = '';
this.AccId = '';
this.verticalId = '';
this.sbuId = '';
this.isServicelines = false
this.leadDetailsForm.patchValue({
serviceLineToggle: false
})
this.ServiceTable = [];
this.createTableNewRow()
this.disableActivityGroup = true
this.disableCampaign = true
this.disableOpportunity = true;
this.AccountSelected.Name =''
this.AccountSelected.SysGuid=''
this.SbuSelected=undefined
this.VerticalSelected=undefined
}
clearAdvisor() {
this.leadDetailsForm.patchValue({
advisorAccount: ""
});
this.AdvisorName = '';
// this.AdvisorAccountguid = "";
}
clearAlliance() {
this.leadDetailsForm.patchValue({
allianceAccount: ""
});
this.AllianceName = '';
// this.AllianceAccountguid = "";
}
clearLeadOwner() {
this.leadDetailsForm.patchValue({
leadOwner: ""
});
this.leadOwnerName = "";
this.leadOwnerId = "";
this.leadDetailsForm.controls['leadOwner'].setValidators(Validators.required);
this.leadDetailsForm.controls['leadOwner'].updateValueAndValidity();
}
//------------------------------------------------CLEAR FUNCTIONS ENDS---------------------------------------------------------------------//
CreateLeadNext() {
this.tableInValid = false;
this.ServiceTable.forEach((x) => {
this.servicelineTable_Data.forEach(y => {
if (this.leadDetailsForm.controls['serviceLineToggle'].value) {
if (y.isRequired) {
if (x[y.name].Name.trim().length == 0) {
this.tableInValid = true;
x[y.name].IsError = true;
setTimeout(() => {
let t = document.getElementsByClassName('error')[0];
document.getElementById(t.id).focus()
document.getElementById(t.id).blur()
x[y.name].IsError = true;
}, 500)
}
}
}
})
})
this.LeadNameValidation();
if (!this.tableInValid && this.leadDetailsForm.valid) {
this.steptwo()
} else {
this.service.windowScroll();
this.service.validateAllFormFields(this.leadDetailsForm);
let invalidElements = this.el.nativeElement.querySelectorAll('#validatescroll .ng-invalid');
if (invalidElements.length) {
this.scrollTo(invalidElements[0]);
this.service.validationErrorMessage();
}
return;
}
}
/****************Advance search popup starts**********************/
lookUpColumn(controlName, value) {
this.lookupdata.controlName = controlName
this.lookupdata.headerdata = leadAdvnHeaders[controlName]
this.lookupdata.lookupName = leadAdvnNames[controlName]['name']
this.lookupdata.isCheckboxRequired = leadAdvnNames[controlName]['isCheckbox']
this.lookupdata.Isadvancesearchtabs = leadAdvnNames[controlName]['isAccount']
this.lookupdata.inputValue = value
}
openadvancetabs(controlName, initalLookupData, value, index?, headerdata?, line?): void {
this.lookUpColumn(controlName, value)
this.lookupdata.selectedRecord = this.selectedLookupData(this.lookupdata.controlName);
this.myOpenLeadService.getLookUpFilterData({ data: initalLookupData, controlName: controlName, isService: false, useFullData: null, rowData: headerdata, rowIndex: index, rowLine: line }).subscribe(res => {
this.lookupdata.isLoader = false;
this.lookupdata.tabledata = res
})
const dialogRef = this.dialog.open(AdvancelookuptabsComponent, {
width: this.service.setHeaderPixes(this.lookupdata.headerdata.length, this.lookupdata.Isadvancesearchtabs),
data: this.lookupdata
});
dialogRef.componentInstance.modelEmiter.subscribe((x) => {
if (x['objectRowData'].searchKey != '' && x.currentPage == 1) {
this.lookupdata.nextLink = ''
}
let dialogData = {
searchVal: (x['objectRowData'].searchKey != '') ? x['objectRowData'].searchKey : '',
recordCount: this.lookupdata.recordCount,
OdatanextLink: this.lookupdata.nextLink,// need to handel the pagination and search!
pageNo: x.currentPage//need to handel from pagination
}
if (x.objectRowData.wiprodb) {
this.lookUpColumn(controlName, value)
this.myOpenLeadService.getLookUpFilterData({ data: null, controlName: controlName, isService: true, useFullData: { ...this.getCommonData(), ...dialogData } }).subscribe(res => {
this.lookupdata.isLoader = false;
this.lookupdata.errorMsg.isError = false;
if (res.IsError == false) {
this.lookupdata.errorMsg.message = ''
if (x.action == "loadMore") {
this.lookupdata.TotalRecordCount = res.TotalRecordCount;
this.lookupdata.tabledata = this.lookupdata.tabledata.concat(res.ResponseObject);
this.lookupdata.nextLink = (res.OdatanextLink) ? res.OdatanextLink : '';
} else if (x.action == "search") {
this.lookupdata.TotalRecordCount = res.TotalRecordCount;
this.lookupdata.tabledata = res.ResponseObject;
this.lookupdata.nextLink = (res.OdatanextLink) ? res.OdatanextLink : '';
}
else if (x.action == "tabSwich") {
if (x.objectRowData.wiprodb) {
this.lookupdata.TotalRecordCount = res.TotalRecordCount;
this.lookupdata.tabledata = res.ResponseObject;
this.lookupdata.nextLink = (res.OdatanextLink) ? res.OdatanextLink : '';
}
}
} else {
this.lookupdata.errorMsg.message = JSON.stringify(res.Message)
}
}, error => {
this.lookupdata.isLoader = false;
this.lookupdata.errorMsg.isError = false;
})
} else {
this.lookupdata.controlName = controlName
this.lookupdata.headerdata = DnBAccountHeader
this.lookupdata.lookupName = leadAdvnNames[controlName]['name']
this.lookupdata.isCheckboxRequired = leadAdvnNames[controlName]['isCheckbox']
this.lookupdata.Isadvancesearchtabs = leadAdvnNames[controlName]['isAccount']
this.lookupdata.inputValue = value;
this.dnBDataBase(x);
}
});
dialogRef.afterClosed().subscribe(result => {
if (result) {
if (result.wiprodb == false) {
this.service.sendProspectAccount = false;
this.IsModuleSwitch = false
this.showFirstForm = false;
this.groupData(result);
sessionStorage.setItem("TempLeadDetails", JSON.stringify(this.createTempData()))
this.router.navigateByUrl('/leads/prospectAccount')
} else {
this.emptyArray(result.controlName);
this.AppendParticularInputFun(result.selectedData, result.controlName)
}
}
});
}
dnBDataBase(action) {
if (action.action == "dbAutoSearch") {
this.lookupdata.otherDbData.isLoader = true;
this.activityService.getCountryData({ isService: true, searchKey: action.objectRowData.searchKey }).subscribe(res => {
this.lookupdata.otherDbData.isLoader = false;
this.lookupdata.isLoader = false;
if (res.IsError == false) {
this.lookupdata.errorMsg.isError = false;
this.lookupdata.errorMsg.message = ''
this.lookupdata.otherDbData.countryvalue = res.ResponseObject;
}
}, error => {
this.lookupdata.isLoader = false;
this.lookupdata.errorMsg.isError = false;
})
}
if (action.action == "dbSearch") {
let body = {
"CustomerAccount": {
"Name": action.objectRowData.dbSerachData.accountname.value,
"Address": { "CountryCode": action.objectRowData.dbSerachData.countryvalue.id }
}
}
this.lookupdata.otherDbData.isLoader = true;
this.activityService.getSearchAccountInDNB({ isService: true, body: body }).subscribe(res => {
this.lookupdata.otherDbData.isLoader = false;
this.lookupdata.isLoader = false;
if (res.IsError == false) {
this.lookupdata.TotalRecordCount = res.TotalRecordCount;
this.lookupdata.tabledata = res.ResponseObject;
this.lookupdata.nextLink = (res.OdatanextLink) ? res.OdatanextLink : '';
this.lookupdata.errorMsg.isError = false;
this.lookupdata.errorMsg.message = ''
}
if (res.IsError == true) {
this.lookupdata.tabledata = [];
this.lookupdata.TotalRecordCount = 0;
this.lookupdata.nextLink = ''
}
}, error => {
this.lookupdata.isLoader = false;
this.lookupdata.errorMsg.isError = false;
})
}
}
groupData(result) {
var object = {
activityGroupName: "",
account: {
Name: (result.selectedData.length != 0) ? (result.selectedData[0].Name) ? result.selectedData[0].Name : "" : "",
Id: (result.selectedData.length != 0) ? (result.selectedData[0].Id) ? result.selectedData[0].Id : "" : "",
Industry: (result.selectedData.length != 0) ? (result.selectedData[0].Industry) ? result.selectedData[0].Industry : "" : "",
Region: (result.selectedData.length != 0) ? (result.selectedData[0].Region) ? result.selectedData[0].Region : "" : ""
},
model: 'Create lead',
route: 'leads/createlead'
}
sessionStorage.setItem('CreateActivityGroup', JSON.stringify(object))
}
/*****************Advance search popup ends*********************/
AppendParticularInputFun(selectedData, controlName) {
if (selectedData) {
if (selectedData.length > 0) {
selectedData.forEach(data => {
this.IdentifyAppendFunc[controlName](data)
});
}
}
}
getCommonData() {
return {
guid: this.LeadGuid,
isProspect: this.isProspect,
AccId: this.AccId,
SbuId: this.sbuId,
VerticalId: this.verticalId
}
}
LeadNameValidation(){
if (this.leadNameChanges.trim() == "") {
this.leadDetailsForm.patchValue({leadName : ""})
this.leadDetailsForm.controls['leadName'].setValidators(Validators.required);
this.leadDetailsForm.controls['leadName'].markAsTouched();
this.leadDetailsForm.controls['leadName'].updateValueAndValidity();
}
}
stepone() {
this.leadinfo = true;
this.dealinfo = false;
this.twoactive = false;
}
steptwo() {
debugger
this.LeadNameValidation();
if (this.leadDetailsForm.valid && this.leadNameChanges.trim() !== "") {
this.isLoading = false;
this.leadinfo = false;
this.dealinfo = true;
} else {
this.service.validateAllFormFields(this.leadDetailsForm);
}
this.service.windowScroll();
}
populateCreateLeadForm(data) {
if (data.enquiryData) {
this.enquiryType = data.enquiryData
this.enqiuryTypedata = data.enquirytype
this.enquiryId = (data.enquirytype) ? data.enquirytype.Id : ""
this.enquiryTypeAria = (data.enquirytype) ? data.enquirytype.Name : ""
} else {
this.getenquiryType()
}
this.leadNameChanges = (data.leadName == null || data.leadName == undefined)? "" : data.leadName
console.log('leadNameChanges', this.leadNameChanges)
this.leadDetailsForm.patchValue({
leadName: data.leadName,
leadSource: (data.leadSource) ? data.leadSource.Name : "",
accountOrCompanyName: (data.accountName) ? data.accountName.Name : "",
// accountOrProspect:data.leadName ,
allianceAccount: (data.alliance) ? data.alliance.Name : "",
advisorAccount: (data.advisor) ? data.advisor.Name : "",
sbu: (data.sbu) ? data.sbu.Name : "",
vertical: (data.vertical) ? data.vertical.Name : "",
serviceLineToggle: data.serviceLineToggle,
WiproSolutionToggle: data.WiproSolutionToggle,
country: (data.country) ? data.country.Name : '',
WiproSolutions: "",
activityGroup: "",
campaign: "",
opportunity: "",
link: "",
enquiryType: (data.enquirytype) ? JSON.parse(data.enquirytype.Id) : '',
description: (data.desc) ? data.desc : "",
leadOriginator: (data.ownerDetails) ? data.ownerDetails.originator : "",
leadOwner: "",
})
this.leadDealOwnerForm.patchValue({
dealName: (data.leadInfo) ? data.leadInfo.dealValue : "",
currency: (data.leadInfo) ? (data.leadInfo.currency) ? data.leadInfo.currency.Desc : "" : "",
estimatedRateValue: "",
timeline: (data.leadInfo) ? data.leadInfo.timeline : "",
customerContact: "",
wiproContact: ""
})
this.isFromMeeting = (data.moduleSwitch)? true: false
this.Orinator= data.ownerDetails.originator
this.showFirstForm = data.showFirstForm
if (data.leadSource) {
this.appendleadSource(data.leadSource.Name, data.leadSource, false, 0)
}
//so,why appendAccountName delinkdata is always false 4th param ?
//1) when doing a module switch we dont have to clear the any linked data ;just see delinkAccountChanges() .
//2) when trying to create contact or activity inside create then no need to clear the lined data ;just see delinkAccountChanges().
if (data.accountName) {
this.appendAccountName(data.accountName.Name, data.accountName, true, false, false, 0)
}
if (data.sbu) {
this.appendConversationsbu(data.sbu.Name, data.sbu, false, 0)
}
if (data.vertical) {
this.appendVertical(data.vertical.Name, data.vertical, false)
}
if (data.alliance) {
this.appendalliance(data.alliance.Name, data.alliance, false, 0)
}
if (data.advisor) {
this.appendadvisor(data.advisor.Name, data.advisor, false, 0)
}
if (data.country) {
this.appendCountry(data.country.Name, data.country, false, 0)
}
if (!data.moduleSwitch) {
if (data.links.campaign) {
data.links.campaign.forEach(x => {
this.appendCampaignGroup(x.Name, x, false, false, 0)
})
}
}
this.wiprosolutionevent(false)
this.servicelinesevent(false)
this.sbuId = (data.sbu)? (data.sbu.sbuId)? data.sbu.sbuId : '': ''
if (!data.moduleSwitch) {
if (data.links.wiprosolution && this.iswiprosolution) {
if (data.links.wiprosolution.length > 0) {
data.links.wiprosolution.forEach(x => {
this.appendWiproSolutions(x.Name, x, false, false, 0)
})
}
}
}
if (data.ownerDetails) {
if (data.ownerDetails.owner) {
if (data.ownerDetails.owner.length > 0) {
this.appendOwner(data.ownerDetails.owner[0].FullName, data.ownerDetails.owner[0], 0, false)
}
}
}
if (!data.moduleSwitch) {
if (data.leadInfo.currency) {
this.appendCurrency(data.leadInfo.currency.Desc, data.leadInfo.currency, false, 0)
}
}
if (!data.moduleSwitch) {
console.log("yes modulae switch is false!!!1")
// trying to append data (i.e, if you create a contact from "+" )
this.selectedWiproSolution = (data.links.wiprosolution) ? data.links.wiprosolution : []
this.ServiceTable = (data.serviceline) ? data.serviceline : []
this.selectedConversation = (data.links) ? (data.links.activitygroup) ? data.links.activitygroup : [] : []
this.selectedCamp = (data.links) ? (data.links.campaign) ? data.links.campaign : [] : []
this.selectedOppertunity = (data.links) ? (data.links.opportunity) ? data.links.opportunity : [] : []
this.selectedAgp = (data.links) ? (data.links.agp) ? data.links.agp : [] : []
this.selectedCustomer = (data.ownerDetails) ? (data.ownerDetails.customers) ? data.ownerDetails.customers : [] : []
this.finalActivityGroup = (data.finalActivityGroup) ? data.finalActivityGroup : []
this.finalCampaignGroup = (data.finalCampaignGroup) ? data.finalCampaignGroup : []
this.finalOppotunityGroup = (data.finalOpportunityGroup) ? data.finalOpportunityGroup : []
this.finalContactGroup = (data.finalCustomerGroup) ? data.finalCustomerGroup : []
this.finalAgpGroup = (data.finalAgpGroup) ? data.finalAgpGroup : []
this.selectedContactowner = (data.ownerDetails) ? (data.ownerDetails.owner) ? data.ownerDetails.owner : [] : []
// to remove the validators for the customer contact.
if (data.ownerDetails.customers) {
data.ownerDetails.customers.forEach(x => {
this.appendCustomerGroup(x.FullName, x, false, false, 0)
})
}
//below if condition -
//1) if user module sitch to create lead ,now module switch is TRUE,
// then when he create contact now it will FALSE,
// To handel this we have done below if
if (!this.isEmpty(this.moduleTypeStateData)) {
console.log("we ahve the meeitng from lead!!!!")
if (this.moduleTypeStateData.name == "Meeting") {
this.disableAccount = true // need to disable accouint input
this.createExistingLinks(data)
}
}
} else {
// trying to create a LEAD from other modules.
console.log("coming from module swith!!@@@")
this.disableAccount = (data.accountName) ? true : false
this.disableActivityGroup = (data.links.activitygroup) ? false : false
this.disableCampaign = (data.links.campaign) ? false : false // as per requirement , not disabling the input if swithcing fom module
this.disableOpportunity = (data.links.opportunity) ? false : false // as per requirement , not disabling the input if swithcing fom module
this.disableContacts = (data.ownerDetails.customers) ? false : false // as per requirement , not disabling the input if swithcing fom module
this.selectedConversation = (data.links) ? (data.links.activitygroup) ? data.links.activitygroup : [] : []
this.selectedCamp = (data.links) ? (data.links.campaign) ? data.links.campaign : [] : []
this.selectedOppertunity = (data.links) ? (data.links.opportunity) ? data.links.opportunity : [] : []
this.selectedAgp = (data.links) ? (data.links.agp) ? data.links.agp : [] : []
this.selectedCustomer = (data.ownerDetails) ? (data.ownerDetails.customers) ? data.ownerDetails.customers : [] : []
// this.createTableNewRow()
this.CreateFinalGroups(data)
this.linkedLeaddisabled(data)
}
}
// linked leads enable when account is present
linkedLeaddisabled(data) {
this.disableActivityGroup = (data.accountName) ? false : true
this.disableCampaign = (data.accountName) ? false : true
this.disableOpportunity = (data.accountName) ? false : true
}
CreateFinalGroups(data) {
this.ExistingActivityData = (data.links) ? (data.links.activitygroup) ? data.links.activitygroup : [] : []
this.ExistingCampaignData = (data.links) ? (data.links.campaign) ? data.links.campaign : [] : []
this.ExistingOpportunityData = (data.links) ? (data.links.opportunity) ? data.links.opportunity : [] : []
this.ExistingAgpData = (data.links) ? (data.links.agp) ? data.links.agp : [] : []
this.ExistingContactData = (data.ownerDetails) ? (data.ownerDetails.customers) ? data.ownerDetails.customers : [] : []
if (data.links.activitygroup) {
data.links.activitygroup.forEach(x => {
this.appendActivityGroup(x.Name, x, false, false, 0)
})
}
if (data.links.campaign) {
data.links.campaign.forEach(x => {
this.appendCampaignGroup(x.Name, x, false, false, 0)
})
}
if (data.links.opportunity) {
data.links.opportunity.forEach(x => {
this.appendOpportunityGroup(x.Name, x, false, false, 0)
})
}
if (data.links.agp) {
data.links.agp.forEach(x => {
this.appendAgpGroup(x.Name, x, false, 0)
})
}
if (data.ownerDetails.customers) {
data.ownerDetails.customers.forEach(x => {
this.appendCustomerGroup(x.FullName, x, false, false, 0)
})
}
}
createTempData() {
return {
leadName: this.leadNameChanges.trim(),
leadSource: this.leadSourceSelected,
accountName: this.AccountSelected,
sbu: this.SbuSelected,
vertical: this.VerticalSelected,
alliance: this.AllianceData,
advisor: this.AdvisorData,
enquirytype: this.enqiuryTypedata,
country: this.selectedCountry,
serviceLineToggle: this.leadDetailsForm.value.serviceLineToggle,
WiproSolutionToggle: this.leadDetailsForm.value.WiproSolutionToggle,
desc: this.leadDetailsForm.value.description,
id: "",
links: {
wiprosolution: this.selectedWiproSolution,
activitygroup: this.selectedConversation,
campaign: this.selectedCamp,
opportunity: this.selectedOppertunity,
agp: this.selectedAgp
},
leadInfo: {
dealValue: this.leadDealOwnerForm.value.dealName,
currency: this.currencySelected,
timeline: this.leadDealOwnerForm.value.timeline
},
ownerDetails: {
originator: this.Orinator,
oiginatorlist: this.Orinator,
owner: this.selectedContactowner,
customers: this.selectedCustomer
},
Existinglinks: {
wiprosolution: this.ExistingWiproSolutionData,
activitygroup: this.ExistingActivityData,
campaign: this.ExistingCampaignData,
opportunity: this.ExistingOpportunityData,
agp: this.ExistingAgpData,
customers: this.ExistingContactData
},
serviceline: this.ServiceTable,
attachments: this.contactLeadService.attachmentList,
finalActivityGroup: this.finalActivityGroup,
finalCampaignGroup: this.finalCampaignGroup,
finalOpportunityGroup: this.finalOppotunityGroup,
finalCustomerGroup: this.finalContactGroup,
finalAgpGroup: this.finalAgpGroup,
enquiryData: this.enquiryType,
moduleSwitch: this.IsModuleSwitch,
showFirstForm: this.showFirstForm,
moduletype: this.moduleTypeStateData,
model: 'Create lead',
route: 'leads/createlead'
}
}
deleterow(rowData) {
if (this.ServiceTable.length > 1) {
if (rowData.apiId == "0") {
this.ServiceTable = this.ServiceTable.filter(x => x.id != rowData.id);
}
else {
rowData.isDeleted = true;
rowData.LinkActionType = 3;
}
}
}
createExistingLinks(data: any) {
this.ExistingActivityData = (data.Existinglinks) ? (data.Existinglinks.activitygroup) ? data.Existinglinks.activitygroup : [] : []
this.ExistingCampaignData = (data.Existinglinks) ? (data.Existinglinks.campaign) ? data.Existinglinks.campaign : [] : []
this.ExistingOpportunityData = (data.Existinglinks) ? (data.Existinglinks.opportunity) ? data.Existinglinks.opportunity : [] : []
this.ExistingAgpData = (data.Existinglinks) ? (data.Existinglinks.agp) ? data.Existinglinks.agp : [] : []
this.ExistingContactData = (data.Existinglinks) ? (data.Existinglinks.customers) ? data.Existinglinks.customers : [] : []
}
getSymbol(data) {
return unescape(JSON.parse('"' + data + '"')).replace(/\+/g, ' ');
}
/***************** autocomplete code start ****************** */
servicelineautoleadclose() {
this.servicelineautoSwitch = false;
}
clearData(inputValue, headerName, line) {
if (inputValue == '') {
line[headerName].Name = '';
}
else {
if (line[headerName].seletedValue != '') {
}
}
this.searchitem = '';
this.searchPractice = '';
this.searchSlbdm = ''
}
appendservicelineauto(item, i, hederData, rowData, isRedisCache) {
if (this.ServiceTable[i][hederData.name].Name != item.Name) {
if (this.ServiceTable.filter(x => x.slbdm["bdmidGuid"] == item.bdmidGuid && x.practice["practiceGuid"]==rowData.practice["practiceGuid"]&& x.serviceLines["Guid"]== rowData.serviceLines["Guid"]).length != 0) {
let Message = "The identified combination is already available";
this.errPopup.throwError(Message)
this.ServiceTable[i][hederData.name] = {
Name: "",
bdmidGuid: "0",
serviceLineBDMid: "",
seletedValue: '',
IsError: false
}
} else {
this.ServiceTable[i][hederData.name] = item;
if (hederData.IsRelation) {
//Unique Service line Selector
this.ServiceTable[i][hederData.IsRelation[0]] = {
Name: "",
practiceGuid: "",
seletedValue: '',
IsError: false
},
this.ServiceTable[i][hederData.IsRelation[1]] = {
Name: "",
bdmidGuid: "0",
serviceLineBDMid: "",
seletedValue: '',
IsError: false
}
}
}
}
if (isRedisCache) {
this.CreateRedisCache()
}
}
textAreaaChange() {
this.CreateRedisCache()
}
OnChange() {
//create for leadDetailsForm
this.leadDetailsForm.get('description').valueChanges.subscribe(val => {
if (val.length >= 2001) {
this.descriptionLength = true
} else {
this.descriptionLength = false
}
})
this.masterApi.getCurrency().subscribe(res => {
if (!res.IsError) {
this.offlineService.addMasterApiCache(routes.getCurrency, res)
this.currency = res.ResponseObject;
} else {
this.errPopup.throwError(res.Message)
}
}, error => {
this.isLoading = false;
});
// this.leadDealOwnerForm.get('dealName').valueChanges.subscribe(val => {
// })
}
addrow(event) {
event.preventDefault()
this.searchitem = ""
this.searchPractice = ""
this.searchSlbdm = ""
this.createTableNewRow();
}
createTableNewRow() {
var timestamp = "serviceline" + Math.floor(Date.now());
this.ServiceTable.push({
id: timestamp,
apiId: "0",
'@serviceLines': false,
'@practice': false,
'@slbdm': false,
serviceLines: {
Name: "",
Guid: "",
seletedValue: "",
IsError: false
}, practice: {
Name: "",
practiceGuid: "",
seletedValue: "",
IsError: false
},
slbdm: {
Name: "",
bdmidGuid: "0",//Validation
serviceLineBDMid: "",
seletedValue: "",
IsError: false
}
})
}
ResetSlbdm(i, hederData) {
if (hederData.IsRelation) {
//Unique Service line Selector
this.ServiceTable[i][hederData.IsRelation[0]] = {
Name: "",
practiceGuid: "",
seletedValue: '',
IsError: false
},
this.ServiceTable[i][hederData.IsRelation[1]] = {
Name: "",
bdmidGuid: "0",
serviceLineBDMid: "",
seletedValue: '',
IsError: true
},
this.ServiceTable[i][hederData.name] = {
Code: "",
Guid: "0",
Name: "",
IsError: true
}
} else {
this.ClearPartiularServiceLineInput(i, hederData)
}
}
onKeyUp(event, formData, colData, searchitem, byPassFlag) {
colData.serviceData = []
this.isServicelineLoader = true
switch (colData.name) {
case 'serviceLines':
if (event.key !== "Backspace") {
this.searchitem = this.searchitem + event.key
}
else {
this.searchitem = this.searchitem.slice(0, -1)
}
if (this.searchitem.length > 0 || byPassFlag) {
var ServicelineReqBody = {
"SearchText": searchitem,
"Account": {
"SysGuid": this.AccId,
"isProspect": this.isProspect
},
"SBU": {
"Id": this.sbuId
}
}
this.contactLeadService.getsearchServiceLine(ServicelineReqBody).subscribe(
data => {
this.isServicelineLoader = false
if (data.IsError === false) {
colData.serviceData = data.ResponseObject;
} else {
colData.serviceData = []
this.errPopup.throwError(data.Message)
}
}, error => {
colData.serviceData = []
this.isServicelineLoader = false;
})
}
return
case 'practice':
if (event.key !== "Backspace") {
this.searchPractice = this.searchPractice + event.key
}
else {
this.searchPractice = this.searchPractice.slice(0, -1)
}
if (formData.serviceLines.Guid != '') {
this.contactLeadService.getPractice(searchitem, formData.serviceLines.Guid).subscribe(
data => {
this.isServicelineLoader = false
if (data.IsError === false) {
colData.serviceData = data.ResponseObject;
} else {
colData.serviceData = []
this.errPopup.throwError(data.Message)
}
}, error => {
colData.serviceData = []
this.isServicelineLoader = false;
})
}
else {
formData.serviceLines.IsError = true;
}
return
case 'slbdm':
if (event.key !== "Backspace") {
this.searchSlbdm = this.searchSlbdm + event.key
}
else {
this.searchSlbdm = this.searchSlbdm.slice(0, -1)
}
if (formData.serviceLines.Guid != '') {
this.contactLeadService.getSLBDM(searchitem, formData.serviceLines.Guid, formData.practice.practiceGuid, this.sbuId, this.verticalId).subscribe(
data => {
this.isServicelineLoader = false
if (data.IsError === false) {
colData.serviceData = data.ResponseObject;
} else {
colData.serviceData = []
this.errPopup.throwError(data.Message)
}
}, error => {
colData.serviceData = []
this.isServicelineLoader = false;
})
}
else {
formData.serviceLines.IsError = true;
}
return
}
}
ClearPartiularServiceLineInput(i, hederData) {
switch (hederData.name) {
case 'serviceLines':
this.ServiceTable[i][hederData.name] = {
Code: "",
Guid: "0",
Name: "",
IsError: true
}
return
case 'practice':
this.ServiceTable[i][hederData.name] = {
Name: "",
practiceGuid: "",
seletedValue: '',
IsError: true
}
return
case 'slbdm':
this.ServiceTable[i][hederData.name] = {
Name: "",
bdmidGuid: "0",
serviceLineBDMid: "",
seletedValue: '',
IsError: true
}
}
}
AddCustomerContact() {
if (this.AccName == '') {
this.errPopup.throwError('Select account')
} else {
const dialogRef = this.dialog.open(CustomerpopupComponent, {
width: '800px',
data: (this.AccountSelected) ? ({ Name: this.AccountSelected['Name'], SysGuid: this.AccountSelected['SysGuid'], isProspect: this.AccountSelected['isProspect'] }) : ''
});
dialogRef.afterClosed().subscribe(res => {
if (res != '') {
let json = { FullName: (res['FName'] + ' ' + res['LName']), LinkActionType: 1, Designation: res['Designation'] ? res['Designation'] : "", isKeyContact: res['isKeyContact'] ? res['isKeyContact'] : false, MapGuid: "", Guid: res['Guid'], SysGuid: res['Guid'], Email:res['Email'] };
this.appendCustomerGroup(json.FullName, json, true, true, 0)
this.removeValidatorsForCustomer()
}
})
}
}
setCurrentFormDetails() {
sessionStorage.setItem("TempLeadDetails", JSON.stringify(this.createTempData()))
}
getenquiryType() {
this.masterApi.getEnquiryType().subscribe(res => {
if (res.IsError === false) {
this.isLoading = false;
this.offlineService.addMasterApiCache(routes.enquirytype, res)
this.enquiryType = res.ResponseObject;
} else {
this.errPopup.throwError(res.Message)
}
},
error => {
this.isLoading = false;
})
}
filesToDownloadDocument64(body) {
console.log("body",body);
this.isLoading = true;
this.fileService.filesToDownloadDocument64(body).subscribe((res) =>{
this.isLoading = false;
if(!res.IsError) {
res.ResponseObject.forEach(res => {
this.service.Base64Download(res);
})
} else {
this.errPopup.throwError(res.Message);
}
console.log(res);
},() =>{this.isLoading = false;})
}
downloadAll() {
let downloadUrls = []
if (this.envr.envName === 'MOBILEQA') {
this.contactLeadService.attachmentList.forEach(
item => {
downloadUrls.push({ Url: item.Url, Name: item.Name })
})
this.downloadAllInMobile(downloadUrls)
return;
} else {
let body = this.contactLeadService.attachmentList.map(x=>{return {Name : x.downloadFileName}});
this.filesToDownloadDocument64(body);
// this.contactLeadService.attachmentList.forEach(res => {
// this.service.Base64Download(res);
// // downloadUrls.push(res.Url);
// })
}
// downloadUrls.forEach(function (value, idx) {
// const response = {
// file: value,
// };
// setTimeout(() => {
// var a = document.createElement('a');
// a.href = response.file;
// a.download = response.file;
// document.body.appendChild(a);
// a.click();
// }, idx * 2500)
// });
}
downloadAllInMobile(fileInfo) {
fileInfo.forEach(function (value, idx) {
const response = value;
setTimeout(() => {
var fileTransfer = new FileTransfer();
var uri = encodeURI(response.Url);
var fileURL = "///storage/emulated/0/DCIM/" + response.Name;
fileTransfer.download(
uri, fileURL, function (entry) {
console.log("download complete: " + entry.toURL());
},
function (error) {
console.log("download error source " + error.source);
console.log("download error target " + error.target);
console.log("download error code" + error.code);
},
null, {
// "Authorization": "Basic dGVzdHVzZXJuYW1lOnRlc3RwYXNzd29yZA=="
// } headers: {
//
}
);
}, idx * 2500)
});
}
downloadFile(i) {
let body = [this.contactLeadService.attachmentList[i]].map(x=>{return {Name : x.downloadFileName}});
this.filesToDownloadDocument64(body);
// this.service.Base64Download(res);
// const response = {
// file: this.contactLeadService.attachmentList[i].Url,
// };
// var a = document.createElement('a');
// a.href = response.file;
// a.download = response.file;
// document.body.appendChild(a);
// a.click();
}
delinkAttach(Name) {
const dialogRef = this.dialog.open(deleteAttachPopUp, {
width: '400px',
});
dialogRef.afterClosed().subscribe(result => {
if (result.OK === true) {
this.contactLeadService.attachmentList = this.contactLeadService.attachmentList.filter(res => res.downloadFileName !== Name)
this.CreateRedisCache()
}
})
}
custominputclose() {
this.suggestion = false;
}
custominputclose1() {
this.suggestion1 = false;
}
custominputclose2() {
this.suggestion2 = false;
}
custominputclose3() {
this.suggestion3 = false;
}
custominputclose4() {
this.suggestion4 = false;
}
append(text) {
this.term = text;
this.suggestion = false;
}
append1(text) {
this.term1 = text;
this.suggestion1 = false;
}
append2(text) {
this.term2 = text;
this.suggestion2 = false;
}
append3(text) {
this.term3 = text;
this.suggestion3 = false;
}
append4(text) {
this.term4 = text;
this.suggestion4 = false;
}
consentStatus(e) {
this.consent = e.checked;
}
scrollTo(element: Element) {
if (element) {
window.scroll({
behavior: 'smooth',
left: 0,
top: element.getBoundingClientRect().top + window.scrollY - 150
});
}
}
/****************** Conversation Name autocomplete code start ****************** */
customerNameclose() {
this.customerNameSwitch = false;
this.leadDealOwnerForm.patchValue({
customerContact: ''
})
}
contactNamecloseorigin() {
this.contactNameSwitchorigin = false;
}
appendcontactorigin(value: string, item, i) {
this.leadDetailsForm.patchValue({
leadOriginator: value
});
this.contactNameorigin = value;
this.selectedContactorigin.push(item)
}
contactNamecloseowner() {
this.contactNameSwitchowner = false;
if (this.leadOwnerName == "") {
this.leadDetailsForm.patchValue({
leadOwner: ""
})
}
if(this.leadOwnerName != "") {
this.leadDetailsForm.patchValue({
leadOwner: this.leadOwnerName
})
}
}
onSubmit() {
var contactLeadServiceAttachments;
if (this.leadDealOwnerForm.valid && this.selectedContactowner.length > 0 && this.selectedCustomer.length > 0) {
contactLeadServiceAttachments = this.contactLeadService.attachmentList;
let body = this.generateCreateReqParam();
this.isLoading = true;
this.createClicked = true;
this.contactLeadService.CreateLead(body).subscribe(
res => {
if (res.IsError === false) {
// making this empty ,so that on destroy it shd not update cache .
this.ClearRedisCache()
this.isvalidation = false;
this.isLoading = false;
this.createClicked = true;
this.ModuleSwitchStateLogic(this.moduleTypeStateData)
this.myOpenLeadService.clearLeadAddContactSessionStore()
this.store.dispatch(new ClearMyopenlead())
this.store.dispatch(new ClearOpenLeadState())
this.store.dispatch(new ClearContactList())
this.store.dispatch(new ClearRelationshipCount());
let val
this.matSnackBar.open(res.Message, val, {
duration: 2000
}).afterDismissed().subscribe(() => {
this.isLoading = false;
this.oppArr = [];
if (sessionStorage.getItem('selAccountObj')) {
this.router.navigate(['/accounts/accountleads/unqalified']);
} else {
if (this.userId === this.leadOwnerId) {
this.router.navigate(['/leads/unqalified']);
} else {
this.router.navigate(['/leads/qualified']);
}
}
});
} else {
this.contactLeadService.attachmentList = contactLeadServiceAttachments
this.errPopup.throwError(res.Message)
this.isLoading = false;
this.createClicked = false;
}
}, error => {
this.isLoading = false;
this.createClicked = false;
this.contactLeadService.attachmentList = contactLeadServiceAttachments
this.errPopup.throwError("User doesn't have sufficient permissions to complete the task")
})
} else {
this.service.validateAllFormFields(this.leadDealOwnerForm);
let invalidElements = this.el.nativeElement.querySelectorAll('#validatescroll .ng-invalid');
if (invalidElements.length) {
this.scrollTo(invalidElements[0]);
this.service.validationErrorMessage();
}
return;
}
}
ClearRedisCache() {
this.service.SetRedisCacheData("empty", 'createLead').subscribe(res => console.log(res))
this.service.deleteRedisCacheData('createlead').subscribe(res => console.log(res))
}
generateCreateReqParam() {
this.contactLeadService.attachmentList = this.contactLeadService.attachmentList.map(x =>{
return { 'Name' : x.Name, 'Url': x.Url,"MapGuid": x.MapGuid,"LinkActionType": x.LinkActionType,"Comments" : x.Comments }
});
return {
"Title": this.leadNameChanges.trim(),
"wipro_LeadSource": {"sysGuid": this.leadSourceId},
"AllianceAccountGuid": this.AllianceAccountguid ? this.AllianceAccountguid : "",
"AdvisoryAccountGuid": this.AdvisorAccountguid ? this.AdvisorAccountguid : "",
"Prospect": {"Guid": this.wiproProspectAccount ? this.wiproProspectAccount : ""},
"Account": {"SysGuid": this.customerAccount ? this.customerAccount : ""},
"Vertical": {"Id": this.verticalId ? this.verticalId : ""},
"SBU": {"Id": this.sbuId ? this.sbuId : ""},
"EnquiryType": {"Id": this.enquiryId ? (this.enquiryId) : ""},
"EnquiryDesc": this.leadDetailsForm.value.description === "" ? "" : encodeURIComponent(this.leadDetailsForm.value.description),
"Agp": this.filterAgpData(this.finalAgpGroup),
"Country": { "SysGuid": this.countryId },
"isSolutionInvolved": this.leadDetailsForm.value.WiproSolutionToggle,
"isServiceLineInvolved": this.leadDetailsForm.value.serviceLineToggle,
"Solutions": this.filterWiproSolution(this.finalWiproSolutionGroup),
"wipro_remarks": "",
"DealValue": Number(this.leadDealOwnerForm.value.dealName.replace(/\,/g, "")),
"DealValueInUSD": (this.leadDealOwnerForm.value.estimatedRateValue) ? this.leadDealOwnerForm.value.estimatedRateValue : "",
"Currency": {"Id": (this.currencyId) ? this.currencyId : ""},
"EstimatedCloseDate": this.datepipe.transform(this.leadDealOwnerForm.value.timeline, 'yyyy-MM-dd'),
"Owner": {"ownerId": this.leadOwnerId},
"ServiceLine": this.filterServiceBdm(this.ServiceTable),
"ActivityGroups": this.filterActivityGroup(this.finalActivityGroup),
"OpportunitiesOrOrders": this.filterOppotunityGroup(this.finalOppotunityGroup),
"CustomerContacts": this.filterContactGroup(this.finalContactGroup),
"Campaign": this.filterCampaignGroup(this.finalCampaignGroup),
"Attachments": this.contactLeadService.attachmentList,
"isFromMeeting" : this.isFromMeeting
}
}
ModuleSwitchStateLogic(data) {
if (data) {
if (data.name == "Meeting") {
this.store.dispatch(new ClearMeetingList({ cleardetails: data.data.Activityid }))
this.store.dispatch(new ClearActivity())
this.store.dispatch(new ClearActivityDetails())
}
}
}
filterAgpData(data) {
if (data.length > 0) {
return {
Guid: data[0].SysGuid,
MapGuid: (data[0].MapGid) ? data[0].MapGid : "",
LinkActionType: data[0].LinkActionType
}
} else {
return {
Guid: ""
}
}
}
filterWiproSolution(data) {
if (data.length > 0) {
if (!this.iswiprosolution) {
return []
}
return data.map(x => {
return {
SysGuid: x.SysGuid,
MapGuid: (x.MapGuid) ? x.MapGuid : "",
LinkActionType: x.LinkActionType
}
})
} else {
return []
}
}
filterContactGroup(data) {
return data.map(x => {
return {
Guid: (x.SysGuid) ? x.SysGuid : x.Guid,
LinkActionType: x.LinkActionType
}
})
}
filterOppotunityGroup(data) {
return data.map(x => {
return {
SysGuid: x.Guid,
Type: x.Type,
MapGuid: (x.MapGuid) ? x.MapGuid : "",
LinkActionType: x.LinkActionType
}
})
}
filterServiceBdm(data) {
if (this.isServicelines) {
if (data) {
if (data.length > 0) {
data = data.filter(x => x.serviceLines.Guid != '')
if (data.some(x => x.serviceLines.Guid != '')) {
return data.map(x => {
return {
Guid: x.serviceLines.Guid,
practice: {
practiceGuid: x.practice.practiceGuid
},
SLBDM: {
bdmidGuid: x.slbdm.bdmidGuid
},
LinkActionType: 1
}
})
} else {
return []
}
} else {
return []
}
} else {
return []
}
} else {
return []
}
}
filterActivityGroup(data) {
return data.map(x => {
return {
Guid: x.Guid,
MapGuid: (x.MapGuid) ? x.MapGuid : "",
LinkActionType: x.LinkActionType
}
})
}
filterCampaignGroup(data) {
return data.map(x => {
return {
Id: x.Id,
LinkActionType: x.LinkActionType
}
})
}
callTempCurrency() {
this.isCurrencySearchLoading = true
this.CurrencyArrayList = []
this.contactLeadService.getsearchCurrency("").subscribe(res => {
this.isCurrencySearchLoading = false
if (res.IsError === false) {
this.CurrencyArrayList = res.ResponseObject.map(x => x = { ...x, Desc: this.getSymbol(x.Desc) });
} else {
this.errPopup.throwError(res.Message);
this.CurrencyArrayList = []
}
}, error => {
this.isCurrencySearchLoading = false;
this.CurrencyArrayList = []
});
}
callTempSource() {
this.isLeadSourceNameSearchLoading = true
this.LeadSource = []
this.contactLeadService.getsearchLeadSource("").subscribe(res => {
this.isLeadSourceNameSearchLoading = false
if (res.IsError === false) {
this.LeadSource = res.ResponseObject;
} else {
this.errPopup.throwError(res.Message);
this.LeadSource = []
}
}, error => {
this.isLeadSourceNameSearchLoading = false;
this.LeadSource = []
});
}
callTempAdvisor() {
this.leadDetailsForm.patchValue({
advisorAccount: ''
})
this.isAdvisorLoader = true;
this.AdvisorAccountArr = [];
if (this.cacheDataService.cacheDataGet('advisorAcc').length > 0) {
this.AdvisorAccountArr = this.cacheDataService.cacheDataGet('advisorAcc');
this.isAdvisorLoader = false;
} else {
this.isAdvisorLoader = true
this.AdvisorAccountArr = []
this.contactLeadService.GetAdvisorAccount("").subscribe(
data => {
this.isAdvisorLoader = false
if (data.IsError === false) {
this.lookupdata.TotalRecordCount = data.TotalRecordCount
this.lookupdata.nextLink = (data.OdatanextLink) ? data.OdatanextLink : '';
this.AdvisorAccountArr = data.ResponseObject;
} else {
this.errPopup.throwError(data.Message);
this.AdvisorAccountArr = []
}
}, error => {
this.isAdvisorLoader = false;
this.AdvisorAccountArr = []
});
}
}
callTempAccount() {
this.leadDetailsForm.patchValue({
accountOrCompanyName: ''
})
this.isAccountSearchLoader = true;
this.companyNameSearch = [];
if (this.cacheDataService.cacheDataGet('accountCompany').length > 0) {
this.companyNameSearch = this.cacheDataService.cacheDataGet('accountCompany');
this.isAccountSearchLoader = false;
} else {
this.isAccountSearchLoader = true
this.companyNameSearch = []
this.contactLeadService.getsearchAccountCompanyNew("").subscribe(res => {
this.isAccountSearchLoader = false
this.isLoading = false;
if (res.IsError === false) {
this.lookupdata.TotalRecordCount = res.TotalRecordCount
this.lookupdata.nextLink = (res.OdatanextLink) ? res.OdatanextLink : '';
this.companyNameSearch = res.ResponseObject;
} else {
this.errPopup.throwError(res.Message);
this.companyNameSearch = []
}
}, error => {
this.isAccountSearchLoader = false;
this.companyNameSearch = []
});
}
}
callTempSbu() {
this.isSbuLoder = true
this.Conversationssbu = []
this.contactLeadService.getsearchSBUbyName("", this.AccId, this.isProspect).subscribe(res => {
this.isSbuLoder = false
if (res.IsError === false) {
this.Conversationssbu = res.ResponseObject;
} else {
this.errPopup.throwError(res.Message);
this.Conversationssbu = []
}
}, error => {
this.isSbuLoder = false;
this.Conversationssbu = []
});
}
callTempVertical() {
this.isVerticalLoader = true
this.Vertical = []
let verticalSearchreqBody = {
SearchText: "",
Guid: this.AccId,
SBUGuid: this.sbuId,
isProspect: this.isProspect,
PageSize: 10,
OdatanextLink: "",
RequestedPageNumber: 1
}
this.contactLeadService.getsearchVerticalBySbu(verticalSearchreqBody).subscribe(res => {
this.isVerticalLoader = false
if (res.IsError === false) {
this.Vertical = res.ResponseObject;
} else {
this.errPopup.throwError(res.Message);
this.Vertical = []
}
}, error => {
this.isVerticalLoader = false;
this.Vertical = []
});
}
callTempCountry() {
this.isCountryLoading = true
this.countrySearch = []
this.contactLeadService.getCoutry("").subscribe(data => {
this.isCountryLoading = false
if (data.IsError === false) {
this.countrySearch = data.ResponseObject;
} else {
this.errPopup.throwError(data.Message);
this.countrySearch = []
}
}, error => {
this.isCountryLoading = false;
this.countrySearch = []
});
}
callTempAlliance() {
this.isAllianceLoader = true
this.AllianceAccountArr = []
this.contactLeadService.GetAllianceAccount("").subscribe(
data => {
this.isAllianceLoader = false
if (data.IsError === false) {
this.lookupdata.TotalRecordCount = data.TotalRecordCount
this.lookupdata.nextLink = (data.OdatanextLink) ? data.OdatanextLink : '';
this.AllianceAccountArr = data.ResponseObject;
} else {
this.errPopup.throwError(data.Message);
this.isAllianceLoader = false
}
}, error => {
this.isAllianceLoader = false;
this.AllianceAccountArr = []
});
}
getServiceLineData(event, formData, colData, searchitem, flag) {
this.onKeyUp(event, formData, colData, searchitem, (searchitem == '') ? true : false)
}
callTempWipro() {
this.iswiproSolutionLoader = true
this.wiproSolutionsearch = []
this.contactLeadService.getWiproSolutions("").subscribe(
data => {
this.iswiproSolutionLoader = false
if (data.IsError === false) {
this.lookupdata.TotalRecordCount = data.TotalRecordCount
this.lookupdata.nextLink = (data.OdatanextLink) ? data.OdatanextLink : '';
this.WiproSolAdvanceData = data.ResponseObject;
if (this.selectedWiproSolution.length > 0 && data.ResponseObject.length > 0) {
this.wiproSolutionsearch = this.CompareRemoveSelected(data.ResponseObject, this.selectedWiproSolution, "SysGuid")
} else {
this.wiproSolutionsearch = data.ResponseObject;
}
} else {
this.errPopup.throwError(data.Message);
this.wiproSolutionsearch = []
}
}, error => {
this.iswiproSolutionLoader = false;
this.wiproSolutionsearch = []
});
}
callTempActivity() {
this.isAcivityGroupLoader = true
this.Conversationslead = []
this.contactLeadService.getSearchActivityGroup("", this.AccId, this.isProspect).subscribe(res => {
this.isAcivityGroupLoader = false
if (res.IsError === false) {
this.lookupdata.TotalRecordCount = res.TotalRecordCount
this.lookupdata.nextLink = (res.OdatanextLink) ? res.OdatanextLink : '';
this.ActivityAdvanceData = res.ResponseObject;
if (this.selectedConversation.length > 0 && res.ResponseObject.length > 0) {
this.Conversationslead = this.CompareRemoveSelected(res.ResponseObject, this.selectedConversation, "Guid")
} else {
this.Conversationslead = res.ResponseObject;
}
} else {
this.errPopup.throwError(res.Message)
this.Conversationslead = []
}
}, error => {
this.isAcivityGroupLoader = false;
this.Conversationslead = []
});
}
callTempCampaign() {
this.isCampaignLoading = true
this.Conversationscamp = []
this.contactLeadService.getsearchCampaign("", this.AccId, this.isProspect).subscribe(res => {
this.isCampaignLoading = false
if (res.IsError === false) {
this.lookupdata.TotalRecordCount = res.TotalRecordCount
this.lookupdata.nextLink = (res.OdatanextLink) ? res.OdatanextLink : '';
this.CampaignAdvanceData = res.ResponseObject;
if (this.selectedCamp.length > 0 && res.ResponseObject.length > 0) {
this.Conversationscamp = this.CompareRemoveSelected(res.ResponseObject, this.selectedCamp, "Id")
} else {
this.Conversationscamp = res.ResponseObject;
}
} else {
this.errPopup.throwError(res.Message);
this.Conversationscamp = []
}
}, error => {
this.isCampaignLoading = false;
this.Conversationscamp = []
});
}
callTempOpportunity() {
this.isOpportunityLoader = true
this.Conversationsoppo = []
this.contactLeadService.searchOpportunityOrder("", this.AccId, this.isProspect).subscribe(res => {
this.isOpportunityLoader = false
if (res.IsError === false) {
this.lookupdata.TotalRecordCount = res.TotalRecordCount
this.lookupdata.nextLink = (res.OdatanextLink) ? res.OdatanextLink : '';
this.OppAdvanceData = res.ResponseObject;
if (this.selectedOppertunity.length > 0 && res.ResponseObject.length > 0) {
this.Conversationsoppo = this.CompareRemoveSelected(res.ResponseObject, this.selectedOppertunity, "Guid")
} else {
this.Conversationsoppo = res.ResponseObject;
}
} else {
this.errPopup.throwError(res.Message);
this.Conversationsoppo = []
}
}, error => {
this.isOpportunityLoader = false;
this.Conversationsoppo = []
});
}
callTempAgp() {
this.isAgpLoader = true
this.Conversationsagp = []
this.contactLeadService.getsearchLinkAGP("").subscribe(res => {
this.isAgpLoader = false
if (res.IsError === false) {
this.Conversationsagp = res.ResponseObject;
} else {
this.errPopup.throwError(res.Message);
this.Conversationsagp = []
}
}, error => {
this.isAgpLoader = false;
this.Conversationsagp = []
});
}
callTempContact() {
this.isCustometContactLoader = true
this.customerContactdetails = []
this.contactLeadService.searchCustomerparticipants("", this.AccId, this.isProspect).subscribe(data => {
this.isCustometContactLoader = false
if (data.IsError === false) {
this.lookupdata.TotalRecordCount = data.TotalRecordCount
this.lookupdata.nextLink = (data.OdatanextLink) ? data.OdatanextLink : '';
this.customerAdvanceData = data.ResponseObject;
if (this.selectedCustomer.length > 0 && data.ResponseObject.length > 0) {
this.customerContactdetails = this.CompareRemoveSelected(data.ResponseObject, this.selectedCustomer, "Guid")
} else {
this.customerContactdetails = data.ResponseObject;
}
this.customerContactdetails = data.ResponseObject;
} else {
this.errPopup.throwError(data.Message);
this.customerContactdetails = []
}
}, error => {
this.isCustometContactLoader = false;
this.customerContactdetails = []
});
}
callTempOwner() {
this.isLeadOwnerLoader = true
this.wiproContactowner = []
this.contactLeadService.getsearchLeadOwner("").subscribe(data => {
this.isLeadOwnerLoader = false
if (data.IsError === false) {
this.lookupdata.TotalRecordCount = data.TotalRecordCount
this.lookupdata.nextLink = (data.OdatanextLink) ? data.OdatanextLink : '';
this.wiproContactowner = data.ResponseObject;
} else {
this.errPopup.throwError(data.Message);
this.wiproContactowner = []
}
}, error => {
this.isLeadOwnerLoader = false;
this.wiproContactowner = []
});
}
accept = ['application/pdf', 'text/xml', 'application/jpg', 'application/xml', 'application/zip', 'application/octet-stream', 'audio/mp3', 'audio/mp4', 'image/jpeg', 'image/png', 'text/plain', 'image/gif', 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', 'application/msword', 'application/msexcel', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', 'application/doc', 'application/docx', 'video/mp4', 'audio/mpeg', 'application/x-zip-compressed', 'application/mspowerpoint', 'application/vnd.openxmlformats-officedocument.presentationml.presentation'];
fileChangeEvent(e) {
debugger
let files = [].slice.call(e.target.files);
let uploadingFileList = [];
let fileNames = [];
if (files.length > 0) {
files.forEach(res => {
let file: File = res;
let canditionAction = this.fileValidation(file)
switch (canditionAction) {
case 'FileSize': {
this.errPopup.throwError("Not able to upload the file because filesize is greater than 5mb.");
break;
}
case 'InvalidFormat': {
this.errPopup.throwError("File format not supported!")
break;
}
case 'FileExist': {
this.errPopup.throwError("File is already exist!")
break;
}
case 'Upload': {
debugger
const fd: FormData = new FormData();
fd.append('file', file);
fileNames.push(file.name)
uploadingFileList.push(fd)
break;
}
}
})
this.fileUplaod(uploadingFileList, fileNames)
}
}
fileValidation(file) {
if (file.size > 5242880) {
return 'FileSize'
}
if (!this.accept.includes(file.type)) {
return 'InvalidFormat'
}
if (this.contactLeadService.attachmentList.length == 0) {
if (this.accept.includes(file.type)) {
return 'Upload'
}
if (!this.accept.includes(file.type)) {
return 'InvalidFormat'
}
}
if (this.contactLeadService.attachmentList.length > 0) {
let index = this.contactLeadService.attachmentList.findIndex(k => k.Name == file.name);
if (index === -1) {
if (this.accept.includes(file.type)) {
return 'Upload'
}
} else {
return 'FileExist'
}
}
}
fileUplaod(fileList, fileNames) {
debugger
if (fileList.length > 0) {
this.isLoading = true
// this.fileService.filesToUpload(fileList).subscribe((res) => {
this.fileService.filesToUploadDocument64(fileList).subscribe((res) => {
this.isLoading = false;
res.forEach((file, i) => {
if (file !== '') {
this.contactLeadService.attachmentList.push({
"Name": fileNames[i],
"Url": file.ResponseObject.Url,
"MapGuid": "",
"LinkActionType": 1,
"Comments": [{ "Description": "" }],
downloadFileName: file.ResponseObject.Name
})
}
this.CreateRedisCache();
})
},
() => this.isLoading = false
)}
}
previousPage() {
if (this.routingState.getPreviousUrl().includes('/activities/list')) {
this.router.navigateByUrl('/activities/list');
}
else {
this.routingState.backClicked();
}
this.myOpenLeadService.clearLeadAddContactSessionStore()
}
isEmpty(obj) {
for (var key in obj) {
if (obj.hasOwnProperty(key))
return false;
}
return true;
}
}
@Component({
selector: 'app-cancel-pop',
templateUrl: './cancel-pop.html',
styleUrls: ['./create-lead.component.scss'],
})
export class cancelpopComponent {
leadIdentityFrom: any;
constructor(@Optional() @Inject(MAT_DIALOG_DATA) public data: any, public dialogRef: MatDialogRef<cancelpopComponent>, public router: Router, private myOpenLeadService: MyOpenLeadsService, private routingState: RoutingState, )
{ }
noneditdetails() {
this.dialogRef.close(true)
this.leadIdentityFrom = JSON.parse(sessionStorage.getItem('navigationfromlist'))
if (this.data != null) {
// if user is trying to create lead from other modules!!! to navigate back this below routes
this.router.navigate([this.data.Moduleroute])
}else if (this.leadIdentityFrom == 2) {
this.router.navigate(['leads/unqalified'])
} else if (this.leadIdentityFrom == 1) {
this.router.navigate(['leads/qualified'])
} else if (this.leadIdentityFrom == 3) {
this.router.navigate(['leads/archived'])
} else if (this.leadIdentityFrom == 4) {
this.router.navigate(['leads/diqualified'])
}
this.myOpenLeadService.clearLeadAddContactSessionStore()
}
}
@Component({
selector: 'delete-attach-pop',
templateUrl: './delete-attach-pop.html',
styleUrls: ['./create-lead.component.scss'],
})
export class deleteAttachPopUp {
constructor(private dialogRef: MatDialog, public dialog: MatDialogRef<deleteAttachPopUp>) { }
okClicked() {
this.dialog.close({ 'OK': true });
}
closeallpop() {
this.dialog.close({ 'OK': false });
}
} | checkboxcounter: number = 0; selectedCount: any = [];
showservicelineauto: boolean = false;
servicelineauto: string; |
cmpl_test.go | package otto
import (
"testing"
"github.com/BigtigerGG/otto/parser"
)
func Test_cmpl(t *testing.T) {
tt(t, func() {
vm := New()
test := func(src string, expect ...interface{}) {
program, err := parser.ParseFile(nil, "", src, 0)
is(err, nil)
{
program := cmpl_parse(program)
value := vm.runtime.cmpl_evaluate_nodeProgram(program, false)
if len(expect) > 0 {
is(value, expect[0])
}
}
}
test(``, Value{})
test(`var abc = 1; abc;`, 1)
test(`var abc = 1 + 1; abc;`, 2)
test(`1 + 2;`, 3)
})
}
func TestParse_cmpl(t *testing.T) {
tt(t, func() { | program, err := parser.ParseFile(nil, "", src, 0)
is(err, nil)
is(cmpl_parse(program), "!=", nil)
}
test(``)
test(`var abc = 1; abc;`)
test(`
function abc() {
return;
}
`)
})
} |
test := func(src string) { |
vue.d.ts | // 1. Make sure to import 'vue' before declaring augmented types
import Vue from 'vue';
import EventBus from "@/shell/EventBus";
// 2. Specify a file with the types you want to augment | eventBus: EventBus
}
} | // Vue has the constructor type in types/vue.d.ts
declare module 'vue/types/vue' {
// 3. Declare augmentation for Vue
interface Vue { |
test_parent.py | # coding: utf-8
"""
OpenAPI Petstore
This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\ # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import petstore_api
class TestParent(unittest.TestCase):
|
if __name__ == '__main__':
unittest.main()
| """Parent unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testParent(self):
"""Test Parent"""
# FIXME: construct object with mandatory attributes with example values
# model = petstore_api.Parent() # noqa: E501
pass |
ed25519.rs | // Copyright 2019-2021 @axia-js/wasm-crypto authors & contributors
// SPDX-License-Identifier: Apache-2.0
use std::convert::TryFrom;
use ed25519_dalek::{Keypair, PublicKey, SecretKey, Signature, Signer as _, Verifier as _};
use wasm_bindgen::prelude::*;
/// Keypair helper function.
fn create_from_pair(pair: &[u8]) -> Keypair {
match Keypair::from_bytes(pair) {
Ok(pair) => return pair,
Err(_) => panic!("Provided pair is invalid.")
}
}
/// Keypair helper function
fn create_from_parts(pubkey: &[u8], seckey: &[u8]) -> Keypair {
let mut pair = vec![];
pair.extend_from_slice(seckey);
pair.extend_from_slice(pubkey);
create_from_pair(&pair)
}
/// Keypair helper function.
fn create_from_seed(seed: &[u8]) -> Keypair {
let seckey = SecretKey::from_bytes(seed).unwrap();
let pubkey: PublicKey = (&seckey).into();
create_from_parts(pubkey.as_bytes(), seed)
}
/// PublicKey helper
fn create_public(pubkey: &[u8]) -> PublicKey {
match PublicKey::from_bytes(pubkey) {
Ok(pubkey) => return pubkey,
Err(_) => panic!("Provided public key is invalid.")
}
}
/// Generate a key pair.
///
/// * seed: UIntArray with 32 element
///
/// returned vector is the concatenation of first the private key (64 bytes)
/// followed by the public key (32) bytes.
#[wasm_bindgen]
pub fn ext_ed_from_seed(seed: &[u8]) -> Vec<u8> {
create_from_seed(seed)
.to_bytes()
.to_vec()
}
/// Sign a message
///
/// The combination of both public and private key must be provided.
/// This is effectively equivalent to a keypair.
///
/// * pubkey: UIntArray with 32 element
/// * private: UIntArray with 64 element | pub fn ext_ed_sign(pubkey: &[u8], seckey: &[u8], message: &[u8]) -> Vec<u8> {
create_from_parts(pubkey, seckey)
.sign(message)
.to_bytes()
.to_vec()
}
/// Verify a message and its corresponding against a public key;
///
/// * signature: UIntArray with 64 element
/// * message: Arbitrary length UIntArray
/// * pubkey: UIntArray with 32 element
#[wasm_bindgen]
pub fn ext_ed_verify(signature: &[u8], message: &[u8], pubkey: &[u8]) -> bool {
let signature = match Signature::try_from(signature) {
Ok(signature) => signature,
Err(_) => return false
};
create_public(pubkey)
.verify(message, &signature)
.is_ok()
}
#[cfg(test)]
pub mod tests {
extern crate rand;
use hex_literal::hex;
use super::*;
use ed25519_dalek::{SIGNATURE_LENGTH, KEYPAIR_LENGTH, SECRET_KEY_LENGTH};
fn generate_random_seed() -> Vec<u8> {
(0..32).map(|_| rand::random::<u8>() ).collect()
}
#[test]
fn can_create_keypair() {
let seed = generate_random_seed();
let keypair = ext_ed_from_seed(seed.as_slice());
assert!(keypair.len() == KEYPAIR_LENGTH);
}
#[test]
fn creates_pair_from_known() {
let seed = b"12345678901234567890123456789012";
let expected = hex!("2f8c6129d816cf51c374bc7f08c3e63ed156cf78aefb4a6550d97b87997977ee");
let keypair = ext_ed_from_seed(seed);
let public = &keypair[SECRET_KEY_LENGTH..KEYPAIR_LENGTH];
assert_eq!(public, expected);
}
#[test]
fn can_sign_message() {
let seed = generate_random_seed();
let keypair = ext_ed_from_seed(seed.as_slice());
let private = &keypair[0..SECRET_KEY_LENGTH];
let public = &keypair[SECRET_KEY_LENGTH..KEYPAIR_LENGTH];
let message = b"this is a message";
let signature = ext_ed_sign(public, private, message);
assert!(signature.len() == SIGNATURE_LENGTH);
}
#[test]
fn can_verify_message() {
let seed = generate_random_seed();
let keypair = ext_ed_from_seed(seed.as_slice());
let private = &keypair[0..SECRET_KEY_LENGTH];
let public = &keypair[SECRET_KEY_LENGTH..KEYPAIR_LENGTH];
let message = b"this is a message";
let signature = ext_ed_sign(public, private, message);
let is_valid = ext_ed_verify(&signature[..], message, public);
assert!(is_valid);
}
#[test]
fn can_verify_known() {
let public = hex!("2f8c6129d816cf51c374bc7f08c3e63ed156cf78aefb4a6550d97b87997977ee");
let message = b"this is a message";
let signature = hex!("90588f3f512496f2dd40571d162e8182860081c74e2085316e7c4396918f07da412ee029978e4dd714057fe973bd9e7d645148bf7b66680d67c93227cde95202");
let is_valid = ext_ed_verify(&signature, message, &public);
assert!(is_valid);
}
#[test]
fn can_verify_known_wrong() {
let public = hex!("2f8c6129d816cf51c374bc7f08c3e63ed156cf78aefb4a6550d97b87997977ee");
let message = b"this is a message";
let signature = &[0u8; 64];
let is_valid = ext_ed_verify(signature, message, &public);
assert_eq!(is_valid, false);
}
} | /// * message: Arbitrary length UIntArray
///
/// * returned vector is the signature consisting of 64 bytes.
#[wasm_bindgen] |
resnet.py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains definitions for the preactivation form of Residual Networks
(also known as ResNet v2).
Residual networks (ResNets) were originally proposed in:
[1] Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun
Deep Residual Learning for Image Recognition. arXiv:1512.03385
The full preactivation 'v2' ResNet variant implemented in this module was
introduced by:
[2] Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun
Identity Mappings in Deep Residual Networks. arXiv: 1603.05027
The key difference of the full preactivation 'v2' variant compared to the
'v1' variant in [1] is the use of batch normalization before every weight layer
rather than after.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os
import tensorflow as tf
_BATCH_NORM_DECAY = 0.997
_BATCH_NORM_EPSILON = 1e-5
################################################################################
# Functions for input processing.
################################################################################
def process_record_dataset(dataset, is_training, batch_size, shuffle_buffer,
parse_record_fn, num_epochs=1, num_parallel_calls=1):
"""Given a Dataset with raw records, parse each record into images and labels,
and return an iterator over the records.
Args:
dataset: A Dataset representing raw records
is_training: A boolean denoting whether the input is for training.
batch_size: The number of samples per batch.
shuffle_buffer: The buffer size to use when shuffling records. A larger
value results in better randomness, but smaller values reduce startup
time and use less memory.
parse_record_fn: A function that takes a raw record and returns the
corresponding (image, label) pair.
num_epochs: The number of epochs to repeat the dataset.
num_parallel_calls: The number of records that are processed in parallel.
This can be optimized per data set but for generally homogeneous data
sets, should be approximately the number of available CPU cores.
Returns:
Dataset of (image, label) pairs ready for iteration.
"""
# We prefetch a batch at a time, This can help smooth out the time taken to
# load input files as we go through shuffling and processing.
dataset = dataset.prefetch(buffer_size=batch_size)
if is_training:
# Shuffle the records. Note that we shuffle before repeating to ensure
# that the shuffling respects epoch boundaries.
dataset = dataset.shuffle(buffer_size=shuffle_buffer)
# If we are training over multiple epochs before evaluating, repeat the
# dataset for the appropriate number of epochs.
dataset = dataset.repeat(num_epochs)
# Parse the raw records into images and labels
dataset = dataset.map(lambda value: parse_record_fn(value, is_training),
num_parallel_calls=num_parallel_calls)
dataset = dataset.batch(batch_size)
# Operations between the final prefetch and the get_next call to the iterator
# will happen synchronously during run time. We prefetch here again to
# background all of the above processing work and keep it out of the
# critical training path.
dataset = dataset.prefetch(1)
return dataset
################################################################################
# Functions building the ResNet model.
################################################################################
def batch_norm_relu(inputs, training, data_format):
"""Performs a batch normalization followed by a ReLU."""
# We set fused=True for a significant performance boost. See
# https://www.tensorflow.org/performance/performance_guide#common_fused_ops
inputs = tf.layers.batch_normalization(
inputs=inputs, axis=1 if data_format == 'channels_first' else 3,
momentum=_BATCH_NORM_DECAY, epsilon=_BATCH_NORM_EPSILON, center=True,
scale=True, training=training, fused=True)
inputs = tf.nn.relu(inputs)
return inputs
def fixed_padding(inputs, kernel_size, data_format):
"""Pads the input along the spatial dimensions independently of input size.
Args:
inputs: A tensor of size [batch, channels, height_in, width_in] or
[batch, height_in, width_in, channels] depending on data_format.
kernel_size: The kernel to be used in the conv2d or max_pool2d operation.
Should be a positive integer.
data_format: The input format ('channels_last' or 'channels_first').
Returns:
A tensor with the same format as the input with the data either intact
(if kernel_size == 1) or padded (if kernel_size > 1).
"""
pad_total = kernel_size - 1
pad_beg = pad_total // 2
pad_end = pad_total - pad_beg
if data_format == 'channels_first':
padded_inputs = tf.pad(inputs, [[0, 0], [0, 0],
[pad_beg, pad_end], [pad_beg, pad_end]])
else:
padded_inputs = tf.pad(inputs, [[0, 0], [pad_beg, pad_end],
[pad_beg, pad_end], [0, 0]])
return padded_inputs
def conv2d_fixed_padding(inputs, filters, kernel_size, strides, data_format):
"""Strided 2-D convolution with explicit padding."""
# The padding is consistent and is based only on `kernel_size`, not on the
# dimensions of `inputs` (as opposed to using `tf.layers.conv2d` alone).
if strides > 1:
inputs = fixed_padding(inputs, kernel_size, data_format)
return tf.layers.conv2d(
inputs=inputs, filters=filters, kernel_size=kernel_size, strides=strides,
padding=('SAME' if strides == 1 else 'VALID'), use_bias=False,
kernel_initializer=tf.variance_scaling_initializer(),
data_format=data_format)
def building_block(inputs, filters, training, projection_shortcut, strides,
data_format):
"""Standard building block for residual networks with BN before convolutions.
Args:
inputs: A tensor of size [batch, channels, height_in, width_in] or
[batch, height_in, width_in, channels] depending on data_format.
filters: The number of filters for the convolutions.
training: A Boolean for whether the model is in training or inference
mode. Needed for batch normalization.
projection_shortcut: The function to use for projection shortcuts
(typically a 1x1 convolution when downsampling the input).
strides: The block's stride. If greater than 1, this block will ultimately
downsample the input.
data_format: The input format ('channels_last' or 'channels_first').
Returns:
The output tensor of the block.
"""
shortcut = inputs
inputs = batch_norm_relu(inputs, training, data_format)
# The projection shortcut should come after the first batch norm and ReLU
# since it performs a 1x1 convolution.
if projection_shortcut is not None:
shortcut = projection_shortcut(inputs)
inputs = conv2d_fixed_padding(
inputs=inputs, filters=filters, kernel_size=3, strides=strides,
data_format=data_format)
inputs = batch_norm_relu(inputs, training, data_format)
inputs = conv2d_fixed_padding(
inputs=inputs, filters=filters, kernel_size=3, strides=1,
data_format=data_format)
return inputs + shortcut
def bottleneck_block(inputs, filters, training, projection_shortcut,
strides, data_format):
"""Bottleneck block variant for residual networks with BN before convolutions.
Args:
inputs: A tensor of size [batch, channels, height_in, width_in] or
[batch, height_in, width_in, channels] depending on data_format.
filters: The number of filters for the first two convolutions. Note
that the third and final convolution will use 4 times as many filters.
training: A Boolean for whether the model is in training or inference
mode. Needed for batch normalization.
projection_shortcut: The function to use for projection shortcuts
(typically a 1x1 convolution when downsampling the input).
strides: The block's stride. If greater than 1, this block will ultimately
downsample the input.
data_format: The input format ('channels_last' or 'channels_first').
Returns:
The output tensor of the block.
"""
shortcut = inputs
inputs = batch_norm_relu(inputs, training, data_format)
# The projection shortcut should come after the first batch norm and ReLU
# since it performs a 1x1 convolution.
if projection_shortcut is not None:
shortcut = projection_shortcut(inputs)
inputs = conv2d_fixed_padding(
inputs=inputs, filters=filters, kernel_size=1, strides=1,
data_format=data_format)
inputs = batch_norm_relu(inputs, training, data_format)
inputs = conv2d_fixed_padding(
inputs=inputs, filters=filters, kernel_size=3, strides=strides,
data_format=data_format)
inputs = batch_norm_relu(inputs, training, data_format)
inputs = conv2d_fixed_padding(
inputs=inputs, filters=4 * filters, kernel_size=1, strides=1,
data_format=data_format)
return inputs + shortcut
def block_layer(inputs, filters, block_fn, blocks, strides, training, name,
data_format):
"""Creates one layer of blocks for the ResNet model.
Args:
inputs: A tensor of size [batch, channels, height_in, width_in] or
[batch, height_in, width_in, channels] depending on data_format.
filters: The number of filters for the first convolution of the layer.
block_fn: The block to use within the model, either `building_block` or
`bottleneck_block`.
blocks: The number of blocks contained in the layer.
strides: The stride to use for the first convolution of the layer. If
greater than 1, this layer will ultimately downsample the input.
training: Either True or False, whether we are currently training the
model. Needed for batch norm.
name: A string name for the tensor output of the block layer.
data_format: The input format ('channels_last' or 'channels_first').
Returns:
The output tensor of the block layer.
"""
# Bottleneck blocks end with 4x the number of filters as they start with
filters_out = 4 * filters if block_fn is bottleneck_block else filters
def projection_shortcut(inputs):
return conv2d_fixed_padding(
inputs=inputs, filters=filters_out, kernel_size=1, strides=strides,
data_format=data_format)
# Only the first block per block_layer uses projection_shortcut and strides
inputs = block_fn(inputs, filters, training, projection_shortcut, strides,
data_format)
for _ in range(1, blocks):
inputs = block_fn(inputs, filters, training, None, 1, data_format)
return tf.identity(inputs, name)
class Model(object):
"""Base class for building the Resnet v2 Model.
"""
def __init__(self, resnet_size, num_classes, num_filters, kernel_size,
conv_stride, first_pool_size, first_pool_stride, probe_pool_size,
second_pool_size, second_pool_stride, probe_pool_stride,
block_fn, block_sizes, pool_type, num_probes,
block_strides, final_size, data_format=None):
"""Creates a model for classifying an image.
Args:
resnet_size: A single integer for the size of the ResNet model.
probe_pool_size: Number to pool the probes by.
probe_pool_stride: stride size for the probe pooling layer
num_classes: The number of classes used as labels.
num_filters: The number of filters to use for the first block layer
of the model. This number is then doubled for each subsequent block
layer.
kernel_size: The kernel size to use for convolution.
conv_stride: stride size for the initial convolutional layer
first_pool_size: Pool size to be used for the first pooling layer.
If none, the first pooling layer is skipped.
first_pool_stride: stride size for the first pooling layer. Not used
if first_pool_size is None.
second_pool_size: Pool size to be used for the second pooling layer.
second_pool_stride: stride size for the final pooling layer
block_fn: Which block layer function should be used? Pass in one of
the two functions defined above: building_block or bottleneck_block
block_sizes: A list containing n values, where n is the number of sets of
block layers desired. Each value should be the number of blocks in the
i-th set.
pool_type: 'max' or 'mean'.
block_strides: List of integers representing the desired stride size for
each of the sets of block layers. Should be same length as block_sizes.
final_size: The expected size of the model after the second pooling.
data_format: Input format ('channels_last', 'channels_first', or None).
If set to None, the format is dependent on whether a GPU is available.
"""
self.resnet_size = resnet_size
if not data_format:
data_format = (
'channels_first' if tf.test.is_built_with_cuda() else 'channels_last')
self.data_format = data_format
self.num_classes = num_classes
self.num_filters = num_filters
self.kernel_size = kernel_size
self.conv_stride = conv_stride
self.first_pool_size = first_pool_size
self.first_pool_stride = first_pool_stride
self.second_pool_size = second_pool_size
self.second_pool_stride = second_pool_stride
self.probe_pool_size = probe_pool_size
self.probe_pool_stride = probe_pool_stride
self.block_fn = block_fn
self.block_sizes = block_sizes
self.block_strides = block_strides
self.final_size = final_size
self.pool_type = pool_type
self.num_probes = num_probes
def __call__(self, inputs, training):
"""Add operations to classify a batch of input images.
Args:
inputs: A Tensor representing a batch of input images.
training: A boolean. Set to True to add operations required only when
training the classifier.
Returns:
A logits Tensor with shape [<batch_size>, self.num_classes].
"""
with tf.variable_scope('input_transforms'):
if self.data_format == 'channels_first':
# Convert the inputs from channels_last (NHWC) to channels_first (NCHW).
# This provides a large performance boost on GPU. See
# https://www.tensorflow.org/performance/performance_guide#data_formats
inputs = tf.transpose(inputs, [0, 3, 1, 2])
with tf.variable_scope('mentor') as scope:
# mentor
mentor = conv2d_fixed_padding(
inputs=inputs, filters=self.num_filters, kernel_size=self.kernel_size,
strides=self.conv_stride, data_format=self.data_format)
mentor = tf.identity(mentor, 'mentor_' + 'initial_conv')
if self.first_pool_size:
mentor = tf.layers.max_pooling2d(
inputs=mentor, pool_size=self.first_pool_size,
strides=self.first_pool_stride, padding='SAME',
data_format=self.data_format)
mentor = tf.identity(mentor, 'mentor_' + 'initial_max_pool')
mentor_probes = []
probe_count = 0
for i, num_blocks in enumerate(self.block_sizes[0]):
num_filters = self.num_filters * (2**i)
mentor = block_layer(
inputs=mentor, filters=num_filters, block_fn=self.block_fn,
blocks=num_blocks, strides=self.block_strides[i],
training=training, name='mentor_' + 'block_layer{}'.format(i + 1),
data_format=self.data_format)
if probe_count < self.num_probes:
if self.probe_pool_size > 0:
if self.pool_type == 'max':
|
elif self.pool_type == 'mean':
mentor_probe = tf.layers.average_pooling2d(
inputs=mentor, pool_size=self.probe_pool_size,
strides=self.probe_pool_stride, padding='SAME',
data_format=self.data_format)
mentor_probe = tf.identity(mentor, 'mentor_'+'probe_mean_pool_' \
+ str(i))
else:
mentor_probe = mentor
mentor_probes.append(mentor_probe)
probe_count+=1
mentor = batch_norm_relu(mentor, training, self.data_format)
mentor = tf.layers.average_pooling2d(
inputs=mentor, pool_size=self.second_pool_size,
strides=self.second_pool_stride, padding='VALID',
data_format=self.data_format)
mentor = tf.identity(mentor, 'mentor_' + 'final_avg_pool')
mentor = tf.reshape(mentor, [-1, self.final_size])
mentor = tf.layers.dense(inputs=mentor, units=self.num_classes)
mentor = tf.identity(mentor, 'mentor_' + 'final_dense')
mentor_probes.append(mentor)
with tf.variable_scope('mentee') as scope:
# mentee
mentee = conv2d_fixed_padding(
inputs=inputs, filters=self.num_filters, kernel_size=self.kernel_size,
strides=self.conv_stride, data_format=self.data_format)
mentee = tf.identity(mentee, 'mentee_' + 'initial_conv')
if self.first_pool_size:
mentee = tf.layers.max_pooling2d(
inputs=mentee, pool_size=self.first_pool_size,
strides=self.first_pool_stride, padding='SAME',
data_format=self.data_format)
mentee = tf.identity(mentee, 'mentee_' + 'initial_max_pool')
probe_count = 0
mentee_probes = []
for i, num_blocks in enumerate(self.block_sizes[1]):
num_filters = self.num_filters * (2**i)
mentee = block_layer(
inputs=mentee, filters=num_filters, block_fn=self.block_fn,
blocks=num_blocks, strides=self.block_strides[i],
training=training, name='mentee_' + 'block_layer{}'.format(i + 1),
data_format=self.data_format)
if probe_count < self.num_probes:
if self.probe_pool_size > 0:
if self.pool_type == 'max':
mentee_probe = tf.layers.max_pooling2d(
inputs=mentee, pool_size=self.probe_pool_size,
strides=self.probe_pool_stride, padding='SAME',
data_format=self.data_format)
mentee_probe = tf.identity(mentee, 'mentee_'+'probe_max_pool_' \
+ str(i))
elif self.pool_type == 'mean':
mentee_probe = tf.layers.average_pooling2d(
inputs=mentee, pool_size=self.probe_pool_size,
strides=self.probe_pool_stride, padding='SAME',
data_format=self.data_format)
mentee_probe = tf.identity(mentee, 'mentee_'+'probe_max_pool_' \
+ str(i))
else:
mentee_probe=mentee
mentee_probes.append(mentee_probe)
probe_count+=1
mentee = batch_norm_relu(mentee, training, self.data_format)
mentee = tf.layers.average_pooling2d(
inputs=mentee, pool_size=self.second_pool_size,
strides=self.second_pool_stride, padding='VALID',
data_format=self.data_format)
mentee = tf.identity(mentee, 'mentee_' + 'final_avg_pool')
mentee = tf.reshape(mentee, [-1, self.final_size])
mentee = tf.layers.dense(inputs=mentee, units=self.num_classes)
mentee = tf.identity(mentee, 'mentee_' + 'final_dense')
mentee_probes.append(mentee)
probe_cost = tf.constant(0.)
for mentor_feat, mentee_feat in zip(mentor_probes, mentee_probes):
probe_cost = probe_cost + tf.losses.mean_squared_error (
mentor_feat, mentee_feat)
return (mentor, mentee, probe_cost)
################################################################################
# Functions for running training/eval/validation loops for the model.
################################################################################
def learning_rate_with_decay(
batch_size, batch_denom, num_images, boundary_epochs, decay_rates):
"""Get a learning rate that decays step-wise as training progresses.
Args:
batch_size: the number of examples processed in each training batch.
batch_denom: this value will be used to scale the base learning rate.
`0.1 * batch size` is divided by this number, such that when
batch_denom == batch_size, the initial learning rate will be 0.1.
num_images: total number of images that will be used for training.
boundary_epochs: list of ints representing the epochs at which we
decay the learning rate.
decay_rates: list of floats representing the decay rates to be used
for scaling the learning rate. Should be the same length as
boundary_epochs.
Returns:
Returns a function that takes a single argument - the number of batches
trained so far (global_step)- and returns the learning rate to be used
for training the next batch.
"""
with tf.variable_scope('learning_rate'):
initial_learning_rate = 0.01 * batch_size / batch_denom
batches_per_epoch = num_images / batch_size
# Multiply the learning rate by 0.1 at 100, 150, and 200 epochs.
boundaries = [int(batches_per_epoch * epoch) for epoch in boundary_epochs]
vals = [initial_learning_rate * decay for decay in decay_rates]
def learning_rate_fn(global_step):
global_step = tf.cast(global_step, tf.int32)
rval = tf.train.piecewise_constant(global_step, boundaries, vals)
return rval
return learning_rate_fn
def learning_rate_with_decay_2( initial_learning_rate,
batch_size, batch_denom, num_images, boundary_epochs, decay_rates):
"""Get a learning rate that decays step-wise as training progresses.
Args:
batch_size: the number of examples processed in each training batch.
batch_denom: this value will be used to scale the base learning rate.
`0.1 * batch size` is divided by this number, such that when
batch_denom == batch_size, the initial learning rate will be 0.1.
num_images: total number of images that will be used for training.
boundary_epochs: list of ints representing the epochs at which we
decay the learning rate.
decay_rates: list of floats representing the decay rates to be used
for scaling the learning rate. Should be the same length as
boundary_epochs.
Returns:
Returns a function that takes a single argument - the number of batches
trained so far (global_step)- and returns the learning rate to be used
for training the next batch.
"""
with tf.variable_scope('learning_rate'):
batches_per_epoch = num_images / batch_size
boundaries = [int(batches_per_epoch * epoch) for epoch in boundary_epochs]
vals = [initial_learning_rate * decay for decay in decay_rates]
def learning_rate_fn(global_step):
global_step = tf.cast(global_step, tf.int32)
rval = tf.train.piecewise_constant(global_step, boundaries, vals)
return rval
return learning_rate_fn
def distillation_coeff_fn(intital_distillation, global_step):
global_step = tf.cast(global_step, tf.int32)
rval = tf.train.exponential_decay (
intital_distillation,
global_step,
100000,
0.55,
staircase = False)
return rval
def resnet_model_fn(features, labels, mode, model_class, trainee,
distillation_coeff, probes_coeff, resnet_size, num_probes,
weight_decay_coeff, learning_rate_fn_mentor,
learning_rate_fn_mentee, learning_rate_fn_finetune,
momentum, data_format, pool_probes, pool_type,
temperature=1, optimizer='momentum',
loss_filter_fn=None):
"""Shared functionality for different resnet model_fns.
Initializes the ResnetModel representing the model layers
and uses that model to build the necessary EstimatorSpecs for
the `mode` in question. For training, this means building losses,
the optimizer, and the train op that get passed into the EstimatorSpec.
For evaluation and prediction, the EstimatorSpec is returned without
a train op, but with the necessary parameters for the given mode.
Args:
features: tensor representing input images
labels: tensor representing class labels for all input images
mode: current estimator mode; should be one of
`tf.estimator.ModeKeys.TRAIN`, `EVALUATE`, `PREDICT`
model_class: a class representing a TensorFlow model that has a __call__
function. We assume here that this is a subclass of ResnetModel.
trainee: A string either `'mentee'` or `'mentor`'.
resnet_size: A list of two integers for the size of the ResNet model for
mentor followed by mentee.
weight_decay_coeff: weight decay rate used to regularize learned variables.
distillation_coeff: Weight for distillation.
probes_coeff: weight for probes.
learning_rate_fn_mentor: function that returns the current learning rate given
the current global_step
learning_rate_fn_mentee: function that returns the current learning rate given
the current global_step
learning_rate_fn_finetune: function that returns the current learning rate given
the current global_step
num_probes: How many equally spaced probes do we need.
momentum: momentum term used for optimization.
data_format: Input format ('channels_last', 'channels_first', or None).
If set to None, the format is dependent on whether a GPU is available.
temperature: A value of temperature to use for distillation. Defaults to 1
so that it will remain backward compatible.
loss_filter_fn: function that takes a string variable name and returns
True if the var should be included in loss calculation, and False
otherwise. If None, batch_normalization variables will be excluded
from the loss.
pool_probes: Downsampling for probes.
pool_type: 'max' or 'mean'.
optimizer: 'adam', 'adadelta' and 'momentum' are options.
Returns:
EstimatorSpec parameterized according to the input params and the
current mode.
"""
with tf.variable_scope('inputs'):
# Generate a summary node for the images
tf.summary.image('images', features, max_outputs=6)
model = model_class(resnet_size = resnet_size,
pool_probes = pool_probes,
pool_type = pool_type,
num_probes = num_probes,
data_format = data_format)
logits_mentor, logits_mentee, probe_cost = model(features,
mode == tf.estimator.ModeKeys.TRAIN)
predictions_mentor = {
'classes': tf.argmax(logits_mentor, axis=1),
'probabilities': tf.nn.softmax(logits_mentor,
name='softmax_tensor_mentor'),
}
predictions_mentee = {
'classes': tf.argmax(logits_mentee, axis=1),
'probabilities': tf.nn.softmax(logits_mentee,
name='softmax_tensor_mentee'),
}
if mode == tf.estimator.ModeKeys.PREDICT:
if trainee == 'mentor':
return tf.estimator.EstimatorSpec(mode=mode,
predictions=predictions_mentor)
elif trainee == 'mentee' or trainee == 'finetune':
return tf.estimator.EstimatorSpec(mode=mode,
predictions=predictions_mentee)
with tf.variable_scope('distillery'):
temperature_softmax_mentor = tf.nn.softmax((tf.div(logits_mentor,
temperature)), name ='softmax_temperature_tensor_mentor')
distillation_loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(
logits = tf.div(logits_mentee,temperature),
labels = temperature_softmax_mentor))
tf.identity(distillation_loss, name='distillation_loss')
tf.summary.scalar('distillation_loss', distillation_loss)
tf.summary.scalar('scaled_distillation_loss', distillation_coeff *
distillation_loss)
with tf.variable_scope('cross_entropy'):
# Calculate loss, which includes softmax cross entropy and L2 regularization.
cross_entropy_mentor = tf.losses.softmax_cross_entropy(
logits=logits_mentor, onehot_labels=labels)
# Create a tensor named cross_entropy for logging purposes.
tf.identity(cross_entropy_mentor, name='cross_entropy_mentor')
tf.summary.scalar('cross_entropy_mentor', cross_entropy_mentor)
cross_entropy_mentee = tf.losses.softmax_cross_entropy(
logits=logits_mentee, onehot_labels=labels)
tf.identity(cross_entropy_mentee, name='cross_entropy_mentee')
tf.summary.scalar('cross_entropy_mentee', cross_entropy_mentee)
# If no loss_filter_fn is passed, assume we want the default behavior,
# which is that batch_normalization variables are excluded from loss.
if not loss_filter_fn:
def loss_filter_fn(name):
return 'batch_normalization' not in name
mentor_variables=tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES,
scope='mentor')
mentee_variables=tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES,
scope='mentee')
with tf.variable_scope('regularizers'):
if weight_decay_coeff > 0:
l2_mentor = weight_decay_coeff * tf.add_n(
[tf.nn.l2_loss(v) for v in mentor_variables
if loss_filter_fn(v.name)])
l2_mentee = weight_decay_coeff * tf.add_n(
[tf.nn.l2_loss(v) for v in mentee_variables
if loss_filter_fn(v.name)])
else:
l2_mentor = tf.constant(0.)
l2_mentee = tf.constant(0.)
if mode == tf.estimator.ModeKeys.TRAIN:
with tf.variable_scope('learning_rates'):
global_step = tf.train.get_or_create_global_step()
learning_rate_mentor = learning_rate_fn_mentor(global_step)
learning_rate_mentee = learning_rate_fn_mentee(global_step)
learning_rate_finetune = learning_rate_fn_finetune(global_step)
tf.identity(learning_rate_mentor, name='learning_rate_mentor' )
tf.summary.scalar('learning_rate_mentor', learning_rate_mentor)
tf.identity(learning_rate_mentee, name='learning_rate_mentee' )
tf.summary.scalar('learning_rate_mentee', learning_rate_mentee)
tf.identity(learning_rate_finetune, name='learning_rate_finetune' )
tf.summary.scalar('learning_rate_finetune', learning_rate_finetune)
with tf.variable_scope('mentor_cumulative_loss'):
# Add weight decay and distillation to the loss.
loss_mentor = cross_entropy_mentor + l2_mentor
tf.summary.scalar('objective', loss_mentor)
with tf.variable_scope('mentee_cumulative_loss'):
distillation_coeff_decayed = distillation_coeff_fn(distillation_coeff,
global_step)
probe_scale = probes_coeff * distillation_coeff_decayed
tf.identity(probe_cost, name='probe_cost')
tf.summary.scalar('probe_loss', probe_cost)
tf.summary.scalar('scaled_probe_loss', probe_scale *
probe_cost)
tf.identity(distillation_coeff, name='distillation_coeff_decayed')
tf.summary.scalar('coeff',distillation_coeff_decayed)
loss_mentee = cross_entropy_mentee + l2_mentee + \
distillation_coeff_decayed * distillation_loss + \
probe_scale * probe_cost
tf.summary.scalar('objective', loss_mentee)
with tf.variable_scope('mentee_finetune'):
loss_finetune = cross_entropy_mentee + l2_mentee
tf.summary.scalar('objective', loss_finetune)
if optimizer[0] == 'momentum':
with tf.variable_scope('mentor_momentum_optimizer'):
optimizer_mentor = tf.train.MomentumOptimizer(
learning_rate=learning_rate_mentor,
momentum=momentum)
elif optimizer[0] == 'adam':
with tf.variable_scope('mentor_adam_optimizer'):
optimizer_mentor = tf.train.AdamOptimizer(
learning_rate=learning_rate_mentor)
elif optimizer[0] == 'adadelta':
with tf.variable_scope('mentor_adadelta_optimizer'):
optimizer_mentor = tf.train.AdadeltaOptimizer(
learning_rate=learning_rate_mentor)
if optimizer[1] == 'momentum':
with tf.variable_scope('mentee_momentum_optimizer'):
optimizer_mentee = tf.train.MomentumOptimizer(
learning_rate=learning_rate_mentee,
momentum=momentum)
elif optimizer[1] == 'adam':
with tf.variable_scope('mentee_adam_optimizer'):
optimizer_mentee = tf.train.AdamOptimizer(
learning_rate=learning_rate_mentee)
elif optimizer[1] == 'adadelta':
with tf.variable_scope('mentee_adadelta_optimizer'):
optimizer_mentee = tf.train.AdadeltaOptimizer(
learning_rate=learning_rate_mentee)
if optimizer[2] == 'momentum':
with tf.variable_scope('finetune_momentum_optimizer'):
optimizer_finetune = tf.train.MomentumOptimizer(
learning_rate=learning_rate_finetune,
momentum=momentum)
elif optimizer[2] == 'adam':
with tf.variable_scope('finetune_adam_optimizer'):
optimizer_finetune = tf.train.AdamOptimizer(
learning_rate=learning_rate_finetune)
elif optimizer[2] == 'adadelta':
with tf.variable_scope('finetune_adadelta_optimizer'):
optimizer_finetune = tf.train.AdadeltaOptimizer(
learning_rate=learning_rate_finetune)
# Batch norm requires update ops to be added as a dependency to train_op
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
with tf.variable_scope('optimizers'):
train_op_mentor = optimizer_mentor.minimize(loss_mentor,
global_step,
var_list = mentor_variables)
train_op_mentee = optimizer_mentee.minimize(loss_mentee,
global_step,
var_list = mentee_variables)
train_op_finetune = optimizer_finetune.minimize(loss_finetune,
global_step,
var_list = mentee_variables)
else:
with tf.variable_scope('mentor_cumulative_loss'):
# Add weight decay and distillation to the loss.
loss_mentor = cross_entropy_mentor + weight_decay_coeff * l2_mentor
with tf.variable_scope('mentee_cumulative_loss'):
loss_mentee = cross_entropy_mentee + weight_decay_coeff * l2_mentee
with tf.variable_scope('mentee_finetune'):
loss_finetune = cross_entropy_mentee + weight_decay_coeff * l2_mentee
train_op_mentor = None
train_op_mentee = None
train_op_finetune = None
with tf.variable_scope('metrics'):
accuracy_mentor = tf.metrics.accuracy(
tf.argmax(labels, axis=1), predictions_mentor['classes'])
accuracy_mentee = tf.metrics.accuracy(
tf.argmax(labels, axis=1), predictions_mentee['classes'])
metrics = {'accuracy_mentor': accuracy_mentor,
'accuracy_mentee': accuracy_mentee}
# Create a tensor named train_accuracy for logging purposes
tf.identity(accuracy_mentor[1], name='train_accuracy_mentor')
tf.summary.scalar('train_accuracy_mentor', accuracy_mentor[1])
tf.identity(accuracy_mentee[1], name='train_accuracy_mentee')
tf.summary.scalar('train_accuracy_mentee', accuracy_mentee[1])
saver=tf.train.Saver(var_list = tf.global_variables())
if trainee == 'mentor':
return tf.estimator.EstimatorSpec(
mode=mode,
predictions=predictions_mentor,
loss=loss_mentor,
train_op=train_op_mentor,
scaffold=tf.train.Scaffold(saver=saver),
eval_metric_ops=metrics)
elif trainee == 'mentee':
return tf.estimator.EstimatorSpec(
mode=mode,
predictions=predictions_mentee,
loss=loss_mentee,
train_op=train_op_mentee,
scaffold=tf.train.Scaffold(saver=saver),
eval_metric_ops=metrics)
elif trainee == 'finetune':
return tf.estimator.EstimatorSpec(
mode=mode,
predictions=predictions_mentee,
loss=loss_finetune,
train_op=train_op_finetune,
scaffold=tf.train.Scaffold(saver=saver),
eval_metric_ops=metrics)
def resnet_main(flags, model_function, input_function):
# Using the Winograd non-fused algorithms provides a small performance boost.
os.environ['TF_ENABLE_WINOGRAD_NONFUSED'] = '1'
# Set up a RunConfig to only save checkpoints once per training cycle.
run_config = tf.estimator.RunConfig().replace(save_checkpoints_secs=1e9)
mentor = tf.estimator.Estimator(
model_fn=model_function, model_dir=flags.model_dir,
config=run_config,
params={
'resnet_size': [flags.resnet_size_mentor, flags.resnet_size_mentee],
'data_format': flags.data_format,
'batch_size': flags.batch_size,
'distillation_coeff': flags.distillation_coeff,
'probes_coeff': flags.probes_coeff,
'weight_decay_coeff': flags.weight_decay_coeff,
'optimizer': [flags.mentor_optimizer,
flags.mentee_optimizer,
flags.finetune_optimizer],
'temperature': flags.temperature,
'num_probes': flags.num_probes,
'pool_probes': flags.pool_probes,
'train_epochs_mentor': flags.train_epochs_mentor,
'train_epochs_mentee': flags.train_epochs_mentee,
'train_epochs_finetune': flags.train_epochs_finetune,
'initial_learning_rate_mentor': flags.initial_learning_rate_mentor,
'initial_learning_rate_mentee': flags.initial_learning_rate_mentee,
'initial_learning_rate_finetune': flags.initial_learning_rate_finetune,
'pool_type': flags.pool_type,
'trainee': 'mentor'
})
for i in range(flags.train_epochs_mentor // flags.epochs_per_eval):
tensors_to_log = {
'learning_rate': 'learning_rates/learning_rate_mentor',
'cross_entropy': 'cross_entropy/cross_entropy_mentor' ,
'train_accuracy': 'metrics/train_accuracy_mentor'
}
logging_hook = tf.train.LoggingTensorHook(
tensors=tensors_to_log, every_n_iter=100)
def input_fn_train():
return input_function(True, flags.data_dir, flags.batch_size,
flags.epochs_per_eval, flags.num_parallel_calls)
print(' *********************** ' )
print(' Starting a mentor training cycle. [' + str(i) + '/'
+ str(flags.train_epochs_mentor // flags.epochs_per_eval) + ']')
print(' *********************** ' )
mentor.train(input_fn=input_fn_train, hooks=[logging_hook])
print('Starting to evaluate.')
# Evaluate the model and print results
def input_fn_eval():
return input_function(False, flags.data_dir, flags.batch_size,
1, flags.num_parallel_calls)
eval_results = mentor.evaluate(input_fn=input_fn_eval)
print(eval_results)
mentee = tf.estimator.Estimator(
model_fn=model_function, model_dir=flags.model_dir,
config=run_config,
params={
'resnet_size': [flags.resnet_size_mentor, flags.resnet_size_mentee],
'data_format': flags.data_format,
'batch_size': flags.batch_size,
'distillation_coeff': flags.distillation_coeff,
'probes_coeff': flags.probes_coeff,
'optimizer': [flags.mentor_optimizer,
flags.mentee_optimizer,
flags.finetune_optimizer],
'weight_decay_coeff': flags.weight_decay_coeff,
'temperature': flags.temperature,
'num_probes': flags.num_probes,
'pool_probes': flags.pool_probes,
'train_epochs_mentor': flags.train_epochs_mentor,
'train_epochs_mentee': flags.train_epochs_mentee,
'train_epochs_finetune': flags.train_epochs_finetune,
'initial_learning_rate_mentor': flags.initial_learning_rate_mentor,
'initial_learning_rate_mentee': flags.initial_learning_rate_mentee,
'initial_learning_rate_finetune': flags.initial_learning_rate_finetune,
'pool_type': flags.pool_type,
'trainee': 'mentee'
})
for i in range(flags.train_epochs_mentee // flags.epochs_per_eval):
tensors_to_log = {
'learning_rate': 'learning_rates/learning_rate_mentee',
'cross_entropy': 'cross_entropy/cross_entropy_mentee',
'train_accuracy': 'metrics/train_accuracy_mentee',
'distillation_loss': 'distillery/distillation_loss',
'distillation_coeff':'mentee_cumulative_loss/distillation_coeff_decayed'
}
logging_hook = tf.train.LoggingTensorHook(
tensors=tensors_to_log, every_n_iter=100)
def input_fn_train():
return input_function(True, flags.data_dir, flags.batch_size,
flags.epochs_per_eval, flags.num_parallel_calls)
print(' *********************** ' )
print(' Starting a mentee training cycle. [' + str(i) + '/'
+ str(flags.train_epochs_mentee // flags.epochs_per_eval) + ']')
print(' *********************** ' )
mentee.train(input_fn=input_fn_train, hooks=[logging_hook])
print('Starting to evaluate.')
# Evaluate the model and print results
def input_fn_eval():
return input_function(False, flags.data_dir, flags.batch_size,
1, flags.num_parallel_calls)
eval_results = mentee.evaluate(input_fn=input_fn_eval)
print(eval_results)
finetune = tf.estimator.Estimator(
model_fn=model_function, model_dir=flags.model_dir,
config=run_config,
params={
'resnet_size': [flags.resnet_size_mentor, flags.resnet_size_mentee],
'data_format': flags.data_format,
'batch_size': flags.batch_size,
'distillation_coeff': flags.distillation_coeff,
'probes_coeff': flags.probes_coeff,
'optimizer': [flags.mentor_optimizer,
flags.mentee_optimizer,
flags.finetune_optimizer],
'weight_decay_coeff': flags.weight_decay_coeff,
'temperature': flags.temperature,
'num_probes': flags.num_probes,
'pool_probes': flags.pool_probes,
'train_epochs_mentor': flags.train_epochs_mentor,
'train_epochs_mentee': flags.train_epochs_mentee,
'train_epochs_finetune': flags.train_epochs_finetune,
'initial_learning_rate_mentor': flags.initial_learning_rate_mentor,
'initial_learning_rate_mentee': flags.initial_learning_rate_mentee,
'initial_learning_rate_finetune': flags.initial_learning_rate_finetune,
'pool_type': flags.pool_type,
'trainee': 'finetune'
})
for i in range(flags.train_epochs_finetune // flags.epochs_per_eval):
tensors_to_log = {
'learning_rate': 'learning_rates/learning_rate_mentee',
'cross_entropy': 'cross_entropy/cross_entropy_mentee',
'train_accuracy': 'metrics/train_accuracy_mentee',
}
logging_hook = tf.train.LoggingTensorHook(
tensors=tensors_to_log, every_n_iter=100)
def input_fn_train():
return input_function(True, flags.data_dir, flags.batch_size,
flags.epochs_per_eval, flags.num_parallel_calls)
print(' *********************** ' )
print(' Starting a mentee finetune cycle. [' + str(i) + '/'
+ str(flags.train_epochs_finetune // flags.epochs_per_eval) + ']')
print(' *********************** ' )
finetune.train(input_fn=input_fn_train, hooks=[logging_hook])
print('Starting to evaluate.')
# Evaluate the model and print results
def input_fn_eval():
return input_function(False, flags.data_dir, flags.batch_size,
1, flags.num_parallel_calls)
eval_results = finetune.evaluate(input_fn=input_fn_eval)
print(eval_results)
class ResnetArgParser(argparse.ArgumentParser):
"""Arguments for configuring and running a Resnet Model.
"""
def __init__(self, resnet_size_choices=None):
super(ResnetArgParser, self).__init__()
self.add_argument(
'--data_dir', type=str, default='./resnet_data',
help='The directory where the input data is stored.')
self.add_argument(
'--num_parallel_calls', type=int, default=5,
help='The number of records that are processed in parallel '
'during input processing. This can be optimized per data set but '
'for generally homogeneous data sets, should be approximately the '
'number of available CPU cores.')
self.add_argument(
'--model_dir', type=str, default='./resnet_model',
help='The directory where the model will be stored.')
self.add_argument(
'--resnet_size_mentor', type=int, default=50,
choices=resnet_size_choices,
help='The size of the ResNet Mentor model to use.')
self.add_argument(
'--resnet_size_mentee', type=int, default=10,
choices=resnet_size_choices,
help='The size of the ResNet Mentee model to use.')
self.add_argument(
'--train_epochs_mentor', type=int, default=100,
help='The number of epochs to use for training.')
self.add_argument(
'--train_epochs_mentee', type=int, default=100,
help='The number of epochs to use for training.')
self.add_argument(
'--train_epochs_finetune', type=int, default=100,
help='The number of epochs to use for training.')
self.add_argument(
'--epochs_per_eval', type=int, default=1,
help='The number of training epochs to run between evaluations.')
self.add_argument(
'--batch_size', type=int, default=32,
help='Batch size for training and evaluation.')
self.add_argument(
'--mentor_optimizer', type=str, default='momentum',
help='Optimizer for training and evaluation.')
self.add_argument(
'--mentee_optimizer', type=str, default='momentum',
help='Optimizer for training and evaluation.')
self.add_argument(
'--finetune_optimizer', type=str, default='momentum',
help='Optimizer for training and evaluation.')
self.add_argument(
'--data_format', type=str, default=None,
choices=['channels_first', 'channels_last'],
help='A flag to override the data format used in the model. '
'channels_first provides a performance boost on GPU but '
'is not always compatible with CPU. If left unspecified, '
'the data format will be chosen automatically based on '
'whether TensorFlow was built for CPU or GPU.')
self.add_argument(
'--distillation_coeff', type=float, default=0.01,
help='Coefficient of distillation to be applied from parent to'
'child. This is only useful when performing distillaiton.')
self.add_argument(
'--probes_coeff', type=float, default=0.0001,
help='Coefficient of weight to be applied from parent to'
'child. This is only useful when performing mentoring.')
self.add_argument(
'--weight_decay_coeff', type=float, default=0.0002,
help='Coefficient of weight to be applied from to the'
'weight decay regularizer.')
self.add_argument(
'--temperature', type=float, default=3,
help='Temperature to be used for the softmax layer')
self.add_argument(
'--num_probes', type=int, default=0,
help='Number of probes to be used')
self.add_argument(
'--pool_probes', type=int, default=2,
help='Maxpool probes by')
self.add_argument(
'--initial_learning_rate_mentor', type=float, default=0.001,
help='Set initial learning rate for mentor')
self.add_argument(
'--initial_learning_rate_mentee', type=float, default=0.001,
help='Set initial learning rate for mentee')
self.add_argument(
'--initial_learning_rate_finetune', type=float, default=0.001,
help='Set initial learning rate finetune')
self.add_argument(
'--pool_type', type=str, default='max',
help='Pool type for probes.') | mentor_probe = tf.layers.max_pooling2d(
inputs=mentor, pool_size=self.probe_pool_size,
strides=self.probe_pool_stride, padding='SAME',
data_format=self.data_format)
mentor_probe = tf.identity(mentor, 'mentor_'+'probe_max_pool_' \
+ str(i)) |
Transformer_training_V2.py | #!/usr/bin/python
import sys
import os
import subprocess
from os.path import join, isdir
import torch
#*************************************************************************************************************************
####### Loading the Parser and default arguments
#import pdb;pdb.set_trace()
#sys.path.insert(0,'/mnt/matylda3/vydana/HOW2_EXP/Gen_V1/ATTNCODE/Trans_V1')
sys.path.insert(0,'/mnt/matylda3/vydana/HOW2_EXP/ASR_Transformer/ASR_TransV1')
import Transformer_arg
from Transformer_arg import parser
args = parser.parse_args()
#************************
import Set_gpus
from Set_gpus import Set_gpu
if args.gpu:
Set_gpu()
#import safe_gpu
#from safe_gpu import safe_gpu
#gpu_owner = safe_gpu.GPUOwner()
#***********************
import numpy as np
import fileinput
import json
import random
from itertools import chain
from numpy.random import permutation
##------------------------------------------------------------------
#import torch
from torch.autograd import Variable
#----------------------------------------
import torch.nn as nn
from torch import autograd, nn, optim
os.environ['PYTHONUNBUFFERED'] = '0'
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim as optim
from random import shuffle
from statistics import mean
import matplotlib
import matplotlib.pyplot as plt
plt.switch_backend('agg')
matplotlib.pyplot.viridis()
import glob
###save architecture for decoding
model_path_name=join(args.model_dir,'model_architecture_')
with open(model_path_name, 'w') as f:
json.dump(args.__dict__, f, indent=2)
print(args)
sys.path.insert(0,'/mnt/matylda3/vydana/HOW2_EXP/ASR_Transformer/ASR_TransV1')
# #####setting the gpus in the gpu cluster
# #**********************************
#import Set_gpus
#from Set_gpus import Set_gpu
#if args.gpu:
# Set_gpu()
###----------------------------------------
from Dataloader_for_AM_v2 import DataLoader
from utils__ import weights_init,reduce_learning_rate,read_as_list,gaussian_noise,plotting
#==============================================================
sys.path.insert(0,'/mnt/matylda3/vydana/HOW2_EXP/ASR_Transformer/ASR_TransV1')
from TRANSFORMER_ASR_V1 import Transformer
from Initializing_Transformer_ASR import Initialize_Att_model
from Transformer_Training_loop import train_val_model
from Load_sp_model import Load_sp_models
##==================================
#==============================================================
if not isdir(args.model_dir):
os.makedirs(args.model_dir)
png_dir=args.model_dir+'_png'
if not isdir(png_dir):
os.makedirs(png_dir)
############################################
#=============================================================
def main():
##Load setpiece models for Dataloaders
Word_model=Load_sp_models(args.Word_model_path)
Char_model=Load_sp_models(args.Char_model_path)
###initilize the model
model,optimizer=Initialize_Att_model(args)
#============================================================
#------------------------------------------------------------
#
train_gen = DataLoader(files=glob.glob(args.data_dir + "train_splits/*"),
max_batch_label_len=args.max_batch_label_len,
max_batch_len=args.max_batch_len,
max_feat_len=args.max_feat_len,
max_label_len=args.max_label_len,
Word_model=Word_model,
Char_model=Char_model,
apply_cmvn=int(args.apply_cmvn))
dev_gen = DataLoader(files=glob.glob(args.data_dir + "dev_splits/*"),
max_batch_label_len=2000,
max_batch_len=args.max_batch_len,
max_feat_len=5000,
max_label_len=1000,
Word_model=Word_model,
Char_model=Char_model,
apply_cmvn=int(args.apply_cmvn))
#Flags that may change while training
if args.spec_aug_flag==2:
weight_noise_flag=False
spec_aug_flag=True
else:
weight_noise_flag=False
spec_aug_flag=False
val_history=np.zeros(args.nepochs)
#======================================
for epoch in range(args.nepochs):
##start of the epoch
tr_CER=[]; tr_BPE_CER=[]; L_train_cost=[]
model.train();
validate_interval = int(args.validate_interval * args.accm_grad) if args.accm_grad>0 else args.validate_interval
for trs_no in range(validate_interval):
B1 = train_gen.next()
assert B1 is not None, "None should never come out of the DataLoader"
Output_trainval_dict=train_val_model(smp_no=trs_no,
args = args,
model = model,
optimizer = optimizer,
data_dict = B1,
weight_noise_flag=weight_noise_flag,
spec_aug_flag=spec_aug_flag,
trainflag = True)
#
#
#get the losses form the dict
L_train_cost.append(Output_trainval_dict.get('cost_cpu'))
tr_CER.append(Output_trainval_dict.get('Char_cer'))
tr_BPE_CER.append(Output_trainval_dict.get('Word_cer'))
#attention_map=Output_trainval_dict.get('attention_record').data.cpu().numpy()
#==========================================
if (trs_no%args.tr_disp==0):
print("tr ep:==:>",epoch,"sampl no:==:>",trs_no,"train_cost==:>",mean(L_train_cost),"CER:",mean(tr_CER),'BPE_CER',mean(tr_BPE_CER),flush=True)
#------------------------
if args.plot_fig_training:
plot_name=join(png_dir,'train_epoch'+str(epoch)+'_attention_single_file_'+str(trs_no)+'.png')
plotting(plot_name,attention_map)
###validate the model
model.eval()
#=======================================================
Vl_CER=[]; Vl_BPE_CER=[];L_val_cost=[]
val_examples=0
for vl_smp in range(args.max_val_examples):
B1 = dev_gen.next()
smp_feat = B1.get('smp_feat')
val_examples+=smp_feat.shape[0]
assert B1 is not None, "None should never come out of the DataLoader"
##brak when the examples are more
if (val_examples >= args.max_val_examples):
break;
#--------------------------------------
Val_Output_trainval_dict=train_val_model(smp_no=trs_no,
args=args,
model = model,
optimizer = optimizer,
data_dict = B1,
weight_noise_flag=False,
spec_aug_flag=False,
trainflag = False)
L_val_cost.append(Val_Output_trainval_dict.get('cost_cpu'))
Vl_CER.append(Val_Output_trainval_dict.get('Char_cer'))
Vl_BPE_CER.append(Val_Output_trainval_dict.get('Word_cer'))
#attention_map=Val_Output_trainval_dict.get('attention_record').data.cpu().numpy()
#======================================================
#======================================================
if (vl_smp%args.vl_disp==0) or (val_examples==args.max_val_examples-1):
print("val epoch:==:>",epoch,"val smp no:==:>",vl_smp,"val_cost:==:>",mean(L_val_cost),"CER:",mean(Vl_CER),'BPE_CER',mean(Vl_BPE_CER),flush=True)
if args.plot_fig_validation:
plot_name=join(png_dir,'val_epoch'+str(epoch)+'_attention_single_file_'+str(vl_smp)+'.png')
plotting(plot_name,attention_map)
#----------------------------------------------------
#==================================================================
val_history[epoch]=(mean(Vl_CER)*100)
print("val_history:",val_history[:epoch+1])
#==================================================================
####saving_weights
ct="model_epoch_"+str(epoch)+"_sample_"+str(trs_no)+"_"+str(mean(L_train_cost))+"___"+str(mean(L_val_cost))+"__"+str(mean(Vl_CER))
print(ct) | #######################################################
#######################################################
###open the file write and close it to avoid delays
with open(args.weight_text_file,'a+') as weight_saving_file:
print(join(args.model_dir,str(ct)), file=weight_saving_file)
with open(args.Res_text_file,'a+') as Res_saving_file:
print(float(mean(Vl_CER)), file=Res_saving_file)
#=================================
# early_stopping and checkpoint averaging:
if args.early_stopping:
A=val_history
Non_zero_loss=A[A>0]
min_cpts=np.argmin(Non_zero_loss)
Non_zero_len=len(Non_zero_loss)
if ((Non_zero_len-min_cpts)>1):
weight_noise_flag=True
spec_aug_flag=True
if (Non_zero_len-min_cpts) > args.early_stopping_patience:
print("The model is early stopping........","minimum value of model is:",min_cpts)
exit(0)
#=======================================================
#=============================================================================================
if __name__ == '__main__':
main() | torch.save(model.state_dict(),join(args.model_dir,str(ct)))
####saving otpimizer helped Transformer
#torch.save(optimizer.state_dict(),join(args.model_dir,str(ct)+'_opt'))
|
wiki.component.ts | // #docregion
import { Component } from '@angular/core';
import { Observable } from 'rxjs/Observable';
import { WikipediaService } from './wikipedia.service';
@Component({
moduleId: module.id,
selector: 'my-wiki',
templateUrl: 'wiki.component.html',
providers: [ WikipediaService ]
})
export class | {
title = 'Wikipedia Demo';
fetches = 'Fetches after each keystroke';
items: Observable<string[]>;
search (term: string) {
this.items = this.wikipediaService.search(term);
}
constructor (private wikipediaService: WikipediaService) { }
}
| WikiComponent |
rustdoc.rs | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Shim which is passed to Cargo as "rustdoc" when running the bootstrap.
//!
//! See comments in `src/bootstrap/rustc.rs` for more information.
#![deny(warnings)]
extern crate bootstrap;
use std::env;
use std::process::Command;
use std::path::PathBuf;
fn main() {
let args = env::args_os().skip(1).collect::<Vec<_>>();
let rustdoc = env::var_os("RUSTDOC_REAL").expect("RUSTDOC_REAL was not set");
let libdir = env::var_os("RUSTDOC_LIBDIR").expect("RUSTDOC_LIBDIR was not set");
let stage = env::var("RUSTC_STAGE").expect("RUSTC_STAGE was not set");
let sysroot = env::var_os("RUSTC_SYSROOT").expect("RUSTC_SYSROOT was not set");
use std::str::FromStr;
let verbose = match env::var("RUSTC_VERBOSE") {
Ok(s) => usize::from_str(&s).expect("RUSTC_VERBOSE should be an integer"),
Err(_) => 0,
};
let mut dylib_path = bootstrap::util::dylib_path();
dylib_path.insert(0, PathBuf::from(libdir));
let mut cmd = Command::new(rustdoc);
cmd.args(&args)
.arg("--cfg")
.arg(format!("stage{}", stage))
.arg("--cfg")
.arg("dox")
.arg("--sysroot")
.arg(sysroot)
.env(bootstrap::util::dylib_path_var(),
env::join_paths(&dylib_path).unwrap());
// Force all crates compiled by this compiler to (a) be unstable and (b)
// allow the `rustc_private` feature to link to other unstable crates
// also in the sysroot.
if env::var_os("RUSTC_FORCE_UNSTABLE").is_some() {
cmd.arg("-Z").arg("force-unstable-if-unmarked");
}
if let Some(linker) = env::var_os("RUSTC_TARGET_LINKER") {
cmd.arg("--linker").arg(linker).arg("-Z").arg("unstable-options");
}
// Bootstrap's Cargo-command builder sets this variable to the current Rust version; let's pick
// it up so we can make rustdoc print this into the docs
if let Some(version) = env::var_os("RUSTDOC_CRATE_VERSION") |
if verbose > 1 {
eprintln!("rustdoc command: {:?}", cmd);
}
std::process::exit(match cmd.status() {
Ok(s) => s.code().unwrap_or(1),
Err(e) => panic!("\n\nfailed to run {:?}: {}\n\n", cmd, e),
})
}
| {
// This "unstable-options" can be removed when `--crate-version` is stabilized
cmd.arg("-Z")
.arg("unstable-options")
.arg("--crate-version").arg(version);
} |
bookmark.go | package gui
import (
"database/sql"
"log"
"os"
"strings"
"github.com/gdamore/tcell/v2"
_ "github.com/mattn/go-sqlite3"
"github.com/rivo/tview"
"github.com/skanehira/ff/system"
)
type DBLogger struct{}
func (d DBLogger) Print(v ...interface{}) {
log.Print(v...)
}
type Bookmark struct {
ID int
Name string
}
type BookmarkStore struct {
db *sql.DB
}
func NewBookmarkStore(file string) (*BookmarkStore, error) {
file = os.ExpandEnv(file)
// if db file is not exist, use in memory db
if !system.IsExist(file) {
file = ":memory:"
}
db, err := sql.Open("sqlite3", file)
if err != nil {
log.Println(err)
return nil, err
}
createSql := `
CREATE TABLE IF NOT EXISTS "bookmarks" ("id" integer, "name" varchar(255) , PRIMARY KEY ("id"));`
_, err = db.Exec(createSql)
if err != nil {
log.Println(err)
return nil, err
}
return &BookmarkStore{db: db}, nil
}
func (b *BookmarkStore) HasBookmark(name string) bool {
var count int
row := b.db.QueryRow("select count(*) from bookmarks where name = ?", name)
if err := row.Scan(&count); err != nil {
log.Println(err)
return false
}
if count > 0 {
return true
}
return false
}
func (b *BookmarkStore) Save(bookmark Bookmark) error {
if !b.HasBookmark(bookmark.Name) {
_, err := b.db.Exec("insert into bookmarks (name) values (?)", bookmark.Name)
if err != nil {
log.Println(err)
return err
}
}
return nil
}
func (b *BookmarkStore) Load() ([]Bookmark, error) {
var bookmarks []Bookmark
rows, err := b.db.Query("select * from bookmarks")
if err != nil {
log.Println(err)
return nil, err
}
defer rows.Close()
for rows.Next() {
var id int
var name string
if err := rows.Scan(&id, &name); err != nil {
log.Println(err)
return nil, err
}
bookmarks = append(bookmarks, Bookmark{
ID: id,
Name: name,
})
}
return bookmarks, nil
}
func (b *BookmarkStore) Delete(id int) error {
_, err := b.db.Exec("delete from bookmarks where id = ?", id)
if err != nil {
log.Println(err)
return err
}
return nil
}
type Bookmarks struct {
store *BookmarkStore
entries []*Bookmark
searchWord string
enableIgnorecase bool
*tview.Table
}
func NewBookmark(config Config) (*Bookmarks, error) |
func (b *Bookmarks) SetSearchWord(word string) {
b.searchWord = word
}
func (b *Bookmarks) GetSearchWord() string {
return b.searchWord
}
func (b *Bookmarks) Add(name string) error {
bookmarks := Bookmark{
Name: name,
}
return b.store.Save(bookmarks)
}
func (b *Bookmarks) Delete(id int) error {
return b.store.Delete(id)
}
func (b *Bookmarks) Update() error {
entries, err := b.store.Load()
if err != nil {
return err
}
b.entries = []*Bookmark{}
for _, e := range entries {
e := e
b.entries = append(b.entries, &e)
}
return b.UpdateView()
}
func (b *Bookmarks) UpdateView() error {
table := b.Clear()
headers := []string{
"Name",
}
for k, v := range headers {
table.SetCell(0, k, &tview.TableCell{
Text: v,
NotSelectable: true,
Align: tview.AlignLeft,
Color: tcell.ColorYellow,
BackgroundColor: tcell.ColorDefault,
})
}
var entries []*Bookmark
for _, e := range b.entries {
var name, word string
if b.enableIgnorecase {
name = strings.ToLower(e.Name)
word = strings.ToLower(b.searchWord)
} else {
name = e.Name
word = b.searchWord
}
if strings.Index(name, word) == -1 {
continue
}
entries = append(entries, &Bookmark{Name: name})
}
i := 1
for _, e := range entries {
table.SetCell(i, 0, tview.NewTableCell(e.Name))
i++
}
return nil
}
func (b *Bookmarks) GetSelectEntry() *Bookmark {
row, _ := b.GetSelection()
if len(b.entries) == 0 {
return nil
}
if row < 1 {
return nil
}
if row > len(b.entries) {
return nil
}
return b.entries[row-1]
}
func (e *Bookmarks) SearchBookmark(gui *Gui) {
pageName := "search_bookmark"
if gui.Pages.HasPage(pageName) {
searchBookmarks.SetText(gui.Bookmark.GetSearchWord())
gui.Pages.SendToFront(pageName).ShowPage(pageName)
} else {
searchBookmarks = tview.NewInputField()
searchBookmarks.SetBorder(true).SetTitle("search bookmark").SetTitleAlign(tview.AlignLeft)
searchBookmarks.SetChangedFunc(func(text string) {
gui.Bookmark.SetSearchWord(text)
gui.Bookmark.UpdateView()
})
searchBookmarks.SetLabel("word").SetLabelWidth(5).SetDoneFunc(func(key tcell.Key) {
if key == tcell.KeyEnter {
gui.Pages.HidePage(pageName)
gui.FocusPanel(BookmarkPanel)
}
})
gui.Pages.AddAndSwitchToPage(pageName, gui.Modal(searchBookmarks, 0, 3), true).ShowPage("bookmark").ShowPage("main")
}
}
func (b *Bookmarks) CloseBookmark(gui *Gui) {
gui.Pages.RemovePage("bookmark").ShowPage("main")
gui.FocusPanel(FileTablePanel)
}
func (b *Bookmarks) BookmarkKeybinding(gui *Gui) {
gui.Bookmark.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
switch event.Rune() {
case 'q':
b.CloseBookmark(gui)
case 'd':
entry := gui.Bookmark.GetSelectEntry()
if entry == nil {
return event
}
b.Delete(entry.ID)
b.Update()
case 'f', '/':
b.SearchBookmark(gui)
case 'a':
b.AddBookmark(gui)
case '?':
gui.Help.UpdateView(BookmarkPanel)
gui.Pages.AddAndSwitchToPage("help", gui.Modal(gui.Help, 0, 0), true).ShowPage("bookmark")
}
switch event.Key() {
case tcell.KeyF1:
gui.Help.UpdateView(BookmarkPanel)
gui.Pages.AddAndSwitchToPage("help", gui.Modal(gui.Help, 0, 0), true).ShowPage("bookmark")
case tcell.KeyCtrlG:
entry := gui.Bookmark.GetSelectEntry()
if entry == nil {
return event
}
if err := gui.FileBrowser.ChangeDir(gui, gui.InputPath.GetText(), entry.Name); err != nil {
gui.Message(err.Error(), BookmarkPanel)
return event
}
b.CloseBookmark(gui)
}
return event
})
}
func (b *Bookmarks) AddBookmark(gui *Gui) {
gui.Form(map[string]string{"path": ""}, "add", "new bookmark", "new_bookmark", BookmarkPanel,
7, func(values map[string]string) error {
name := values["path"]
if name == "" {
return ErrNoPathName
}
name = os.ExpandEnv(name)
if !system.IsExist(name) {
return ErrNotExistPath
}
if err := b.Add(name); err != nil {
return err
}
if err := b.Update(); err != nil {
return err
}
return nil
})
gui.Pages.ShowPage("bookmark")
}
| {
if !system.IsExist(config.Bookmark.File) {
file, _ := os.OpenFile(os.ExpandEnv(config.Bookmark.File),
os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0666)
defer file.Close()
}
table := tview.NewTable().Select(0, 0).SetFixed(1, 1).SetSelectable(true, false)
table.SetTitleAlign(tview.AlignLeft).SetTitle("bookmarks").SetBorder(true)
store, err := NewBookmarkStore(config.Bookmark.File)
if err != nil {
log.Println(err)
return nil, err
}
return &Bookmarks{
store: store,
enableIgnorecase: config.IgnoreCase,
Table: table,
}, nil
} |
server.js | var webpack = require('webpack')
var WebpackDevServer = require('webpack-dev-server')
var config = require('./webpack.dev.config')
new WebpackDevServer(webpack(config), {
publicPath: config.output.publicPath,
hot: true,
inline: true,
historyApiFallback: true,
watchOptions: { | poll: 1000,
},
}).listen(3000, '0.0.0.0', function (err, result) {
if (err) {
console.log(err)
}
console.log('Listening at 0.0.0.0:3000')
}) | aggregateTimeout: 300, |
get.go | /*
Copyright 2021 The Fluid Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package volume
import (
"github.com/fluid-cloudnative/fluid/pkg/utils/kubeclient"
"github.com/pkg/errors"
"sigs.k8s.io/controller-runtime/pkg/client"
)
func | (client client.Client, volumeId string) (namespace, name string, err error) {
pv, err := kubeclient.GetPersistentVolume(client, volumeId)
if err != nil {
return "", "", err
}
if pv.Spec.ClaimRef == nil {
return "", "", errors.Errorf("pv %s has unexpected nil claimRef", volumeId)
}
namespace = pv.Spec.ClaimRef.Namespace
name = pv.Spec.ClaimRef.Name
ok, err := kubeclient.IsDatasetPVC(client, name, namespace)
if err != nil {
return "", "", err
}
if !ok {
return "", "", errors.Errorf("pv %s is not bounded with a fluid pvc", volumeId)
}
return
}
| GetNamespacedNameByVolumeId |
serializers.py | from custom_user.serializers import RelatedFieldAlternative
from rest_framework import serializers
from .models import Appointment
from visiting_schedule.models import VisitingSchedule
from patient.models import Patient
from visiting_schedule.serializers import VisitingScheduleSerializer
from patient.serializers import PatientSerializer
class AppointmentSerializer(serializers.ModelSerializer):
appointmentVisitingSchedule = RelatedFieldAlternative(
queryset=VisitingSchedule.objects.all(),
serializer = VisitingScheduleSerializer,
source = 'appointmentVisitingScheduleId',
)
appointmentPatient = RelatedFieldAlternative(
queryset=Patient.objects.all(),
serializer = PatientSerializer,
source = 'appointmentPatientId'
)
patientSymptomNote = serializers.CharField(allow_blank=True,source = 'appointmentPatientSymptomNote')
class Meta:
| model = Appointment
fields = [
'appointmentId',
'appointmentPaymentCredential',
'patientSymptomNote',
'appointmentTime',
'appointmentIsConfirmed',
'appointmentIsCanceled',
'appointmentIsVisited',
'appointmentDate',
'appointmentSerialNumber',
'appointmentVisitingSchedule',
'appointmentPatient',
'appointmentType'
] |
|
bing_speech.go | package voices
import (
"bytes"
"context"
"encoding/json"
"fmt"
"html"
"io"
"os"
"path/filepath"
"strings"
"github.com/wzshiming/requests"
"golang.org/x/net/websocket"
)
const (
ua = `Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36 Edg/87.0.664.66`
trustedClientToken = `6A5AA1D4EAFF4E9FB37E23D68491D6F4`
listUrl = `https://speech.platform.bing.com/consumer/speech/synthesize/readaloud/voices/list?trustedclienttoken=` + trustedClientToken
speechUrl = `wss://speech.platform.bing.com/consumer/speech/synthesize/readaloud/edge/v1?TrustedClientToken=` + trustedClientToken
)
var bing *bingSayVoices
func BingSayVoices() Voices {
if bing == nil {
bing = &bingSayVoices{
req: requests.NewClient().
SetLogLevel(requests.LogIgnore).
SetCache(requests.FileCacheDir(cacheDir)).
NewRequest().
SetUserAgent(ua),
}
}
return bing
}
type bingSayVoices struct {
req *requests.Request
voices []Voice
}
func (m *bingSayVoices) Voices(opts ...Opt) ([]Voice, error) {
vs, err := m.getVoices()
if err != nil {
return nil, err
}
opt := voicesOpt{}
opt.parse(opts)
if opt.Name != "" {
for _, voice := range vs {
if voice.Name() == opt.Name {
return []Voice{voice}, nil
}
}
return nil, fmt.Errorf("not found voice %q", opt.Name)
} else if opt.Language != "" {
voices := []Voice{}
for _, voice := range vs {
if voice.Language() == opt.Language {
voices = append(voices, voice)
}
}
return voices, nil
} else {
return vs, nil
}
}
func (m *bingSayVoices) getVoices() ([]Voice, error) {
if m.voices != nil {
return m.voices, nil
}
resp, err := m.req.Get(listUrl)
if err != nil {
return nil, err
}
list := []bingSayItem{}
err = json.Unmarshal(resp.Body(), &list)
if err != nil {
return nil, err
}
var voices []Voice
for _, item := range list {
t := strings.Split(item.ShortName, "-")
name := t[len(t)-1]
name = strings.TrimSuffix(name, "Neural")
voice := bingSay{
name: name,
language: strings.ReplaceAll(item.Locale, "_", "-"),
bingSayItem: item,
}
voices = append(voices, &voice)
}
m.voices = voices
return voices, nil
}
type bingSay struct {
name string
language string
bingSayItem
}
func (m bingSay) Name() string {
return m.name
}
func (m bingSay) Language() string {
return m.language
}
func (m bingSay) Detail() string {
return m.bingSayItem.FriendlyName
}
func (m *bingSay) sayReader(ctx context.Context, word string) (io.ReadCloser, error) {
conn, err := websocket.Dial(speechUrl, "", "https://www.bing.com/")
if err != nil {
return nil, err
}
if deadline, ok := ctx.Deadline(); ok |
const head = "Content-Type:application/json; charset=utf-8\r\n\r\nPath:speech.config\r\n\r\n{\"context\":{\"synthesis\":{\"audio\":{\"metadataoptions\":{\"sentenceBoundaryEnabled\":\"false\",\"wordBoundaryEnabled\":\"true\"},\"outputFormat\":\"audio-24khz-160kbitrate-mono-mp3\"}}}}\r\n"
var body = "X-RequestId:fe83fbefb15c7739fe674d9f3e81d38f\r\nContent-Type:application/ssml+xml\r\nPath:ssml\r\n\r\n<speak version='1.0' xmlns='http://www.w3.org/2001/10/synthesis' xml:lang='en-US'><voice name='" + m.bingSayItem.Name + "'><prosody pitch='+0Hz' rate ='+0%' volume='+0%'>" + html.EscapeString(word) + "</prosody></voice></speak>\r\n"
_, err = conn.Write([]byte(head))
if err != nil {
return nil, err
}
_, err = conn.Write([]byte(body))
if err != nil {
return nil, err
}
return struct {
io.Reader
io.Closer
}{
Reader: newBindStream(ctx, conn),
Closer: conn,
}, nil
}
func (m bingSay) cache(ctx context.Context, word string) (string, error) {
word = clean(word)
file := filepath.Join(cacheDir, "bing", m.Name(), hashName(word)+".mp3")
os.MkdirAll(filepath.Dir(file), 0755)
info, err := os.Stat(file)
if err == nil && info.Size() != 0 {
return file, nil
}
r, err := m.sayReader(ctx, word)
if err != nil {
return "", err
}
defer r.Close()
tmp := file + ".tmp"
f, err := os.OpenFile(tmp, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644)
if err != nil {
return "", err
}
defer func() {
f.Close()
os.Remove(tmp)
}()
n, err := io.Copy(f, r)
if err != nil {
return "", err
}
if n == 0 {
return "", fmt.Errorf("can't read the respone body")
}
err = ToMp3(ctx, tmp, file)
if err != nil {
return "", err
}
return file, nil
}
func (m bingSay) Cache(ctx context.Context, word string) (string, error) {
return m.cache(ctx, word)
}
func (m bingSay) Say(ctx context.Context, word string) error {
f, err := m.cache(ctx, word)
if err != nil {
return err
}
return PlayMp3(ctx, f)
}
func (m bingSay) String() string {
return m.bingSayItem.FriendlyName
}
type bingSayItem struct {
Name string
ShortName string
Gender string
Locale string
SuggestedCodec string
FriendlyName string
Status string
}
type bingStream struct {
ctx context.Context
r io.Reader
}
func newBindStream(ctx context.Context, r io.Reader) io.Reader {
return &bingStream{
ctx: ctx,
r: r,
}
}
func (b *bingStream) Read(p []byte) (n int, err error) {
err = b.ctx.Err()
if err != nil {
return 0, err
}
n, err = b.r.Read(p)
if err != nil {
return 0, err
}
if n > 2 && p[0] == 0 {
o := p[:n]
if o[1] == 128 {
if bytes.Contains(o, []byte("Content-Type:audio/mpeg\r\n")) {
sl := []byte("Path:audio\r\n")
i := bytes.Index(o, sl)
tmp := o[i+len(sl) : n]
n = copy(o, tmp)
return n, nil
}
} else if o[1] == 103 {
return 0, io.EOF
}
}
return b.Read(p)
}
| {
conn.SetDeadline(deadline)
} |
LobbyFacade.ts | import { GameModeProperty } from './../enums/GameProperties';
import { Socket } from 'socket.io';
import Application from '../classes/Application';
import Lobby from '../classes/Lobby/Lobby';
import { YesOrNo } from '../classes/Votes/YesNoVote';
import PlayerFactory from '../factories/PlayerFactory';
import LobbyService from '../services/LobbyService';
import SocketIdentifierService from '../services/SocketIdentifierService';
import { Player } from '../classes';
import dayjs from 'dayjs';
export default class | {
public static create(socket: Socket): Lobby {
const lobby: Lobby = LobbyService.create(SocketIdentifierService.getIdentifiersOf(socket))
LobbyFacade.join(socket, lobby.id);
return lobby;
}
public static join(socket: Socket, lobbyId: Lobby['id']): Lobby {
const sessionOfSocket = SocketIdentifierService.getSessionOf(socket);
const lobby = Application.getLobbyStorage().get(lobbyId);
if (!lobby) return;
const player = PlayerFactory.create(sessionOfSocket);
lobby.add(player);
socket.join(lobby.getSocketRoomName());
LobbyService.linkPlayerToLobby(sessionOfSocket.sessionId, lobby.id);
socket.to(lobby.getSocketRoomName()).emit('update-lobby', lobby.toSocketJson());
return lobby;
}
public static rejoin(socket: Socket): Lobby {
const joinedLobbyId = Application.getPlayerLobbyStorage().get(SocketIdentifierService.getSessionIdentifier(socket))
if (joinedLobbyId) {
socket.join(Lobby.getLobbyName(joinedLobbyId));
}
return this.reassignHost(socket);
}
public static reassignHost(socket: Socket): Lobby {
const updatedLobby = LobbyService.reassignHost(SocketIdentifierService.getIdentifiersOf(socket));
if (updatedLobby) {
this.updateLobby(updatedLobby);
}
return updatedLobby;
}
private static emitToLobby(ev: string, lobby: Lobby, ...params: unknown[]): void {
if (!lobby) return;
params = params || [];
Application.getSocketIoInstance()
.of('/game')
.to(lobby.getSocketRoomName())
.emit(ev, ...params);
}
private static updateLobby(lobby: Lobby): void {
LobbyFacade.emitToLobby('update-lobby', lobby, lobby.toSocketJson());
}
public static startGame(socket: Socket, gameModeProperty: GameModeProperty): void {
const player = PlayerFactory.create(SocketIdentifierService.getSessionOf(socket));
const lobbyId = Application.getPlayerLobbyStorage().get(SocketIdentifierService.getSessionIdentifier(socket));
const lobby = Application.getLobbyStorage().get(lobbyId);
if (LobbyService.start(lobby, player, gameModeProperty)) {
socket.in(Lobby.getLobbyName(lobbyId)).emit('game-started', lobby.toSocketJson());
}
}
public static nextDrawing(socket: Socket): void {
const player = PlayerFactory.create(SocketIdentifierService.getSessionOf(socket));
const lobby = Application.getPlayerLobbyStorage().getLobbyOf(SocketIdentifierService.getSessionIdentifier(socket));
if (lobby?.game?.isTurnOf(player)) {
LobbyService.nextDrawing(lobby);
}
}
public static kick(socket: Socket, kickedPlayerId: string): Lobby {
const updatedLobby = LobbyService.kick(SocketIdentifierService.getIdentifiersOf(socket), kickedPlayerId);
this.updateLobby(updatedLobby);
socket.in(Lobby.getLobbyName(updatedLobby.id)).emit('kicked-player', kickedPlayerId);
return updatedLobby;
}
public static quit(socket: Socket): Lobby {
const updatedLobby = LobbyService.quit(SocketIdentifierService.getIdentifiersOf(socket))
this.updateLobby(updatedLobby);
return updatedLobby;
}
public static startVote(socket: Socket, selectedPlayer: Player): Lobby {
const playerLobby = LobbyService.startVote(SocketIdentifierService.getIdentifiersOf(socket), selectedPlayer);
this.emitToLobby('vote-started', playerLobby, playerLobby);
setTimeout(() => {
if (!playerLobby) return;
playerLobby?.game.endVote();
Application.getSocketIoInstance().of('/game').to(playerLobby.getSocketRoomName()).emit('stop-vote', playerLobby);
}, playerLobby?.game?.playerErrorVoteManager.getVoteEndDate().diff(dayjs(), 'milliseconds'));
return playerLobby;
}
public static vote(socket: Socket, vote: YesOrNo): Lobby {
const playerLobby = LobbyService.vote(SocketIdentifierService.getIdentifiersOf(socket), vote);
this.updateLobby(playerLobby);
return playerLobby;
}
public static playAgain(socket: Socket): void {
const lobby = LobbyService.playAgain(SocketIdentifierService.getIdentifiersOf(socket));
this.updateLobby(lobby);
}
public static updateGameMode(socket: Socket, gameMode: GameModeProperty): void {
const player = PlayerFactory.create(SocketIdentifierService.getSessionOf(socket));
const lobbyId = Application.getPlayerLobbyStorage().get(SocketIdentifierService.getSessionIdentifier(socket));
const lobby = Application.getLobbyStorage().get(lobbyId);
if (lobby.isPlayerHost(player.id)) {
socket.in(Lobby.getLobbyName(lobbyId)).emit('update-game-mode', gameMode);
}
}
}
| LobbyFacade |
expressions.rs | // -*- indent-tabs-mode:nil; tab-width:2; -*-
extern crate random;
extern crate tensorflow;
use std::error::Error;
use std::result::Result;
use std::process::exit;
use tensorflow::Code;
use tensorflow::expr::Compiler;
use tensorflow::expr::Placeholder;
use tensorflow::Graph;
use tensorflow::Session;
use tensorflow::SessionOptions;
use tensorflow::Status;
use tensorflow::StepWithGraph;
use tensorflow::Tensor;
fn main() {
// Putting the main code in another function serves two purposes:
// 1. We can use the try! macro.
// 2. We can call exit safely, which does not run any destructors.
exit(match run() {
Ok(_) => 0,
Err(e) => {
println!("{}", e);
1
}
})
}
struct Checker {
success: bool,
epsilon: f32,
}
impl Checker {
fn new(epsilon: f32) -> Self {
Checker {
success: true,
epsilon: epsilon,
}
}
fn check(&mut self, name: &str, expected: f32, actual: f32) {
let success = (expected - actual).abs() < self.epsilon;
println!("Checking {}: expected {}, got {}. {}",
name, expected, actual, if success {"Success!"} else {"FAIL"});
self.success &= success;
}
fn result(&self) -> Result<(), Box<Error>> {
if self.success {
Ok(())
} else {
Err(Box::new(try!(Status::new_set(Code::Internal, "At least one check failed"))))
}
}
}
fn run() -> Result<(), Box<Error>> {
// Build the graph
let mut g = Graph::new();
let y_node = {
let mut compiler = Compiler::new(&mut g);
let x_expr = <Placeholder<f32>>::new_expr(&vec![2], "x");
try!(compiler.compile(x_expr * 2.0f32 + 1.0f32))
};
let x_node = try!(g.operation_by_name_required("x"));
// This is another valid way to get x_node and y_node:
// let (x_node, y_node) = {
// let mut compiler = Compiler::new(&mut g);
// let x_expr = <Placeholder<f32>>::new_expr(&vec![2], "x");
// let x_node = try!(compiler.compile(x_expr.clone()));
// let y_node = try!(compiler.compile(x_expr * 2.0f32 + 1.0f32));
// (x_node, y_node)
// };
let options = SessionOptions::new();
let mut session = try!(Session::new(&options, &g));
// Evaluate the graph.
let mut x = <Tensor<f32>>::new(&[2]);
x[0] = 2.0; | step.add_input(&x_node, 0, &x);
let output_token = step.request_output(&y_node, 0);
session.run(&mut step).unwrap();
// Check our results.
let output_tensor = try!(step.take_output::<f32>(output_token));
let data = output_tensor.data();
let mut checker = Checker::new(1e-3);
checker.check("data[0]", 5.0, data[0]);
checker.check("data[1]", 7.0, data[1]);
checker.result()
} | x[1] = 3.0;
let mut step = StepWithGraph::new(); |
database.py | import mysql
import pickle
import hashlib
import mysql.connector
from mysql.connector import pooling
import settings
import datetime
from time import sleep
def initDatabase():
global connection_pool
connection_object = connection_pool.get_connection()
cursor = connection_object.cursor()
cursor.execute("SET sql_notes = 0; ")
cursor.execute("create database IF NOT EXISTS youtubebot")
cursor.execute("USE youtubebot;")
cursor.execute("SET sql_notes = 0; ")
cursor.execute("set global max_allowed_packet=67108864;")
cursor.execute("create table IF NOT EXISTS users (username varchar(70),password varchar(80), status varchar(80));")
cursor.execute("create table IF NOT EXISTS videogenerators (generatorname varchar(70),password varchar(80), status varchar(80));")
# youtube account, estimated length, actual length
cursor.execute("create table IF NOT EXISTS scripts (scriptno int NOT NULL AUTO_INCREMENT, PRIMARY KEY (scriptno), submission_id varchar(70), subredditid varchar(70), subreddit varchar(70), url varchar(2083), timecreated DATETIME,"
"status varchar(70), editedby varchar(70), scripttitle varchar(2083), scriptauthor varchar(70), ups int, downs int, num_comments int, timegathered DATETIME, timeuploaded DATETIME, sceduledupload DATETIME, esttime time, actualtime time, rawscript MEDIUMBLOB, "
"finalscript MEDIUMBLOB);")
cursor.execute("SET sql_notes = 1; ")
connection_pool = None
def login(username, password):
global connection_pool
connection_object = connection_pool.get_connection()
cursor = connection_object.cursor()
cursor.execute("USE youtubebot;")
query = "SELECT count(*) FROM users WHERE username = %s AND password = %s;"%(repr(username), repr(password))
cursor.execute(query)
result = cursor.fetchall()
cursor.close()
connection_object.close()
flag = (result[0][0])
if flag == 0:
return False
else:
return True
def getScriptEditInformation():
connection_object = connection_pool.get_connection()
cursor = connection_object.cursor()
cursor.execute("USE youtubebot;")
query = "SELECT scriptno, status, editedby FROM scripts WHERE status = 'EDITING' AND editedby IS NOT NULL;"
cursor.execute(query)
result = cursor.fetchall()
results = []
for res in result:
results.append(res)
cursor.close()
connection_object.close()
return results
def completeUpload(scriptno, timeuploaded, scedualedrelease):
connection_object = connection_pool.get_connection()
cursor = connection_object.cursor()
cursor.execute("USE youtubebot;")
query = "UPDATE scripts " \
"SET status = 'SUCCESSUPLOAD', timeuploaded = %s, sceduledupload = %s WHERE scriptno = %s;"
args = (timeuploaded, scedualedrelease, scriptno)
cursor.execute(query, args)
connection_object.commit()
cursor.close()
connection_object.close()
def getLastUploadedScripts():
connection_object = connection_pool.get_connection()
cursor = connection_object.cursor()
now = datetime.datetime.now()
cursor.execute("USE youtubebot;")
query = "SELECT timeuploaded "\
"from scripts "\
"WHERE timeuploaded <= '%s' "\
"ORDER BY timeuploaded DESC "\
"LIMIT 6;" % (now.strftime('%Y-%m-%d %H:%M:%S'))
cursor.execute(query)
result = cursor.fetchall()
results = []
for res in result:
results.append(res)
cursor.close()
connection_object.close()
return results
def getCompletedScripts():
connection_object = connection_pool.get_connection()
cursor = connection_object.cursor()
cursor.execute("USE youtubebot;")
query = "SELECT scriptno, status, editedby FROM scripts WHERE status = 'COMPLETE' AND editedby IS NOT NULL;"
cursor.execute(query)
result = cursor.fetchall()
results = []
for res in result:
results.append(res)
cursor.close()
connection_object.close()
return results
def getOnlineUsers():
connection_object = connection_pool.get_connection()
cursor = connection_object.cursor()
cursor.execute("USE youtubebot;")
query = "SELECT username FROM users WHERE status = 'ONLINE';"
cursor.execute(query)
result = cursor.fetchall()
results = []
for res in result:
results.append(res[0])
cursor.close()
connection_object.close()
return results
def updateScriptStatus(status, user, scriptid):
global connection_pool
connection_object = connection_pool.get_connection()
cursor = connection_object.cursor()
if user is None:
user = "NULL"
else:
user = user
cursor.execute("USE youtubebot;")
query = "UPDATE scripts " \
"SET status = %s, editedby = %s WHERE scriptno = %s;"
args = (status, user, scriptid)
cursor.execute(query, args)
connection_object.commit()
cursor.close()
connection_object.close()
def updateScriptStatusById(status, user, scriptid):
global connection_pool
connection_object = connection_pool.get_connection()
cursor = connection_object.cursor()
if user is None:
user = "NULL"
else:
user = user
cursor.execute("USE youtubebot;")
query = "UPDATE scripts " \
"SET status = %s, editedby = %s WHERE submission_id = %s;"
args = (status, user, scriptid)
cursor.execute(query, args)
connection_object.commit()
cursor.close()
connection_object.close()
def updateUserStatus(user, status):
global connection_pool
connection_object = connection_pool.get_connection()
cursor = connection_object.cursor()
if status is None:
status = "NULL"
else:
status = repr(status)
cursor.execute("USE youtubebot;")
query = "UPDATE users " \
"SET status = %s WHERE username = %s;"
args = (status, user)
cursor.execute(query, args)
connection_object.commit()
cursor.close()
connection_object.close()
def getScriptStatus(scriptno):
global connection_pool
connection_object = connection_pool.get_connection()
cursor = connection_object.cursor()
cursor.execute("USE youtubebot;")
query = "SELECT status " \
"FROM scripts WHERE scriptno = %s;"%(scriptno)
cursor.execute(query)
result = cursor.fetchall()
cursor.close()
connection_object.close()
return result[0][0]
def | ():
connection_object = connection_pool.get_connection()
cursor = connection_object.cursor()
cursor.execute("USE youtubebot;")
query = "SELECT scriptno, submission_id, status " \
"FROM scripts;"
cursor.execute(query)
result = cursor.fetchall()
cursor.close()
connection_object.close()
return result
def getCompletedScripts(back):
global connection_pool
try:
connection_object = connection_pool.get_connection()
cursor = connection_object.cursor()
cursor.execute("USE youtubebot;")
query = "SELECT scriptno, scripttitle, scriptauthor, ups, finalscript " \
"FROM scripts WHERE status = 'COMPLETE' AND finalscript IS NOT NULL ORDER BY ups DESC " \
"LIMIT %s;"%back
cursor.execute(query)
result = cursor.fetchall()
results = []
for res in result:
scriptno = res[0]
scripttitle = res[1]
author = res[2]
ups = res[3]
scriptpayload = pickle.loads(res[4])
load = (scriptno, scripttitle, author, ups, scriptpayload)
results.append(load)
cursor.close()
connection_object.close()
return results
except Exception as e:
print("Mysql Error with downloading completed scripts")
print(e)
pass
def getScripts(back, filter):
global connection_pool
connection_object = connection_pool.get_connection()
cursor = connection_object.cursor()
cursor.execute("USE youtubebot;")
query = "SELECT scriptno, subreddit, scripttitle, scriptauthor, ups, downs, rawscript, submission_id, status, editedby, num_comments " \
"FROM scripts WHERE status = 'RAW' or status = 'EDITING' ORDER BY %s DESC " \
"LIMIT %s;"%(filter, back)
cursor.execute(query)
result = cursor.fetchall()
results = []
for res in result:
scriptno = res[0]
subreddit = res[1]
title = res[2]
author = res[3]
ups = res[4]
downs = res[5]
rawscript = pickle.loads(res[6])
sub_id = res[7]
status = res[8]
editedby = res[9]
num_comments = res[10]
load = (scriptno, subreddit, title, author, ups, downs, rawscript, sub_id, status, editedby, num_comments)
results.append(load)
cursor.close()
connection_object.close()
return results
def addUser(username, password):
global connection_pool
connection_object = connection_pool.get_connection()
cursor = connection_object.cursor()
cursor.execute("USE youtubebot;")
query = "INSERT INTO users(username, password) " \
"VALUES(%s, %s)"
args = (username, hashlib.md5(password.encode()).hexdigest())
cursor.execute(query, args)
connection_object.commit()
cursor.close()
connection_object.close()
def addVideoGenerator(name, password):
global connection_pool
connection_object = connection_pool.get_connection()
cursor = connection_object.cursor()
cursor.execute("USE youtubebot;")
query = "INSERT INTO videogenerators(generatorname, password) " \
"VALUES(%s, %s)"
args = (name, hashlib.md5(password.encode()).hexdigest())
cursor.execute(query, args)
connection_object.commit()
cursor.close()
connection_object.close()
def beginDataBaseConnection():
global connection_pool
connection_pool = pooling.MySQLConnectionPool(
pool_size=32,
pool_reset_session=True,
host=settings.database_host,
user=settings.database_user,
passwd=settings.database_password,
auth_plugin='mysql_native_password'
)
print("Started database connection")
def uploadVid(payload, scriptno):
global connection_pool
try:
connection_object = connection_pool.get_connection()
cursor = connection_object.cursor()
cursor.execute("USE youtubebot;")
cursor.execute("set global max_allowed_packet=67108864;")
connection_object.commit()
load = pickle.dumps(payload)
print("%s SERVER attempting to upload script no %s (%s) to database" % (datetime.datetime.now(), scriptno, str((len(load) / 1000000)) + "MB"))
query = "UPDATE scripts SET finalscript = %s WHERE scriptno = %s " \
""
args = (load, scriptno)
cursor.execute(query, args)
connection_object.commit()
except Exception as e:
print("Error while connecting to MySQL using Connection pool ", e)
return False
finally:
if (connection_object.is_connected()):
cursor.close()
connection_object.close()
return True
def updateSubmission(submission):
global connection_pool
connection_object = connection_pool.get_connection()
cursor = connection_object.cursor()
cursor.execute("USE youtubebot;")
rawscript = pickle.dumps(submission.comments)
query = "UPDATE scripts " \
"SET scripttitle = %s, rawscript = %s, ups = %s, downs = %s, num_comments = %s, timecreated = %s, timegathered = %s WHERE submission_id = %s"
args = (submission.title, (rawscript), submission.upvotes, submission.downvotes, submission.amountcomments,
submission.timecreated, submission.timegathered, submission.submission_id)
cursor.execute(query, args)
connection_object.commit()
cursor.close()
connection_object.close()
def addSubmission(submission):
global connection_pool
connection_object = connection_pool.get_connection()
cursor = connection_object.cursor()
cursor.execute("USE youtubebot;")
rawscript = pickle.dumps(submission.comments)
query = "INSERT INTO scripts(subredditid, submission_id, subreddit, url, timecreated, status, scripttitle, scriptauthor, timegathered, rawscript, ups, downs, num_comments) " \
"VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
args = ((submission.subredditid), (submission.submission_id),
(submission.subreddit), (submission.link), (submission.timecreated),
("RAW"), submission.title, (submission.author), (submission.timegathered), rawscript,
submission.upvotes, submission.downvotes, submission.amountcomments)
cursor.execute(query, args)
connection_object.commit()
cursor.close()
connection_object.close()
def checkValueExists(column, value):
global database
cursor = database.cursor()
cursor.execute("USE youtubebot;")
query = "SELECT count(*) FROM scripts WHERE %s = %s;"%(column, repr(value))
cursor.execute(query)
result = cursor.fetchall()
flag = (result[0][0])
if flag == 0:
return False
else:
return True
def getVideoCountFromStatus(status):
global database
cursor = database.cursor()
cursor.execute("USE youtubebot;")
query = "SELECT count(*) FROM scripts WHERE status=%s;"%(repr(status))
cursor.execute(query)
result = cursor.fetchall()
return (result[0][0])
def getRowCount(tablename):
global database
cursor = database.cursor()
cursor.execute("USE youtubebot;")
cursor.execute("select count(*) from %s"%tablename)
result = cursor.fetchall()
return (result[0][0]) | getScriptIds |
backoff.rs | use std::prelude::v1::*;
use core::cell::Cell;
use core::fmt;
use core::sync::atomic;
const SPIN_LIMIT: u32 = 6;
const YIELD_LIMIT: u32 = 10;
/// Performs exponential backoff in spin loops.
///
/// Backing off in spin loops reduces contention and improves overall performance.
///
/// This primitive can execute *YIELD* and *PAUSE* instructions, yield the current thread to the OS
/// scheduler, and tell when is a good time to block the thread using a different synchronization
/// mechanism. Each step of the back off procedure takes roughly twice as long as the previous
/// step.
///
/// # Examples
///
/// Backing off in a lock-free loop:
///
/// ```
/// use crossbeam_utils::Backoff;
/// use std::sync::atomic::AtomicUsize;
/// use std::sync::atomic::Ordering::SeqCst;
///
/// fn fetch_mul(a: &AtomicUsize, b: usize) -> usize {
/// let backoff = Backoff::new();
/// loop {
/// let val = a.load(SeqCst);
/// if a.compare_and_swap(val, val.wrapping_mul(b), SeqCst) == val {
/// return val;
/// }
/// backoff.spin();
/// }
/// }
/// ```
///
/// Waiting for an [`AtomicBool`] to become `true`:
///
/// ```
/// use crossbeam_utils::Backoff;
/// use std::sync::atomic::AtomicBool;
/// use std::sync::atomic::Ordering::SeqCst;
///
/// fn spin_wait(ready: &AtomicBool) {
/// let backoff = Backoff::new();
/// while !ready.load(SeqCst) {
/// backoff.snooze();
/// }
/// }
/// ```
///
/// Waiting for an [`AtomicBool`] to become `true` and parking the thread after a long wait.
/// Note that whoever sets the atomic variable to `true` must notify the parked thread by calling
/// [`unpark()`]:
///
/// ```
/// use crossbeam_utils::Backoff;
/// use std::sync::atomic::AtomicBool;
/// use std::sync::atomic::Ordering::SeqCst;
/// use std::thread;
///
/// fn blocking_wait(ready: &AtomicBool) {
/// let backoff = Backoff::new();
/// while !ready.load(SeqCst) {
/// if backoff.is_completed() {
/// thread::park();
/// } else {
/// backoff.snooze();
/// }
/// }
/// }
/// ```
///
/// [`is_completed`]: struct.Backoff.html#method.is_completed
/// [`std::thread::park()`]: https://doc.rust-lang.org/std/thread/fn.park.html
/// [`Condvar`]: https://doc.rust-lang.org/std/sync/struct.Condvar.html
/// [`AtomicBool`]: https://doc.rust-lang.org/std/sync/atomic/struct.AtomicBool.html
/// [`unpark()`]: https://doc.rust-lang.org/std/thread/struct.Thread.html#method.unpark
pub struct Backoff {
step: Cell<u32>,
}
impl Backoff {
/// Creates a new `Backoff`.
///
/// # Examples
///
/// ```
/// use crossbeam_utils::Backoff;
///
/// let backoff = Backoff::new();
/// ```
#[inline]
pub fn new() -> Self {
Backoff { step: Cell::new(0) }
}
/// Resets the `Backoff`.
///
/// # Examples
///
/// ```
/// use crossbeam_utils::Backoff;
///
/// let backoff = Backoff::new();
/// backoff.reset();
/// ```
#[inline]
pub fn reset(&self) {
self.step.set(0);
}
/// Backs off in a lock-free loop.
///
/// This method should be used when we need to retry an operation because another thread made
/// progress.
///
/// The processor may yield using the *YIELD* or *PAUSE* instruction.
///
/// # Examples
///
/// Backing off in a lock-free loop:
///
/// ```
/// use crossbeam_utils::Backoff;
/// use std::sync::atomic::AtomicUsize;
/// use std::sync::atomic::Ordering::SeqCst;
///
/// fn fetch_mul(a: &AtomicUsize, b: usize) -> usize {
/// let backoff = Backoff::new();
/// loop {
/// let val = a.load(SeqCst);
/// if a.compare_and_swap(val, val.wrapping_mul(b), SeqCst) == val {
/// return val;
/// }
/// backoff.spin();
/// }
/// }
///
/// let a = AtomicUsize::new(7);
/// assert_eq!(fetch_mul(&a, 8), 7);
/// assert_eq!(a.load(SeqCst), 56);
/// ```
#[inline]
pub fn spin(&self) {
for _ in 0..1 << self.step.get().min(SPIN_LIMIT) {
atomic::spin_loop_hint();
}
if self.step.get() <= SPIN_LIMIT {
self.step.set(self.step.get() + 1);
}
}
/// Backs off in a blocking loop.
///
/// This method should be used when we need to wait for another thread to make progress.
///
/// The processor may yield using the *YIELD* or *PAUSE* instruction and the current thread
/// may yield by giving up a timeslice to the OS scheduler.
///
/// In `#[no_std]` environments, this method is equivalent to [`spin`].
///
/// If possible, use [`is_completed`] to check when it is advised to stop using backoff and
/// block the current thread using a different synchronization mechanism instead.
///
/// [`spin`]: struct.Backoff.html#method.spin
/// [`is_completed`]: struct.Backoff.html#method.is_completed
///
/// # Examples
///
/// Waiting for an [`AtomicBool`] to become `true`:
///
/// ```
/// use crossbeam_utils::Backoff;
/// use std::sync::Arc;
/// use std::sync::atomic::AtomicBool;
/// use std::sync::atomic::Ordering::SeqCst;
/// use std::thread;
/// use std::time::Duration;
///
/// fn spin_wait(ready: &AtomicBool) {
/// let backoff = Backoff::new();
/// while !ready.load(SeqCst) {
/// backoff.snooze();
/// }
/// }
///
/// let ready = Arc::new(AtomicBool::new(false));
/// let ready2 = ready.clone();
///
/// thread::spawn(move || {
/// thread::sleep(Duration::from_millis(100));
/// ready2.store(true, SeqCst);
/// });
///
/// assert_eq!(ready.load(SeqCst), false);
/// spin_wait(&ready);
/// assert_eq!(ready.load(SeqCst), true);
/// ```
///
/// [`AtomicBool`]: https://doc.rust-lang.org/std/sync/atomic/struct.AtomicBool.html
#[inline]
pub fn snooze(&self) {
if self.step.get() <= SPIN_LIMIT {
for _ in 0..1 << self.step.get() {
atomic::spin_loop_hint();
}
} else {
#[cfg(not(feature = "std"))]
for _ in 0..1 << self.step.get() {
atomic::spin_loop_hint();
}
#[cfg(feature = "std")]
::std::thread::yield_now();
}
if self.step.get() <= YIELD_LIMIT |
}
/// Returns `true` if exponential backoff has completed and blocking the thread is advised.
///
/// # Examples
///
/// Waiting for an [`AtomicBool`] to become `true` and parking the thread after a long wait:
///
/// ```
/// use crossbeam_utils::Backoff;
/// use std::sync::Arc;
/// use std::sync::atomic::AtomicBool;
/// use std::sync::atomic::Ordering::SeqCst;
/// use std::thread;
/// use std::time::Duration;
///
/// fn blocking_wait(ready: &AtomicBool) {
/// let backoff = Backoff::new();
/// while !ready.load(SeqCst) {
/// if backoff.is_completed() {
/// thread::park();
/// } else {
/// backoff.snooze();
/// }
/// }
/// }
///
/// let ready = Arc::new(AtomicBool::new(false));
/// let ready2 = ready.clone();
/// let waiter = thread::current();
///
/// thread::spawn(move || {
/// thread::sleep(Duration::from_millis(100));
/// ready2.store(true, SeqCst);
/// waiter.unpark();
/// });
///
/// assert_eq!(ready.load(SeqCst), false);
/// blocking_wait(&ready);
/// assert_eq!(ready.load(SeqCst), true);
/// ```
///
/// [`AtomicBool`]: https://doc.rust-lang.org/std/sync/atomic/struct.AtomicBool.html
#[inline]
pub fn is_completed(&self) -> bool {
self.step.get() > YIELD_LIMIT
}
}
impl fmt::Debug for Backoff {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Backoff")
.field("step", &self.step)
.field("is_completed", &self.is_completed())
.finish()
}
}
impl Default for Backoff {
fn default() -> Backoff {
Backoff::new()
}
}
| {
self.step.set(self.step.get() + 1);
} |
renamer.rs | use std::collections::HashMap;
use std::collections::HashSet;
use std::hash::Hash;
use std::iter::FromIterator;
struct Scope<T> {
name_map: HashMap<T, String>,
used: HashSet<String>,
}
impl<T: Clone + Eq + Hash> Scope<T> {
pub fn new() -> Self {
Self::new_with_reserved(HashSet::new())
}
pub fn new_with_reserved(reserved: HashSet<String>) -> Self {
Scope {
name_map: HashMap::new(),
used: reserved,
}
}
pub fn insert(&mut self, key: T, val: String) {
self.name_map.insert(key, val);
}
pub fn contains_key(&self, key: &T) -> bool {
self.name_map.contains_key(key)
}
pub fn contains_value(&self, val: &str) -> bool {
self.used.contains(val)
}
pub fn reserve(&mut self, val: String) {
self.used.insert(val);
}
}
pub struct Renamer<T> {
scopes: Vec<Scope<T>>,
next_fresh: u64,
}
impl<T: Clone + Eq + Hash> Renamer<T> {
/// Creates a new renaming environment with a single, empty scope. The given set of
/// reserved names will exclude those names from being chosen as the mangled names from
/// the insert method.
pub fn new(reserved_names: &[&str]) -> Self {
let set: HashSet<String> = HashSet::from_iter(reserved_names.iter().map(|&x| x.to_owned()));
Renamer {
scopes: vec![Scope::new_with_reserved(set)],
next_fresh: 0,
}
}
/// Introduces a new name binding scope
pub fn add_scope(&mut self) {
self.scopes.push(Scope::new())
}
/// Drops the current name binding scope
pub fn drop_scope(&mut self) {
if self.scopes.len() == 1 {
panic!("Attempting to drop outermost scope")
}
self.scopes.pop();
}
fn current_scope(&self) -> &Scope<T> {
self.scopes.last().expect("Expected a scope")
}
fn current_scope_mut(&mut self) -> &mut Scope<T> {
self.scopes.last_mut().expect("Expected a scope")
}
/// Is the mangled name currently in use
fn is_target_used(&self, key: &str) -> bool {
let key = key.to_string();
self.scopes.iter().any(|x| x.contains_value(&key))
}
/// Assigns a name that doesn't collide with anything in the context of a particular
/// scope, defaulting to the current scope if None is provided
fn pick_name_in_scope(&mut self, basename: &str, scope: Option<usize>) -> String {
let mut target = basename.to_string();
for i in 0.. {
if self.is_target_used(&target) {
target = format!("{}_{}", basename, i);
} else {
break;
}
}
match scope {
Some(scope_index) => self.scopes[scope_index].reserve(target.clone()),
None => self.current_scope_mut().reserve(target.clone()),
}
target
}
pub fn pick_name(&mut self, basename: &str) -> String {
self.pick_name_in_scope(basename, None)
}
/// Permanently assign a name that doesn't collide with anything
/// currently in scope, and also never goes out of scope
pub fn pick_name_root(&mut self, basename: &str) -> String {
self.pick_name_in_scope(basename, Some(0))
}
/// Introduce a new name binding into a particular scope or the current one if None is provided.
/// If the key is unbound in the scope then Some of the resulting mangled name is returned,
/// otherwise None.
fn insert_in_scope(&mut self, key: T, basename: &str, scope: Option<usize>) -> Option<String> {
let contains_key = match scope {
Some(scope_index) => self.scopes[scope_index].contains_key(&key),
None => self.current_scope().contains_key(&key),
};
if contains_key {
return None;
}
let target = self.pick_name_in_scope(basename, scope);
match scope {
Some(scope_index) => self.scopes[scope_index].insert(key, target.clone()),
None => self.current_scope_mut().insert(key, target.clone()),
}
Some(target)
}
/// Introduce a new name binding into the current scope. If the key is unbound in
/// the current scope then Some of the resulting mangled name is returned, otherwise
/// None.
pub fn insert(&mut self, key: T, basename: &str) -> Option<String> {
self.insert_in_scope(key, basename, None)
}
/// Introduce a new name binding into the root scope. If the key is unbound in
/// the root scope then Some of the resulting mangled name is returned, otherwise
/// None.
pub fn insert_root(&mut self, key: T, basename: &str) -> Option<String> {
self.insert_in_scope(key, basename, Some(0))
}
/// Assign a name in the current scope without reservation or checking for overlap.
/// This is intended to be used when one key is going to be merged
pub fn alias(&mut self, new_key: T, old_key: &T) {
match self.get(old_key) {
Some(name) => self.current_scope_mut().insert(new_key, name),
None => panic!("Failed to overlap name"),
}
}
/// Lookup the given key in all of the scopes returning Some of the matched mangled name
/// if one exists, otherwise None.
pub fn | (&self, key: &T) -> Option<String> {
for scope in self.scopes.iter().rev() {
if let Some(target) = scope.name_map.get(key) {
return Some(target.to_string());
}
}
None
}
pub fn fresh(&mut self) -> String {
let fresh = self.next_fresh;
self.next_fresh += 1;
self.pick_name(&format!("fresh{}", fresh))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn simple() {
let mut renamer = Renamer::new(&["reserved"]);
let one1 = renamer.insert(1, "one").unwrap();
let one2 = renamer.get(&1).unwrap();
assert_eq!(one1, one2);
let reserved1 = renamer.insert(2, "reserved").unwrap();
let reserved2 = renamer.get(&2).unwrap();
assert_eq!(reserved1, "reserved_0");
assert_eq!(reserved2, "reserved_0");
}
#[test]
fn scoped() {
let mut renamer = Renamer::new(&[]);
let one1 = renamer.insert(10, "one").unwrap();
renamer.add_scope();
let one2 = renamer.get(&10).unwrap();
assert_eq!(one1, one2);
let one3 = renamer.insert(20, "one").unwrap();
let one4 = renamer.get(&20).unwrap();
assert_eq!(one3, one4);
assert_ne!(one3, one2);
renamer.drop_scope();
let one5 = renamer.get(&10).unwrap();
assert_eq!(one5, one2);
}
#[test]
fn forgets() {
let mut renamer = Renamer::new(&[]);
assert_eq!(renamer.get(&1), None);
renamer.add_scope();
renamer.insert(1, "example");
renamer.drop_scope();
assert_eq!(renamer.get(&1), None);
}
}
| get |
setup.go | // Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package setup
import (
"context"
"fmt"
"time"
"github.com/google/exposure-notifications-server/internal/authorizedapp"
"github.com/google/exposure-notifications-server/internal/database"
"github.com/google/exposure-notifications-server/internal/envconfig"
"github.com/google/exposure-notifications-server/internal/logging"
"github.com/google/exposure-notifications-server/internal/metrics"
"github.com/google/exposure-notifications-server/internal/secrets"
"github.com/google/exposure-notifications-server/internal/serverenv"
"github.com/google/exposure-notifications-server/internal/signing"
"github.com/google/exposure-notifications-server/internal/storage"
)
// DBConfigProvider ensures that the envionment config can provide a DB config.
// All binaries in this application connect to the databse via the same method.
type DBConfigProvider interface {
DB() *database.Config
}
// AuthorizedAppConfigProvider signals that the config provided knows how to
// configure authorized apps.
type AuthorizedAppConfigProvider interface {
AuthorizedAppConfig() *authorizedapp.Config
}
// KeyManagerProvider is a marker interface indicating the KeyManagerProvider should be installed.
type KeyManagerProvider interface {
KeyManager() bool
}
// BlobStorageConfigProvider is a marker interface indicating the BlobStorage interface should be installed.
type BlobStorageConfigProvider interface {
BlobStorage() bool
}
// Function returned from setup to be deferred until the caller exits.
type Defer func()
// Setup runs common intitializion code for all servers.
func Setup(ctx context.Context, config DBConfigProvider) (*serverenv.ServerEnv, Defer, error) {
logger := logging.FromContext(ctx)
// Can be changed with a different secret manager interface.
// TODO(mikehelmick): Make this extensible to other providers.
// TODO(sethvargo): Make TTL configurable.
sm, err := secrets.NewCacher(ctx, secrets.NewGCPSecretManager, 5*time.Minute)
if err != nil {
return nil, nil, fmt.Errorf("unable to connect to secret manager: %v", err)
}
logger.Infof("Effective environment variables: %+v", config)
if err := envconfig.Process(ctx, config, sm); err != nil {
return nil, nil, fmt.Errorf("error loading environment variables: %v", err)
}
// Start building serverenv opts
opts := []serverenv.Option{
serverenv.WithSecretManager(sm),
serverenv.WithMetricsExporter(metrics.NewLogsBasedFromContext),
}
// TODO(mikehelmick): Make this extensible to other providers.
if _, ok := config.(KeyManagerProvider); ok {
km, err := signing.NewGCPKMS(ctx)
if err != nil {
return nil, nil, fmt.Errorf("unable to connect to key manager: %w", err)
}
opts = append(opts, serverenv.WithKeyManager(km))
}
// TODO(mikehelmick): Make this extensible to other providers.
if _, ok := config.(BlobStorageConfigProvider); ok {
storage, err := storage.NewGoogleCloudStorage(ctx)
if err != nil |
opts = append(opts, serverenv.WithBlobStorage(storage))
}
// Setup the database connection.
db, err := database.NewFromEnv(ctx, config.DB())
if err != nil {
return nil, nil, fmt.Errorf("unable to connect to database: %v", err)
}
{
// Log the database config, but omit the password field.
redactedDB := config.DB()
redactedDB.Password = "<hidden>"
logger.Infof("Effective DB config: %+v", redactedDB)
}
opts = append(opts, serverenv.WithDatabase(db))
// AuthorizedApp must come after database setup due to the dependency.
if typ, ok := config.(AuthorizedAppConfigProvider); ok {
logger.Infof("Effective AuthorizedApp config: %+v", typ.AuthorizedAppConfig())
provider, err := authorizedapp.NewDatabaseProvider(ctx, db, typ.AuthorizedAppConfig(), authorizedapp.WithSecretManager(sm))
if err != nil {
// Ensure the database is closed on an error.
defer db.Close(ctx)
return nil, nil, fmt.Errorf("unable to create AuthorizedApp provider: %v", err)
}
opts = append(opts, serverenv.WithAuthorizedAppProvider(provider))
}
return serverenv.New(ctx, opts...), func() { db.Close(ctx) }, nil
}
| {
return nil, nil, fmt.Errorf("unable to connect to storage system: %v", err)
} |
project-role-policies-edit.tsx | import * as React from 'react';
import * as ReactForm from 'react-form';
import {DataLoader} from '../../../shared/components';
import {Application} from '../../../shared/models';
import {services} from '../../../shared/services';
interface ProjectRolePoliciesProps {
projName: string;
roleName: string;
policies: string[];
formApi: ReactForm.FormApi;
newRole: boolean;
}
function generatePolicy(project: string, role: string, action?: string, object?: string, permission?: string): string {
return `p, proj:${project}:${role}, applications, ${action || ''}, ${object ? project + '/' + object : ''}, ${permission || ''}`;
}
const actions = ['get', 'create', 'update', 'delete', 'sync', 'override'];
export const ProjectRolePoliciesEdit = (props: ProjectRolePoliciesProps) => (
<DataLoader load={() => services.applications.list([props.projName], {fields: ['items.metadata.name']}).then(list => list.items)}>
{applications => (
<React.Fragment>
<h4>Policy Rules</h4> | <div className='row'>
<div className='columns small-4'>ACTION</div>
<div className='columns small-4'>APPLICATION</div>
<div className='columns small-4'>PERMISSION</div>
</div>
</div>
<div className='argo-table-list__row'>
{props.policies.map((policy, i) => (
<Policy
key={i}
field={['policies', i]}
formApi={props.formApi}
policy={policy}
projName={props.projName}
roleName={props.roleName}
deletePolicy={() => props.formApi.setValue('policies', removeEl(props.policies, i))}
availableApps={applications}
actions={actions}
/>
))}
<div className='row'>
<div className='columns small-4'>
<a
onClick={() => {
const newPolicy = generatePolicy(props.projName, props.roleName);
props.formApi.setValue('policies', (props.formApi.values.policies || []).concat(newPolicy));
}}>
Add policy
</a>
</div>
</div>
</div>
</div>
</React.Fragment>
)}
</DataLoader>
);
interface PolicyProps {
projName: string;
roleName: string;
policy: string;
fieldApi: ReactForm.FieldApi;
actions: string[];
availableApps: Application[];
deletePolicy: () => void;
}
function removeEl(items: any[], index: number) {
items.splice(index, 1);
return items;
}
class PolicyWrapper extends React.Component<PolicyProps, any> {
public render() {
return (
<div className='row'>
<div className='columns small-4'>
<datalist id='action'>
{this.props.actions !== undefined && this.props.actions.length > 0 && this.props.actions.map(action => <option key={action}>{action}</option>)}
<option key='wildcard'>*</option>
</datalist>
<input
className='argo-field'
list='action'
value={this.getAction()}
onChange={(e: React.ChangeEvent<HTMLInputElement>) => {
this.setAction(e.target.value);
}}
/>
</div>
<div className='columns small-4'>
<datalist id='object'>
{this.props.availableApps !== undefined &&
this.props.availableApps.length > 0 &&
this.props.availableApps.map(app => (
<option key={app.metadata.name}>
{this.props.projName}/{app.metadata.name}
</option>
))}
<option key='wildcard'>{this.props.projName}</option>
</datalist>
<input
className='argo-field'
list='object'
value={this.getObject()}
onChange={(e: React.ChangeEvent<HTMLInputElement>) => {
this.setObject(e.target.value);
}}
/>
</div>
<div className='columns small-3'>
<datalist id='permission'>
<option>allow</option>
<option>deny</option>
</datalist>
<input
className='argo-field'
list='permission'
value={this.getPermission()}
onChange={(e: React.ChangeEvent<HTMLInputElement>) => {
this.setPermission(e.target.value);
}}
/>
</div>
<div className='columns small-1'>
<i className='fa fa-times' onClick={() => this.props.deletePolicy()} style={{cursor: 'pointer'}} />
</div>
</div>
);
}
private getAction(): string {
const fields = (this.props.fieldApi.getValue() as string).split(',');
if (fields.length !== 6) {
return '';
}
return fields[3].trim();
}
private setAction(action: string) {
const fields = (this.props.fieldApi.getValue() as string).split(',');
if (fields.length !== 6) {
this.props.fieldApi.setValue(generatePolicy(this.props.projName, this.props.roleName, action, '', ''));
return;
}
fields[3] = ` ${action}`;
this.props.fieldApi.setValue(fields.join());
}
private getObject(): string {
const fields = (this.props.fieldApi.getValue() as string).split(',');
if (fields.length !== 6) {
return '';
}
return fields[4].trim();
}
private setObject(object: string) {
const fields = (this.props.fieldApi.getValue() as string).split(',');
if (fields.length !== 6) {
this.props.fieldApi.setValue(generatePolicy(this.props.projName, this.props.roleName, '', object, ''));
return;
}
fields[4] = ` ${object}`;
this.props.fieldApi.setValue(fields.join());
}
private getPermission(): string {
const fields = (this.props.fieldApi.getValue() as string).split(',');
if (fields.length !== 6) {
return '';
}
return fields[5].trim();
}
private setPermission(permission: string) {
const fields = (this.props.fieldApi.getValue() as string).split(',');
if (fields.length !== 6) {
this.props.fieldApi.setValue(generatePolicy(this.props.projName, this.props.roleName, '', '', permission));
return;
}
fields[5] = ` ${permission}`;
this.props.fieldApi.setValue(fields.join());
}
}
const Policy = ReactForm.FormField(PolicyWrapper); | <div>Manage this role's permissions to applications</div>
<div className='argo-table-list'>
<div className='argo-table-list__head'> |
test-calls-example-2.6.x.py | # Download the Python helper library from twilio.com/docs/python/install
import os
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/user/account
# To set up environmental variables, see http://twil.io/secure | auth_token = os.environ['TWILIO_AUTH_TOKEN']
client = Client(account_sid, auth_token)
call = client.calls.create(
url="http://demo.twilio.com/docs/voice.xml",
to="+15005550003",
from_="+15005550006"
)
print(call.sid) | account_sid = os.environ['TWILIO_ACCOUNT_SID'] |
generate_thumbnail.py | import os
from PIL import Image
import argparse
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('-f', '--folder_path', type=str, help='folder with images which need a thumbnail', required=True)
args = parser.parse_args()
folder_path = args.folder_path + "/"
print("Observed this: " + folder_path)
size = [512, 512]
def get_thumbnail(folder, filename, box, fit=True):
img = Image.open(folder + filename)
if img:
# preresize image with factor 2, 4, 8 and fast algorithm
factor = 1
while img.size[0] / factor > 2 * box[0] and img.size[1] * 2 / factor > 2 * box[1]:
factor *= 2
if factor > 1:
img.thumbnail((img.size[0] / factor, img.size[1] / factor), Image.NEAREST)
# calculate the cropping box and get the cropped part
if fit:
x1 = y1 = 0
x2, y2 = img.size
wRatio = 1.0 * x2 / box[0]
hRatio = 1.0 * y2 / box[1]
if hRatio > wRatio: | x2 = int(x2 / 2 + box[0] * hRatio / 2)
img = img.crop((x1, y1, x2, y2))
# Resize the image with best quality algorithm ANTI-ALIAS
img.thumbnail(box, Image.ANTIALIAS)
output = folder + filename + "_thumbnail.jpg"
img.save(output, "JPEG")
return output
for infile in os.listdir(folder_path):
print("Removing old thumbnail entries")
if "thumbnail" in infile:
print("Removing " + infile)
os.remove(folder_path + infile)
for infile in os.listdir(folder_path):
print(folder_path + infile)
try:
print(get_thumbnail(folder_path, infile, (512, 512)))
except:
print("Could not open " + infile) | y1 = int(y2 / 2 - box[1] * wRatio / 2)
y2 = int(y2 / 2 + box[1] * wRatio / 2)
else:
x1 = int(x2 / 2 - box[0] * hRatio / 2) |
last_tx.rs | //! State of the last transaction signed by a particular signer
use tendermint_rpc::endpoint::broadcast::tx_commit;
/// State of the last broadcasted transaction
#[derive(Clone, Debug)]
pub enum LastTx {
/// No previously broadcast transaction (i.e. starting up)
None,
/// Tendermint RPC response
Response(tx_commit::Response),
/// Error broadcasting the previous transaction
Error(tendermint_rpc::error::Error),
}
impl Default for LastTx {
fn default() -> LastTx {
LastTx::None
}
}
impl LastTx {
/// Get the RPC response, if there was one
pub fn response(&self) -> Option<&tx_commit::Response> {
match self {
LastTx::Response(ref resp) => Some(resp),
_ => None,
}
}
/// Get the RPC error, if there was one
pub fn error(&self) -> Option<&tendermint_rpc::error::Error> {
match self {
LastTx::Error(ref resp) => Some(resp),
_ => None,
}
}
/// Was there no last TX?
pub fn is_none(&self) -> bool {
match self {
LastTx::None => true,
_ => false,
}
}
/// Was there a response from the last transaction broadcast?
pub fn is_response(&self) -> bool {
self.response().is_some()
}
/// Was there an error broadcasting the last transaction?
pub fn is_error(&self) -> bool {
self.error().is_some()
} | fn from(state: &LastTx) -> Option<tx_commit::Response> {
state.response().cloned()
}
} | }
impl From<&LastTx> for Option<tx_commit::Response> { |
0002_post_subtitle.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-08-01 03:00
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
| dependencies = [
('blog', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='post',
name='subtitle',
field=models.CharField(default='test', max_length=255),
preserve_default=False,
),
] |
|
forms.py | """Membership forms module
"""
from django.forms import ModelForm
from .models import Note, Term, Organization, Contact, Membership
class NoteForm(ModelForm):
"""Note Form
"""
class Meta:
model = Note
fields = ['title', 'content', 'date_time']
class TermForm(ModelForm):
"""Term Form
"""
class Meta:
model = Term
fields = ['mem_type', 'n_workshops',
'n_instructors', 'reserve',
'inh_trainer', 'local_train',
'publicize', 'recruit',
'coordinate']
class OrganizationForm(ModelForm):
"""Organization Form
"""
class Meta:
model = Organization
fields = []
class ContactForm(ModelForm):
"""Contact Form
"""
class Meta:
model = Contact
fields = []
class MembershipForm(ModelForm):
| """Membership Form
"""
class Meta:
model = Membership
fields = [] |
|
norms.py | """Predict lexical norms, either to evaluate word vectors, or to get norms for unnormed words."""
import numpy as np
import pandas as pd
import sklearn.linear_model
import sklearn.model_selection
import sklearn.preprocessing
import sklearn.utils
import argparse
import os
from .vecs import Vectors
from .utensils import log_timer
import logging
logging.basicConfig(format='[{levelname}] {message}', style='{', level=logging.INFO)
path = os.path.dirname(__file__)
@log_timer
def evaluate_norms(lang, vecs_fname, alpha=1.0):
"""Predict lexical norms to evaluate a set of word vectors in a given language.
Writes scores to tab-separated text file but also returns them.
:param lang: language to evaluate word vectors in (uses two-letter ISO codes)
:param vecs_fname: word vectors to evaluate
:param alpha: regularization strength, default 1.0, set higher for small datasets
:return: pandas DataFrame containing the norms results
"""
norms_path = os.path.join(path, 'datasets', 'norms')
if not os.path.exists('results'):
os.mkdir('results')
results_path = os.path.join('results', 'norms')
if not os.path.exists(results_path):
os.mkdir(results_path)
logging.info(f'evaluating lexical norm prediction with {vecs_fname}')
vectors = Vectors(vecs_fname, normalize=True, n=1e6, d=300)
scores = []
for norms_fname in os.listdir(norms_path):
if norms_fname.startswith(lang):
logging.info(f'predicting norms from {norms_fname}')
norms = pd.read_csv(os.path.join(norms_path, norms_fname), sep='\t', comment='#')
norms = norms.set_index('word')
score = predict_norms(vectors, norms, alpha)['scores']
score['source'] = norms_fname
scores.append(score)
scores_fname = os.path.split(vecs_fname)[1].replace('.vec', '.tsv')
if len(scores) > 0:
scores = pd.concat(scores)
scores.to_csv(os.path.join(results_path, scores_fname), sep='\t', index=False)
return scores
@log_timer
def | (vectors, norms, alpha=1.0):
"""Predict lexical norms and return score.
:param vectors: Vectors object containing word vectors
:param norms: pandas DataFrame of lexical norms
:param alpha: regularization strength, default 1.0, set higher for small datasets
:return: dict containing scores and predictions in separate pandas DataFrames
"""
vecs_df = vectors.as_df()
cols = norms.columns.values
df = norms.join(vecs_df, how='inner')
# compensate for missing ys somehow
total = len(norms)
missing = len(norms) - len(df)
penalty = (total - missing) / total
logging.info(f'missing vectors for {missing} out of {total} words')
df = sklearn.utils.shuffle(df) # shuffle is important for unbiased results on ordered datasets!
model = sklearn.linear_model.Ridge(alpha=alpha) # use ridge regression models
cv = sklearn.model_selection.RepeatedKFold(n_splits=5, n_repeats=10)
# compute crossvalidated prediction scores
scores = []
for col in cols:
# set dependent variable and calculate 10-fold mean fit/predict scores
df_subset = df.loc[:, vecs_df.columns.values] # use .loc[] so copy is created and no setting with copy warning is issued
df_subset[col] = df[col]
df_subset = df_subset.dropna() # drop NaNs for this specific y
x = df_subset[vecs_df.columns.values]
y = df_subset[col]
cv_scores = sklearn.model_selection.cross_val_score(model, x, y, cv=cv)
median_score = np.median(cv_scores)
penalized_score = median_score * penalty
scores.append({
'norm': col,
'adjusted r': np.sqrt(penalized_score), # take square root of explained variance to get Pearson r
'adjusted r-squared': penalized_score,
'r-squared': median_score,
'r': np.sqrt(median_score),
})
# predict (extend norms)
x_full = df[vecs_df.columns.values]
predictions = df.loc[:, cols] # use .loc[] so copy is created and no setting with copy warning is raised by pandas
for col in cols:
# set dependent variable and fit, but predict for whole x (so including unobserved y)
df_subset = df.loc[:, vecs_df.columns.values] # use .loc[] so copy is created and no setting with copy warning is raised
df_subset[col] = df[col]
df_subset = df_subset.dropna() # drop NaNs for this specific y
x = df_subset[vecs_df.columns.values]
y = df_subset[col]
model.fit(x, y)
predictions[f'{col} predicted'] = model.predict(x_full)
return {'scores': pd.DataFrame(scores), 'predictions': predictions}
def extend_norms(vecs_fname, norms_fname, alpha=1.0):
"""Extend lexical norms to unobserved words, using word vectors.
Writes predictions to tab-separated text file.
:param vecs_fname: file containing word vectors to use for prediction.
:param norms_fname: file containing norms in tab-separated columns, first column should contain words,
first line should contain column names, unobserved cells should be left empty
:param alpha: regularization strength, default 1.0, set higher for small datasets
"""
logging.info(f'extending lexical norms with {vecs_fname}')
vectors = Vectors(vecs_fname, normalize=True, n=1e6, d=300)
norms = pd.read_csv(norms_fname, sep='\t', comment='#')
norms = norms.set_index('word')
results = predict_norms(vectors, norms, alpha)
base_fname = '.'.join(norms_fname.split('.')[:-1])
results['scores'].to_csv(f'{base_fname}.scores.tsv', sep='\t', index=False)
results['predictions'].to_csv(f'{base_fname}.predictions.tsv', sep='\t', index=True)
if __name__ == '__main__':
argparser = argparse.ArgumentParser(description='perform crossvalidated penalized regression of lexical norms using word vectors as predictors')
argparser.add_argument('lang', help='language to predict norms for (uses two-letter ISO language codes)')
argparser.add_argument('vecs_fname', help='vectors to evaluate (or use for lexical norm extension')
argparser.add_argument('--extend_norms', help='file containing lexical norms to extend')
argparser.add_argument('--alpha', type=float, default=1.0, help='regularization strength, default 1.0, set higher for small datasets')
args = argparser.parse_args()
if args.extend_norms:
extend_norms(vecs_fname=args.vecs_fname, norms_fname=args.extend_norms, alpha=args.alpha)
else:
print(evaluate_norms(lang=args.lang, vecs_fname=args.vecs_fname, alpha=args.alpha))
| predict_norms |
lib.rs | //! A high-level StatsD metric client built on cadence.
//!
//! ## Defining Metrics
//!
//! In order to use metrics, one needs to first define one of the metric traits on a custom enum.
//! The following types of metrics are available: `counter`, `timer`, `gauge`, `histogram`, and
//! `set`. For explanations on what that means see [Metric Types].
//!
//! The metric traits serve only to provide a type safe metric name. All metric types have exactly
//! the same form, they are different only to ensure that a metric can only be used for the type for
//! which it was defined, (e.g. a counter metric cannot be used as a timer metric). See the traits
//! for more detailed examples.
//!
//! ## Initializing the Client
//!
//! Metrics can be used without initializing a statsd client. In that case, invoking `with_client`
//! or the [`metric!`] macro will become a noop. Only when configured, metrics will actually be
//! collected.
//!
//! To initialize the client, either use [`set_client`] to pass a custom client, or use
//! [`init`] to create a default client with known arguments:
//!
//! ```no_run
//! # use std::collections::BTreeMap;
//!
//! relay_statsd::init("myprefix", "localhost:8125", BTreeMap::new(), true, 1.0);
//! ```
//!
//! ## Macro Usage
//!
//! The recommended way to record metrics is by using the [`metric!`] macro. See the trait docs
//! for more information on how to record each type of metric.
//!
//! ```
//! use relay_statsd::{metric, CounterMetric};
//!
//! struct MyCounter;
//!
//! impl CounterMetric for MyCounter {
//! fn name(&self) -> &'static str {
//! "counter"
//! }
//! }
//!
//! metric!(counter(MyCounter) += 1);
//! ```
//!
//! ## Manual Usage
//!
//! ```
//! use relay_statsd::prelude::*;
//!
//! relay_statsd::with_client(|client| {
//! client.count("mymetric", 1).ok();
//! });
//! ```
//!
//! [Metric Types]: https://github.com/statsd/statsd/blob/master/docs/metric_types.md
use std::collections::BTreeMap;
use std::net::{ToSocketAddrs, UdpSocket};
use std::ops::{Deref, DerefMut};
use std::sync::Arc;
use cadence::{
BufferedUdpMetricSink, Metric, MetricBuilder, QueuingMetricSink, StatsdClient, UdpMetricSink,
};
use lazy_static::lazy_static;
use parking_lot::RwLock;
use rand::distributions::{Distribution, Uniform};
use relay_log::LogError;
/// Maximum number of metric events that can be queued before we start dropping them
const METRICS_MAX_QUEUE_SIZE: usize = 100_000;
/// Client configuration object to store globally.
#[derive(Debug)]
pub struct MetricsClient {
/// The raw statsd client.
pub statsd_client: StatsdClient,
/// Default tags to apply to every metric.
pub default_tags: BTreeMap<String, String>,
/// Global sample rate.
pub sample_rate: f32,
}
impl Deref for MetricsClient {
type Target = StatsdClient;
fn deref(&self) -> &StatsdClient {
&self.statsd_client
}
}
impl DerefMut for MetricsClient {
fn deref_mut(&mut self) -> &mut StatsdClient {
&mut self.statsd_client
}
}
impl MetricsClient {
/// Send a metric with the default tags defined on this `MetricsClient`.
#[inline(always)]
pub fn send_metric<'a, T>(&'a self, mut metric: MetricBuilder<'a, '_, T>)
where
T: Metric + From<String>,
{
if !self._should_send() {
return;
}
for (k, v) in &self.default_tags {
metric = metric.with_tag(k, v);
}
if let Err(error) = metric.try_send() {
relay_log::error!(
"Error sending a metric: {}, maximum capacity: {}",
LogError(&error),
METRICS_MAX_QUEUE_SIZE
);
};
}
fn _should_send(&self) -> bool {
if self.sample_rate <= 0.0 {
false
} else if self.sample_rate >= 1.0 {
true
} else {
// Using thread local RNG and uniform distribution here because Rng::gen_range is
// "optimized for the case that only a single sample is made from the given range".
// See https://docs.rs/rand/0.7.3/rand/distributions/uniform/struct.Uniform.html for more
// details.
let mut rng = rand::thread_rng();
RNG_UNIFORM_DISTRIBUTION
.with(|uniform_dist| uniform_dist.sample(&mut rng) <= self.sample_rate)
}
}
}
lazy_static! {
static ref METRICS_CLIENT: RwLock<Option<Arc<MetricsClient>>> = RwLock::new(None);
}
thread_local! {
static CURRENT_CLIENT: Option<Arc<MetricsClient>> = METRICS_CLIENT.read().clone();
static RNG_UNIFORM_DISTRIBUTION: Uniform<f32> = Uniform::new(0.0, 1.0);
}
/// Internal prelude for the macro
#[doc(hidden)]
pub mod _pred {
pub use cadence::prelude::*;
}
/// The metrics prelude that is necessary to use the client.
pub mod prelude {
pub use cadence::prelude::*;
}
/// Set a new statsd client.
pub fn set_client(client: MetricsClient) {
*METRICS_CLIENT.write() = Some(Arc::new(client));
}
/// Disable the client again.
pub fn | () {
*METRICS_CLIENT.write() = None;
}
/// Tell the metrics system to report to statsd.
pub fn init<A: ToSocketAddrs>(
prefix: &str,
host: A,
default_tags: BTreeMap<String, String>,
buffering: bool,
sample_rate: f32,
) {
let addrs: Vec<_> = host.to_socket_addrs().unwrap().collect();
if !addrs.is_empty() {
relay_log::info!("reporting metrics to statsd at {}", addrs[0]);
}
// Normalize sample_rate
let sample_rate = sample_rate.max(0.).min(1.);
relay_log::debug!(
"metrics sample rate is set to {}{}",
sample_rate,
if sample_rate == 0.0 {
", no metrics will be reported"
} else {
""
}
);
let socket = UdpSocket::bind("0.0.0.0:0").unwrap();
socket.set_nonblocking(true).unwrap();
let statsd_client = if buffering {
let udp_sink = BufferedUdpMetricSink::from(host, socket).unwrap();
let queuing_sink = QueuingMetricSink::with_capacity(udp_sink, METRICS_MAX_QUEUE_SIZE);
StatsdClient::from_sink(prefix, queuing_sink)
} else {
let simple_sink = UdpMetricSink::from(host, socket).unwrap();
StatsdClient::from_sink(prefix, simple_sink)
};
relay_log::debug!(
"metrics buffering is {}",
if buffering { "enabled" } else { "disabled" }
);
set_client(MetricsClient {
statsd_client,
default_tags,
sample_rate,
});
}
/// Invoke a callback with the current statsd client.
///
/// If statsd is not configured the callback is not invoked. For the most part
/// the [`metric!`] macro should be used instead.
#[inline(always)]
pub fn with_client<F, R>(f: F) -> R
where
F: FnOnce(&MetricsClient) -> R,
R: Default,
{
CURRENT_CLIENT.with(|client| {
if let Some(client) = client {
f(&*client)
} else {
R::default()
}
})
}
/// A metric for capturing timings.
///
/// Timings are a positive number of milliseconds between a start and end time. Examples include
/// time taken to render a web page or time taken for a database call to return.
///
/// ## Example
///
/// ```
/// use relay_statsd::{metric, TimerMetric};
///
/// enum MyTimer {
/// ProcessA,
/// ProcessB,
/// }
///
/// impl TimerMetric for MyTimer {
/// fn name(&self) -> &'static str {
/// match self {
/// Self::ProcessA => "process_a",
/// Self::ProcessB => "process_b",
/// }
/// }
/// }
///
/// # fn process_a() {}
///
/// // measure time by explicitly setting a std::timer::Duration
/// # use std::time::Instant;
/// let start_time = Instant::now();
/// process_a();
/// metric!(timer(MyTimer::ProcessA) = start_time.elapsed());
///
/// // provide tags to a timer
/// metric!(
/// timer(MyTimer::ProcessA) = start_time.elapsed(),
/// server = "server1",
/// host = "host1",
/// );
///
/// // measure time implicitly by enclosing a code block in a metric
/// metric!(timer(MyTimer::ProcessA), {
/// process_a();
/// });
///
/// // measure block and also provide tags
/// metric!(
/// timer(MyTimer::ProcessB),
/// server = "server1",
/// host = "host1",
/// {
/// process_a();
/// }
/// );
///
/// ```
pub trait TimerMetric {
/// Returns the timer metric name that will be sent to statsd.
fn name(&self) -> &'static str;
}
/// A metric for capturing counters.
///
/// Counters are simple values incremented or decremented by a client. The rates at which these
/// events occur or average values will be determined by the server receiving them. Examples of
/// counter uses include number of logins to a system or requests received.
///
/// ## Example
///
/// ```
/// use relay_statsd::{metric, CounterMetric};
///
/// enum MyCounter {
/// TotalRequests,
/// TotalBytes,
/// }
///
/// impl CounterMetric for MyCounter {
/// fn name(&self) -> &'static str {
/// match self {
/// Self::TotalRequests => "total_requests",
/// Self::TotalBytes => "total_bytes",
/// }
/// }
/// }
///
/// # let buffer = &[(), ()];
///
/// // add to the counter
/// metric!(counter(MyCounter::TotalRequests) += 1);
/// metric!(counter(MyCounter::TotalBytes) += buffer.len() as i64);
///
/// // add to the counter and provide tags
/// metric!(
/// counter(MyCounter::TotalRequests) += 1,
/// server = "s1",
/// host = "h1"
/// );
///
/// // subtract from the counter
/// metric!(counter(MyCounter::TotalRequests) -= 1);
///
/// // subtract from the counter and provide tags
/// metric!(
/// counter(MyCounter::TotalRequests) -= 1,
/// server = "s1",
/// host = "h1"
/// );
/// ```
pub trait CounterMetric {
/// Returns the counter metric name that will be sent to statsd.
fn name(&self) -> &'static str;
}
/// A metric for capturing histograms.
///
/// Histograms are values whose distribution is calculated by the server. The distribution
/// calculated for histograms is often similar to that of timers. Histograms can be thought of as a
/// more general (not limited to timing things) form of timers.
///
/// ## Example
///
/// ```
/// use relay_statsd::{metric, HistogramMetric};
///
/// struct QueueSize;
///
/// impl HistogramMetric for QueueSize {
/// fn name(&self) -> &'static str {
/// "queue_size"
/// }
/// }
///
/// # use std::collections::VecDeque;
/// let queue = VecDeque::new();
/// # let _hint: &VecDeque<()> = &queue;
///
/// // record a histogram value
/// metric!(histogram(QueueSize) = queue.len() as u64);
///
/// // record with tags
/// metric!(
/// histogram(QueueSize) = queue.len() as u64,
/// server = "server1",
/// host = "host1",
/// );
/// ```
pub trait HistogramMetric {
/// Returns the histogram metric name that will be sent to statsd.
fn name(&self) -> &'static str;
}
/// A metric for capturing sets.
///
/// Sets count the number of unique elements in a group. You can use them to, for example, count the
/// unique visitors to your site.
///
/// ## Example
///
/// ```
/// use relay_statsd::{metric, SetMetric};
///
/// enum MySet {
/// UniqueProjects,
/// UniqueUsers,
/// }
///
/// impl SetMetric for MySet {
/// fn name(&self) -> &'static str {
/// match self {
/// MySet::UniqueProjects => "unique_projects",
/// MySet::UniqueUsers => "unique_users",
/// }
/// }
/// }
///
/// # use std::collections::HashSet;
/// let users = HashSet::new();
/// # let _hint: &HashSet<()> = &users;
///
/// // use a set metric
/// metric!(set(MySet::UniqueUsers) = users.len() as i64);
///
/// // use a set metric with tags
/// metric!(
/// set(MySet::UniqueUsers) = users.len() as i64,
/// server = "server1",
/// host = "host1",
/// );
/// ```
pub trait SetMetric {
/// Returns the set metric name that will be sent to statsd.
fn name(&self) -> &'static str;
}
/// A metric for capturing gauges.
///
/// Gauge values are an instantaneous measurement of a value determined by the client. They do not
/// change unless changed by the client. Examples include things like load average or how many
/// connections are active.
///
/// ## Example
///
/// ```
/// use relay_statsd::{metric, GaugeMetric};
///
/// struct QueueSize;
///
/// impl GaugeMetric for QueueSize {
/// fn name(&self) -> &'static str {
/// "queue_size"
/// }
/// }
///
/// # use std::collections::VecDeque;
/// let queue = VecDeque::new();
/// # let _hint: &VecDeque<()> = &queue;
///
/// // a simple gauge value
/// metric!(gauge(QueueSize) = queue.len() as u64);
///
/// // a gauge with tags
/// metric!(
/// gauge(QueueSize) = queue.len() as u64,
/// server = "server1",
/// host = "host1"
/// );
/// ```
pub trait GaugeMetric {
/// Returns the gauge metric name that will be sent to statsd.
fn name(&self) -> &'static str;
}
/// Emits a metric.
///
/// See [crate-level documentation](self) for examples.
#[macro_export]
macro_rules! metric {
// counter increment
(counter($id:expr) += $value:expr $(, $k:ident = $v:expr)* $(,)?) => {
$crate::with_client(|client| {
use $crate::_pred::*;
client.send_metric(
client.count_with_tags(&$crate::CounterMetric::name(&$id), $value)
$(.with_tag(stringify!($k), $v))*
)
})
};
// counter decrement
(counter($id:expr) -= $value:expr $(, $k:ident = $v:expr)* $(,)?) => {
$crate::with_client(|client| {
use $crate::_pred::*;
client.send_metric(
client.count_with_tags(&$crate::CounterMetric::name(&$id), -$value)
$(.with_tag(stringify!($k), $v))*
)
})
};
// gauge set
(gauge($id:expr) = $value:expr $(, $k:ident = $v:expr)* $(,)?) => {
$crate::with_client(|client| {
use $crate::_pred::*;
client.send_metric(
client.gauge_with_tags(&$crate::GaugeMetric::name(&$id), $value)
$(.with_tag(stringify!($k), $v))*
)
})
};
// histogram
(histogram($id:expr) = $value:expr $(, $k:ident = $v:expr)* $(,)?) => {
$crate::with_client(|client| {
use $crate::_pred::*;
client.send_metric(
client.histogram_with_tags(&$crate::HistogramMetric::name(&$id), $value)
$(.with_tag(stringify!($k), $v))*
)
})
};
// sets (count unique occurrences of a value per time interval)
(set($id:expr) = $value:expr $(, $k:ident = $v:expr)* $(,)?) => {
$crate::with_client(|client| {
use $crate::_pred::*;
client.send_metric(
client.set_with_tags(&$crate::SetMetric::name(&$id), $value)
$(.with_tag(stringify!($k), $v))*
)
})
};
// timer value (duration)
(timer($id:expr) = $value:expr $(, $k:ident = $v:expr)* $(,)?) => {
$crate::with_client(|client| {
use $crate::_pred::*;
client.send_metric(
client.time_with_tags(&$crate::TimerMetric::name(&$id), $value)
$(.with_tag(stringify!($k), $v))*
)
})
};
// timed block
(timer($id:expr), $($k:ident = $v:expr,)* $block:block) => {{
let now = std::time::Instant::now();
let rv = {$block};
$crate::with_client(|client| {
use $crate::_pred::*;
client.send_metric(
client.time_with_tags(&$crate::TimerMetric::name(&$id), now.elapsed())
$(.with_tag(stringify!($k), $v))*
)
});
rv
}};
}
| disable |
__init__.py | # ==================================================================================
# Copyright (c) 2019 Nokia
# Copyright (c) 2018-2019 AT&T Intellectual Property.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================
"""
contains the app; broken out here for ease of unit testing
"""
import connexion
from prometheus_client import CollectorRegistry, generate_latest, multiprocess
app = connexion.App(__name__, specification_dir=".")
app.add_api("openapi.yaml", arguments={"title": "My Title"})
# python decorators feel like black magic to me
@app.app.route('/a1-p/metrics', methods=['GET'])
def | (): # pylint: disable=unused-variable
# /metrics API shouldn't be visible in the API documentation,
# hence it's added here in the create_app step
# requires environment variable prometheus_multiproc_dir
registry = CollectorRegistry()
multiprocess.MultiProcessCollector(registry)
return generate_latest(registry)
| metrics |
Furthest Building You Can Reach.py | class Solution:
|
a = Solution()
print(a.furthestBuilding([4,12,2,7,3,18,20,3,19], 10, 2))
| def furthestBuilding(self, H, bricks, ladders):
jumps_pq = []
for i in range(len(H) - 1):
jump_height = H[i + 1] - H[i]
if jump_height <= 0: continue
heappush(jumps_pq, jump_height)
if len(jumps_pq) > ladders:
bricks -= heappop(jumps_pq)
if(bricks < 0) : return i
return len(H) - 1 |
header_ident_osabi.rs | use crate::display;
use deku::bitvec::{BitSlice, Msb0};
use deku::prelude::*;
use std::fmt;
// list copied from https://en.wikipedia.org/wiki/Executable_and_Linkable_Format
#[derive(Debug, PartialEq, DekuRead, DekuWrite)]
#[deku(type = "u8")]
pub enum HeaderIdentOsAbi {
#[deku(id = "0x00")]
SystemV,
#[deku(id = "0x01")]
HpUx,
#[deku(id = "0x02")]
NetBSD,
#[deku(id = "0x03")]
Linux,
#[deku(id = "0x04")]
GnuHurd,
#[deku(id = "0x06")]
Solaris,
#[deku(id = "0x07")]
Aix,
#[deku(id = "0x08")]
Irix,
#[deku(id = "0x09")]
FreeBSD,
#[deku(id = "0x0A")]
Tru64,
#[deku(id = "0x0B")]
NovelModesto,
#[deku(id = "0x0C")]
OpenBSD,
#[deku(id = "0x0D")]
OpenVMS,
#[deku(id = "0x0E")]
HPNonStop,
#[deku(id = "0x0F")]
Aros,
#[deku(id = "0x10")]
FenixOS,
#[deku(id = "0x11")]
CloudABI,
#[deku(id = "0x12")]
StratusVOS,
#[deku(id = "0xFF")]
Standalone,
}
impl fmt::Display for HeaderIdentOsAbi {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let (data_str, byte): (&str, u8) = match *self {
HeaderIdentOsAbi::SystemV => ("SystemV", 0x00),
HeaderIdentOsAbi::HpUx => ("HP-UX", 0x01),
HeaderIdentOsAbi::NetBSD => ("NetBSD", 0x02),
HeaderIdentOsAbi::Linux => ("Linux", 0x03),
HeaderIdentOsAbi::GnuHurd => ("GNU Hurd", 0x04),
HeaderIdentOsAbi::Solaris => ("Sun Solaris", 0x06),
HeaderIdentOsAbi::Aix => ("IBM AIX", 0x07),
HeaderIdentOsAbi::Irix => ("IRIX", 0x08),
HeaderIdentOsAbi::FreeBSD => ("FreeBSD", 0x09),
HeaderIdentOsAbi::Tru64 => ("Tru64 UNIX", 0x0A),
HeaderIdentOsAbi::NovelModesto => ("Novel Modesto", 0x0B),
HeaderIdentOsAbi::OpenBSD => ("OpenBSD", 0x0C),
HeaderIdentOsAbi::OpenVMS => ("OpenVMS", 0x0D),
HeaderIdentOsAbi::HPNonStop => ("HP NonStop", 0x0E),
HeaderIdentOsAbi::Aros => ("AROS Research Operating System", 0x0F),
HeaderIdentOsAbi::FenixOS => ("Fenix OX", 0x10),
HeaderIdentOsAbi::CloudABI => ("CloudABI", 0x11),
HeaderIdentOsAbi::StratusVOS => ("Stratus VOS", 0x12),
HeaderIdentOsAbi::Standalone => ("Standalone application", 0xFF),
};
let data_vec: Vec<u8> = byte.to_ne_bytes().to_vec();
display::print_field(f, "OS ABI", data_str, &data_vec)
}
}
impl Default for HeaderIdentOsAbi {
fn default() -> Self {
HeaderIdentOsAbi::SystemV
}
}
impl HeaderIdentOsAbi {
pub fn new() -> Self {
HeaderIdentOsAbi::SystemV
}
fn reader(rest: &BitSlice<Msb0, u8>) -> Result<(&BitSlice<Msb0, u8>, Vec<u8>), DekuError> {
debug!("rest: {:?}", rest);
debug!("len rest: {}", rest.len());
// slice off length of options
let index = 4 * 8;
// Check split_at precondition
if index > rest.len() {
return Err(DekuError::Parse(format!(
"Not enough data to read HeaderIdentOsAbi. Bits expected: {}, Bits given: {}",
index,
rest.len()
)));
}
// read data
let mut ret = vec![];
let (mut u8_rest, rest) = rest.split_at(index);
while !u8_rest.is_empty() {
let (u8_rest_new, u8_val) = u8::read(u8_rest, deku::ctx::Endian::Little)?;
ret.push(u8_val);
u8_rest = u8_rest_new;
}
Ok((rest, ret))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_from_bytes() {
let data: Vec<u8> = vec![0x03];
let (rest, val) = HeaderIdentOsAbi::from_bytes((&data, 0)).unwrap();
println!("rest: {:?}", rest);
println!("value: {:?}", val);
let expected = HeaderIdentOsAbi::Linux;
assert_eq!(expected, val);
assert_eq!((vec![].as_ref(), 0 as usize), rest);
}
#[test]
fn test_from_bytes_empty() {
let data: Vec<u8> = vec![];
let err = HeaderIdentOsAbi::from_bytes((&data, 0));
assert!(matches!(err, Err(_)));
}
#[test]
fn test_from_bytes_incomplete() {
let data: Vec<u8> = vec![];
let err = HeaderIdentOsAbi::from_bytes((&data, 0));
assert!(matches!(err, Err(_)));
}
#[test]
fn test_from_bytes_invalid() |
#[test]
fn test_try_from() {
let data: Vec<u8> = vec![0x00];
let val = HeaderIdentOsAbi::try_from(data.as_ref()).unwrap();
println!("value: {:?}", val);
let expected = HeaderIdentOsAbi::new();
assert_eq!(expected, val);
let val: Vec<u8> = val.try_into().unwrap();
assert_eq!(data, val);
}
}
| {
let data: Vec<u8> = vec![0xF0];
let err = HeaderIdentOsAbi::from_bytes((&data, 0));
assert!(matches!(err, Err(_)));
} |
templates.go | package templates
import (
"bytes"
"encoding/base64"
"text/template"
)
var templateBody = `---
apiVersion: v1
kind: Secret
metadata:
name: {{ .Name }}-bmc-secret
type: Opaque
data:
username: {{ .EncodedUsername }}
password: {{ .EncodedPassword }}
---
apiVersion: metal3.io/v1alpha1
kind: BareMetalHost
metadata:
name: {{ .Name }}
spec:
online: true
{{- if .HardwareProfile }}
hardwareProfile: {{ .HardwareProfile }}
{{- end }}
{{- if .BootMacAddress }}
bootMACAddress: {{ .BootMacAddress }}
{{- end }}
bmc:
address: {{ .BMCAddress }}
credentialsName: {{ .Name }}-bmc-secret
{{- if .Machine }}
machineRef:
name: {{ .Machine }}
namespace: {{ .MachineNamespace }}
{{- end }}
`
// Template holds the arguments to pass to the template.
type Template struct {
Name string
BMCAddress string
Username string
Password string
HardwareProfile string
BootMacAddress string
Machine string
MachineNamespace string
}
// EncodedUsername returns the username in the format needed to store
// it in a Secret.
func (t Template) EncodedUsername() string {
return encodeToSecret(t.Username)
}
// EncodedPassword returns the password in the format needed to store
// it in a Secret.
func (t Template) EncodedPassword() string {
return encodeToSecret(t.Password)
}
func encodeToSecret(input string) string |
// Render returns the string from the template or an error if there
// was a problem rendering it.
func (t Template) Render() (string, error) {
buf := new(bytes.Buffer)
tmpl := template.Must(template.New("yaml_out").Parse(templateBody))
err := tmpl.Execute(buf, t)
return buf.String(), err
}
| {
return base64.StdEncoding.EncodeToString([]byte(input))
} |
file_utils.py | """
Utilities for working with the local dataset cache.
This file is adapted from the AllenNLP library at https://github.com/allenai/allennlp
Copyright by the AllenNLP authors.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import fnmatch
import json
import logging
import os
import shutil
import sys
import tempfile
from functools import wraps
from hashlib import sha256
from io import open
import boto3
import numpy as np
import requests
from botocore.exceptions import ClientError
from dotmap import DotMap
from tqdm import tqdm
try:
from torch.hub import _get_torch_home
torch_cache_home = _get_torch_home()
except ImportError:
torch_cache_home = os.path.expanduser(
os.getenv(
"TORCH_HOME", os.path.join(os.getenv("XDG_CACHE_HOME", "~/.cache"), "torch")
)
)
default_cache_path = os.path.join(torch_cache_home, "farm")
try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
try:
from pathlib import Path
FARM_CACHE = Path(os.getenv("FARM_CACHE", default_cache_path))
except (AttributeError, ImportError):
FARM_CACHE = os.getenv("FARM_CACHE", default_cache_path)
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
def url_to_filename(url, etag=None):
"""
Convert `url` into a hashed filename in a repeatable way.
If `etag` is specified, append its hash to the url's, delimited
by a period.
"""
url_bytes = url.encode("utf-8")
url_hash = sha256(url_bytes)
filename = url_hash.hexdigest()
if etag:
etag_bytes = etag.encode("utf-8")
etag_hash = sha256(etag_bytes)
filename += "." + etag_hash.hexdigest()
return filename
def filename_to_url(filename, cache_dir=None):
"""
Return the url and etag (which may be ``None``) stored for `filename`.
Raise ``EnvironmentError`` if `filename` or its stored metadata do not exist.
"""
if cache_dir is None:
cache_dir = FARM_CACHE
if sys.version_info[0] == 3 and isinstance(cache_dir, Path):
cache_dir = str(cache_dir)
cache_path = os.path.join(cache_dir, filename)
if not os.path.exists(cache_path):
raise EnvironmentError("file {} not found".format(cache_path))
meta_path = cache_path + ".json"
if not os.path.exists(meta_path):
raise EnvironmentError("file {} not found".format(meta_path))
with open(meta_path, encoding="utf-8") as meta_file:
metadata = json.load(meta_file)
url = metadata["url"]
etag = metadata["etag"]
return url, etag
def cached_path(url_or_filename, cache_dir=None):
"""
Given something that might be a URL (or might be a local path),
determine which. If it's a URL, download the file and cache it, and
return the path to the cached file. If it's already a local path,
make sure the file exists and then return the path.
"""
if cache_dir is None:
cache_dir = FARM_CACHE
if sys.version_info[0] == 3 and isinstance(url_or_filename, Path):
url_or_filename = str(url_or_filename)
if sys.version_info[0] == 3 and isinstance(cache_dir, Path):
cache_dir = str(cache_dir)
parsed = urlparse(url_or_filename)
if parsed.scheme in ("http", "https", "s3"):
# URL, so get it from the cache (downloading if necessary)
return get_from_cache(url_or_filename, cache_dir)
elif os.path.exists(url_or_filename):
# File, and it exists.
return url_or_filename
elif parsed.scheme == "":
# File, but it doesn't exist.
raise EnvironmentError("file {} not found".format(url_or_filename))
else:
# Something unknown
raise ValueError(
"unable to parse {} as a URL or as a local path".format(url_or_filename)
)
def split_s3_path(url):
"""Split a full s3 path into the bucket name and path."""
parsed = urlparse(url)
if not parsed.netloc or not parsed.path:
raise ValueError("bad s3 path {}".format(url))
bucket_name = parsed.netloc
s3_path = parsed.path
# Remove '/' at beginning of path.
if s3_path.startswith("/"):
s3_path = s3_path[1:]
return bucket_name, s3_path
def s3_request(func):
"""
Wrapper function for s3 requests in order to create more helpful error
messages.
"""
@wraps(func)
def wrapper(url, *args, **kwargs):
try:
return func(url, *args, **kwargs)
except ClientError as exc:
if int(exc.response["Error"]["Code"]) == 404:
raise EnvironmentError("file {} not found".format(url))
else:
raise
return wrapper
@s3_request
def s3_etag(url):
"""Check ETag on S3 object."""
s3_resource = boto3.resource("s3")
bucket_name, s3_path = split_s3_path(url)
s3_object = s3_resource.Object(bucket_name, s3_path)
return s3_object.e_tag
@s3_request
def s3_get(url, temp_file):
"""Pull a file directly from S3."""
s3_resource = boto3.resource("s3")
bucket_name, s3_path = split_s3_path(url)
s3_resource.Bucket(bucket_name).download_fileobj(s3_path, temp_file)
def http_get(url, temp_file, proxies=None):
req = requests.get(url, stream=True, proxies=proxies)
content_length = req.headers.get("Content-Length")
total = int(content_length) if content_length is not None else None
progress = tqdm(unit="B", total=total)
for chunk in req.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
progress.update(len(chunk))
temp_file.write(chunk)
progress.close()
def | (url, cache_dir=None):
"""
Given a URL, look for the corresponding dataset in the local cache.
If it's not there, download it. Then return the path to the cached file.
"""
if cache_dir is None:
cache_dir = FARM_CACHE
if sys.version_info[0] == 3 and isinstance(cache_dir, Path):
cache_dir = str(cache_dir)
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
# Get eTag to add to filename, if it exists.
if url.startswith("s3://"):
etag = s3_etag(url)
else:
try:
response = requests.head(url, allow_redirects=True)
if response.status_code != 200:
etag = None
else:
etag = response.headers.get("ETag")
except EnvironmentError:
etag = None
if sys.version_info[0] == 2 and etag is not None:
etag = etag.decode("utf-8")
filename = url_to_filename(url, etag)
# get cache path to put the file
cache_path = os.path.join(cache_dir, filename)
# If we don't have a connection (etag is None) and can't identify the file
# try to get the last downloaded one
if not os.path.exists(cache_path) and etag is None:
matching_files = fnmatch.filter(os.listdir(cache_dir), filename + ".*")
matching_files = list(filter(lambda s: not s.endswith(".json"), matching_files))
if matching_files:
cache_path = os.path.join(cache_dir, matching_files[-1])
if not os.path.exists(cache_path):
# Download to temporary file, then copy to cache dir once finished.
# Otherwise you get corrupt cache entries if the download gets interrupted.
with tempfile.NamedTemporaryFile() as temp_file:
logger.info("%s not found in cache, downloading to %s", url, temp_file.name)
# GET file object
if url.startswith("s3://"):
s3_get(url, temp_file)
else:
http_get(url, temp_file)
# we are copying the file before closing it, so flush to avoid truncation
temp_file.flush()
# shutil.copyfileobj() starts at the current position, so go to the start
temp_file.seek(0)
logger.info("copying %s to cache at %s", temp_file.name, cache_path)
with open(cache_path, "wb") as cache_file:
shutil.copyfileobj(temp_file, cache_file)
logger.info("creating metadata file for %s", cache_path)
meta = {"url": url, "etag": etag}
meta_path = cache_path + ".json"
with open(meta_path, "w") as meta_file:
output_string = json.dumps(meta)
if sys.version_info[0] == 2 and isinstance(output_string, str):
output_string = unicode(
output_string, "utf-8"
) # The beauty of python 2
meta_file.write(output_string)
logger.info("removing temp file %s", temp_file.name)
return cache_path
def read_set_from_file(filename):
"""
Extract a de-duped collection (set) of text from a file.
Expected file format is one item per line.
"""
collection = set()
with open(filename, "r", encoding="utf-8") as file_:
for line in file_:
collection.add(line.rstrip())
return collection
def get_file_extension(path, dot=True, lower=True):
ext = os.path.splitext(path)[1]
ext = ext if dot else ext[1:]
return ext.lower() if lower else ext
def read_config(path, flattend=False):
if path:
with open(path) as json_data_file:
conf_args = json.load(json_data_file)
else:
raise ValueError("No config provided for classifier")
def getArgValue(arg):
if "value" not in arg:
logger.error(
"Only depth 2 config files supported. Failed to convert: %s" % str(arg)
)
return arg["value"] if (arg["value"] is not None) else arg["default"]
# flatten last part of config, take either value or default as value
for gk, gv in conf_args.items():
for k, v in gv.items():
if isinstance(getArgValue(v), dict):
logger.error("Config is too deeply nested, at %s" % str(v))
conf_args[gk][k] = getArgValue(v)
# DotMap for making nested dictionary accessible through dot notation
flat_args = dict(
conf_args["general"],
**conf_args["task"],
**conf_args["parameter"],
**conf_args["logging"],
)
if flattend:
args = DotMap(flat_args, _dynamic=False)
else:
args = DotMap(conf_args, _dynamic=False)
return args
def unnestConfig(config, flattened=False):
"""
This function creates a list of config files for evaluating parameters with different values. If a config parameter
is of type list this list is iterated over and a config object without lists is returned. Can handle lists inside any
number of parameters.
Can handle shallow or nested (one level) configs
"""
nestedKeys = []
nestedVals = []
if flattened:
for k, v in config.items():
if isinstance(v, list):
if k != "layer_dims": # exclude layer dims, since it is already a list
nestedKeys.append(k)
nestedVals.append(v)
else:
for gk, gv in config.items():
if(gk != "task"):
for k, v in gv.items():
if isinstance(v, list):
if isinstance(v, list):
if (
k != "layer_dims"
): # exclude layer dims, since it is already a list
nestedKeys.append([gk, k])
nestedVals.append(v)
elif isinstance(v, dict):
logger.error("Config too deep!")
if len(nestedKeys) == 0:
unnestedConfig = [config]
else:
if flattened:
logger.info("Nested config at parameters: %s" % (", ".join(nestedKeys)))
else:
logger.info(
"Nested config at parameters: %s"
% (", ".join(".".join(x) for x in nestedKeys))
)
unnestedConfig = []
mesh = np.meshgrid(
*nestedVals
) # get all combinations, each dimension corresponds to one parameter type
# flatten mesh into shape: [num_parameters, num_combinations] so we can iterate in 2d over any paramter combinations
mesh = [x.flatten() for x in mesh]
# loop over all combinations
for i in range(len(mesh[0])):
tempconfig = config.copy()
for j, k in enumerate(nestedKeys):
if isinstance(k, str):
tempconfig[k] = mesh[j][
i
] # get ith val of correct param value and overwrite original config
elif len(k) == 2:
tempconfig[k[0]][k[1]] = mesh[j][i] # set nested dictionary keys
else:
logger.error("Config too deep!")
unnestedConfig.append(tempconfig)
return unnestedConfig
| get_from_cache |
operations.go | package containerinstance
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/tracing"
"net/http"
)
// OperationsClient is the client for the Operations methods of the Containerinstance service.
type OperationsClient struct {
BaseClient
}
// NewOperationsClient creates an instance of the OperationsClient client.
func NewOperationsClient(subscriptionID string) OperationsClient {
return NewOperationsClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewOperationsClientWithBaseURI creates an instance of the OperationsClient client using a custom endpoint. Use this
// when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack).
func NewOperationsClientWithBaseURI(baseURI string, subscriptionID string) OperationsClient |
// List list the operations for Azure Container Instance service.
func (client OperationsClient) List(ctx context.Context) (result OperationListResult, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/OperationsClient.List")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.ListPreparer(ctx)
if err != nil {
err = autorest.NewErrorWithError(err, "containerinstance.OperationsClient", "List", nil, "Failure preparing request")
return
}
resp, err := client.ListSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "containerinstance.OperationsClient", "List", resp, "Failure sending request")
return
}
result, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "containerinstance.OperationsClient", "List", resp, "Failure responding to request")
return
}
return
}
// ListPreparer prepares the List request.
func (client OperationsClient) ListPreparer(ctx context.Context) (*http.Request, error) {
const APIVersion = "2018-04-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPath("/providers/Microsoft.ContainerInstance/operations"),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListSender sends the List request. The method will close the
// http.Response Body if it receives an error.
func (client OperationsClient) ListSender(req *http.Request) (*http.Response, error) {
return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
}
// ListResponder handles the response to the List request. The method always
// closes the http.Response Body.
func (client OperationsClient) ListResponder(resp *http.Response) (result OperationListResult, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
| {
return OperationsClient{NewWithBaseURI(baseURI, subscriptionID)}
} |
three-d-secure.js | 'use strict';
var wrapPromise = require('@braintree/wrap-promise');
var methods = require('../../lib/methods');
var convertMethodsToError = require('../../lib/convert-methods-to-error');
var EventEmitter = require('@braintree/event-emitter');
var FRAMEWORKS = require('./frameworks');
/**
* @deprecated
* @callback ThreeDSecure~addFrameCallback
* @param {?BraintreeError} [err] `null` or `undefined` if there was no error.
* @param {HTMLIFrameElement} iframe An iframe element containing the bank's authentication page that you must put on your page.
* @description **Deprecated** The callback used for options.addFrame in 3DS 1.0's {@link ThreeDSecure#verifyCard|verifyCard}.
* @returns {void}
*/
/**
* @deprecated
* @callback ThreeDSecure~removeFrameCallback
* @description **Deprecated** The callback used for options.removeFrame in 3DS 1.0's {@link ThreeDSecure#verifyCard|verifyCard}.
* @returns {void}
*/
/**
* @deprecated
* @typedef {object} ThreeDSecure~verifyCardCustomerObject
* @property {string} [customer.mobilePhoneNumber] The mobile phone number used for verification. Only numbers; remove dashes, parenthesis and other characters.
* @property {string} [customer.email] The email used for verification.
* @property {string} [customer.shippingMethod] The 2-digit string indicating the shipping method chosen for the transaction.
* @property {string} [customer.billingAddress.firstName] The first name associated with the address.
* @property {string} [customer.billingAddress.lastName] The last name associated with the address.
* @property {string} [customer.billingAddress.streetAddress] Line 1 of the Address (eg. number, street, etc).
* @property {string} [customer.billingAddress.extendedAddress] Line 2 of the Address (eg. suite, apt #, etc.).
* @property {string} [customer.billingAddress.locality] The locality (city) name associated with the address.
* @property {string} [customer.billingAddress.region] The 2 letter code for US states, and the equivalent for other countries.
* @property {string} [customer.billingAddress.postalCode] The zip code or equivalent for countries that have them.
* @property {string} [customer.billingAddress.countryCodeAlpha2] The 2 character country code.
* @property {string} [customer.billingAddress.phoneNumber] The phone number associated with the address. Only numbers; remove dashes, parenthesis and other characters.
* @description **Deprecated** Optional customer information to be passed to 3DS 1.0 for verification.
*/
/**
* @typedef {object} ThreeDSecure~verifyPayload
* @property {string} nonce The new payment method nonce produced by the 3D Secure lookup. The original nonce passed into {@link ThreeDSecure#verifyCard|verifyCard} was consumed. This new nonce should be used to transact on your server.
* @property {object} details Additional account details.
* @property {string} details.cardType Type of card, ex: Visa, MasterCard.
* @property {string} details.lastFour Last four digits of card number.
* @property {string} details.lastTwo Last two digits of card number.
* @property {string} description A human-readable description.
* @property {object} binData Information about the card based on the bin.
* @property {string} binData.commercial Possible values: 'Yes', 'No', 'Unknown'.
* @property {string} binData.countryOfIssuance The country of issuance.
* @property {string} binData.debit Possible values: 'Yes', 'No', 'Unknown'.
* @property {string} binData.durbinRegulated Possible values: 'Yes', 'No', 'Unknown'.
* @property {string} binData.healthcare Possible values: 'Yes', 'No', 'Unknown'.
* @property {string} binData.issuingBank The issuing bank.
* @property {string} binData.payroll Possible values: 'Yes', 'No', 'Unknown'.
* @property {string} binData.prepaid Possible values: 'Yes', 'No', 'Unknown'.
* @property {string} binData.productId The product id.
* @property {boolean} liabilityShiftPossible *Deprecated:* Use `threeDSecureInfo.liabilityShiftPossible` instead.
* @property {boolean} liabilityShifted *Deprecated:* Use `threeDSecureInfo.liabilityShifted` instead.
* @property {object} threeDSecureInfo 3DS information about the card.
* @property {boolean} threeDSecureInfo.liabilityShiftPossible Indicates whether the card was eligible for 3D Secure.
* @property {boolean} threeDSecureInfo.liabilityShifted Indicates whether the liability for fraud has been shifted away from the merchant.
*/
/**
* @typedef {string} ThreeDSecure~prepareLookupPayload The client data to pass on when doing a server side lookup call.
*/
/**
* @typedef {object} ThreeDSecure~verificationData
* @property {boolean} requiresUserAuthentication When `true`, the user will be presented with a 3D Secure challenge when calling `next` in the {@link ThreeDSecure#event:lookup-complete|`lookup-complete` event}.
* @property {object} threeDSecureInfo Contains liability shift details.
* @property {boolean} threeDSecureInfo.liabilityShiftPossible Indicates whether the card was eligible for 3D Secure.
* @property {boolean} threeDSecureInfo.liabilityShifted Indicates whether the liability for fraud has been shifted away from the merchant.
* @property {object} paymentMethod A {@link ThreeDSecure~verifyPayload|verifyPayload} object.
* @property {object} lookup Details about the 3D Secure lookup.
* @property {string} lookup.threeDSecureVersion The version of 3D Secure that will be used for the 3D Secure challenge.
*/
/**
* @typedef {object} ThreeDSecure~billingAddress
* @property {string} [givenName] The first name associated with the billing address.
* @property {string} [surname] The last name associated with the billing address.
* @property {string} [phoneNumber] The phone number associated with the billing address. Only numbers; remove dashes, parenthesis and other characters.
* @property {string} [streetAddress] Line 1 of the billing address (eg. number, street, etc).
* @property {string} [extendedAddress] Line 2 of the billing address (eg. suite, apt #, etc.).
* @property {string} [line3] Line 3 of the billing address if needed (eg. suite, apt #, etc).
* @property {string} [locality] The locality (city) name associated with the billing address.
* @property {string} [region] The 2 letter code for US states, and the equivalent for other countries.
* @property {string} [postalCode] The zip code or equivalent for countries that have them.
* @property {string} [countryCodeAlpha2] The 2 character country code.
*/
/**
* @typedef {object} ThreeDSecure~additionalInformation
* @property {string} [workPhoneNumber] The work phone number used for verification. Only numbers; remove dashes, parenthesis and other characters.
* @property {string} [shippingGivenName] The first name associated with the shipping address.
* @property {string} [shippingSurname] The last name associated with the shipping address.
* @property {object} [shippingAddress]
* @property {string} [shippingAddress.streetAddress] The first name associated with the shipping address.
* @property {string} [shippingAddress.extendedAddress] The last name associated with the shipping address.
* @property {string} [shippingAddress.line3] Line 3 of the shipping address if needed (eg. suite, apt #, etc).
* @property {string} [shippingAddress.locality] The locality (city) name associated with the shipping address.
* @property {string} [shippingAddress.region] The 2 letter code for US states, and the equivalent for other countries.
* @property {string} [shippingAddress.postalCode] The zip code or equivalent for countries that have them.
* @property {string} [shippingAddress.countryCodeAlpha2] The 2 character country code.
* @property {string} [shippingPhone] The phone number associated with the shipping address. Only numbers; remove dashes, parenthesis and other characters.
* @property {string} [shippingMethod] The 2-digit string indicating the name of the shipping method chosen for the transaction. Possible values:
* - `01` Same Day
* - `02` Overnight / Expedited
* - `03` Priority (2-3 Days)
* - `04` Ground
* - `05` Electronic Delivery
* - `06` Ship to Store
* @property {string} [shippingMethodIndicator] The 2-digit string indicating the shipping method chosen for the transaction Possible values.
* - `01` Ship to cardholder billing address
* - `02` Ship to another verified address on file with merchant
* - `03` Ship to address that is different from billing address
* - `04` Ship to store (store address should be populated on request)
* - `05` Digital goods
* - `06` Travel and event tickets, not shipped
* - `07` Other
* @property {string} [productCode] The 3-letter string representing the merchant product code. Possible values:
* - `AIR` Airline
* - `GEN` General Retail
* - `DIG` Digital Goods
* - `SVC` Services
* - `RES` Restaurant
* - `TRA` Travel
* - `DSP` Cash Dispensing
* - `REN` Car Rental
* - `GAS` Fuel
* - `LUX` Luxury Retail
* - `ACC` Accommodation Retail
* - `TBD` Other
* @property {string} [deliveryTimeframe] The 2-digit number indicating the delivery time frame. Possible values:
* - `01` Electronic delivery
* - `02` Same day shipping
* - `03` Overnight shipping
* - `04` Two or more day shipping
* @property {string} [deliveryEmail] For electronic delivery, email address to which the merchandise was delivered.
* @property {string} [reorderindicator] The 2-digit number indicating whether the cardholder is reordering previously purchased merchandise. possible values:
* - `01` First time ordered
* - `02` Reordered
* @property {string} [preorderIndicator] The 2-digit number indicating whether cardholder is placing an order with a future availability or release date. possible values:
* - `01` Merchandise available
* - `02` Future availability
* @property {string} [preorderDate] The 8-digit number (format: YYYYMMDD) indicating expected date that a pre-ordered purchase will be available.
* @property {string} [giftCardAmount] The purchase amount total for prepaid gift cards in major units.
* @property {string} [giftCardCurrencyCode] ISO 4217 currency code for the gift card purchased.
* @property {string} [giftCardCount] Total count of individual prepaid gift cards purchased.
* @property {string} [accountAgeIndicator] The 2-digit value representing the length of time cardholder has had account. Possible values:
* - `01` No Account
* - `02` Created during transaction
* - `03` Less than 30 days
* - `04` 30-60 days
* - `05` More than 60 days
* @property {string} [accountCreateDate] The 8-digit number (format: YYYYMMDD) indicating the date the cardholder opened the account.
* @property {string} [accountChangeIndicator] The 2-digit value representing the length of time since the last change to the cardholder account. This includes shipping address, new payment account or new user added. Possible values:
* - `01` Changed during transaction
* - `02` Less than 30 days
* - `03` 30-60 days
* - `04` More than 60 days
* @property {string} [accountChangeDate] The 8-digit number (format: YYYYMMDD) indicating the date the cardholder's account was last changed. This includes changes to the billing or shipping address, new payment accounts or new users added.
* @property {string} [accountPwdChangeIndicator] The 2-digit value representing the length of time since the cardholder changed or reset the password on the account. Possible values:
* - `01` No change
* - `02` Changed during transaction
* - `03` Less than 30 days
* - `04` 30-60 days
* - `05` More than 60 days
* @property {string} [accountPwdChangeDate] The 8-digit number (format: YYYYMMDD) indicating the date the cardholder last changed or reset password on account.
* @property {string} [shippingAddressUsageIndicator] The 2-digit value indicating when the shipping address used for transaction was first used. Possible values:
* - `01` This transaction
* - `02` Less than 30 days
* - `03` 30-60 days
* - `04` More than 60 days
* @property {string} [shippingAddressUsageDate] The 8-digit number (format: YYYYMMDD) indicating the date when the shipping address used for this transaction was first used.
* @property {string} [transactionCountDay] Number of transactions (successful or abandoned) for this cardholder account within the last 24 hours.
* @property {string} [transactionCountYear] Number of transactions (successful or abandoned) for this cardholder account within the last year.
* @property {string} [addCardAttempts] Number of add card attempts in the last 24 hours.
* @property {string} [accountPurchases] Number of purchases with this cardholder account during the previous six months.
* @property {string} [fraudActivity] The 2-digit value indicating whether the merchant experienced suspicious activity (including previous fraud) on the account. Possible values:
* - `01` No suspicious activity
* - `02` Suspicious activity observed
* @property {string} [shippingNameIndicator] The 2-digit value indicating if the cardholder name on the account is identical to the shipping name used for the transaction. Possible values:
* - `01` Account and shipping name identical
* - `02` Account and shipping name differ
* @property {string} [paymentAccountIndicator] The 2-digit value indicating the length of time that the payment account was enrolled in the merchant account. Possible values:
* - `01` No account (guest checkout)
* - `02` During the transaction
* - `03` Less than 30 days
* - `04` 30-60 days
* - `05` More than 60 days
* @property {string} [paymentAccountAge] The 8-digit number (format: YYYYMMDD) indicating the date the payment account was added to the cardholder account.
* @property {string} [acsWindowSize] The 2-digit number to set the challenge window size to display to the end cardholder. The ACS will reply with content that is formatted appropriately to this window size to allow for the best user experience. The sizes are width x height in pixels of the window displayed in the cardholder browser window. Possible values:
* - `01` 250x400
* - `02` 390x400
* - `03` 500x600
* - `04` 600x400
* - `05` Full page
* @property {string} [sdkMaxTimeout] The 2-digit number of minutes (minimum 05) to set the maximum amount of time for all 3DS 2.0 messages to be communicated between all components.
* @property {string} [addressMatch] The 1-character value (Y/N) indicating whether cardholder billing and shipping addresses match.
* @property {string} [accountId] Additional cardholder account information.
* @property {string} [ipAddress] The IP address of the consumer. IPv4 and IPv6 are supported.
* @property {string} [orderDescription] Brief description of items purchased.
* @property {string} [taxAmount] Unformatted tax amount without any decimalization (ie. $123.67 = 12367).
* @property {string} [userAgent] The exact content of the HTTP user agent header.
* @property {string} [authenticationIndicator] The 2-digit number indicating the type of authentication request. Possible values:
* - `02` Recurring
* - `03` Installment
* @property {string} [installment] An integer value greater than 1 indicating the maximum number of permitted authorizations for installment payments.
* @property {string} [purchaseDate] The 14-digit number (format: YYYYMMDDHHMMSS) indicating the date in UTC of original purchase.
* @property {string} [recurringEnd] The 8-digit number (format: YYYYMMDD) indicating the date after which no further recurring authorizations should be performed.
* @property {string} [recurringFrequency] Integer value indicating the minimum number of days between recurring authorizations. A frequency of monthly is indicated by the value 28. Multiple of 28 days will be used to indicate months (ex. 6 months = 168).
*/
/**
* @name ThreeDSecure#on
* @function
* @param {string} event The name of the event to which you are subscribing.
* @param {function} handler A callback to handle the event.
* @description Subscribes a handler function to a named event.
* @example
* <caption>Listening to a 3D Secure event</caption>
* braintree.threeDSecure.create({ ... }, function (createErr, threeDSecureInstance) {
* threeDSecureInstance.on('lookup-complete', function (data, next) {
* console.log(event);
* next();
* });
* });
* @returns {void}
*/
/**
* @name ThreeDSecure#off
* @function
* @param {string} event The name of the event to which you are unsubscribing.
* @param {function} handler The callback for the event you are unsubscribing from.
* @description Unsubscribes the handler function to a named event.
* @example
* <caption>Subscribing and then unsubscribing from a 3D Secure eld event</caption>
* braintree.threeDSecure.create({ ... }, function (createErr, threeDSecureInstance) {
* var callback = function (data, next) {
* console.log(data);
* next();
* };
*
* threeDSecureInstance.on('lookup-complete', callback);
*
* // later on
* threeDSecureInstance.off('lookup-complete', callback);
* });
* @returns {void}
*/
/**
* This event is emitted when the `2-inline-iframe` version is specified when creating the 3D Secure instance and the authentication iframe becomes available.
* @event ThreeDSecure#authentication-iframe-available
* @type {object}
* @example
* <caption>Listening for the authentication iframe to be available</caption>
* threeDSecureInstance.on('authentication-iframe-available', function (event, next) {
* document.body.appendChild(event.element); // add iframe element to page
*
* next(); // let the SDK know the iframe is ready
* });
* });
*/
/**
* This event is emitted when using the 3D Secure 2.0 flow and the initial lookup request completes. If this is not used, a `onLookupComplete` callback must be passed into the `verifyCard` method.
* @event ThreeDSecure#lookup-complete
* @type {object}
* @example
* <caption>Listening for when the lookup request is complete</caption>
* braintree.threeDSecure.create({
* client: clientInstance,
* version: '2-inline-iframe'
* }, function (createErr, threeDSecureInstance) {
* threeDSecureInstance.on('lookup-complete', function (data, next) {
* // inspect the data
*
* // call next when ready to proceed with the challenge
* next();
* });
* });
*/
/**
* @class
* @param {object} options 3D Secure {@link module:braintree-web/three-d-secure.create create} options
* @description <strong>Do not use this constructor directly. Use {@link module:braintree-web/three-d-secure.create|braintree.threeDSecure.create} instead.</strong>
* @classdesc This class represents a ThreeDSecure component produced by {@link module:braintree-web/three-d-secure.create|braintree.threeDSecure.create}. Instances of this class have a method for launching a 3D Secure authentication flow.
*
* **Note**: 3D Secure 2.0 is documented below and will become the default integration method in a future version of Braintree-web. Until then, version 1.0 will continue to be supported. To view 3D Secure 1.0 documentation, look at Braintree-web documentation from version [3.40.0](https://braintree.github.io/braintree-web/3.40.0/ThreeDSecure.html) and earlier, or upgrade your integration by referring to the [3D Secure 2.0 adoption guide](https://developers.braintreepayments.com/guides/3d-secure/migration/javascript/v3).
*/
function | (options) {
var self = this;
var Framework = FRAMEWORKS[options.framework];
EventEmitter.call(this);
this._framework = new Framework(options);
this._framework.setUpEventListeners(function () {
self._emit.apply(self, arguments);
});
}
EventEmitter.createChild(ThreeDSecure);
/**
* Launch the 3D Secure login flow, returning a nonce payload.
*
* @public
* @param {object} options Options for card verification.
* @param {string} options.nonce The nonce representing the card from a tokenization payload. For example, this can be a {@link HostedFields~tokenizePayload|tokenizePayload} returned by Hosted Fields under `payload.nonce`.
* @param {string} options.bin The numeric Bank Identification Number (bin) of the card from a tokenization payload. For example, this can be a {@link HostedFields~tokenizePayload|tokenizePayload} returned by Hosted Fields under `payload.details.bin`.
* @param {string} options.amount The amount of the transaction in the current merchant account's currency. This must be expressed in numbers with an optional decimal (using `.`) and precision up to the hundredths place. For example, if you're processing a transaction for 1.234,56 € then `amount` should be `1234.56`.
* @param {boolean} [options.challengeRequested] If set to true, an authentication challenge will be forced if possible.
* @param {boolean} [options.exemptionRequested] If set to true, an exemption to the authentication challenge will be requested.
* @param {function} [options.onLookupComplete] *Deprecated:* Use {@link ThreeDSecure#event:lookup-complete|`threeDSecureInstance.on('lookup-complete')`} instead. Function to execute when lookup completes. The first argument, `data`, is a {@link ThreeDSecure~verificationData|verificationData} object, and the second argument, `next`, is a callback. `next` must be called to continue.
* @param {string} [options.email] The email used for verification.
* @param {string} [options.mobilePhoneNumber] The mobile phone number used for verification. Only numbers; remove dashes, parenthesis and other characters.
* @param {object} [options.billingAddress] An {@link ThreeDSecure~billingAddress|billingAddress} object for verification.
* @param {object} [options.additionalInformation] An {@link ThreeDSecure~additionalInformation|additionalInformation} object for verification.
* @param {object} [options.customer] **Deprecated** Customer information for use in 3DS 1.0 verifications. Can contain any subset of a {@link ThreeDSecure~verifyCardCustomerObject|verifyCardCustomerObject}. Only to be used for 3DS 1.0 integrations.
* @param {callback} options.addFrame **Deprecated** This {@link ThreeDSecure~addFrameCallback|addFrameCallback} will be called when the bank frame needs to be added to your page. Only to be used for 3DS 1.0 integrations.
* @param {callback} options.removeFrame **Deprecated** For use in 3DS 1.0 Flows. This {@link ThreeDSecure~removeFrameCallback|removeFrameCallback} will be called when the bank frame needs to be removed from your page. Only to be used in 3DS 1.0 integrations.
* @param {callback} [callback] The second argument, <code>data</code>, is a {@link ThreeDSecure~verifyPayload|verifyPayload}. If no callback is provided, it will return a promise that resolves {@link ThreeDSecure~verifyPayload|verifyPayload}.
* @returns {(Promise|void)} Returns a promise if no callback is provided.
* @example
* <caption>Verifying a payment method nonce with 3DS 2.0</caption>
* var my3DSContainer;
*
* // set up listener after initialization
* threeDSecure.on(('lookup-complete', function (data, next) {
* // use `data` here, then call `next()`
* next();
* });
*
* // call verifyCard after tokenizing a card
* threeDSecure.verifyCard({
* amount: '123.45',
* nonce: hostedFieldsTokenizationPayload.nonce,
* bin: hostedFieldsTokenizationPayload.details.bin,
* email: '[email protected]'
* billingAddress: {
* givenName: 'Jill',
* surname: 'Doe',
* phoneNumber: '8101234567',
* streetAddress: '555 Smith St.',
* extendedAddress: '#5',
* locality: 'Oakland',
* region: 'CA',
* postalCode: '12345',
* countryCodeAlpha2: 'US'
* },
* additionalInformation: {
* workPhoneNumber: '5555555555',
* shippingGivenName: 'Jill',
* shippingSurname: 'Doe',
* shippingAddress: {
* streetAddress: '555 Smith st',
* extendedAddress: '#5',
* locality: 'Oakland',
* region: 'CA',
* postalCode: '12345',
* countryCodeAlpha2: 'US'
* }
* shippingPhone: '8101234567'
* }
* }, function (err, payload) {
* if (err) {
* console.error(err);
* return;
* }
*
* if (payload.liabilityShifted) {
* // Liability has shifted
* submitNonceToServer(payload.nonce);
* } else if (payload.liabilityShiftPossible) {
* // Liability may still be shifted
* // Decide if you want to submit the nonce
* } else {
* // Liability has not shifted and will not shift
* // Decide if you want to submit the nonce
* }
* });
* <caption>Verifying a payment method nonce with 3DS 2.0 with onLookupComplete callback</caption>
* var my3DSContainer;
*
* threeDSecure.verifyCard({
* amount: '123.45',
* nonce: hostedFieldsTokenizationPayload.nonce,
* bin: hostedFieldsTokenizationPayload.details.bin,
* email: '[email protected]'
* billingAddress: {
* givenName: 'Jill',
* surname: 'Doe',
* phoneNumber: '8101234567',
* streetAddress: '555 Smith St.',
* extendedAddress: '#5',
* locality: 'Oakland',
* region: 'CA',
* postalCode: '12345',
* countryCodeAlpha2: 'US'
* },
* additionalInformation: {
* workPhoneNumber: '5555555555',
* shippingGivenName: 'Jill',
* shippingSurname: 'Doe',
* shippingAddress: {
* streetAddress: '555 Smith st',
* extendedAddress: '#5',
* locality: 'Oakland',
* region: 'CA',
* postalCode: '12345',
* countryCodeAlpha2: 'US'
* }
* shippingPhone: '8101234567'
* },
* onLookupComplete: function (data, next) {
* // use `data` here, then call `next()`
* next();
* }
* }, function (err, payload) {
* if (err) {
* console.error(err);
* return;
* }
*
* if (payload.liabilityShifted) {
* // Liability has shifted
* submitNonceToServer(payload.nonce);
* } else if (payload.liabilityShiftPossible) {
* // Liability may still be shifted
* // Decide if you want to submit the nonce
* } else {
* // Liability has not shifted and will not shift
* // Decide if you want to submit the nonce
* }
* });
* @example
* <caption>Handling 3DS lookup errors</caption>
* var my3DSContainer;
*
* // set up listener after initialization
* threeDSecure.on(('lookup-complete', function (data, next) {
* // use `data` here, then call `next()`
* next();
* });
*
* // call verifyCard after tokenizing a card
* threeDSecure.verifyCard({
* amount: '123.45',
* nonce: hostedFieldsTokenizationPayload.nonce,
* bin: hostedFieldsTokenizationPayload.details.bin,
* email: '[email protected]',
* billingAddress: billingAddressFromCustomer,
* additionalInformation: additionalInfoFromCustomer
* }, function (err, payload) {
* if (err) {
* if (err.code.indexOf('THREEDS_LOOKUP') === 0) {
* // an error occurred during the initial lookup request
*
* if (err.code === 'THREEDS_LOOKUP_TOKENIZED_CARD_NOT_FOUND_ERROR') {
* // either the passed payment method nonce does not exist
* // or it was already consumed before the lookup call was made
* } else if (err.code.indexOf('THREEDS_LOOKUP_VALIDATION') === 0) {
* // a validation error occurred
* // likely some non-ascii characters were included in the billing
* // address given name or surname fields, or the cardholdername field
*
* // Instruct your user to check their data and try again
* } else {
* // an unknown lookup error occurred
* }
* } else {
* // some other kind of error
* }
* return;
* }
*
* // handle success
* });
* @example
* <caption>Deprecated: Verifying an existing nonce with 3DS 1.0</caption>
* var my3DSContainer;
*
* threeDSecure.verifyCard({
* nonce: existingNonce,
* amount: 123.45,
* addFrame: function (err, iframe) {
* // Set up your UI and add the iframe.
* my3DSContainer = document.createElement('div');
* my3DSContainer.appendChild(iframe);
* document.body.appendChild(my3DSContainer);
* },
* removeFrame: function () {
* // Remove UI that you added in addFrame.
* document.body.removeChild(my3DSContainer);
* }
* }, function (err, payload) {
* if (err) {
* console.error(err);
* return;
* }
*
* if (payload.liabilityShifted) {
* // Liability has shifted
* submitNonceToServer(payload.nonce);
* } else if (payload.liabilityShiftPossible) {
* // Liability may still be shifted
* // Decide if you want to submit the nonce
* } else {
* // Liability has not shifted and will not shift
* // Decide if you want to submit the nonce
* }
* });
*/
ThreeDSecure.prototype.verifyCard = function (options) {
var privateOptions;
if (this.hasListener('lookup-complete')) {
privateOptions = {
ignoreOnLookupCompleteRequirement: true
};
}
return this._framework.verifyCard(options, privateOptions);
};
/* eslint-disable-next-line valid-jsdoc */
/**
* Launch the iframe challenge using a 3D Secure lookup response from a server side lookup.
*
* @public
* @param {(object|string)} lookupResponse The lookup response from the server side call to lookup the 3D Secure information. The raw string or a parsed object can be passed.
* @returns {Promise} Returns a promise.
* @example
* var my3DSContainer;
*
* threeDSecure.initializeChallengeWithLookupResponse(lookupResponseFromServer).then(function (payload) {
* if (payload.liabilityShifted) {
* // Liability has shifted
* submitNonceToServer(payload.nonce);
* } else if (payload.liabilityShiftPossible) {
* // Liability may still be shifted
* // Decide if you want to submit the nonce
* } else {
* // Liability has not shifted and will not shift
* // Decide if you want to submit the nonce
* }
* });
*/
ThreeDSecure.prototype.initializeChallengeWithLookupResponse = function (lookupResponse) {
if (typeof lookupResponse === 'string') {
lookupResponse = JSON.parse(lookupResponse);
}
return this._framework.initializeChallengeWithLookupResponse(lookupResponse);
};
/**
* Gather the data needed for a 3D Secure lookup call.
*
* @public
* @param {object} options Options for 3D Secure lookup.
* @param {string} options.nonce The nonce representing the card from a tokenization payload. For example, this can be a {@link HostedFields~tokenizePayload|tokenizePayload} returned by Hosted Fields under `payload.nonce`.
* @param {string} [options.bin] The numeric Bank Identification Number (bin) of the card from a tokenization payload. For example, this can be a {@link HostedFields~tokenizePayload|tokenizePayload} returned by Hosted Fields under `payload.details.bin`. Though not required to start the verification, it is required to receive a 3DS 2.0 lookup response.
* @param {callback} [callback] The second argument, <code>data</code>, is a {@link ThreeDSecure~prepareLookupPayload|prepareLookupPayload}. If no callback is provided, it will return a promise that resolves {@link ThreeDSecure~prepareLookupPayload|prepareLookupPayload}.
* @returns {(Promise|void)} Returns a promise if no callback is provided.
* @example
* <caption>Preparing data for a 3D Secure lookup</caption>
* threeDSecure.prepareLookup({
* nonce: hostedFieldsTokenizationPayload.nonce,
* bin: hostedFieldsTokenizationPayload.details.bin
* }, function (err, payload) {
* if (err) {
* console.error(err);
* return;
* }
*
* // send payload to server to do server side lookup
* });
*/
ThreeDSecure.prototype.prepareLookup = function (options) {
return this._framework.prepareLookup(options).then(function (data) {
return JSON.stringify(data);
});
};
/**
* Cancel the 3DS flow and return the verification payload if available. If using 3D Secure version 2, this will not close the UI of the authentication modal. It is recommended that this method only be used in the {@link ThreeDSecure#event:lookup-complete|`lookup-complete`} event or the `onLookupComplete` callback.
* @public
* @param {callback} [callback] The second argument is a {@link ThreeDSecure~verifyPayload|verifyPayload}. If there is no verifyPayload (the initial lookup did not complete), an error will be returned. If no callback is passed, `cancelVerifyCard` will return a promise.
* @returns {(Promise|void)} Returns a promise if no callback is provided.
* @example <caption>Cancel the verification in `lookup-complete` event</caption>
* // set up listener after instantiation
* threeDSecure.on('lookup-complete', function (data, next) {
* // determine if you want to call next to start the challenge,
* // if not, call cancelVerifyCard
* threeDSecure.cancelVerifyCard(function (err, verifyPayload) {
* if (err) {
* // Handle error
* console.log(err.message); // No verification payload available
* return;
* }
*
* verifyPayload.nonce; // The nonce returned from the 3ds lookup call
* verifyPayload.liabilityShifted; // boolean
* verifyPayload.liabilityShiftPossible; // boolean
* });
* });
*
* // after tokenizing a credit card
* threeDSecure.verifyCard({
* amount: '100.00',
* nonce: nonceFromTokenizationPayload,
* bin: binFromTokenizationPayload
* // other fields such as billing address
* }, function (verifyError, payload) {
* if (verifyError) {
* if (verifyError.code === 'THREEDS_VERIFY_CARD_CANCELED_BY_MERCHANT ') {
* // flow was cancelled by merchant, 3ds info can be found in the payload
* // for cancelVerifyCard
* }
* }
* });
* @example <caption>Cancel the verification in onLookupComplete callback</caption>
* threeDSecure.verifyCard({
* amount: '100.00',
* nonce: nonceFromTokenizationPayload,
* bin: binFromTokenizationPayload,
* // other fields such as billing address
* onLookupComplete: function (data, next) {
* // determine if you want to call next to start the challenge,
* // if not, call cancelVerifyCard
* threeDSecure.cancelVerifyCard(function (err, verifyPayload) {
* if (err) {
* // Handle error
* console.log(err.message); // No verification payload available
* return;
* }
*
* verifyPayload.nonce; // The nonce returned from the 3ds lookup call
* verifyPayload.liabilityShifted; // boolean
* verifyPayload.liabilityShiftPossible; // boolean
* });
* }
* }, function (verifyError, payload) {
* if (verifyError) {
* if (verifyError.code === 'THREEDS_VERIFY_CARD_CANCELED_BY_MERCHANT ') {
* // flow was cancelled by merchant, 3ds info can be found in the payload
* // for cancelVerifyCard
* }
* }
* });
* @example <caption>Cancel the verification in 3D Secure version 1</caption>
* // unlike with v2, this will not cause `verifyCard` to error, it will simply
* // never call the callback
* threeDSecure.cancelVerifyCard(function (err, verifyPayload) {
* if (err) {
* // Handle error
* console.log(err.message); // No verification payload available
* return;
* }
*
* verifyPayload.nonce; // The nonce returned from the 3ds lookup call
* verifyPayload.liabilityShifted; // boolean
* verifyPayload.liabilityShiftPossible; // boolean
* });
*/
ThreeDSecure.prototype.cancelVerifyCard = function () {
return this._framework.cancelVerifyCard();
};
/**
* Cleanly remove anything set up by {@link module:braintree-web/three-d-secure.create|create}.
* @public
* @param {callback} [callback] Called on completion. If no callback is passed, `teardown` will return a promise.
* @example
* threeDSecure.teardown();
* @example <caption>With callback</caption>
* threeDSecure.teardown(function () {
* // teardown is complete
* });
* @returns {(Promise|void)} Returns a promise if no callback is provided.
*/
ThreeDSecure.prototype.teardown = function () {
var methodNames = methods(ThreeDSecure.prototype).concat(methods(EventEmitter.prototype));
convertMethodsToError(this, methodNames);
return this._framework.teardown();
};
module.exports = wrapPromise.wrapPrototype(ThreeDSecure);
| ThreeDSecure |
get_score_string.test.ts | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { getScoreString } from './score_health';
describe('create_influencers', () => {
test('it rounds up to 1 from 0.3', () => {
const score = getScoreString(0.3);
expect(score).toEqual('1');
});
test('it rounds up to 1 from 0.000000001', () => {
const score = getScoreString(0.000000001);
expect(score).toEqual('1');
});
test('0 is 0', () => {
const score = getScoreString(0);
expect(score).toEqual('0');
});
test('99.1 is 100', () => {
const score = getScoreString(99.1);
expect(score).toEqual('100');
});
test('100 is 100', () => {
const score = getScoreString(100);
expect(score).toEqual('100');
}); | }); |
|
daemon.go | package daemon
import (
"fmt"
"io/ioutil"
"path/filepath"
"strings"
"github.com/coreos/go-systemd/login1"
ignv2_2types "github.com/coreos/ignition/config/v2_2/types"
"github.com/golang/glog"
mcfgv1 "github.com/openshift/machine-config-operator/pkg/apis/machineconfiguration.openshift.io/v1"
mcfgclientset "github.com/openshift/machine-config-operator/pkg/generated/clientset/versioned"
mcfgclientv1 "github.com/openshift/machine-config-operator/pkg/generated/clientset/versioned/typed/machineconfiguration.openshift.io/v1"
"github.com/vincent-petithory/dataurl"
drain "github.com/wking/kubernetes-drain"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/kubernetes"
)
// Daemon is the dispatch point for the functions of the agent on the
// machine. it keeps track of connections and the current state of the update
// process.
type Daemon struct {
// name is the node name.
name string
//namespace is where the daemon looks for machineconfigs.
namespace string
// login client talks to the systemd-logind service for rebooting the
// machine
loginClient *login1.Conn
client mcfgclientset.Interface
// kubeClient allows interaction with Kubernetes, including the node we are running on.
kubeClient kubernetes.Interface
// prefix is where the root filesystem is mounted
prefix string
}
const (
pathSystemd = "/etc/systemd/system"
pathDevNull = "/dev/null"
)
// New sets up the systemd and kubernetes connections needed to update the
// machine.
func New(
rootPrefix string,
nodeName string,
targetNamespace string,
client mcfgclientset.Interface,
kubeClient kubernetes.Interface,
) (*Daemon, error) {
loginClient, err := login1.New()
if err != nil {
return nil, fmt.Errorf("Error establishing connection to logind dbus: %v", err)
}
if err := loadNodeAnnotations(kubeClient.CoreV1().Nodes(), nodeName); err != nil {
return nil, err
}
return &Daemon{
name: nodeName,
namespace: targetNamespace,
loginClient: loginClient,
client: client,
kubeClient: kubeClient,
prefix: rootPrefix,
}, nil
}
// Run watches the annotations on the machine until they indicate that we need
// an update. then it triggers an update of the machine. currently, the update
// function shouldn't return, and should just reboot the node, unless an error
// occurs, in which case it will return the error up the call stack.
func (dn *Daemon) Run(stop <-chan struct{}) error {
glog.Info("Starting MachineConfigDameon")
defer glog.Info("Shutting down MachineConfigDameon")
err := dn.syncOnce()
if err != nil {
return setUpdateDegraded(dn.kubeClient.CoreV1().Nodes(), dn.name)
}
return nil
}
// syncOnce only completes once.
func (dn *Daemon) syncOnce() error {
// validate that the machine correctly made it to the target state
status, err := dn.validate()
if err != nil {
return err
}
if !status {
return dn.triggerUpdate()
}
if err := setUpdateDone(dn.kubeClient.CoreV1().Nodes(), dn.name); err != nil {
return err
}
node, err := dn.kubeClient.CoreV1().Nodes().Get(dn.name, metav1.GetOptions{})
if err != nil {
return err
}
err = drain.Uncordon(dn.kubeClient.CoreV1().Nodes(), node, nil)
if err != nil {
return err
}
// watch the annotations.
err = waitUntilUpdate(dn.kubeClient.CoreV1().Nodes(), dn.name)
if err != nil {
return fmt.Errorf("Failed to wait until update request: %v", err)
}
return dn.triggerUpdate()
}
// triggerUpdate starts the update using the current and the target config.
func (dn *Daemon) triggerUpdate() error {
if err := setUpdateWorking(dn.kubeClient.CoreV1().Nodes(), dn.name); err != nil {
return err
}
ccAnnotation, err := getNodeAnnotation(dn.kubeClient.CoreV1().Nodes(), dn.name, CurrentMachineConfigAnnotationKey)
if err != nil {
return err
}
dcAnnotation, err := getNodeAnnotation(dn.kubeClient.CoreV1().Nodes(), dn.name, DesiredMachineConfigAnnotationKey)
if err != nil {
return err
}
currentConfig, err := getMachineConfig(dn.client.MachineconfigurationV1().MachineConfigs(dn.namespace), ccAnnotation)
if err != nil {
return err
}
desiredConfig, err := getMachineConfig(dn.client.MachineconfigurationV1().MachineConfigs(dn.namespace), dcAnnotation)
if err != nil {
return err
}
// run the update process. this function doesn't currently return.
return dn.update(currentConfig, desiredConfig)
}
// validate confirms that the node is actually in the state that it wants to be
// in. it does this by looking at the elements in the target config and checks
// if all are present on the node. if any file/unit is missing or there is a
// mismatch, it re-triggers the update.
func (dn *Daemon) validate() (bool, error) {
ccAnnotation, err := getNodeAnnotation(dn.kubeClient.CoreV1().Nodes(), dn.name, CurrentMachineConfigAnnotationKey)
if err != nil {
return false, err
}
dcAnnotation, err := getNodeAnnotation(dn.kubeClient.CoreV1().Nodes(), dn.name, DesiredMachineConfigAnnotationKey)
if err != nil {
return false, err
}
// if the current annotation is equal to the desired annotation,
// system state is valid.
if strings.Compare(dcAnnotation, ccAnnotation) == 0 {
return true, nil
}
desiredConfig, err := getMachineConfig(dn.client.MachineconfigurationV1().MachineConfigs(dn.namespace), dcAnnotation)
if err != nil {
return false, err
}
if dn.checkFiles(desiredConfig.Spec.Config.Storage.Files) &&
dn.checkUnits(desiredConfig.Spec.Config.Systemd.Units) {
return true, nil
}
// error is nil, as we successfully decided that validate is false
return false, nil
}
// checkUnits validates the contents of all the units in the
// target config.
func (dn *Daemon) checkUnits(units []ignv2_2types.Unit) bool {
for _, u := range units {
for j := range u.Dropins {
path := filepath.Join(dn.prefix, pathSystemd, u.Name+".d", u.Dropins[j].Name)
if status := checkFileContents(path, u.Dropins[j].Contents); !status {
return false
}
}
if u.Contents == "" {
continue
}
path := filepath.Join(dn.prefix, pathSystemd, u.Name)
if u.Mask {
link, err := filepath.EvalSymlinks(path)
if err != nil {
glog.Errorf("state validation: error while evaluation symlink for path: %q, err: %v", path, err)
return false
}
if strings.Compare(pathDevNull, link) != 0 {
glog.Errorf("state validation: invalid unit masked setting. path: %q; expected: %v; received: %v", path, pathDevNull, link)
return false
}
}
if status := checkFileContents(path, u.Contents); !status {
return false
}
}
return true
}
// checkFiles validates the contents of all the files in the
// target config.
func (dn *Daemon) checkFiles(files []ignv2_2types.File) bool {
for _, f := range files {
path := filepath.Join(dn.prefix, f.Path)
contents, err := dataurl.DecodeString(f.Contents.Source)
if err != nil {
glog.Errorf("couldn't parse file: %v", err)
return false
}
if status := checkFileContents(path, string(contents.Data)); !status {
return false
}
}
return true
}
// checkFileContents reads the file from the filepath and compares its contents
// with the expectedContent. It logs an error in case of an error or mismatch
// and returns the status of the evaluation.
func checkFileContents(filePath, expectedContent string) bool {
contents, err := ioutil.ReadFile(filePath)
if err != nil {
glog.Errorf("could not read file: %q, error: %v", filePath, err)
return false
}
if strings.Compare(string(contents), expectedContent) != 0 {
glog.Errorf("content mismatch for file: %q; expected: %v; received: %v", filePath, expectedContent, string(contents))
return false
}
return true
}
// Close closes all the connections the node agent has open for it's lifetime
func (dn *Daemon) Close() {
dn.loginClient.Close()
}
func getMachineConfig(client mcfgclientv1.MachineConfigInterface, name string) (*mcfgv1.MachineConfig, error) | {
return client.Get(name, metav1.GetOptions{})
} |
|
xml2js.d.ts | // Type definitions for node-xml2js
// Project: https://github.com/Leonidas-from-XIV/node-xml2js
// Definitions by: Michel Salib <https://github.com/michelsalib>, Jason McNeil <https://github.com/jasonrm>
// Definitions: https://github.com/borisyankov/DefinitelyTyped
declare module 'xml2js' {
export = xml2js;
module xml2js {
function parseString(xml: string, callback: (err: any, result: any) => void): void;
function parseString(xml: string, options: Options, callback: (err: any, result: any) => void): void;
class | {
constructor(options?: BuilderOptions);
buildObject(rootObj: any): string;
}
interface RenderOptions {
indent?: string;
newline?: string;
pretty?: boolean;
}
interface XMLDeclarationOptions {
encoding?: string;
standalone?: boolean;
version?: string;
}
interface BuilderOptions {
doctype?: any;
headless?: boolean;
indent?: string;
newline?: string;
pretty?: boolean;
renderOpts?: RenderOptions;
rootName?: string;
xmldec?: XMLDeclarationOptions;
}
interface Options {
async?: boolean;
attrkey?: string;
attrNameProcessors?: (name: string) => string;
charkey?: string;
charsAsChildren?: boolean;
childkey?: string;
emptyTag?: any;
explicitArray?: boolean;
explicitCharkey?: boolean;
explicitChildren?: boolean;
explicitRoot?: boolean;
ignoreAttrs?: boolean;
mergeAttrs?: boolean;
normalize?: boolean;
normalizeTags?: boolean;
strict?: boolean;
tagNameProcessors?: (name: string) => string;
trim?: boolean;
validator?: Function;
xmlns?: boolean;
}
}
}
| Builder |
cell_options_test.go | // Copyright 2019 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package termbox
import (
"testing"
"github.com/mum4k/termdash/cell"
tbx "github.com/nsf/termbox-go"
)
func TestCellColor(t *testing.T) {
tests := []struct {
color cell.Color
want tbx.Attribute
}{
{cell.ColorDefault, tbx.ColorDefault},
{cell.ColorBlack, tbx.ColorBlack},
{cell.ColorRed, tbx.ColorRed},
{cell.ColorGreen, tbx.ColorGreen},
{cell.ColorYellow, tbx.ColorYellow},
{cell.ColorBlue, tbx.ColorBlue},
{cell.ColorMagenta, tbx.ColorMagenta},
{cell.ColorCyan, tbx.ColorCyan},
{cell.ColorWhite, tbx.ColorWhite},
{cell.Color(42), tbx.Attribute(42)},
}
for _, tc := range tests {
t.Run(tc.color.String(), func(t *testing.T) {
got := cellColor(tc.color)
if got != tc.want |
})
}
}
| {
t.Errorf("cellColor(%v) => got %v, want %v", tc.color, got, tc.want)
} |
apply_test.go | /*
Copyright 2021 The KubeVela Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package oam
import (
"strings"
"testing"
"github.com/pkg/errors"
"github.com/stretchr/testify/require"
"k8s.io/apimachinery/pkg/runtime"
"github.com/tele-cms/launchpad/apis/core.oam.dev/v1beta1"
"github.com/tele-cms/launchpad/pkg/appfile"
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
"github.com/tele-cms/launchpad/apis/core.oam.dev/common"
"github.com/tele-cms/launchpad/pkg/cue/model/value"
"github.com/tele-cms/launchpad/pkg/workflow/providers/mock"
)
func TestParser(t *testing.T) {
r := require.New(t)
p := &provider{
apply: simpleComponentApplyForTest,
}
act := &mock.Action{}
v, err := value.NewValue("", nil, "")
r.NoError(err)
err = p.ApplyComponent(nil, v, act)
r.Equal(err.Error(), "var(path=value) not exist")
v.FillObject(map[string]interface{}{}, "value")
err = p.ApplyComponent(nil, v, act)
r.NoError(err)
output, err := v.LookupValue("output")
r.NoError(err)
outStr, err := output.String()
r.NoError(err)
r.Equal(outStr, `apiVersion: "v1"
kind: "Pod"
metadata: {
name: "rss-site"
labels: {
app: "web"
}
}
`)
outputs, err := v.LookupValue("outputs")
r.NoError(err)
outsStr, err := outputs.String()
r.NoError(err)
r.Equal(outsStr, `service: {
apiVersion: "v1"
kind: "Service"
metadata: {
name: "service"
labels: {
"trait.oam.dev/resource": "service"
}
}
}
`)
r.Equal(act.Phase, "Wait")
testHealthy = true
act = &mock.Action{}
_, err = value.NewValue("", nil, "")
r.NoError(err)
r.Equal(act.Phase, "")
}
func | (t *testing.T) {
r := require.New(t)
p := &provider{
render: func(comp common.ApplicationComponent, patcher *value.Value, clusterName string, overrideNamespace string, _ string) (*unstructured.Unstructured, []*unstructured.Unstructured, error) {
return &unstructured.Unstructured{
Object: map[string]interface{}{
"apiVersion": "apps/v1",
"kind": "Deployment",
},
}, []*unstructured.Unstructured{
{
Object: map[string]interface{}{
"apiVersion": "core.oam.dev/v1alpha2",
"kind": "ManualScalerTrait",
"metadata": map[string]interface{}{
"labels": map[string]interface{}{
"trait.oam.dev/resource": "scaler",
},
},
"spec": map[string]interface{}{"replicaCount": int64(10)},
},
},
}, nil
},
}
v, err := value.NewValue(`value: {}`, nil, "")
r.NoError(err)
err = p.RenderComponent(nil, v, nil)
r.NoError(err)
s, err := v.String()
r.NoError(err)
r.Equal(s, `value: {}
output: {
apiVersion: "apps/v1"
kind: "Deployment"
}
outputs: {
scaler: {
apiVersion: "core.oam.dev/v1alpha2"
kind: "ManualScalerTrait"
metadata: {
labels: {
"trait.oam.dev/resource": "scaler"
}
}
spec: {
replicaCount: 10
}
}
}
`)
}
func TestApplyComponents(t *testing.T) {
r := require.New(t)
testcases := map[string]struct {
Input string
Error string
}{
"normal": {
Input: `{components:{first:{value:{name:"first"}},second:{value:{name:"second"}}},parallelism:5}`,
},
"no-components": {
Input: `{}`,
Error: "var(path=components) not exist",
},
"no-parallelism": {
Input: `{components:{first:{value:{name:"first"}},second:{value:{name:"second"}}}}`,
Error: "var(path=parallelism) not exist",
},
"invalid-parallelism": {
Input: `{components:{first:{value:{name:"first"}},second:{value:{name:"second"}}},parallelism:-1}`,
Error: "parallelism cannot be smaller than 1",
},
"bad-component": {
Input: `{components:{first:{value:{name:"error-first"}},second:{value:{name:"error-second"}},third:{value:{name:"third"}}},parallelism:5}`,
Error: "failed to apply component",
},
}
p := &provider{apply: simpleComponentApplyForTest}
for name, tt := range testcases {
t.Run(name, func(t *testing.T) {
act := &mock.Action{}
v, err := value.NewValue("", nil, "")
r.NoError(err)
r.NoError(v.FillRaw(tt.Input))
err = p.ApplyComponents(nil, v, act)
if tt.Error != "" {
r.NotNil(err)
r.Contains(err.Error(), tt.Error)
} else {
r.NoError(err)
}
})
}
}
func TestLoadComponent(t *testing.T) {
r := require.New(t)
p := &provider{
app: &v1beta1.Application{
Spec: v1beta1.ApplicationSpec{
Components: []common.ApplicationComponent{
{
Name: "c1",
Type: "web",
Properties: &runtime.RawExtension{Raw: []byte(`{"image": "busybox"}`)},
},
},
},
},
}
v, err := value.NewValue(``, nil, "")
r.NoError(err)
err = p.LoadComponent(nil, v, nil)
r.NoError(err)
s, err := v.String()
r.NoError(err)
r.Equal(s, `value: {
c1: {
name: *"c1" | _
type: *"web" | _
properties: {
image: *"busybox" | _
}
}
}
`)
overrideApp := `app: {
apiVersion: "core.oam.dev/v1beta1"
kind: "Application"
metadata: {
name: "test"
namespace: "default"
}
spec: {
components: [{
name: "c2"
type: "web"
properties: {
image: "busybox"
}
}]
}
}
`
overrideValue, err := value.NewValue(overrideApp, nil, "")
r.NoError(err)
err = p.LoadComponent(nil, overrideValue, nil)
r.NoError(err)
_, err = overrideValue.LookupValue("value", "c2")
r.NoError(err)
}
func TestLoadComponentInOrder(t *testing.T) {
r := require.New(t)
p := &provider{
app: &v1beta1.Application{
Spec: v1beta1.ApplicationSpec{
Components: []common.ApplicationComponent{
{
Name: "c1",
Type: "web",
Properties: &runtime.RawExtension{Raw: []byte(`{"image": "busybox"}`)},
},
{
Name: "c2",
Type: "web2",
Properties: &runtime.RawExtension{Raw: []byte(`{"image": "busybox"}`)},
},
},
},
},
}
v, err := value.NewValue(``, nil, "")
r.NoError(err)
err = p.LoadComponentInOrder(nil, v, nil)
r.NoError(err)
s, err := v.String()
r.NoError(err)
r.Equal(s, `value: [{
name: "c1"
type: "web"
properties: {
image: "busybox"
}
}, {
name: "c2"
type: "web2"
properties: {
image: "busybox"
}
}]
`)
}
func TestLoadPolicyInOrder(t *testing.T) {
r := require.New(t)
p := &provider{af: &appfile.Appfile{
Policies: []v1beta1.AppPolicy{{Name: "policy-1"}, {Name: "policy-2"}, {Name: "policy-3"}},
}, app: &v1beta1.Application{
Spec: v1beta1.ApplicationSpec{Policies: []v1beta1.AppPolicy{{Name: "policy-1"}, {Name: "policy-2"}}},
}}
testcases := map[string]struct {
Input string
Output []v1beta1.AppPolicy
Error string
}{
"normal": {
Input: `{input:["policy-3","policy-1"]}`,
Output: []v1beta1.AppPolicy{{Name: "policy-3"}, {Name: "policy-1"}},
},
"empty-input": {
Input: `{}`,
Output: []v1beta1.AppPolicy{{Name: "policy-1"}, {Name: "policy-2"}},
},
"invalid-input": {
Input: `{input:{"name":"policy"}}`,
Error: "failed to parse specified policy name",
},
"policy-not-found": {
Input: `{input:["policy-4","policy-1"]}`,
Error: "not found",
},
}
for name, tt := range testcases {
t.Run(name, func(t *testing.T) {
act := &mock.Action{}
v, err := value.NewValue("", nil, "")
r.NoError(err)
r.NoError(v.FillRaw(tt.Input))
err = p.LoadPoliciesInOrder(nil, v, act)
if tt.Error != "" {
r.NotNil(err)
r.Contains(err.Error(), tt.Error)
} else {
r.NoError(err)
v, err = v.LookupValue("output")
r.NoError(err)
var outputPolicies []v1beta1.AppPolicy
r.NoError(v.UnmarshalTo(&outputPolicies))
r.Equal(tt.Output, outputPolicies)
}
})
}
}
var testHealthy bool
func simpleComponentApplyForTest(comp common.ApplicationComponent, _ *value.Value, _ string, _ string, _ string) (*unstructured.Unstructured, []*unstructured.Unstructured, bool, error) {
workload := new(unstructured.Unstructured)
workload.UnmarshalJSON([]byte(`{
"apiVersion": "v1",
"kind": "Pod",
"metadata": {
"name": "rss-site",
"labels": {
"app": "web"
}
}
}`))
if comp.Name != "" {
workload.SetName(comp.Name)
if strings.Contains(comp.Name, "error") {
return nil, nil, false, errors.Errorf("bad component")
}
}
trait := new(unstructured.Unstructured)
trait.UnmarshalJSON([]byte(`{
"apiVersion": "v1",
"kind": "Service",
"metadata": {
"name": "service",
"labels": {
"trait.oam.dev/resource": "service"
}
}
}`))
if comp.Name != "" {
trait.SetName(comp.Name)
}
traits := []*unstructured.Unstructured{trait}
return workload, traits, testHealthy, nil
}
| TestRenderComponent |
sweetalert2.min.js | !function(t,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):t.Sweetalert2=e()}(this,function(){"use strict";function t(e){return(t="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(e)}function e(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}function n(t,e){for(var n=0;n<e.length;n++){var o=e[n];o.enumerable=o.enumerable||!1,o.configurable=!0,"value"in o&&(o.writable=!0),Object.defineProperty(t,o.key,o)}}function o(t,e,o){return e&&n(t.prototype,e),o&&n(t,o),t}function i(){return(i=Object.assign||function(t){for(var e=1;e<arguments.length;e++){var n=arguments[e];for(var o in n)Object.prototype.hasOwnProperty.call(n,o)&&(t[o]=n[o])}return t}).apply(this,arguments)}function r(t){return(r=Object.setPrototypeOf?Object.getPrototypeOf:function(t){return t.__proto__||Object.getPrototypeOf(t)})(t)}function a(t,e){return(a=Object.setPrototypeOf||function(t,e){return t.__proto__=e,t})(t,e)}function s(t,e,n){return(s=function(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Date.prototype.toString.call(Reflect.construct(Date,[],function(){})),!0}catch(t){return!1}}()?Reflect.construct:function(t,e,n){var o=[null];o.push.apply(o,e);var i=new(Function.bind.apply(t,o));return n&&a(i,n.prototype),i}).apply(null,arguments)}function u(t,e,n){return(u="undefined"!=typeof Reflect&&Reflect.get?Reflect.get:function(t,e,n){var o=function(t,e){for(;!Object.prototype.hasOwnProperty.call(t,e)&&null!==(t=r(t)););return t}(t,e);if(o){var i=Object.getOwnPropertyDescriptor(o,e);return i.get?i.get.call(n):i.value}})(t,e,n||t)}function c(t){return Object.keys(t).map(function(e){return t[e]})}function l(t){return Array.prototype.slice.call(t)}function d(t){console.error("".concat(v," ").concat(t))}function p(t,e){!function(t){-1===y.indexOf(t)&&(y.push(t),b(t))}('"'.concat(t,'" is deprecated and will be removed in the next major release. Please use "').concat(e,'" instead.'))}function f(t){return t&&Promise.resolve(t)===t}function m(t){var e={};for(var n in t)e[t[n]]="swal2-"+t[n];return e}function g(t,e){return t.classList.contains(e)}function h(e,n,o){if(function(t){l(t.classList).forEach(function(e){-1===c(k).indexOf(e)&&-1===c(x).indexOf(e)&&t.classList.remove(e)})}(e),n&&n[o]){if("string"!=typeof n[o]&&!n[o].forEach)return b("Invalid type of customClass.".concat(o,'! Expected string or iterable object, got "').concat(t(n[o]),'"'));ot(e,n[o])}}var v="SweetAlert2:",b=function(t){console.warn("".concat(v," ").concat(t))},y=[],w=function(t){return"function"==typeof t?t():t},C=Object.freeze({cancel:"cancel",backdrop:"backdrop",close:"close",esc:"esc",timer:"timer"}),k=m(["container","shown","height-auto","iosfix","popup","modal","no-backdrop","toast","toast-shown","toast-column","show","hide","noanimation","close","title","header","content","actions","confirm","cancel","footer","icon","image","input","file","range","select","radio","checkbox","label","textarea","inputerror","validation-message","progress-steps","active-progress-step","progress-step","progress-step-line","loading","styled","top","top-start","top-end","top-left","top-right","center","center-start","center-end","center-left","center-right","bottom","bottom-start","bottom-end","bottom-left","bottom-right","grow-row","grow-column","grow-fullscreen","rtl"]),x=m(["success","warning","info","question","error"]),S={previousBodyPadding:null};function P(t,e){if(!e)return null;switch(e){case"select":case"textarea":case"file":return rt(t,k[e]);case"checkbox":return t.querySelector(".".concat(k.checkbox," input"));case"radio":return t.querySelector(".".concat(k.radio," input:checked"))||t.querySelector(".".concat(k.radio," input:first-child"));case"range":return t.querySelector(".".concat(k.range," input"));default:return rt(t,k.input)}}function B(t){if(t.focus(),"file"!==t.type){var e=t.value;t.value="",t.value=e}}function A(t,e,n){t&&e&&("string"==typeof e&&(e=e.split(/\s+/).filter(Boolean)),e.forEach(function(e){t.forEach?t.forEach(function(t){n?t.classList.add(e):t.classList.remove(e)}):n?t.classList.add(e):t.classList.remove(e)}))}function E(t,e,n){n||0===parseInt(n)?t.style[e]="number"==typeof n?n+"px":n:t.style.removeProperty(e)}function T(t,e){var n=1<arguments.length&&void 0!==e?e:"flex";t.style.opacity="",t.style.display=n}function L(t){t.style.opacity="",t.style.display="none"}function O(t,e,n){e?T(t,n):L(t)}function M(t){return!(!t||!(t.offsetWidth||t.offsetHeight||t.getClientRects().length))}function V(t){var e=window.getComputedStyle(t),n=parseFloat(e.getPropertyValue("animation-duration")||"0"),o=parseFloat(e.getPropertyValue("transition-duration")||"0");return 0<n||0<o}function j(){return document.body.querySelector("."+k.container)}function H(t){var e=j();return e?e.querySelector(t):null}function I(t){return H("."+t)}function q(){return I(k.popup)}function R(){return l(q().querySelectorAll("."+k.icon))}function D(){var t=R().filter(function(t){return M(t)});return t.length?t[0]:null}function N(){return I(k.title)}function U(){return I(k.content)}function F(){return I(k.image)}function _(){return I(k["progress-steps"])}function z(){return I(k["validation-message"])}function W(){return H("."+k.actions+" ."+k.confirm)}function K(){return H("."+k.actions+" ."+k.cancel)}function Y(){return I(k.actions)}function Z(){return I(k.header)}function Q(){return I(k.footer)}function $(){return I(k.close)}function J(){var t=l(q().querySelectorAll('[tabindex]:not([tabindex="-1"]):not([tabindex="0"])')).sort(function(t,e){return t=parseInt(t.getAttribute("tabindex")),(e=parseInt(e.getAttribute("tabindex")))<t?1:t<e?-1:0}),e=l(q().querySelectorAll('\n a[href],\n area[href],\n input:not([disabled]),\n select:not([disabled]),\n textarea:not([disabled]),\n button:not([disabled]),\n iframe,\n object,\n embed,\n [tabindex="0"],\n [contenteditable],\n audio[controls],\n video[controls],\n summary\n')).filter(function(t){return"-1"!==t.getAttribute("tabindex")});return function(t){for(var e=[],n=0;n<t.length;n++)-1===e.indexOf(t[n])&&e.push(t[n]);return e}(t.concat(e)).filter(function(t){return M(t)})}function X(){return!at()&&!document.body.classList.contains(k["no-backdrop"])}function G(){return"undefined"==typeof window||"undefined"==typeof document}function tt(t){ke.isVisible()&&nt!==t.target.value&&ke.resetValidationMessage(),nt=t.target.value}function et(e,n){e instanceof HTMLElement?n.appendChild(e):"object"===t(e)?ct(n,e):e&&(n.innerHTML=e)}var nt,ot=function(t,e){A(t,e,!0)},it=function(t,e){A(t,e,!1)},rt=function(t,e){for(var n=0;n<t.childNodes.length;n++)if(g(t.childNodes[n],e))return t.childNodes[n]},at=function(){return document.body.classList.contains(k["toast-shown"])},st='\n <div aria-labelledby="'.concat(k.title,'" aria-describedby="').concat(k.content,'" class="').concat(k.popup,'" tabindex="-1">\n <div class="').concat(k.header,'">\n <ul class="').concat(k["progress-steps"],'"></ul>\n <div class="').concat(k.icon," ").concat(x.error,'">\n <span class="swal2-x-mark"><span class="swal2-x-mark-line-left"></span><span class="swal2-x-mark-line-right"></span></span>\n </div>\n <div class="').concat(k.icon," ").concat(x.question,'"></div>\n <div class="').concat(k.icon," ").concat(x.warning,'"></div>\n <div class="').concat(k.icon," ").concat(x.info,'"></div>\n <div class="').concat(k.icon," ").concat(x.success,'">\n <div class="swal2-success-circular-line-left"></div>\n <span class="swal2-success-line-tip"></span> <span class="swal2-success-line-long"></span>\n <div class="swal2-success-ring"></div> <div class="swal2-success-fix"></div>\n <div class="swal2-success-circular-line-right"></div>\n </div>\n <img class="').concat(k.image,'" />\n <h2 class="').concat(k.title,'" id="').concat(k.title,'"></h2>\n <button type="button" class="').concat(k.close,'"></button>\n </div>\n <div class="').concat(k.content,'">\n <div id="').concat(k.content,'"></div>\n <input class="').concat(k.input,'" />\n <input type="file" class="').concat(k.file,'" />\n <div class="').concat(k.range,'">\n <input type="range" />\n <output></output>\n </div>\n <select class="').concat(k.select,'"></select>\n <div class="').concat(k.radio,'"></div>\n <label for="').concat(k.checkbox,'" class="').concat(k.checkbox,'">\n <input type="checkbox" />\n <span class="').concat(k.label,'"></span>\n </label>\n <textarea class="').concat(k.textarea,'"></textarea>\n <div class="').concat(k["validation-message"],'" id="').concat(k["validation-message"],'"></div>\n </div>\n <div class="').concat(k.actions,'">\n <button type="button" class="').concat(k.confirm,'">OK</button>\n <button type="button" class="').concat(k.cancel,'">Cancel</button>\n </div>\n <div class="').concat(k.footer,'">\n </div>\n </div>\n').replace(/(^|\n)\s*/g,""),ut=function(t){if(function(){var t=j();t&&(t.parentNode.removeChild(t),it([document.documentElement,document.body],[k["no-backdrop"],k["toast-shown"],k["has-column"]]))}(),G())d("SweetAlert2 requires document to initialize");else{var e=document.createElement("div");e.className=k.container,e.innerHTML=st;var n=function(t){return"string"==typeof t?document.querySelector(t):t}(t.target);n.appendChild(e),function(t){var e=q();e.setAttribute("role",t.toast?"alert":"dialog"),e.setAttribute("aria-live",t.toast?"polite":"assertive"),t.toast||e.setAttribute("aria-modal","true")}(t),function(t){"rtl"===window.getComputedStyle(t).direction&&ot(j(),k.rtl)}(n),function(){var t=U(),e=rt(t,k.input),n=rt(t,k.file),o=t.querySelector(".".concat(k.range," input")),i=t.querySelector(".".concat(k.range," output")),r=rt(t,k.select),a=t.querySelector(".".concat(k.checkbox," input")),s=rt(t,k.textarea);e.oninput=tt,n.onchange=tt,r.onchange=tt,a.onchange=tt,s.oninput=tt,o.oninput=function(t){tt(t),i.value=o.value},o.onchange=function(t){tt(t),o.nextSibling.value=o.value}}()}},ct=function(t,e){if(t.innerHTML="",0 in e)for(var n=0;n in e;n++)t.appendChild(e[n].cloneNode(!0));else t.appendChild(e.cloneNode(!0))},lt=function(){if(G())return!1;var t=document.createElement("div"),e={WebkitAnimation:"webkitAnimationEnd",OAnimation:"oAnimationEnd oanimationend",animation:"animationend"};for(var n in e)if(Object.prototype.hasOwnProperty.call(e,n)&&void 0!==t.style[n])return e[n];return!1}();function dt(t,e,n){O(t,n["showC"+e.substring(1)+"Button"],"inline-block"),t.innerHTML=n[e+"ButtonText"],t.setAttribute("aria-label",n[e+"ButtonAriaLabel"]),t.className=k[e],h(t,n.customClass,e+"Button"),ot(t,n[e+"ButtonClass"])}function pt(t,e){t.placeholder&&!e.inputPlaceholder||(t.placeholder=e.inputPlaceholder)}var ft={promise:new WeakMap,innerParams:new WeakMap,domCache:new WeakMap},mt=["input","file","range","select","radio","checkbox","textarea"],gt=function(t){if(!yt[t.input])return d('Unexpected type of input! Expected "text", "email", "password", "number", "tel", "select", "radio", "checkbox", "textarea", "file" or "url", got "'.concat(t.input,'"'));var e=bt(t.input),n=yt[t.input](e,t);T(n),setTimeout(function(){B(n)})},ht=function(t,e){var n=P(U(),t);if(n)for(var o in function(t){for(var e=0;e<t.attributes.length;e++){var n=t.attributes[e].name;-1===["type","value","style"].indexOf(n)&&t.removeAttribute(n)}}(n),e)"range"===t&&"placeholder"===o||n.setAttribute(o,e[o])},vt=function(t){var e=bt(t.input);t.inputClass&&ot(e,t.inputClass),t.customClass&&ot(e,t.customClass.input)},bt=function(t){var e=k[t]?k[t]:k.input;return rt(U(),e)},yt={};function wt(t,e){var n=_();if(!e.progressSteps||0===e.progressSteps.length)return L(n);T(n),n.innerHTML="";var o=parseInt(null===e.currentProgressStep?ke.getQueueStep():e.currentProgressStep);o>=e.progressSteps.length&&b("Invalid currentProgressStep parameter, it should be less than progressSteps.length (currentProgressStep like JS arrays starts from 0)"),e.progressSteps.forEach(function(t,i){var r=function(t){var e=document.createElement("li");return ot(e,k["progress-step"]),e.innerHTML=t,e}(t);if(n.appendChild(r),i===o&&ot(r,k["active-progress-step"]),i!==e.progressSteps.length-1){var a=function(t){var e=document.createElement("li");return ot(e,k["progress-step-line"]),t.progressStepsDistance&&(e.style.width=t.progressStepsDistance),e}(t);n.appendChild(a)}})}function Ct(t,e){!function(t,e){var n=q();E(n,"width",e.width),E(n,"padding",e.padding),e.background&&(n.style.background=e.background),n.className=k.popup,e.toast?(ot([document.documentElement,document.body],k["toast-shown"]),ot(n,k.toast)):ot(n,k.modal),h(n,e.customClass,"popup"),"string"==typeof e.customClass&&ot(n,e.customClass),A(n,k.noanimation,!e.animation)}(0,e),function(t,e){var n=j();n&&(function(t,e){"string"==typeof e?t.style.background=e:e||ot([document.documentElement,document.body],k["no-backdrop"])}(n,e.backdrop),!e.backdrop&&e.allowOutsideClick&&b('"allowOutsideClick" parameter requires `backdrop` parameter to be set to `true`'),function(t,e){e in k?ot(t,k[e]):(b('The "position" parameter is not valid, defaulting to "center"'),ot(t,k.center))}(n,e.position),function(t,e){if(e&&"string"==typeof e){var n="grow-"+e;n in k&&ot(t,k[n])}}(n,e.grow),h(n,e.customClass,"container"),e.customContainerClass&&ot(n,e.customContainerClass))}(0,e),function(t,e){h(Z(),e.customClass,"header"),wt(0,e),function(t,e){var n=ft.innerParams.get(t);if(n&&e.type===n.type&&D())h(D(),e.customClass,"icon");else if(xt(),e.type)if(St(),-1!==Object.keys(x).indexOf(e.type)){var o=H(".".concat(k.icon,".").concat(x[e.type]));T(o),h(o,e.customClass,"icon"),A(o,"swal2-animate-".concat(e.type,"-icon"),e.animation)}else d('Unknown type! Expected "success", "error", "warning", "info" or "question", got "'.concat(e.type,'"'))}(t,e),function(t,e){var n=F();if(!e.imageUrl)return L(n);T(n),n.setAttribute("src",e.imageUrl),n.setAttribute("alt",e.imageAlt),E(n,"width",e.imageWidth),E(n,"height",e.imageHeight),n.className=k.image,h(n,e.customClass,"image"),e.imageClass&&ot(n,e.imageClass)}(0,e),function(t,e){var n=N();O(n,e.title||e.titleText),e.title&&et(e.title,n),e.titleText&&(n.innerText=e.titleText),h(n,e.customClass,"title")}(0,e),function(t,e){var n=$();n.innerHTML=e.closeButtonHtml,h(n,e.customClass,"closeButton"),O(n,e.showCloseButton),n.setAttribute("aria-label",e.closeButtonAriaLabel)}(0,e)}(t,e),function(t,e){var n=U().querySelector("#"+k.content);e.html?(et(e.html,n),T(n,"block")):e.text?(n.textContent=e.text,T(n,"block")):L(n),function(t,e){var n=U(),o=ft.innerParams.get(t),i=!o||e.input!==o.input;mt.forEach(function(t){var o=k[t],r=rt(n,o);ht(t,e.inputAttributes),r.className=o,i&&L(r)}),e.input&&(i&>(e),vt(e))}(t,e),h(U(),e.customClass,"content")}(t,e),function(t,e){var n=Y(),o=W(),i=K();e.showConfirmButton||e.showCancelButton||L(n),h(n,e.customClass,"actions"),dt(o,"confirm",e),dt(i,"cancel",e),e.buttonsStyling?function(t,e,n){ot([t,e],k.styled),n.confirmButtonColor&&(t.style.backgroundColor=n.confirmButtonColor),n.cancelButtonColor&&(e.style.backgroundColor=n.cancelButtonColor);var o=window.getComputedStyle(t).getPropertyValue("background-color");t.style.borderLeftColor=o,t.style.borderRightColor=o}(o,i,e):(it([o,i],k.styled),o.style.backgroundColor=o.style.borderLeftColor=o.style.borderRightColor="",i.style.backgroundColor=i.style.borderLeftColor=i.style.borderRightColor=""),e.reverseButtons&&o.parentNode.insertBefore(i,o)}(0,e),function(t,e){var n=Q();O(n,e.footer),e.footer&&et(e.footer,n),h(n,e.customClass,"footer")}(0,e),"function"==typeof e.onRender&&e.onRender(q())}function kt(){return W()&&W().click()}yt.text=yt.email=yt.password=yt.number=yt.tel=yt.url=function(e,n){return"string"==typeof n.inputValue||"number"==typeof n.inputValue?e.value=n.inputValue:f(n.inputValue)||b('Unexpected type of inputValue! Expected "string", "number" or "Promise", got "'.concat(t(n.inputValue),'"')),pt(e,n),e.type=n.input,e},yt.file=function(t,e){return pt(t,e),t},yt.range=function(t,e){var n=t.querySelector("input"),o=t.querySelector("output");return n.value=e.inputValue,n.type=e.input,o.value=e.inputValue,t},yt.select=function(t,e){if(t.innerHTML="",e.inputPlaceholder){var n=document.createElement("option");n.innerHTML=e.inputPlaceholder,n.value="",n.disabled=!0,n.selected=!0,t.appendChild(n)}return t},yt.radio=function(t){return t.innerHTML="",t},yt.checkbox=function(t,e){var n=P(U(),"checkbox");return n.value=1,n.id=k.checkbox,n.checked=Boolean(e.inputValue),t.querySelector("span").innerHTML=e.inputPlaceholder,t},yt.textarea=function(t,e){if(t.value=e.inputValue,pt(t,e),"MutationObserver"in window){var n=parseInt(window.getComputedStyle(q()).width),o=parseInt(window.getComputedStyle(q()).paddingLeft)+parseInt(window.getComputedStyle(q()).paddingRight);new MutationObserver(function(){var e=t.offsetWidth+o;q().style.width=n<e?e+"px":null}).observe(t,{attributes:!0,attributeFilter:["style"]})}return t};var xt=function(){for(var t=R(),e=0;e<t.length;e++)L(t[e])},St=function(){for(var t=q(),e=window.getComputedStyle(t).getPropertyValue("background-color"),n=t.querySelectorAll("[class^=swal2-success-circular-line], .swal2-success-fix"),o=0;o<n.length;o++)n[o].style.backgroundColor=e};function Pt(){var t=q();t||ke.fire(""),t=q();var e=Y(),n=W(),o=K();T(e),T(n),ot([t,e],k.loading),n.disabled=!0,o.disabled=!0,t.setAttribute("data-loading",!0),t.setAttribute("aria-busy",!0),t.focus()}function Bt(t){return Object.prototype.hasOwnProperty.call(Lt,t)}function At(t){return Mt[t]}var Et=[],Tt={},Lt={title:"",titleText:"",text:"",html:"",footer:"",type:null,toast:!1,customClass:"",customContainerClass:"",target:"body",backdrop:!0,animation:!0,heightAuto:!0,allowOutsideClick:!0,allowEscapeKey:!0,allowEnterKey:!0,stopKeydownPropagation:!0,keydownListenerCapture:!1,showConfirmButton:!0,showCancelButton:!1,preConfirm:null,confirmButtonText:"OK",confirmButtonAriaLabel:"",confirmButtonColor:null,confirmButtonClass:"",cancelButtonText:"Cancel",cancelButtonAriaLabel:"",cancelButtonColor:null,cancelButtonClass:"",buttonsStyling:!0,reverseButtons:!1,focusConfirm:!0,focusCancel:!1,showCloseButton:!1,closeButtonHtml:"×",closeButtonAriaLabel:"Close this dialog",showLoaderOnConfirm:!1,imageUrl:null,imageWidth:null,imageHeight:null,imageAlt:"",imageClass:"",timer:null,width:null,padding:null,background:null,input:null,inputPlaceholder:"",inputValue:"",inputOptions:{},inputAutoTrim:!0,inputClass:"",inputAttributes:{},inputValidator:null,validationMessage:null,grow:!1,position:"center",progressSteps:[],currentProgressStep:null,progressStepsDistance:null,onBeforeOpen:null,onOpen:null,onRender:null,onClose:null,onAfterClose:null,scrollbarPadding:!0},Ot=["title","titleText","text","html","type","customClass","showConfirmButton","showCancelButton","confirmButtonText","confirmButtonAriaLabel","confirmButtonColor","confirmButtonClass","cancelButtonText","cancelButtonAriaLabel","cancelButtonColor","cancelButtonClass","buttonsStyling","reverseButtons","imageUrl","imageWidth","imageHeigth","imageAlt","imageClass","progressSteps","currentProgressStep"],Mt={customContainerClass:"customClass",confirmButtonClass:"customClass",cancelButtonClass:"customClass",imageClass:"customClass",inputClass:"customClass"},Vt=["allowOutsideClick","allowEnterKey","backdrop","focusConfirm","focusCancel","heightAuto","keydownListenerCapture"],jt=Object.freeze({isValidParameter:Bt,isUpdatableParameter:function(t){return-1!==Ot.indexOf(t)},isDeprecatedParameter:At,argsToParams:function(e){var n={};switch(t(e[0])){case"object":i(n,e[0]);break;default:["title","html","type"].forEach(function(o,i){switch(t(e[i])){case"string":n[o]=e[i];break;case"undefined":break;default:d("Unexpected type of ".concat(o,'! Expected "string", got ').concat(t(e[i])))}})}return n},isVisible:function(){return M(q())},clickConfirm:kt,clickCancel:function(){return K()&&K().click()},getContainer:j,getPopup:q,getTitle:N,getContent:U,getImage:F,getIcon:D,getIcons:R,getCloseButton:$,getActions:Y,getConfirmButton:W,getCancelButton:K,getHeader:Z,getFooter:Q,getFocusableElements:J,getValidationMessage:z,isLoading:function(){return q().hasAttribute("data-loading")},fire:function(){for(var t=arguments.length,e=new Array(t),n=0;n<t;n++)e[n]=arguments[n];return s(this,e)},mixin:function(t){return function(n){function s(){return e(this,s),function(t,e){return!e||"object"!=typeof e&&"function"!=typeof e?function(t){if(void 0===t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return t}(t):e}(this,r(s).apply(this,arguments))}return function(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function");t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,writable:!0,configurable:!0}}),e&&a(t,e)}(s,n),o(s,[{key:"_main",value:function(e){return u(r(s.prototype),"_main",this).call(this,i({},t,e))}}]),s}(this)},queue:function(t){var e=this;function n(t,e){Et=[],document.body.removeAttribute("data-swal2-queue-step"),t(e)}Et=t;var o=[];return new Promise(function(t){!function i(r,a){r<Et.length?(document.body.setAttribute("data-swal2-queue-step",r),e.fire(Et[r]).then(function(e){void 0!==e.value?(o.push(e.value),i(r+1,a)):n(t,{dismiss:e.dismiss})})):n(t,{value:o})}(0)})},getQueueStep:function(){return document.body.getAttribute("data-swal2-queue-step")},insertQueueStep:function(t,e){return e&&e<Et.length?Et.splice(e,0,t):Et.push(t)},deleteQueueStep:function(t){void 0!==Et[t]&&Et.splice(t,1)},showLoading:Pt,enableLoading:Pt,getTimerLeft:function(){return Tt.timeout&&Tt.timeout.getTimerLeft()},stopTimer:function(){return Tt.timeout&&Tt.timeout.stop()},resumeTimer:function(){return Tt.timeout&&Tt.timeout.start()},toggleTimer:function(){var t=Tt.timeout;return t&&(t.running?t.stop():t.start())},increaseTimer:function(t){return Tt.timeout&&Tt.timeout.increase(t)},isTimerRunning:function(){return Tt.timeout&&Tt.timeout.isRunning()}});function Ht(){var t=ft.innerParams.get(this),e=ft.domCache.get(this);t.showConfirmButton||(L(e.confirmButton),t.showCancelButton||L(e.actions)),it([e.popup,e.actions],k.loading),e.popup.removeAttribute("aria-busy"),e.popup.removeAttribute("data-loading"),e.confirmButton.disabled=!1,e.cancelButton.disabled=!1}function It(){return!!window.MSInputMethodContext&&!!document.documentMode}function qt(){var t=j(),e=q();t.style.removeProperty("align-items"),e.offsetTop<0&&(t.style.alignItems="flex-start")}var Rt={swalPromiseResolve:new WeakMap};function Dt(t,e,n,o){n?_t(t,o):(new Promise(function(t){var e=window.scrollX,n=window.scrollY;Tt.restoreFocusTimeout=setTimeout(function(){Tt.previousActiveElement&&Tt.previousActiveElement.focus?(Tt.previousActiveElement.focus(),Tt.previousActiveElement=null):document.body&&document.body.focus(),t()},100),void 0!==e&&void 0!==n&&window.scrollTo(e,n)}).then(function(){return _t(t,o)}),Tt.keydownTarget.removeEventListener("keydown",Tt.keydownHandler,{capture:Tt.keydownListenerCapture}),Tt.keydownHandlerAdded=!1),e.parentNode&&e.parentNode.removeChild(e),X()&&(null!==S.previousBodyPadding&&(document.body.style.paddingRight=S.previousBodyPadding+"px",S.previousBodyPadding=null),function(){if(g(document.body,k.iosfix)){var t=parseInt(document.body.style.top,10);it(document.body,k.iosfix),document.body.style.top="",document.body.scrollTop=-1*t}}(),"undefined"!=typeof window&&It()&&window.removeEventListener("resize",qt),l(document.body.children).forEach(function(t){t.hasAttribute("data-previous-aria-hidden")?(t.setAttribute("aria-hidden",t.getAttribute("data-previous-aria-hidden")),t.removeAttribute("data-previous-aria-hidden")):t.removeAttribute("aria-hidden")})),it([document.documentElement,document.body],[k.shown,k["height-auto"],k["no-backdrop"],k["toast-shown"],k["toast-column"]])}function Nt(t){var e=q();if(e&&!g(e,k.hide)){var n=ft.innerParams.get(this);if(n){var o=Rt.swalPromiseResolve.get(this);it(e,k.show),ot(e,k.hide),function(t,e,n){var o=j(),i=lt&&V(e),r=n.onClose,a=n.onAfterClose;null!==r&&"function"==typeof r&&r(e),i?Ft(t,e,o,a):Dt(t,o,at(),a)}(this,e,n),o(t||{})}}}function Ut(t){for(var e in t)t[e]=new WeakMap}var Ft=function(t,e,n,o){Tt.swalCloseEventFinishedCallback=Dt.bind(null,t,n,at(),o),e.addEventListener(lt,function(t){t.target===e&&(Tt.swalCloseEventFinishedCallback(),delete Tt.swalCloseEventFinishedCallback)})},_t=function(t,e){setTimeout(function(){null!==e&&"function"==typeof e&&e(),q()||function(t){delete t.params,delete Tt.keydownHandler,delete Tt.keydownTarget,Ut(ft),Ut(Rt)}(t)})};function zt(t,e,n){var o=ft.domCache.get(t);e.forEach(function(t){o[t].disabled=n})}function Wt(t,e){if(!t)return!1;if("radio"===t.type)for(var n=t.parentNode.parentNode.querySelectorAll("input"),o=0;o<n.length;o++)n[o].disabled=e;else t.disabled=e}var Kt=function(){function t(n,o){e(this,t),this.callback=n,this.remaining=o,this.running=!1,this.start()}return o(t,[{key:"start",value:function(){return this.running||(this.running=!0,this.started=new Date,this.id=setTimeout(this.callback,this.remaining)),this.remaining}},{key:"stop",value:function(){return this.running&&(this.running=!1,clearTimeout(this.id),this.remaining-=new Date-this.started),this.remaining}},{key:"increase",value:function(t){var e=this.running;return e&&this.stop(),this.remaining+=t,e&&this.start(),this.remaining}},{key:"getTimerLeft",value:function(){return this.running&&(this.stop(),this.start()),this.remaining}},{key:"isRunning",value:function(){return this.running}}]),t}(),Yt={email:function(t,e){return/^[a-zA-Z0-9.+_-]+@[a-zA-Z0-9.-]+\.[a-zA-Z0-9-]{2,24}$/.test(t)?Promise.resolve():Promise.resolve(e||"Invalid email address")},url:function(t,e){return/^https?:\/\/(www\.)?[-a-zA-Z0-9@:%._+~#=]{2,256}\.[a-z]{2,63}\b([-a-zA-Z0-9@:%_+.~#?&/=]*)$/.test(t)?Promise.resolve():Promise.resolve(e||"Invalid URL")}};function Zt(t,e){t.removeEventListener(lt,Zt),e.style.overflowY="auto"}function Qt(t,e){t.closePopup({value:e})}function $t(t,e,n){for(var o=J(),i=0;i<o.length;i++)return(e+=n)===o.length?e=0:-1===e&&(e=o.length-1),o[e].focus();q().focus()}var Jt,Xt=function(t,e){lt&&V(e)?(t.style.overflowY="hidden",e.addEventListener(lt,Zt.bind(null,e,t))):t.style.overflowY="auto"},Gt=function(t,e){!function(){if((/iPad|iPhone|iPod/.test(navigator.userAgent)&&!window.MSStream||"MacIntel"===navigator.platform&&1<navigator.maxTouchPoints)&&!g(document.body,k.iosfix)){var t=document.body.scrollTop;document.body.style.top=-1*t+"px",ot(document.body,k.iosfix),(n=j()).ontouchstart=function(t){e=t.target===n||!function(t){return!!(t.scrollHeight>t.clientHeight)}(n)&&"INPUT"!==t.target.tagName},n.ontouchmove=function(t){e&&(t.preventDefault(),t.stopPropagation())}}var e,n}(),"undefined"!=typeof window&&It()&&(qt(),window.addEventListener("resize",qt)),l(document.body.children).forEach(function(t){t===j()||function(t,e){if("function"==typeof t.contains)return t.contains(e)}(t,j())||(t.hasAttribute("aria-hidden")&&t.setAttribute("data-previous-aria-hidden",t.getAttribute("aria-hidden")),t.setAttribute("aria-hidden","true"))}),e&&(null===S.previousBodyPadding&&document.body.scrollHeight>window.innerHeight&&(S.previousBodyPadding=parseInt(window.getComputedStyle(document.body).getPropertyValue("padding-right")),document.body.style.paddingRight=S.previousBodyPadding+function(){if("ontouchstart"in window||navigator.msMaxTouchPoints)return 0;var t=document.createElement("div");t.style.width="50px",t.style.height="50px",t.style.overflow="scroll",document.body.appendChild(t);var e=t.offsetWidth-t.clientWidth;return document.body.removeChild(t),e}()+"px")),setTimeout(function(){t.scrollTop=0})},te=function(t,e,n){n.animation&&ot(e,k.show),T(e),ot([document.documentElement,document.body,t],k.shown),n.heightAuto&&n.backdrop&&!n.toast&&ot([document.documentElement,document.body],k["height-auto"])},ee=function(e,n){function o(t){return oe[n.input](i,ie(t),n)}var i=U();f(n.inputOptions)?(Pt(),n.inputOptions.then(function(t){e.hideLoading(),o(t)})):"object"===t(n.inputOptions)?o(n.inputOptions):d("Unexpected type of inputOptions! Expected object, Map or Promise, got ".concat(t(n.inputOptions)))},ne=function(t,e){var n=t.getInput();L(n),e.inputValue.then(function(o){n.value="number"===e.input?parseFloat(o)||0:o+"",T(n),n.focus(),t.hideLoading()}).catch(function(e){d("Error in inputValue promise: "+e),n.value="",T(n),n.focus(),t.hideLoading()})},oe={select:function(t,e,n){var o=rt(t,k.select);e.forEach(function(t){var e=t[0],i=t[1],r=document.createElement("option");r.value=e,r.innerHTML=i,n.inputValue.toString()===e.toString()&&(r.selected=!0),o.appendChild(r)}),o.focus()},radio:function(t,e,n){var o=rt(t,k.radio);e.forEach(function(t){var e=t[0],i=t[1],r=document.createElement("input"),a=document.createElement("label");r.type="radio",r.name=k.radio,r.value=e,n.inputValue.toString()===e.toString()&&(r.checked=!0);var s=document.createElement("span");s.innerHTML=i,s.className=k.label,a.appendChild(r),a.appendChild(s),o.appendChild(a)});var i=o.querySelectorAll("input");i.length&&i[0].focus()}},ie=function(t){var e=[];return"undefined"!=typeof Map&&t instanceof Map?t.forEach(function(t,n){e.push([n,t])}):Object.keys(t).forEach(function(n){e.push([n,t[n]])}),e},re=function(t,e){var n=function(t,e){var n=t.getInput();if(!n)return null;switch(e.input){case"checkbox":return function(t){return t.checked?1:0}(n);case"radio":return function(t){return t.checked?t.value:null}(n);case"file":return function(t){return t.files.length?null!==t.getAttribute("multiple")?t.files:t.files[0]:null}(n);default:return e.inputAutoTrim?n.value.trim():n.value}}(t,e);e.inputValidator?(t.disableInput(),Promise.resolve().then(function(){return e.inputValidator(n,e.validationMessage)}).then(function(o){t.enableButtons(),t.enableInput(),o?t.showValidationMessage(o):ae(t,e,n)})):t.getInput().checkValidity()?ae(t,e,n):(t.enableButtons(),t.showValidationMessage(e.validationMessage))},ae=function(t,e,n){e.showLoaderOnConfirm&&Pt(),e.preConfirm?(t.resetValidationMessage(),Promise.resolve().then(function(){return e.preConfirm(n,e.validationMessage)}).then(function(e){M(z())||!1===e?t.hideLoading():Qt(t,void 0===e?n:e)})):Qt(t,n)},se=["ArrowLeft","ArrowRight","ArrowUp","ArrowDown","Left","Right","Up","Down"],ue=["Escape","Esc"],ce=function(t,e,n,o){n.stopKeydownPropagation&&e.stopPropagation(),"Enter"===e.key?le(t,e,n):"Tab"===e.key?de(e,n):-1!==se.indexOf(e.key)?pe():-1!==ue.indexOf(e.key)&&fe(e,n,o)},le=function(t,e,n){if(!e.isComposing&&e.target&&t.getInput()&&e.target.outerHTML===t.getInput().outerHTML){if(-1!==["textarea","file"].indexOf(n.input))return;kt(),e.preventDefault()}},de=function(t,e){for(var n=t.target,o=J(),i=-1,r=0;r<o.length;r++)if(n===o[r]){i=r;break}t.shiftKey?$t(0,i,-1):$t(0,i,1),t.stopPropagation(),t.preventDefault()},pe=function(){var t=W(),e=K();document.activeElement===t&&M(e)?e.focus():document.activeElement===e&&M(t)&&t.focus()},fe=function(t,e,n){w(e.allowEscapeKey)&&(t.preventDefault(),n(C.esc))},me=function(t,e,n){t.popup.onclick=function(){e.showConfirmButton||e.showCancelButton||e.showCloseButton||e.input||n(C.close)}},ge=!1,he=function(t){t.popup.onmousedown=function(){t.container.onmouseup=function(e){t.container.onmouseup=void 0,e.target===t.container&&(ge=!0)}}},ve=function(t){t.container.onmousedown=function(){t.popup.onmouseup=function(e){t.popup.onmouseup=void 0,e.target!==t.popup&&!t.popup.contains(e.target)||(ge=!0)}}},be=function(t,e,n){t.container.onclick=function(o){ge?ge=!1:o.target===t.container&&w(e.allowOutsideClick)&&n(C.backdrop)}},ye=function(){document.activeElement&&"function"==typeof document.activeElement.blur&&document.activeElement.blur()},we=Object.freeze({hideLoading:Ht,disableLoading:Ht,getInput:function(t){var e=ft.innerParams.get(t||this),n=ft.domCache.get(t||this);return n?P(n.content,e.input):null},close:Nt,closePopup:Nt,closeModal:Nt,closeToast:Nt,enableButtons:function(){zt(this,["confirmButton","cancelButton"],!1)},disableButtons:function(){zt(this,["confirmButton","cancelButton"],!0)},enableConfirmButton:function(){p("Swal.enableConfirmButton()","Swal.getConfirmButton().removeAttribute('disabled')"),zt(this,["confirmButton"],!1)},disableConfirmButton:function(){p("Swal.disableConfirmButton()","Swal.getConfirmButton().setAttribute('disabled', '')"),zt(this,["confirmButton"],!0)},enableInput:function(){return Wt(this.getInput(),!1)},disableInput:function(){return Wt(this.getInput(),!0)},showValidationMessage:function(t){var e=ft.domCache.get(this);e.validationMessage.innerHTML=t;var n=window.getComputedStyle(e.popup);e.validationMessage.style.marginLeft="-".concat(n.getPropertyValue("padding-left")),e.validationMessage.style.marginRight="-".concat(n.getPropertyValue("padding-right")),T(e.validationMessage);var o=this.getInput();o&&(o.setAttribute("aria-invalid",!0),o.setAttribute("aria-describedBy",k["validation-message"]),B(o),ot(o,k.inputerror))},resetValidationMessage:function(){var t=ft.domCache.get(this);t.validationMessage&&L(t.validationMessage);var e=this.getInput();e&&(e.removeAttribute("aria-invalid"),e.removeAttribute("aria-describedBy"),it(e,k.inputerror))},getProgressSteps:function(){return p("Swal.getProgressSteps()","const swalInstance = Swal.fire({progressSteps: ['1', '2', '3']}); const progressSteps = swalInstance.params.progressSteps"),ft.innerParams.get(this).progressSteps},setProgressSteps:function(t){p("Swal.setProgressSteps()","Swal.update()");var e=i({},ft.innerParams.get(this),{progressSteps:t});wt(0,e),ft.innerParams.set(this,e)},showProgressSteps:function(){T(ft.domCache.get(this).progressSteps)},hideProgressSteps:function(){L(ft.domCache.get(this).progressSteps)},_main:function(t){!function(t){for(var e in t)Bt(i=e)||b('Unknown parameter "'.concat(i,'"')),t.toast&&(o=e,-1!==Vt.indexOf(o)&&b('The parameter "'.concat(o,'" is incompatible with toasts'))),At(n=void 0)&&p(n,At(n));var n,o,i}(t),q()&&Tt.swalCloseEventFinishedCallback&&(Tt.swalCloseEventFinishedCallback(),delete Tt.swalCloseEventFinishedCallback),Tt.deferDisposalTimer&&(clearTimeout(Tt.deferDisposalTimer),delete Tt.deferDisposalTimer);var e=i({},Lt,t);(function(t){var e;(e=t).inputValidator||Object.keys(Yt).forEach(function(t){e.input===t&&(e.inputValidator=Yt[t])}),t.showLoaderOnConfirm&&!t.preConfirm&&b("showLoaderOnConfirm is set to true, but preConfirm is not defined.\nshowLoaderOnConfirm should be used together with preConfirm, see usage example:\nhttps://sweetalert2.github.io/#ajax-request"),t.animation=w(t.animation),function(t){t.target&&("string"!=typeof t.target||document.querySelector(t.target))&&("string"==typeof t.target||t.target.appendChild)||(b('Target parameter is not valid, defaulting to "body"'),t.target="body")}(t),"string"==typeof t.title&&(t.title=t.title.split("\n").join("<br />")),ut(t)})(e),Object.freeze(e),Tt.timeout&&(Tt.timeout.stop(),delete Tt.timeout),clearTimeout(Tt.restoreFocusTimeout);var n=function(t){var e={popup:q(),container:j(),content:U(),actions:Y(),confirmButton:W(),cancelButton:K(),closeButton:$(),validationMessage:z(),progressSteps:_()};return ft.domCache.set(t,e),e}(this);return Ct(this,e),ft.innerParams.set(this,e),function(t,e,n){return new Promise(function(o){var i=function(e){t.closePopup({dismiss:e})};Rt.swalPromiseResolve.set(t,o),function(t,e,n){e.timer&&(t.timeout=new Kt(function(){n("timer"),delete t.timeout},e.timer))}(Tt,n,i),e.confirmButton.onclick=function(){return o=n,(e=t).disableButtons(),void(o.input?re(e,o):ae(e,o,!0));var e,o},e.cancelButton.onclick=function(){return function(t,e){t.disableButtons(),e(C.cancel)}(t,i)},e.closeButton.onclick=function(){return i(C.close)},function(t,e,n){e.toast?me(t,e,n):(he(t),ve(t),be(t,e,n))}(e,n,i),function(t,e,n,o){e.keydownTarget&&e.keydownHandlerAdded&&(e.keydownTarget.removeEventListener("keydown",e.keydownHandler,{capture:e.keydownListenerCapture}),e.keydownHandlerAdded=!1),n.toast||(e.keydownHandler=function(e){return ce(t,e,n,o)},e.keydownTarget=n.keydownListenerCapture?window:q(),e.keydownListenerCapture=n.keydownListenerCapture,e.keydownTarget.addEventListener("keydown",e.keydownHandler,{capture:e.keydownListenerCapture}),e.keydownHandlerAdded=!0)}(t,Tt,n,i),n.toast&&(n.input||n.footer||n.showCloseButton)?ot(document.body,k["toast-column"]):it(document.body,k["toast-column"]),function(t,e){"select"===e.input||"radio"===e.input?ee(t,e):-1!==["text","email","number","tel","textarea"].indexOf(e.input)&&f(e.inputValue)&&ne(t,e)}(t,n),function(t){var e=j(),n=q();"function"==typeof t.onBeforeOpen&&t.onBeforeOpen(n),te(e,n,t),Xt(e,n),X()&&Gt(e,t.scrollbarPadding),at()||Tt.previousActiveElement||(Tt.previousActiveElement=document.activeElement),"function"==typeof t.onOpen&&setTimeout(function(){return t.onOpen(n)})}(n),function(t,e){if(!e.toast)w(e.allowEnterKey)?e.focusCancel&&M(t.cancelButton)?t.cancelButton.focus():e.focusConfirm&&M(t.confirmButton)?t.confirmButton.focus():$t(0,-1,1):ye()}(e,n),e.container.scrollTop=0})}(this,n,e)},update:function(t){var e=q();if(!e||g(e,k.hide))return b("You're trying to update the closed or closing popup, that won't work. Use the update() method in preConfirm parameter or show a new popup.");var n={};Object.keys(t).forEach(function(e){ke.isUpdatableParameter(e)?n[e]=t[e]:b('Invalid parameter to update: "'.concat(e,'". Updatable params are listed here: https://github.com/sweetalert2/sweetalert2/blob/master/src/utils/params.js'))});var o=i({},ft.innerParams.get(this),n);Ct(this,o),ft.innerParams.set(this,o),Object.defineProperties(this,{params:{value:i({},this.params,t),writable:!1,enumerable:!0}})}});function Ce(){if("undefined"!=typeof window){"undefined"==typeof Promise&&d("This package requires a Promise library, please include a shim to enable it in this browser (See: https://github.com/sweetalert2/sweetalert2/wiki/Migration-from-SweetAlert-to-SweetAlert2#1-ie-support)"),Jt=this;for(var t=arguments.length,e=new Array(t),n=0;n<t;n++)e[n]=arguments[n];var o=Object.freeze(this.constructor.argsToParams(e));Object.defineProperties(this,{params:{value:o,writable:!1,enumerable:!0,configurable:!0}});var i=this._main(this.params);ft.promise.set(this,i)}}Ce.prototype.then=function(t){return ft.promise.get(this).then(t)},Ce.prototype.finally=function(t){return ft.promise.get(this).finally(t)},i(Ce.prototype,we),i(Ce,jt),Object.keys(we).forEach(function(t){Ce[t]=function(){var e;if(Jt)return(e=Jt)[t].apply(e,arguments)}}),Ce.DismissReason=C,Ce.version="8.19.0";var ke=Ce;return ke.default=ke}),void 0!==this&&this.Sweetalert2&&(this.swal=this.sweetAlert=this.Swal=this.SweetAlert=this.Sweetalert2); |
||
RootStackScreen.js | import React from 'react';
import { createStackNavigator } from '@react-navigation/stack';
|
const RootStack = createStackNavigator();
const RootStackScreen = ({ navigation }) => (
<RootStack.Navigator headerMode="none">
<RootStack.Screen name="LoginScreen" component={LoginScreen} />
</RootStack.Navigator>
);
export default RootStackScreen; | import LoginScreen from './LoginScreenV2'; |
mod.rs | mod response;
use response::Root; | use crate::{utils::http_get, VtClient, VtResult};
impl VtClient {
pub fn ip_info(&self, ip_address: &str) -> VtResult<Root> {
//! Get the report of a given IP Address
//!
//! ## Example Usage
//! ```rust
//! use vt3::VtClient;
//!
//! let vt = VtClient::new("Your API Key");
//! println!("{:?}", vt.ip_info("192.168.2.1"));
//! ```
let url = format!("{}/ip_addresses/{}", &self.endpoint, ip_address);
http_get(&self.api_key, &self.user_agent, &url)
}
} | |
dora.public.js | /*
前后台公用js*/
$(function(){
//用户注销
$('#userLoginOut').click(function () {
loginOut();
});
// 返回顶部
$('#gotop').click(function(){
$('body,html').animate({scrollTop:0},800); |
// 监听滚动条位置
$(window).scroll(function(event) {
if(getScrollTop() > 100){
$('#gotop').css('opacity' , 0.3);
}else{
$('#gotop').css('opacity' , 0);
}
});
});
function loginOut(){
$.ajax({
url: "/users/logout",
method: "GET",
success: function (result) {
if (result === "success") {
window.location = "/"
} else {
alert("未知异常,请稍后重试");
}
}
})
}
//兼容方式获取scrolltop以及设置scrolltop
function getScrollTop() {
var scrollTop = document.documentElement.scrollTop || window.pageYOffset || document.body.scrollTop;
return scrollTop;
}
/*滑动组件
* adsId 广告id
* containerId 自定义广告容器ID
* showPoint 是否显示小圆点 默认false
* */
function initSlides(jsonData){
var jsonData = jsonData || {};
jsonData.adsId = jsonData.adsId || '';
jsonData.containerId = jsonData.containerId || '';
jsonData.showPoint = jsonData.showPoint || false;
if(jsonData.adsId && jsonData.containerId){
this.init(jsonData);
}
}
initSlides.prototype = {
init : function(jsonData){
$.ajax({
url:"/content/requestAds/ads/item?uid="+jsonData.adsId,
type:"get",
dataType:"json",
success:function(data){
if(!data){
return;
}
if(data.type === "1"){
var imgContent = data.content;
var imgList = imgContent.replace(/},/g,"};").split(";");
var imgItems = "";
var imgIcons = "";
for(var i=0;i<imgList.length;i++){
var item = JSON.parse(imgList[i]);
if(i==0){
imgIcons += "<li data-target='#carousel-example-generic' data-slide-to='0' class='active'></li>";
imgItems += "<div class='item active'><a href='"+item.link+"' target='"+item.target+"'><img width='"+item.width+"' height='"+item.height+"' src='"+item.sImg+"' alt='"+item.discription+"'></a></div>";
}
else{
imgIcons += "<li data-target='#carousel-example-generic' data-slide-to='"+i+"'></li>";
imgItems += "<div class='item'><a href='"+item.link+"' target='"+item.target+"'><img width='"+item.width+"' height='"+item.height+"' src='"+item.sImg+"' alt='"+item.discription+"'></a></div>";
}
}
adsTemp(imgItems,imgIcons,jsonData,imgList.length);
}else{
var contentObj = JSON.parse(data.content);
var txtHtml = "";
txtHtml += "<a href='"+contentObj.link+"' target='_blank'><i class='fa fa-tags'></i>"+contentObj.title+"</a>";
$("#"+jsonData.containerId).html(txtHtml);
}
}
});
}
};
function adsTemp(imgItems,imgIcons,jsonData,imgListLength){
var adsHtml = "";
adsHtml += "<div id='carousel-example-generic' class='carousel slide' data-ride='carousel'>";
adsHtml += " <ol class='carousel-indicators'>";
if(jsonData.showPoint){
adsHtml += imgIcons;
}
adsHtml += " </ol>";
adsHtml += " <div class='carousel-inner' role='listbox'>";
adsHtml += imgItems;
adsHtml += " </div>";
if(imgListLength > 1){
adsHtml += " <a class='left carousel-control' href='#carousel-example-generic' role='button' data-slide='prev'>";
adsHtml += " <span class='glyphicon glyphicon-chevron-left' aria-hidden='true'></span>";
adsHtml += " <span class='sr-only'>Previous</span>";
adsHtml += " </a>";
adsHtml += " <a class='right carousel-control' href='#carousel-example-generic' role='button' data-slide='next'>";
adsHtml += " <span class='glyphicon glyphicon-chevron-right' aria-hidden='true'></span>";
adsHtml += " <span class='sr-only'>Next</span>";
adsHtml += " </a>";
}
adsHtml += "</div>";
$("#"+jsonData.containerId).html(adsHtml);
} | return false;
}); |
main.rs | // DO NOT EDIT !
// This file was generated automatically from 'src/mako/cli/main.rs.mako'
// DO NOT EDIT !
#![allow(unused_variables, unused_imports, dead_code, unused_mut)]
extern crate tokio;
#[macro_use]
extern crate clap;
use std::env;
use std::io::{self, Write};
use clap::{App, SubCommand, Arg};
use google_cloudshell1::{api, Error, oauth2};
mod client;
use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg,
input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol,
calltype_from_str, remove_json_null_values, ComplexType, JsonType, JsonTypeInfo};
use std::default::Default;
use std::str::FromStr;
use serde_json as json;
use clap::ArgMatches;
enum DoitError {
IoError(String, io::Error),
ApiError(Error),
}
struct Engine<'n> {
opt: ArgMatches<'n>,
hub: api::CloudShell,
gp: Vec<&'static str>,
gpm: Vec<(&'static str, &'static str)>,
}
impl<'n> Engine<'n> {
async fn _operations_cancel(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec![]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::CancelOperationRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.operations().cancel(request, opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _operations_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.operations().delete(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn | (&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.operations().get(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _operations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.operations().list(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
"page-token" => {
call = call.page_token(value.unwrap_or(""));
},
"page-size" => {
call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer"));
},
"filter" => {
call = call.filter(value.unwrap_or(""));
},
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _users_environments_add_public_key(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"key" => Some(("key", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["key"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::AddPublicKeyRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.users().environments_add_public_key(request, opt.value_of("environment").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _users_environments_authorize(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"access-token" => Some(("accessToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"expire-time" => Some(("expireTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"id-token" => Some(("idToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["access-token", "expire-time", "id-token"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::AuthorizeEnvironmentRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.users().environments_authorize(request, opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _users_environments_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.users().environments_get(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _users_environments_remove_public_key(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"key" => Some(("key", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["key"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::RemovePublicKeyRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.users().environments_remove_public_key(request, opt.value_of("environment").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _users_environments_start(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"access-token" => Some(("accessToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"public-keys" => Some(("publicKeys", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["access-token", "public-keys"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::StartEnvironmentRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.users().environments_start(request, opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _doit(&self, dry_run: bool) -> Result<Result<(), DoitError>, Option<InvalidOptionsError>> {
let mut err = InvalidOptionsError::new();
let mut call_result: Result<(), DoitError> = Ok(());
let mut err_opt: Option<InvalidOptionsError> = None;
match self.opt.subcommand() {
("operations", Some(opt)) => {
match opt.subcommand() {
("cancel", Some(opt)) => {
call_result = self._operations_cancel(opt, dry_run, &mut err).await;
},
("delete", Some(opt)) => {
call_result = self._operations_delete(opt, dry_run, &mut err).await;
},
("get", Some(opt)) => {
call_result = self._operations_get(opt, dry_run, &mut err).await;
},
("list", Some(opt)) => {
call_result = self._operations_list(opt, dry_run, &mut err).await;
},
_ => {
err.issues.push(CLIError::MissingMethodError("operations".to_string()));
writeln!(io::stderr(), "{}\n", opt.usage()).ok();
}
}
},
("users", Some(opt)) => {
match opt.subcommand() {
("environments-add-public-key", Some(opt)) => {
call_result = self._users_environments_add_public_key(opt, dry_run, &mut err).await;
},
("environments-authorize", Some(opt)) => {
call_result = self._users_environments_authorize(opt, dry_run, &mut err).await;
},
("environments-get", Some(opt)) => {
call_result = self._users_environments_get(opt, dry_run, &mut err).await;
},
("environments-remove-public-key", Some(opt)) => {
call_result = self._users_environments_remove_public_key(opt, dry_run, &mut err).await;
},
("environments-start", Some(opt)) => {
call_result = self._users_environments_start(opt, dry_run, &mut err).await;
},
_ => {
err.issues.push(CLIError::MissingMethodError("users".to_string()));
writeln!(io::stderr(), "{}\n", opt.usage()).ok();
}
}
},
_ => {
err.issues.push(CLIError::MissingCommandError);
writeln!(io::stderr(), "{}\n", self.opt.usage()).ok();
}
}
if dry_run {
if err.issues.len() > 0 {
err_opt = Some(err);
}
Err(err_opt)
} else {
Ok(call_result)
}
}
// Please note that this call will fail if any part of the opt can't be handled
async fn new(opt: ArgMatches<'n>) -> Result<Engine<'n>, InvalidOptionsError> {
let (config_dir, secret) = {
let config_dir = match client::assure_config_dir_exists(opt.value_of("folder").unwrap_or("~/.google-service-cli")) {
Err(e) => return Err(InvalidOptionsError::single(e, 3)),
Ok(p) => p,
};
match client::application_secret_from_directory(&config_dir, "cloudshell1-secret.json",
"{\"installed\":{\"auth_uri\":\"https://accounts.google.com/o/oauth2/auth\",\"client_secret\":\"hCsslbCUyfehWMmbkG8vTYxG\",\"token_uri\":\"https://accounts.google.com/o/oauth2/token\",\"client_email\":\"\",\"redirect_uris\":[\"urn:ietf:wg:oauth:2.0:oob\",\"oob\"],\"client_x509_cert_url\":\"\",\"client_id\":\"620010449518-9ngf7o4dhs0dka470npqvor6dc5lqb9b.apps.googleusercontent.com\",\"auth_provider_x509_cert_url\":\"https://www.googleapis.com/oauth2/v1/certs\"}}") {
Ok(secret) => (config_dir, secret),
Err(e) => return Err(InvalidOptionsError::single(e, 4))
}
};
let auth = oauth2::InstalledFlowAuthenticator::builder(
secret,
oauth2::InstalledFlowReturnMethod::HTTPRedirect,
).persist_tokens_to_disk(format!("{}/cloudshell1", config_dir)).build().await.unwrap();
let client = hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots());
let engine = Engine {
opt: opt,
hub: api::CloudShell::new(client, auth),
gp: vec!["$-xgafv", "access-token", "alt", "callback", "fields", "key", "oauth-token", "pretty-print", "quota-user", "upload-type", "upload-protocol"],
gpm: vec![
("$-xgafv", "$.xgafv"),
("access-token", "access_token"),
("oauth-token", "oauth_token"),
("pretty-print", "prettyPrint"),
("quota-user", "quotaUser"),
("upload-type", "uploadType"),
("upload-protocol", "upload_protocol"),
]
};
match engine._doit(true).await {
Err(Some(err)) => Err(err),
Err(None) => Ok(engine),
Ok(_) => unreachable!(),
}
}
async fn doit(&self) -> Result<(), DoitError> {
match self._doit(false).await {
Ok(res) => res,
Err(_) => unreachable!(),
}
}
}
#[tokio::main]
async fn main() {
let mut exit_status = 0i32;
let arg_data = [
("operations", "methods: 'cancel', 'delete', 'get' and 'list'", vec![
("cancel",
Some(r##"Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`."##),
"Details at http://byron.github.io/google-apis-rs/google_cloudshell1_cli/operations_cancel",
vec![
(Some(r##"name"##),
None,
Some(r##"The name of the operation resource to be cancelled."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("delete",
Some(r##"Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`."##),
"Details at http://byron.github.io/google-apis-rs/google_cloudshell1_cli/operations_delete",
vec![
(Some(r##"name"##),
None,
Some(r##"The name of the operation resource to be deleted."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("get",
Some(r##"Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service."##),
"Details at http://byron.github.io/google-apis-rs/google_cloudshell1_cli/operations_get",
vec![
(Some(r##"name"##),
None,
Some(r##"The name of the operation resource."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("list",
Some(r##"Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id."##),
"Details at http://byron.github.io/google-apis-rs/google_cloudshell1_cli/operations_list",
vec![
(Some(r##"name"##),
None,
Some(r##"The name of the operation's parent resource."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
]),
("users", "methods: 'environments-add-public-key', 'environments-authorize', 'environments-get', 'environments-remove-public-key' and 'environments-start'", vec![
("environments-add-public-key",
Some(r##"Adds a public SSH key to an environment, allowing clients with the corresponding private key to connect to that environment via SSH. If a key with the same content already exists, this will error with ALREADY_EXISTS."##),
"Details at http://byron.github.io/google-apis-rs/google_cloudshell1_cli/users_environments-add-public-key",
vec![
(Some(r##"environment"##),
None,
Some(r##"Environment this key should be added to, e.g. `users/me/environments/default`."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("environments-authorize",
Some(r##"Sends OAuth credentials to a running environment on behalf of a user. When this completes, the environment will be authorized to run various Google Cloud command line tools without requiring the user to manually authenticate."##),
"Details at http://byron.github.io/google-apis-rs/google_cloudshell1_cli/users_environments-authorize",
vec![
(Some(r##"name"##),
None,
Some(r##"Name of the resource that should receive the credentials, for example `users/me/environments/default` or `users/[email protected]/environments/default`."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("environments-get",
Some(r##"Gets an environment. Returns NOT_FOUND if the environment does not exist."##),
"Details at http://byron.github.io/google-apis-rs/google_cloudshell1_cli/users_environments-get",
vec![
(Some(r##"name"##),
None,
Some(r##"Required. Name of the requested resource, for example `users/me/environments/default` or `users/[email protected]/environments/default`."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("environments-remove-public-key",
Some(r##"Removes a public SSH key from an environment. Clients will no longer be able to connect to the environment using the corresponding private key. If a key with the same content is not present, this will error with NOT_FOUND."##),
"Details at http://byron.github.io/google-apis-rs/google_cloudshell1_cli/users_environments-remove-public-key",
vec![
(Some(r##"environment"##),
None,
Some(r##"Environment this key should be removed from, e.g. `users/me/environments/default`."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("environments-start",
Some(r##"Starts an existing environment, allowing clients to connect to it. The returned operation will contain an instance of StartEnvironmentMetadata in its metadata field. Users can wait for the environment to start by polling this operation via GetOperation. Once the environment has finished starting and is ready to accept connections, the operation will contain a StartEnvironmentResponse in its response field."##),
"Details at http://byron.github.io/google-apis-rs/google_cloudshell1_cli/users_environments-start",
vec![
(Some(r##"name"##),
None,
Some(r##"Name of the resource that should be started, for example `users/me/environments/default` or `users/[email protected]/environments/default`."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
]),
];
let mut app = App::new("cloudshell1")
.author("Sebastian Thiel <[email protected]>")
.version("3.0.0+20220301")
.about("Allows users to start, configure, and connect to interactive shell sessions running in the cloud. ")
.after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudshell1_cli")
.arg(Arg::with_name("url")
.long("scope")
.help("Specify the authentication a method should be executed in. Each scope requires the user to grant this application permission to use it.If unset, it defaults to the shortest scope url for a particular method.")
.multiple(true)
.takes_value(true))
.arg(Arg::with_name("folder")
.long("config-dir")
.help("A directory into which we will store our persistent data. Defaults to a user-writable directory that we will create during the first invocation.[default: ~/.google-service-cli")
.multiple(false)
.takes_value(true))
.arg(Arg::with_name("debug")
.long("debug")
.help("Debug print all errors")
.multiple(false)
.takes_value(false));
for &(main_command_name, about, ref subcommands) in arg_data.iter() {
let mut mcmd = SubCommand::with_name(main_command_name).about(about);
for &(sub_command_name, ref desc, url_info, ref args) in subcommands {
let mut scmd = SubCommand::with_name(sub_command_name);
if let &Some(desc) = desc {
scmd = scmd.about(desc);
}
scmd = scmd.after_help(url_info);
for &(ref arg_name, ref flag, ref desc, ref required, ref multi) in args {
let arg_name_str =
match (arg_name, flag) {
(&Some(an), _ ) => an,
(_ , &Some(f)) => f,
_ => unreachable!(),
};
let mut arg = Arg::with_name(arg_name_str)
.empty_values(false);
if let &Some(short_flag) = flag {
arg = arg.short(short_flag);
}
if let &Some(desc) = desc {
arg = arg.help(desc);
}
if arg_name.is_some() && flag.is_some() {
arg = arg.takes_value(true);
}
if let &Some(required) = required {
arg = arg.required(required);
}
if let &Some(multi) = multi {
arg = arg.multiple(multi);
}
scmd = scmd.arg(arg);
}
mcmd = mcmd.subcommand(scmd);
}
app = app.subcommand(mcmd);
}
let matches = app.get_matches();
let debug = matches.is_present("debug");
match Engine::new(matches).await {
Err(err) => {
exit_status = err.exit_code;
writeln!(io::stderr(), "{}", err).ok();
},
Ok(engine) => {
if let Err(doit_err) = engine.doit().await {
exit_status = 1;
match doit_err {
DoitError::IoError(path, err) => {
writeln!(io::stderr(), "Failed to open output file '{}': {}", path, err).ok();
},
DoitError::ApiError(err) => {
if debug {
writeln!(io::stderr(), "{:#?}", err).ok();
} else {
writeln!(io::stderr(), "{}", err).ok();
}
}
}
}
}
}
std::process::exit(exit_status);
}
| _operations_get |
build.rs | fn main() | {
if std::env::var("GENERATE_PROTOS").is_ok() {
let prost_protos_file = vec!["./protobuf/exomind/base.proto"];
prost_build::Config::new()
.out_dir("./src/generated")
.compile_protos(&prost_protos_file, &["./protobuf"])
.expect("prost error");
}
} |
|
mock_poster.go | // Code generated by MockGen. DO NOT EDIT.
// Source: github.com/larkox/mattermost-plugin-utils/bot/poster (interfaces: Poster)
// Package mock_bot is a generated GoMock package.
package mock_bot
import (
gomock "github.com/golang/mock/gomock"
model "github.com/mattermost/mattermost-server/v5/model"
reflect "reflect"
)
// MockPoster is a mock of Poster interface
type MockPoster struct {
ctrl *gomock.Controller
recorder *MockPosterMockRecorder
}
// MockPosterMockRecorder is the mock recorder for MockPoster
type MockPosterMockRecorder struct {
mock *MockPoster
}
// NewMockPoster creates a new mock instance
func NewMockPoster(ctrl *gomock.Controller) *MockPoster {
mock := &MockPoster{ctrl: ctrl}
mock.recorder = &MockPosterMockRecorder{mock}
return mock
}
// EXPECT returns an object that allows the caller to indicate expected use
func (m *MockPoster) EXPECT() *MockPosterMockRecorder {
return m.recorder
}
// DM mocks base method
func (m *MockPoster) DM(arg0, arg1 string, arg2 ...interface{}) (string, error) {
m.ctrl.T.Helper()
varargs := []interface{}{arg0, arg1}
for _, a := range arg2 {
varargs = append(varargs, a)
}
ret := m.ctrl.Call(m, "DM", varargs...)
ret0, _ := ret[0].(string)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// DM indicates an expected call of DM
func (mr *MockPosterMockRecorder) DM(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
varargs := append([]interface{}{arg0, arg1}, arg2...)
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DM", reflect.TypeOf((*MockPoster)(nil).DM), varargs...)
}
// DMUpdate mocks base method
func (m *MockPoster) DMUpdate(arg0, arg1 string, arg2 ...interface{}) error {
m.ctrl.T.Helper()
varargs := []interface{}{arg0, arg1}
for _, a := range arg2 {
varargs = append(varargs, a)
}
ret := m.ctrl.Call(m, "DMUpdate", varargs...)
ret0, _ := ret[0].(error)
return ret0
}
// DMUpdate indicates an expected call of DMUpdate
func (mr *MockPosterMockRecorder) DMUpdate(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
varargs := append([]interface{}{arg0, arg1}, arg2...)
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DMUpdate", reflect.TypeOf((*MockPoster)(nil).DMUpdate), varargs...)
}
// DMWithAttachments mocks base method
func (m *MockPoster) DMWithAttachments(arg0 string, arg1 ...*model.SlackAttachment) (string, error) {
m.ctrl.T.Helper()
varargs := []interface{}{arg0}
for _, a := range arg1 {
varargs = append(varargs, a)
}
ret := m.ctrl.Call(m, "DMWithAttachments", varargs...)
ret0, _ := ret[0].(string)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// DMWithAttachments indicates an expected call of DMWithAttachments
func (mr *MockPosterMockRecorder) DMWithAttachments(arg0 interface{}, arg1 ...interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
varargs := append([]interface{}{arg0}, arg1...)
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DMWithAttachments", reflect.TypeOf((*MockPoster)(nil).DMWithAttachments), varargs...)
}
// DeletePost mocks base method
func (m *MockPoster) DeletePost(arg0 string) error {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "DeletePost", arg0)
ret0, _ := ret[0].(error)
return ret0
}
// DeletePost indicates an expected call of DeletePost
func (mr *MockPosterMockRecorder) DeletePost(arg0 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeletePost", reflect.TypeOf((*MockPoster)(nil).DeletePost), arg0)
}
// Ephemeral mocks base method
func (m *MockPoster) Ephemeral(arg0, arg1, arg2 string, arg3 ...interface{}) {
m.ctrl.T.Helper() | }
m.ctrl.Call(m, "Ephemeral", varargs...)
}
// Ephemeral indicates an expected call of Ephemeral
func (mr *MockPosterMockRecorder) Ephemeral(arg0, arg1, arg2 interface{}, arg3 ...interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
varargs := append([]interface{}{arg0, arg1, arg2}, arg3...)
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Ephemeral", reflect.TypeOf((*MockPoster)(nil).Ephemeral), varargs...)
}
// UpdatePost mocks base method
func (m *MockPoster) UpdatePost(arg0 *model.Post) error {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "UpdatePost", arg0)
ret0, _ := ret[0].(error)
return ret0
}
// UpdatePost indicates an expected call of UpdatePost
func (mr *MockPosterMockRecorder) UpdatePost(arg0 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdatePost", reflect.TypeOf((*MockPoster)(nil).UpdatePost), arg0)
}
// UpdatePosterID mocks base method
func (m *MockPoster) UpdatePosterID(arg0 string) {
m.ctrl.T.Helper()
m.ctrl.Call(m, "UpdatePosterID", arg0)
}
// UpdatePosterID indicates an expected call of UpdatePosterID
func (mr *MockPosterMockRecorder) UpdatePosterID(arg0 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdatePosterID", reflect.TypeOf((*MockPoster)(nil).UpdatePosterID), arg0)
} | varargs := []interface{}{arg0, arg1, arg2}
for _, a := range arg3 {
varargs = append(varargs, a) |
config.py | import os
mongodb_atlas = {
"connection_string": os.environ.get('MONGODB_CONNECTION_STRING'),
"database_name": "info-bot",
"news_collection_name": "wow-news",
"log_collections": {"commands": "user-commands-log", "updater": "push-updates-log"} | article_types = {
"HOTFIXES": "hotfixes",
"LATEST": "latest"
}
article_keys = {
"TYPE": "type",
"ID": "_id",
"TITLE": "title",
"DESCRIPTION": "description",
"DATETIME": "datetime",
"URL": "url",
"IMAGE_URL": "image_url"
}
news_cog = {
"embed_color": {
"r": 252,
"g": 186,
"b": 3
}
}
updater_cog = {
"news_channel_id": 823082892367364156,
"wow_role_id": 742188088461099148,
"refresh_rate_seconds": 5,
"embed_color": {
"r": 255,
"g": 75,
"b": 35
}
} | }
|
handle.rs | use std::cell::Cell;
use std::fmt;
use std::io::SeekFrom;
use std::path::Path;
use std::ptr;
use std::str;
use std::time::Duration;
use curl_sys;
use libc::c_void;
use crate::easy::handler::{self, InfoType, ReadError, SeekResult, WriteError};
use crate::easy::handler::{Auth, NetRc, ProxyType, SslOpt};
use crate::easy::handler::{HttpVersion, IpResolve, SslVersion, TimeCondition};
use crate::easy::{Easy2, Handler};
use crate::easy::{Form, List};
use crate::Error;
/// Raw bindings to a libcurl "easy session".
///
/// This type is the same as the `Easy2` type in this library except that it
/// does not contain a type parameter. Callbacks from curl are all controlled
/// via closures on this `Easy` type, and this type namely has a `transfer`
/// method as well for ergonomic management of these callbacks.
///
/// There's not necessarily a right answer for which type is correct to use, but
/// as a general rule of thumb `Easy` is typically a reasonable choice for
/// synchronous I/O and `Easy2` is a good choice for asynchronous I/O.
///
/// ## Examples
///
/// Creating a handle which can be used later
///
/// ```
/// use curl::easy::Easy;
///
/// let handle = Easy::new();
/// ```
///
/// Send an HTTP request, writing the response to stdout.
///
/// ```
/// use std::io::{stdout, Write};
///
/// use curl::easy::Easy;
///
/// let mut handle = Easy::new();
/// handle.url("https://www.rust-lang.org/").unwrap();
/// handle.write_function(|data| {
/// stdout().write_all(data).unwrap();
/// Ok(data.len())
/// }).unwrap();
/// handle.perform().unwrap();
/// ```
///
/// Collect all output of an HTTP request to a vector.
///
/// ```
/// use curl::easy::Easy;
///
/// let mut data = Vec::new();
/// let mut handle = Easy::new();
/// handle.url("https://www.rust-lang.org/").unwrap();
/// {
/// let mut transfer = handle.transfer();
/// transfer.write_function(|new_data| {
/// data.extend_from_slice(new_data);
/// Ok(new_data.len())
/// }).unwrap();
/// transfer.perform().unwrap();
/// }
/// println!("{:?}", data);
/// ```
///
/// More examples of various properties of an HTTP request can be found on the
/// specific methods as well.
#[derive(Debug)]
pub struct Easy {
inner: Easy2<EasyData>,
}
/// A scoped transfer of information which borrows an `Easy` and allows
/// referencing stack-local data of the lifetime `'data`.
///
/// Usage of `Easy` requires the `'static` and `Send` bounds on all callbacks
/// registered, but that's not often wanted if all you need is to collect a
/// bunch of data in memory to a vector, for example. The `Transfer` structure,
/// created by the `Easy::transfer` method, is used for this sort of request.
///
/// The callbacks attached to a `Transfer` are only active for that one transfer
/// object, and they allow to elide both the `Send` and `'static` bounds to
/// close over stack-local information.
pub struct Transfer<'easy, 'data> {
easy: &'easy mut Easy,
data: Box<Callbacks<'data>>,
}
pub struct EasyData {
running: Cell<bool>,
owned: Callbacks<'static>,
borrowed: Cell<*mut Callbacks<'static>>,
}
unsafe impl Send for EasyData {}
#[derive(Default)]
struct Callbacks<'a> {
write: Option<Box<dyn FnMut(&[u8]) -> Result<usize, WriteError> + 'a>>,
read: Option<Box<dyn FnMut(&mut [u8]) -> Result<usize, ReadError> + 'a>>,
seek: Option<Box<dyn FnMut(SeekFrom) -> SeekResult + 'a>>,
debug: Option<Box<dyn FnMut(InfoType, &[u8]) + 'a>>,
header: Option<Box<dyn FnMut(&[u8]) -> bool + 'a>>,
progress: Option<Box<dyn FnMut(f64, f64, f64, f64) -> bool + 'a>>,
ssl_ctx: Option<Box<dyn FnMut(*mut c_void) -> Result<(), Error> + 'a>>,
}
impl Easy {
/// Creates a new "easy" handle which is the core of almost all operations
/// in libcurl.
///
/// To use a handle, applications typically configure a number of options
/// followed by a call to `perform`. Options are preserved across calls to
/// `perform` and need to be reset manually (or via the `reset` method) if
/// this is not desired.
pub fn new() -> Easy {
Easy {
inner: Easy2::new(EasyData {
running: Cell::new(false),
owned: Callbacks::default(),
borrowed: Cell::new(ptr::null_mut()),
}),
}
}
// =========================================================================
// Behavior options
/// Same as [`Easy2::verbose`](struct.Easy2.html#method.verbose)
pub fn verbose(&mut self, verbose: bool) -> Result<(), Error> {
self.inner.verbose(verbose)
}
/// Same as [`Easy2::show_header`](struct.Easy2.html#method.show_header)
pub fn show_header(&mut self, show: bool) -> Result<(), Error> {
self.inner.show_header(show)
}
/// Same as [`Easy2::progress`](struct.Easy2.html#method.progress)
pub fn progress(&mut self, progress: bool) -> Result<(), Error> {
self.inner.progress(progress)
}
/// Same as [`Easy2::signal`](struct.Easy2.html#method.signal)
pub fn signal(&mut self, signal: bool) -> Result<(), Error> {
self.inner.signal(signal)
}
/// Same as [`Easy2::wildcard_match`](struct.Easy2.html#method.wildcard_match)
pub fn wildcard_match(&mut self, m: bool) -> Result<(), Error> {
self.inner.wildcard_match(m)
}
/// Same as [`Easy2::unix_socket`](struct.Easy2.html#method.unix_socket)
pub fn unix_socket(&mut self, unix_domain_socket: &str) -> Result<(), Error> {
self.inner.unix_socket(unix_domain_socket)
}
/// Same as [`Easy2::unix_socket_path`](struct.Easy2.html#method.unix_socket_path)
pub fn unix_socket_path<P: AsRef<Path>>(&mut self, path: Option<P>) -> Result<(), Error> {
self.inner.unix_socket_path(path)
}
// =========================================================================
// Callback options
/// Set callback for writing received data.
///
/// This callback function gets called by libcurl as soon as there is data
/// received that needs to be saved.
///
/// The callback function will be passed as much data as possible in all
/// invokes, but you must not make any assumptions. It may be one byte, it
/// may be thousands. If `show_header` is enabled, which makes header data
/// get passed to the write callback, you can get up to
/// `CURL_MAX_HTTP_HEADER` bytes of header data passed into it. This
/// usually means 100K.
///
/// This function may be called with zero bytes data if the transferred file
/// is empty.
///
/// The callback should return the number of bytes actually taken care of.
/// If that amount differs from the amount passed to your callback function,
/// it'll signal an error condition to the library. This will cause the
/// transfer to get aborted and the libcurl function used will return
/// an error with `is_write_error`.
///
/// If your callback function returns `Err(WriteError::Pause)` it will cause
/// this transfer to become paused. See `unpause_write` for further details.
///
/// By default data is sent into the void, and this corresponds to the
/// `CURLOPT_WRITEFUNCTION` and `CURLOPT_WRITEDATA` options.
///
/// Note that the lifetime bound on this function is `'static`, but that
/// is often too restrictive. To use stack data consider calling the
/// `transfer` method and then using `write_function` to configure a
/// callback that can reference stack-local data.
///
/// # Examples
///
/// ```
/// use std::io::{stdout, Write};
/// use curl::easy::Easy;
///
/// let mut handle = Easy::new();
/// handle.url("https://www.rust-lang.org/").unwrap();
/// handle.write_function(|data| {
/// Ok(stdout().write(data).unwrap())
/// }).unwrap();
/// handle.perform().unwrap();
/// ```
///
/// Writing to a stack-local buffer
///
/// ```
/// use std::io::{stdout, Write};
/// use curl::easy::Easy;
///
/// let mut buf = Vec::new();
/// let mut handle = Easy::new();
/// handle.url("https://www.rust-lang.org/").unwrap();
///
/// let mut transfer = handle.transfer();
/// transfer.write_function(|data| {
/// buf.extend_from_slice(data);
/// Ok(data.len())
/// }).unwrap();
/// transfer.perform().unwrap();
/// ```
pub fn write_function<F>(&mut self, f: F) -> Result<(), Error>
where
F: FnMut(&[u8]) -> Result<usize, WriteError> + Send + 'static,
{
self.inner.get_mut().owned.write = Some(Box::new(f));
Ok(())
}
/// Read callback for data uploads.
///
/// This callback function gets called by libcurl as soon as it needs to
/// read data in order to send it to the peer - like if you ask it to upload
/// or post data to the server.
///
/// Your function must then return the actual number of bytes that it stored
/// in that memory area. Returning 0 will signal end-of-file to the library
/// and cause it to stop the current transfer.
///
/// If you stop the current transfer by returning 0 "pre-maturely" (i.e
/// before the server expected it, like when you've said you will upload N
/// bytes and you upload less than N bytes), you may experience that the
/// server "hangs" waiting for the rest of the data that won't come.
///
/// The read callback may return `Err(ReadError::Abort)` to stop the
/// current operation immediately, resulting in a `is_aborted_by_callback`
/// error code from the transfer.
///
/// The callback can return `Err(ReadError::Pause)` to cause reading from
/// this connection to pause. See `unpause_read` for further details.
///
/// By default data not input, and this corresponds to the
/// `CURLOPT_READFUNCTION` and `CURLOPT_READDATA` options.
///
/// Note that the lifetime bound on this function is `'static`, but that
/// is often too restrictive. To use stack data consider calling the
/// `transfer` method and then using `read_function` to configure a
/// callback that can reference stack-local data.
///
/// # Examples
///
/// Read input from stdin
///
/// ```no_run
/// use std::io::{stdin, Read};
/// use curl::easy::Easy;
///
/// let mut handle = Easy::new();
/// handle.url("https://example.com/login").unwrap();
/// handle.read_function(|into| {
/// Ok(stdin().read(into).unwrap())
/// }).unwrap();
/// handle.post(true).unwrap();
/// handle.perform().unwrap();
/// ```
///
/// Reading from stack-local data:
///
/// ```no_run
/// use std::io::{stdin, Read};
/// use curl::easy::Easy;
///
/// let mut data_to_upload = &b"foobar"[..];
/// let mut handle = Easy::new();
/// handle.url("https://example.com/login").unwrap();
/// handle.post(true).unwrap();
///
/// let mut transfer = handle.transfer();
/// transfer.read_function(|into| {
/// Ok(data_to_upload.read(into).unwrap())
/// }).unwrap();
/// transfer.perform().unwrap();
/// ```
pub fn read_function<F>(&mut self, f: F) -> Result<(), Error>
where
F: FnMut(&mut [u8]) -> Result<usize, ReadError> + Send + 'static,
{
self.inner.get_mut().owned.read = Some(Box::new(f));
Ok(())
}
/// User callback for seeking in input stream.
///
/// This function gets called by libcurl to seek to a certain position in
/// the input stream and can be used to fast forward a file in a resumed
/// upload (instead of reading all uploaded bytes with the normal read
/// function/callback). It is also called to rewind a stream when data has
/// already been sent to the server and needs to be sent again. This may
/// happen when doing a HTTP PUT or POST with a multi-pass authentication
/// method, or when an existing HTTP connection is reused too late and the
/// server closes the connection.
///
/// The callback function must return `SeekResult::Ok` on success,
/// `SeekResult::Fail` to cause the upload operation to fail or
/// `SeekResult::CantSeek` to indicate that while the seek failed, libcurl
/// is free to work around the problem if possible. The latter can sometimes
/// be done by instead reading from the input or similar.
///
/// By default data this option is not set, and this corresponds to the
/// `CURLOPT_SEEKFUNCTION` and `CURLOPT_SEEKDATA` options.
///
/// Note that the lifetime bound on this function is `'static`, but that
/// is often too restrictive. To use stack data consider calling the
/// `transfer` method and then using `seek_function` to configure a
/// callback that can reference stack-local data.
pub fn seek_function<F>(&mut self, f: F) -> Result<(), Error>
where
F: FnMut(SeekFrom) -> SeekResult + Send + 'static,
{
self.inner.get_mut().owned.seek = Some(Box::new(f));
Ok(())
}
/// Callback to progress meter function
///
/// This function gets called by libcurl instead of its internal equivalent
/// with a frequent interval. While data is being transferred it will be
/// called very frequently, and during slow periods like when nothing is
/// being transferred it can slow down to about one call per second.
///
/// The callback gets told how much data libcurl will transfer and has
/// transferred, in number of bytes. The first argument is the total number
/// of bytes libcurl expects to download in this transfer. The second
/// argument is the number of bytes downloaded so far. The third argument is
/// the total number of bytes libcurl expects to upload in this transfer.
/// The fourth argument is the number of bytes uploaded so far.
///
/// Unknown/unused argument values passed to the callback will be set to
/// zero (like if you only download data, the upload size will remain 0).
/// Many times the callback will be called one or more times first, before
/// it knows the data sizes so a program must be made to handle that.
///
/// Returning `false` from this callback will cause libcurl to abort the
/// transfer and return `is_aborted_by_callback`.
///
/// If you transfer data with the multi interface, this function will not be
/// called during periods of idleness unless you call the appropriate
/// libcurl function that performs transfers.
///
/// `progress` must be set to `true` to make this function actually get
/// called.
///
/// By default this function calls an internal method and corresponds to
/// `CURLOPT_PROGRESSFUNCTION` and `CURLOPT_PROGRESSDATA`.
///
/// Note that the lifetime bound on this function is `'static`, but that
/// is often too restrictive. To use stack data consider calling the
/// `transfer` method and then using `progress_function` to configure a
/// callback that can reference stack-local data.
pub fn progress_function<F>(&mut self, f: F) -> Result<(), Error>
where
F: FnMut(f64, f64, f64, f64) -> bool + Send + 'static,
{
self.inner.get_mut().owned.progress = Some(Box::new(f));
Ok(())
}
/// Callback to SSL context
///
/// This callback function gets called by libcurl just before the
/// initialization of an SSL connection after having processed all
/// other SSL related options to give a last chance to an
/// application to modify the behaviour of the SSL
/// initialization. The `ssl_ctx` parameter is actually a pointer
/// to the SSL library's SSL_CTX. If an error is returned from the
/// callback no attempt to establish a connection is made and the
/// perform operation will return the callback's error code.
///
/// This function will get called on all new connections made to a
/// server, during the SSL negotiation. The SSL_CTX pointer will
/// be a new one every time.
///
/// To use this properly, a non-trivial amount of knowledge of
/// your SSL library is necessary. For example, you can use this
/// function to call library-specific callbacks to add additional
/// validation code for certificates, and even to change the
/// actual URI of a HTTPS request.
///
/// By default this function calls an internal method and
/// corresponds to `CURLOPT_SSL_CTX_FUNCTION` and
/// `CURLOPT_SSL_CTX_DATA`.
///
/// Note that the lifetime bound on this function is `'static`, but that
/// is often too restrictive. To use stack data consider calling the
/// `transfer` method and then using `progress_function` to configure a
/// callback that can reference stack-local data.
pub fn ssl_ctx_function<F>(&mut self, f: F) -> Result<(), Error>
where
F: FnMut(*mut c_void) -> Result<(), Error> + Send + 'static,
{
self.inner.get_mut().owned.ssl_ctx = Some(Box::new(f));
Ok(())
}
/// Specify a debug callback
///
/// `debug_function` replaces the standard debug function used when
/// `verbose` is in effect. This callback receives debug information,
/// as specified in the type argument.
///
/// By default this option is not set and corresponds to the
/// `CURLOPT_DEBUGFUNCTION` and `CURLOPT_DEBUGDATA` options.
///
/// Note that the lifetime bound on this function is `'static`, but that
/// is often too restrictive. To use stack data consider calling the
/// `transfer` method and then using `debug_function` to configure a
/// callback that can reference stack-local data.
pub fn debug_function<F>(&mut self, f: F) -> Result<(), Error>
where
F: FnMut(InfoType, &[u8]) + Send + 'static,
{
self.inner.get_mut().owned.debug = Some(Box::new(f));
Ok(())
}
/// Callback that receives header data
///
/// This function gets called by libcurl as soon as it has received header
/// data. The header callback will be called once for each header and only
/// complete header lines are passed on to the callback. Parsing headers is
/// very easy using this. If this callback returns `false` it'll signal an
/// error to the library. This will cause the transfer to get aborted and
/// the libcurl function in progress will return `is_write_error`.
///
/// A complete HTTP header that is passed to this function can be up to
/// CURL_MAX_HTTP_HEADER (100K) bytes.
///
/// It's important to note that the callback will be invoked for the headers
/// of all responses received after initiating a request and not just the
/// final response. This includes all responses which occur during
/// authentication negotiation. If you need to operate on only the headers
/// from the final response, you will need to collect headers in the
/// callback yourself and use HTTP status lines, for example, to delimit
/// response boundaries.
///
/// When a server sends a chunked encoded transfer, it may contain a
/// trailer. That trailer is identical to a HTTP header and if such a
/// trailer is received it is passed to the application using this callback
/// as well. There are several ways to detect it being a trailer and not an
/// ordinary header: 1) it comes after the response-body. 2) it comes after
/// the final header line (CR LF) 3) a Trailer: header among the regular
/// response-headers mention what header(s) to expect in the trailer.
///
/// For non-HTTP protocols like FTP, POP3, IMAP and SMTP this function will
/// get called with the server responses to the commands that libcurl sends.
///
/// By default this option is not set and corresponds to the
/// `CURLOPT_HEADERFUNCTION` and `CURLOPT_HEADERDATA` options.
///
/// Note that the lifetime bound on this function is `'static`, but that
/// is often too restrictive. To use stack data consider calling the
/// `transfer` method and then using `header_function` to configure a
/// callback that can reference stack-local data.
///
/// # Examples
///
/// ```
/// use std::str;
///
/// use curl::easy::Easy;
///
/// let mut handle = Easy::new();
/// handle.url("https://www.rust-lang.org/").unwrap();
/// handle.header_function(|header| {
/// print!("header: {}", str::from_utf8(header).unwrap());
/// true
/// }).unwrap();
/// handle.perform().unwrap();
/// ```
///
/// Collecting headers to a stack local vector
///
/// ```
/// use std::str;
///
/// use curl::easy::Easy;
///
/// let mut headers = Vec::new();
/// let mut handle = Easy::new();
/// handle.url("https://www.rust-lang.org/").unwrap();
///
/// {
/// let mut transfer = handle.transfer();
/// transfer.header_function(|header| {
/// headers.push(str::from_utf8(header).unwrap().to_string());
/// true
/// }).unwrap();
/// transfer.perform().unwrap();
/// }
///
/// println!("{:?}", headers);
/// ```
pub fn header_function<F>(&mut self, f: F) -> Result<(), Error>
where
F: FnMut(&[u8]) -> bool + Send + 'static,
{
self.inner.get_mut().owned.header = Some(Box::new(f));
Ok(())
}
// =========================================================================
// Error options
// TODO: error buffer and stderr
/// Same as [`Easy2::fail_on_error`](struct.Easy2.html#method.fail_on_error)
pub fn fail_on_error(&mut self, fail: bool) -> Result<(), Error> {
self.inner.fail_on_error(fail)
}
// =========================================================================
// Network options
/// Same as [`Easy2::url`](struct.Easy2.html#method.url)
pub fn url(&mut self, url: &str) -> Result<(), Error> {
self.inner.url(url)
}
/// Same as [`Easy2::port`](struct.Easy2.html#method.port)
pub fn port(&mut self, port: u16) -> Result<(), Error> {
self.inner.port(port)
}
/// Same as [`Easy2::connect_to`](struct.Easy2.html#method.connect_to)
pub fn connect_to(&mut self, list: List) -> Result<(), Error> {
self.inner.connect_to(list)
}
/// Same as [`Easy2::path_as_is`](struct.Easy2.html#method.path_as_is)
pub fn path_as_is(&mut self, as_is: bool) -> Result<(), Error> {
self.inner.path_as_is(as_is)
}
/// Same as [`Easy2::proxy`](struct.Easy2.html#method.proxy)
pub fn proxy(&mut self, url: &str) -> Result<(), Error> {
self.inner.proxy(url)
}
/// Same as [`Easy2::proxy_port`](struct.Easy2.html#method.proxy_port)
pub fn proxy_port(&mut self, port: u16) -> Result<(), Error> {
self.inner.proxy_port(port)
}
/// Same as [`Easy2::proxy_cainfo`](struct.Easy2.html#method.proxy_cainfo)
pub fn proxy_cainfo(&mut self, cainfo: &str) -> Result<(), Error> {
self.inner.proxy_cainfo(cainfo)
}
/// Same as [`Easy2::proxy_capath`](struct.Easy2.html#method.proxy_capath)
pub fn proxy_capath<P: AsRef<Path>>(&mut self, path: P) -> Result<(), Error> {
self.inner.proxy_capath(path)
}
/// Same as [`Easy2::proxy_sslcert`](struct.Easy2.html#method.proxy_sslcert)
pub fn proxy_sslcert(&mut self, sslcert: &str) -> Result<(), Error> {
self.inner.proxy_sslcert(sslcert)
}
/// Same as [`Easy2::proxy_sslcert_blob`](struct.Easy2.html#method.proxy_sslcert_blob)
pub fn proxy_sslcert_blob(&mut self, blob: &[u8]) -> Result<(), Error> {
self.inner.proxy_sslcert_blob(blob)
}
/// Same as [`Easy2::proxy_sslkey`](struct.Easy2.html#method.proxy_sslkey)
pub fn proxy_sslkey(&mut self, sslkey: &str) -> Result<(), Error> {
self.inner.proxy_sslkey(sslkey)
}
/// Same as [`Easy2::proxy_sslkey_blob`](struct.Easy2.html#method.proxy_sslkey_blob)
pub fn proxy_sslkey_blob(&mut self, blob: &[u8]) -> Result<(), Error> {
self.inner.proxy_sslkey_blob(blob)
}
/// Same as [`Easy2::proxy_type`](struct.Easy2.html#method.proxy_type)
pub fn proxy_type(&mut self, kind: ProxyType) -> Result<(), Error> {
self.inner.proxy_type(kind)
}
/// Same as [`Easy2::noproxy`](struct.Easy2.html#method.noproxy)
pub fn noproxy(&mut self, skip: &str) -> Result<(), Error> {
self.inner.noproxy(skip)
}
/// Same as [`Easy2::http_proxy_tunnel`](struct.Easy2.html#method.http_proxy_tunnel)
pub fn http_proxy_tunnel(&mut self, tunnel: bool) -> Result<(), Error> {
self.inner.http_proxy_tunnel(tunnel)
}
/// Same as [`Easy2::interface`](struct.Easy2.html#method.interface)
pub fn interface(&mut self, interface: &str) -> Result<(), Error> {
self.inner.interface(interface)
}
/// Same as [`Easy2::set_local_port`](struct.Easy2.html#method.set_local_port)
pub fn set_local_port(&mut self, port: u16) -> Result<(), Error> {
self.inner.set_local_port(port)
}
/// Same as [`Easy2::local_port_range`](struct.Easy2.html#method.local_port_range)
pub fn local_port_range(&mut self, range: u16) -> Result<(), Error> {
self.inner.local_port_range(range)
}
/// Same as [`Easy2::dns_servers`](struct.Easy2.html#method.dns_servers)
pub fn dns_servers(&mut self, servers: &str) -> Result<(), Error> {
self.inner.dns_servers(servers)
}
/// Same as [`Easy2::dns_cache_timeout`](struct.Easy2.html#method.dns_cache_timeout)
pub fn dns_cache_timeout(&mut self, dur: Duration) -> Result<(), Error> {
self.inner.dns_cache_timeout(dur)
}
/// Same as [`Easy2::buffer_size`](struct.Easy2.html#method.buffer_size)
pub fn buffer_size(&mut self, size: usize) -> Result<(), Error> {
self.inner.buffer_size(size)
}
/// Same as [`Easy2::upload_buffer_size`](struct.Easy2.html#method.upload_buffer_size)
pub fn upload_buffer_size(&mut self, size: usize) -> Result<(), Error> {
self.inner.upload_buffer_size(size)
}
/// Same as [`Easy2::tcp_nodelay`](struct.Easy2.html#method.tcp_nodelay)
pub fn tcp_nodelay(&mut self, enable: bool) -> Result<(), Error> {
self.inner.tcp_nodelay(enable)
}
/// Same as [`Easy2::tcp_keepalive`](struct.Easy2.html#method.tcp_keepalive)
pub fn tcp_keepalive(&mut self, enable: bool) -> Result<(), Error> {
self.inner.tcp_keepalive(enable)
}
/// Same as [`Easy2::tcp_keepintvl`](struct.Easy2.html#method.tcp_keepalive)
pub fn tcp_keepintvl(&mut self, dur: Duration) -> Result<(), Error> {
self.inner.tcp_keepintvl(dur)
}
/// Same as [`Easy2::tcp_keepidle`](struct.Easy2.html#method.tcp_keepidle)
pub fn tcp_keepidle(&mut self, dur: Duration) -> Result<(), Error> {
self.inner.tcp_keepidle(dur)
}
/// Same as [`Easy2::address_scope`](struct.Easy2.html#method.address_scope)
pub fn address_scope(&mut self, scope: u32) -> Result<(), Error> {
self.inner.address_scope(scope)
}
// =========================================================================
// Names and passwords
/// Same as [`Easy2::username`](struct.Easy2.html#method.username)
pub fn username(&mut self, user: &str) -> Result<(), Error> {
self.inner.username(user)
}
/// Same as [`Easy2::password`](struct.Easy2.html#method.password)
pub fn password(&mut self, pass: &str) -> Result<(), Error> {
self.inner.password(pass)
}
/// Same as [`Easy2::http_auth`](struct.Easy2.html#method.http_auth)
pub fn http_auth(&mut self, auth: &Auth) -> Result<(), Error> {
self.inner.http_auth(auth)
}
/// Same as [`Easy2::aws_sigv4`](struct.Easy2.html#method.aws_sigv4)
pub fn aws_sigv4(&mut self, param: &str) -> Result<(), Error> {
self.inner.aws_sigv4(param)
}
/// Same as [`Easy2::proxy_username`](struct.Easy2.html#method.proxy_username)
pub fn proxy_username(&mut self, user: &str) -> Result<(), Error> {
self.inner.proxy_username(user)
}
/// Same as [`Easy2::proxy_password`](struct.Easy2.html#method.proxy_password)
pub fn proxy_password(&mut self, pass: &str) -> Result<(), Error> {
self.inner.proxy_password(pass)
}
/// Same as [`Easy2::proxy_auth`](struct.Easy2.html#method.proxy_auth)
pub fn proxy_auth(&mut self, auth: &Auth) -> Result<(), Error> {
self.inner.proxy_auth(auth)
}
/// Same as [`Easy2::netrc`](struct.Easy2.html#method.netrc)
pub fn netrc(&mut self, netrc: NetRc) -> Result<(), Error> {
self.inner.netrc(netrc)
}
// =========================================================================
// HTTP Options
/// Same as [`Easy2::autoreferer`](struct.Easy2.html#method.autoreferer)
pub fn autoreferer(&mut self, enable: bool) -> Result<(), Error> {
self.inner.autoreferer(enable)
}
/// Same as [`Easy2::accept_encoding`](struct.Easy2.html#method.accept_encoding)
pub fn accept_encoding(&mut self, encoding: &str) -> Result<(), Error> {
self.inner.accept_encoding(encoding)
}
/// Same as [`Easy2::transfer_encoding`](struct.Easy2.html#method.transfer_encoding)
pub fn transfer_encoding(&mut self, enable: bool) -> Result<(), Error> {
self.inner.transfer_encoding(enable)
}
/// Same as [`Easy2::follow_location`](struct.Easy2.html#method.follow_location)
pub fn follow_location(&mut self, enable: bool) -> Result<(), Error> {
self.inner.follow_location(enable)
}
/// Same as [`Easy2::unrestricted_auth`](struct.Easy2.html#method.unrestricted_auth)
pub fn unrestricted_auth(&mut self, enable: bool) -> Result<(), Error> {
self.inner.unrestricted_auth(enable)
}
/// Same as [`Easy2::max_redirections`](struct.Easy2.html#method.max_redirections)
pub fn max_redirections(&mut self, max: u32) -> Result<(), Error> {
self.inner.max_redirections(max)
}
/// Same as [`Easy2::put`](struct.Easy2.html#method.put)
pub fn put(&mut self, enable: bool) -> Result<(), Error> {
self.inner.put(enable)
}
/// Same as [`Easy2::post`](struct.Easy2.html#method.post)
pub fn post(&mut self, enable: bool) -> Result<(), Error> {
self.inner.post(enable)
}
/// Same as [`Easy2::post_field_copy`](struct.Easy2.html#method.post_field_copy)
pub fn post_fields_copy(&mut self, data: &[u8]) -> Result<(), Error> {
self.inner.post_fields_copy(data)
}
/// Same as [`Easy2::post_field_size`](struct.Easy2.html#method.post_field_size)
pub fn post_field_size(&mut self, size: u64) -> Result<(), Error> {
self.inner.post_field_size(size)
}
/// Same as [`Easy2::httppost`](struct.Easy2.html#method.httppost)
pub fn httppost(&mut self, form: Form) -> Result<(), Error> {
self.inner.httppost(form)
}
/// Same as [`Easy2::referer`](struct.Easy2.html#method.referer)
pub fn referer(&mut self, referer: &str) -> Result<(), Error> {
self.inner.referer(referer)
}
/// Same as [`Easy2::useragent`](struct.Easy2.html#method.useragent)
pub fn useragent(&mut self, useragent: &str) -> Result<(), Error> {
self.inner.useragent(useragent)
}
/// Same as [`Easy2::http_headers`](struct.Easy2.html#method.http_headers)
pub fn http_headers(&mut self, list: List) -> Result<(), Error> {
self.inner.http_headers(list)
}
/// Same as [`Easy2::cookie`](struct.Easy2.html#method.cookie)
pub fn cookie(&mut self, cookie: &str) -> Result<(), Error> {
self.inner.cookie(cookie)
}
/// Same as [`Easy2::cookie_file`](struct.Easy2.html#method.cookie_file)
pub fn cookie_file<P: AsRef<Path>>(&mut self, file: P) -> Result<(), Error> {
self.inner.cookie_file(file)
}
/// Same as [`Easy2::cookie_jar`](struct.Easy2.html#method.cookie_jar)
pub fn cookie_jar<P: AsRef<Path>>(&mut self, file: P) -> Result<(), Error> {
self.inner.cookie_jar(file)
}
/// Same as [`Easy2::cookie_session`](struct.Easy2.html#method.cookie_session)
pub fn cookie_session(&mut self, session: bool) -> Result<(), Error> {
self.inner.cookie_session(session)
}
/// Same as [`Easy2::cookie_list`](struct.Easy2.html#method.cookie_list)
pub fn cookie_list(&mut self, cookie: &str) -> Result<(), Error> {
self.inner.cookie_list(cookie)
}
/// Same as [`Easy2::get`](struct.Easy2.html#method.get)
pub fn get(&mut self, enable: bool) -> Result<(), Error> {
self.inner.get(enable)
}
/// Same as [`Easy2::ignore_content_length`](struct.Easy2.html#method.ignore_content_length)
pub fn ignore_content_length(&mut self, ignore: bool) -> Result<(), Error> {
self.inner.ignore_content_length(ignore)
}
/// Same as [`Easy2::http_content_decoding`](struct.Easy2.html#method.http_content_decoding)
pub fn http_content_decoding(&mut self, enable: bool) -> Result<(), Error> {
self.inner.http_content_decoding(enable)
}
/// Same as [`Easy2::http_transfer_decoding`](struct.Easy2.html#method.http_transfer_decoding)
pub fn http_transfer_decoding(&mut self, enable: bool) -> Result<(), Error> {
self.inner.http_transfer_decoding(enable)
}
// =========================================================================
// Protocol Options
/// Same as [`Easy2::range`](struct.Easy2.html#method.range)
pub fn range(&mut self, range: &str) -> Result<(), Error> {
self.inner.range(range)
}
/// Same as [`Easy2::resume_from`](struct.Easy2.html#method.resume_from)
pub fn resume_from(&mut self, from: u64) -> Result<(), Error> {
self.inner.resume_from(from)
}
/// Same as [`Easy2::custom_request`](struct.Easy2.html#method.custom_request)
pub fn custom_request(&mut self, request: &str) -> Result<(), Error> {
self.inner.custom_request(request)
}
/// Same as [`Easy2::fetch_filetime`](struct.Easy2.html#method.fetch_filetime)
pub fn fetch_filetime(&mut self, fetch: bool) -> Result<(), Error> {
self.inner.fetch_filetime(fetch)
}
/// Same as [`Easy2::nobody`](struct.Easy2.html#method.nobody)
pub fn nobody(&mut self, enable: bool) -> Result<(), Error> {
self.inner.nobody(enable)
}
/// Same as [`Easy2::in_filesize`](struct.Easy2.html#method.in_filesize)
pub fn in_filesize(&mut self, size: u64) -> Result<(), Error> {
self.inner.in_filesize(size)
}
/// Same as [`Easy2::upload`](struct.Easy2.html#method.upload)
pub fn upload(&mut self, enable: bool) -> Result<(), Error> {
self.inner.upload(enable)
}
/// Same as [`Easy2::max_filesize`](struct.Easy2.html#method.max_filesize)
pub fn max_filesize(&mut self, size: u64) -> Result<(), Error> {
self.inner.max_filesize(size)
}
/// Same as [`Easy2::time_condition`](struct.Easy2.html#method.time_condition)
pub fn time_condition(&mut self, cond: TimeCondition) -> Result<(), Error> {
self.inner.time_condition(cond)
}
/// Same as [`Easy2::time_value`](struct.Easy2.html#method.time_value)
pub fn time_value(&mut self, val: i64) -> Result<(), Error> {
self.inner.time_value(val)
}
// =========================================================================
// Connection Options
/// Same as [`Easy2::timeout`](struct.Easy2.html#method.timeout)
pub fn timeout(&mut self, timeout: Duration) -> Result<(), Error> {
self.inner.timeout(timeout)
}
/// Same as [`Easy2::low_speed_limit`](struct.Easy2.html#method.low_speed_limit)
pub fn low_speed_limit(&mut self, limit: u32) -> Result<(), Error> {
self.inner.low_speed_limit(limit)
}
/// Same as [`Easy2::low_speed_time`](struct.Easy2.html#method.low_speed_time)
pub fn low_speed_time(&mut self, dur: Duration) -> Result<(), Error> {
self.inner.low_speed_time(dur)
}
/// Same as [`Easy2::max_send_speed`](struct.Easy2.html#method.max_send_speed)
pub fn max_send_speed(&mut self, speed: u64) -> Result<(), Error> {
self.inner.max_send_speed(speed)
}
/// Same as [`Easy2::max_recv_speed`](struct.Easy2.html#method.max_recv_speed)
pub fn max_recv_speed(&mut self, speed: u64) -> Result<(), Error> {
self.inner.max_recv_speed(speed)
}
/// Same as [`Easy2::max_connects`](struct.Easy2.html#method.max_connects)
pub fn max_connects(&mut self, max: u32) -> Result<(), Error> {
self.inner.max_connects(max)
}
/// Same as [`Easy2::maxage_conn`](struct.Easy2.html#method.maxage_conn)
pub fn maxage_conn(&mut self, max_age: Duration) -> Result<(), Error> {
self.inner.maxage_conn(max_age)
}
/// Same as [`Easy2::fresh_connect`](struct.Easy2.html#method.fresh_connect)
pub fn fresh_connect(&mut self, enable: bool) -> Result<(), Error> {
self.inner.fresh_connect(enable)
}
/// Same as [`Easy2::forbid_reuse`](struct.Easy2.html#method.forbid_reuse)
pub fn forbid_reuse(&mut self, enable: bool) -> Result<(), Error> {
self.inner.forbid_reuse(enable)
}
/// Same as [`Easy2::connect_timeout`](struct.Easy2.html#method.connect_timeout)
pub fn connect_timeout(&mut self, timeout: Duration) -> Result<(), Error> {
self.inner.connect_timeout(timeout)
}
/// Same as [`Easy2::ip_resolve`](struct.Easy2.html#method.ip_resolve)
pub fn ip_resolve(&mut self, resolve: IpResolve) -> Result<(), Error> {
self.inner.ip_resolve(resolve)
}
/// Same as [`Easy2::resolve`](struct.Easy2.html#method.resolve)
pub fn resolve(&mut self, list: List) -> Result<(), Error> {
self.inner.resolve(list)
}
/// Same as [`Easy2::connect_only`](struct.Easy2.html#method.connect_only)
pub fn connect_only(&mut self, enable: bool) -> Result<(), Error> {
self.inner.connect_only(enable)
}
// =========================================================================
// SSL/Security Options
/// Same as [`Easy2::ssl_cert`](struct.Easy2.html#method.ssl_cert)
pub fn ssl_cert<P: AsRef<Path>>(&mut self, cert: P) -> Result<(), Error> {
self.inner.ssl_cert(cert)
}
/// Same as [`Easy2::ssl_cert_blob`](struct.Easy2.html#method.ssl_cert_blob)
pub fn ssl_cert_blob(&mut self, blob: &[u8]) -> Result<(), Error> {
self.inner.ssl_cert_blob(blob)
}
/// Same as [`Easy2::ssl_cert_type`](struct.Easy2.html#method.ssl_cert_type)
pub fn ssl_cert_type(&mut self, kind: &str) -> Result<(), Error> {
self.inner.ssl_cert_type(kind)
}
/// Same as [`Easy2::ssl_key`](struct.Easy2.html#method.ssl_key)
pub fn ssl_key<P: AsRef<Path>>(&mut self, key: P) -> Result<(), Error> {
self.inner.ssl_key(key)
}
/// Same as [`Easy2::ssl_key_blob`](struct.Easy2.html#method.ssl_key_blob)
pub fn ssl_key_blob(&mut self, blob: &[u8]) -> Result<(), Error> {
self.inner.ssl_key_blob(blob)
}
/// Same as [`Easy2::ssl_key_type`](struct.Easy2.html#method.ssl_key_type)
pub fn ssl_key_type(&mut self, kind: &str) -> Result<(), Error> {
self.inner.ssl_key_type(kind)
}
/// Same as [`Easy2::key_password`](struct.Easy2.html#method.key_password)
pub fn key_password(&mut self, password: &str) -> Result<(), Error> {
self.inner.key_password(password)
}
/// Same as [`Easy2::ssl_cainfo_blob`](struct.Easy2.html#method.ssl_cainfo_blob)
pub fn ssl_cainfo_blob(&mut self, blob: &[u8]) -> Result<(), Error> {
self.inner.ssl_cainfo_blob(blob)
}
/// Same as [`Easy2::ssl_engine`](struct.Easy2.html#method.ssl_engine)
pub fn ssl_engine(&mut self, engine: &str) -> Result<(), Error> {
self.inner.ssl_engine(engine)
}
/// Same as [`Easy2::ssl_engine_default`](struct.Easy2.html#method.ssl_engine_default)
pub fn ssl_engine_default(&mut self, enable: bool) -> Result<(), Error> {
self.inner.ssl_engine_default(enable)
}
/// Same as [`Easy2::http_version`](struct.Easy2.html#method.http_version)
pub fn http_version(&mut self, version: HttpVersion) -> Result<(), Error> {
self.inner.http_version(version)
}
/// Same as [`Easy2::ssl_version`](struct.Easy2.html#method.ssl_version)
pub fn ssl_version(&mut self, version: SslVersion) -> Result<(), Error> {
self.inner.ssl_version(version)
}
/// Same as [`Easy2::ssl_min_max_version`](struct.Easy2.html#method.ssl_min_max_version)
pub fn ssl_min_max_version(
&mut self,
min_version: SslVersion,
max_version: SslVersion,
) -> Result<(), Error> {
self.inner.ssl_min_max_version(min_version, max_version)
}
/// Same as [`Easy2::ssl_verify_host`](struct.Easy2.html#method.ssl_verify_host)
pub fn ssl_verify_host(&mut self, verify: bool) -> Result<(), Error> {
self.inner.ssl_verify_host(verify)
}
/// Same as [`Easy2::ssl_verify_peer`](struct.Easy2.html#method.ssl_verify_peer)
pub fn ssl_verify_peer(&mut self, verify: bool) -> Result<(), Error> {
self.inner.ssl_verify_peer(verify)
}
/// Same as [`Easy2::cainfo`](struct.Easy2.html#method.cainfo)
pub fn cainfo<P: AsRef<Path>>(&mut self, path: P) -> Result<(), Error> {
self.inner.cainfo(path)
}
/// Same as [`Easy2::issuer_cert`](struct.Easy2.html#method.issuer_cert)
pub fn issuer_cert<P: AsRef<Path>>(&mut self, path: P) -> Result<(), Error> {
self.inner.issuer_cert(path)
}
/// Same as [`Easy2::issuer_cert_blob`](struct.Easy2.html#method.issuer_cert_blob)
pub fn issuer_cert_blob(&mut self, blob: &[u8]) -> Result<(), Error> {
self.inner.issuer_cert_blob(blob)
}
/// Same as [`Easy2::capath`](struct.Easy2.html#method.capath)
pub fn capath<P: AsRef<Path>>(&mut self, path: P) -> Result<(), Error> {
self.inner.capath(path)
}
/// Same as [`Easy2::crlfile`](struct.Easy2.html#method.crlfile)
pub fn crlfile<P: AsRef<Path>>(&mut self, path: P) -> Result<(), Error> {
self.inner.crlfile(path)
}
/// Same as [`Easy2::certinfo`](struct.Easy2.html#method.certinfo)
pub fn certinfo(&mut self, enable: bool) -> Result<(), Error> {
self.inner.certinfo(enable)
}
/// Same as [`Easy2::random_file`](struct.Easy2.html#method.random_file)
pub fn random_file<P: AsRef<Path>>(&mut self, p: P) -> Result<(), Error> {
self.inner.random_file(p)
}
/// Same as [`Easy2::egd_socket`](struct.Easy2.html#method.egd_socket)
pub fn egd_socket<P: AsRef<Path>>(&mut self, p: P) -> Result<(), Error> {
self.inner.egd_socket(p)
}
/// Same as [`Easy2::ssl_cipher_list`](struct.Easy2.html#method.ssl_cipher_list)
pub fn ssl_cipher_list(&mut self, ciphers: &str) -> Result<(), Error> {
self.inner.ssl_cipher_list(ciphers)
}
/// Same as [`Easy2::ssl_sessionid_cache`](struct.Easy2.html#method.ssl_sessionid_cache)
pub fn ssl_sessionid_cache(&mut self, enable: bool) -> Result<(), Error> {
self.inner.ssl_sessionid_cache(enable)
}
/// Same as [`Easy2::ssl_options`](struct.Easy2.html#method.ssl_options)
pub fn ssl_options(&mut self, bits: &SslOpt) -> Result<(), Error> {
self.inner.ssl_options(bits)
}
/// Same as [`Easy2::pinned_public_key`](struct.Easy2.html#method.pinned_public_key)
pub fn pinned_public_key(&mut self, pubkey: &str) -> Result<(), Error> {
self.inner.pinned_public_key(pubkey)
}
// =========================================================================
// getters
/// Same as [`Easy2::time_condition_unmet`](struct.Easy2.html#method.time_condition_unmet)
pub fn time_condition_unmet(&mut self) -> Result<bool, Error> {
self.inner.time_condition_unmet()
}
/// Same as [`Easy2::effective_url`](struct.Easy2.html#method.effective_url)
pub fn effective_url(&mut self) -> Result<Option<&str>, Error> {
self.inner.effective_url()
}
/// Same as [`Easy2::effective_url_bytes`](struct.Easy2.html#method.effective_url_bytes)
pub fn effective_url_bytes(&mut self) -> Result<Option<&[u8]>, Error> {
self.inner.effective_url_bytes()
}
/// Same as [`Easy2::response_code`](struct.Easy2.html#method.response_code)
pub fn response_code(&mut self) -> Result<u32, Error> {
self.inner.response_code()
}
/// Same as [`Easy2::http_connectcode`](struct.Easy2.html#method.http_connectcode)
pub fn http_connectcode(&mut self) -> Result<u32, Error> {
self.inner.http_connectcode()
}
/// Same as [`Easy2::filetime`](struct.Easy2.html#method.filetime)
pub fn filetime(&mut self) -> Result<Option<i64>, Error> {
self.inner.filetime()
}
/// Same as [`Easy2::download_size`](struct.Easy2.html#method.download_size)
pub fn download_size(&mut self) -> Result<f64, Error> {
self.inner.download_size()
}
/// Same as [`Easy2::content_length_download`](struct.Easy2.html#method.content_length_download)
pub fn content_length_download(&mut self) -> Result<f64, Error> {
self.inner.content_length_download()
}
/// Same as [`Easy2::total_time`](struct.Easy2.html#method.total_time)
pub fn total_time(&mut self) -> Result<Duration, Error> {
self.inner.total_time()
}
/// Same as [`Easy2::namelookup_time`](struct.Easy2.html#method.namelookup_time)
pub fn namelookup_time(&mut self) -> Result<Duration, Error> {
self.inner.namelookup_time()
}
/// Same as [`Easy2::connect_time`](struct.Easy2.html#method.connect_time)
pub fn connect_time(&mut self) -> Result<Duration, Error> {
self.inner.connect_time()
}
/// Same as [`Easy2::appconnect_time`](struct.Easy2.html#method.appconnect_time)
pub fn appconnect_time(&mut self) -> Result<Duration, Error> {
self.inner.appconnect_time()
}
/// Same as [`Easy2::pretransfer_time`](struct.Easy2.html#method.pretransfer_time)
pub fn pretransfer_time(&mut self) -> Result<Duration, Error> {
self.inner.pretransfer_time()
}
/// Same as [`Easy2::starttransfer_time`](struct.Easy2.html#method.starttransfer_time)
pub fn starttransfer_time(&mut self) -> Result<Duration, Error> {
self.inner.starttransfer_time()
}
/// Same as [`Easy2::redirect_time`](struct.Easy2.html#method.redirect_time)
pub fn redirect_time(&mut self) -> Result<Duration, Error> {
self.inner.redirect_time()
}
/// Same as [`Easy2::redirect_count`](struct.Easy2.html#method.redirect_count)
pub fn redirect_count(&mut self) -> Result<u32, Error> {
self.inner.redirect_count()
}
/// Same as [`Easy2::redirect_url`](struct.Easy2.html#method.redirect_url)
pub fn redirect_url(&mut self) -> Result<Option<&str>, Error> {
self.inner.redirect_url()
}
/// Same as [`Easy2::redirect_url_bytes`](struct.Easy2.html#method.redirect_url_bytes)
pub fn redirect_url_bytes(&mut self) -> Result<Option<&[u8]>, Error> {
self.inner.redirect_url_bytes()
}
/// Same as [`Easy2::header_size`](struct.Easy2.html#method.header_size)
pub fn header_size(&mut self) -> Result<u64, Error> {
self.inner.header_size()
}
/// Same as [`Easy2::request_size`](struct.Easy2.html#method.request_size)
pub fn request_size(&mut self) -> Result<u64, Error> {
self.inner.request_size()
}
/// Same as [`Easy2::content_type`](struct.Easy2.html#method.content_type)
pub fn content_type(&mut self) -> Result<Option<&str>, Error> {
self.inner.content_type()
}
/// Same as [`Easy2::content_type_bytes`](struct.Easy2.html#method.content_type_bytes)
pub fn content_type_bytes(&mut self) -> Result<Option<&[u8]>, Error> {
self.inner.content_type_bytes()
}
/// Same as [`Easy2::os_errno`](struct.Easy2.html#method.os_errno)
pub fn os_errno(&mut self) -> Result<i32, Error> {
self.inner.os_errno()
}
/// Same as [`Easy2::primary_ip`](struct.Easy2.html#method.primary_ip)
pub fn primary_ip(&mut self) -> Result<Option<&str>, Error> {
self.inner.primary_ip()
}
/// Same as [`Easy2::primary_port`](struct.Easy2.html#method.primary_port)
pub fn primary_port(&mut self) -> Result<u16, Error> {
self.inner.primary_port()
}
/// Same as [`Easy2::local_ip`](struct.Easy2.html#method.local_ip)
pub fn local_ip(&mut self) -> Result<Option<&str>, Error> {
self.inner.local_ip()
}
/// Same as [`Easy2::local_port`](struct.Easy2.html#method.local_port)
pub fn local_port(&mut self) -> Result<u16, Error> {
self.inner.local_port()
}
/// Same as [`Easy2::cookies`](struct.Easy2.html#method.cookies)
pub fn cookies(&mut self) -> Result<List, Error> {
self.inner.cookies()
}
/// Same as [`Easy2::pipewait`](struct.Easy2.html#method.pipewait)
pub fn pipewait(&mut self, wait: bool) -> Result<(), Error> {
self.inner.pipewait(wait)
}
// =========================================================================
// Other methods
/// Same as [`Easy2::perform`](struct.Easy2.html#method.perform)
pub fn perform(&self) -> Result<(), Error> {
assert!(self.inner.get_ref().borrowed.get().is_null());
self.do_perform()
}
fn do_perform(&self) -> Result<(), Error> {
// We don't allow recursive invocations of `perform` because we're
// invoking `FnMut`closures behind a `&self` pointer. This flag acts as
// our own `RefCell` borrow flag sorta.
if self.inner.get_ref().running.get() {
return Err(Error::new(curl_sys::CURLE_FAILED_INIT));
}
self.inner.get_ref().running.set(true);
struct Reset<'a>(&'a Cell<bool>);
impl<'a> Drop for Reset<'a> {
fn drop(&mut self) {
self.0.set(false);
}
}
let _reset = Reset(&self.inner.get_ref().running);
self.inner.perform()
}
/// Creates a new scoped transfer which can be used to set callbacks and
/// data which only live for the scope of the returned object.
///
/// An `Easy` handle is often reused between different requests to cache
/// connections to servers, but often the lifetime of the data as part of
/// each transfer is unique. This function serves as an ability to share an
/// `Easy` across many transfers while ergonomically using possibly
/// stack-local data as part of each transfer.
///
/// Configuration can be set on the `Easy` and then a `Transfer` can be
/// created to set scoped configuration (like callbacks). Finally, the
/// `perform` method on the `Transfer` function can be used.
///
/// When the `Transfer` option is dropped then all configuration set on the
/// transfer itself will be reset.
pub fn transfer<'data, 'easy>(&'easy mut self) -> Transfer<'easy, 'data> {
assert!(!self.inner.get_ref().running.get());
Transfer {
data: Box::new(Callbacks::default()),
easy: self,
}
}
/// Same as [`Easy2::upkeep`](struct.Easy2.html#method.upkeep)
#[cfg(feature = "upkeep_7_62_0")]
pub fn upkeep(&self) -> Result<(), Error> {
self.inner.upkeep()
}
/// Same as [`Easy2::unpause_read`](struct.Easy2.html#method.unpause_read)
pub fn unpause_read(&self) -> Result<(), Error> {
self.inner.unpause_read()
}
/// Same as [`Easy2::unpause_write`](struct.Easy2.html#method.unpause_write)
pub fn unpause_write(&self) -> Result<(), Error> {
self.inner.unpause_write()
}
/// Same as [`Easy2::url_encode`](struct.Easy2.html#method.url_encode)
pub fn url_encode(&mut self, s: &[u8]) -> String {
self.inner.url_encode(s)
}
/// Same as [`Easy2::url_decode`](struct.Easy2.html#method.url_decode)
pub fn url_decode(&mut self, s: &str) -> Vec<u8> {
self.inner.url_decode(s)
}
/// Same as [`Easy2::reset`](struct.Easy2.html#method.reset)
pub fn reset(&mut self) {
self.inner.reset()
}
/// Same as [`Easy2::recv`](struct.Easy2.html#method.recv)
pub fn recv(&mut self, data: &mut [u8]) -> Result<usize, Error> {
self.inner.recv(data)
}
/// Same as [`Easy2::send`](struct.Easy2.html#method.send)
pub fn send(&mut self, data: &[u8]) -> Result<usize, Error> {
self.inner.send(data)
}
/// Same as [`Easy2::raw`](struct.Easy2.html#method.raw)
pub fn raw(&self) -> *mut curl_sys::CURL {
self.inner.raw()
}
/// Same as [`Easy2::take_error_buf`](struct.Easy2.html#method.take_error_buf)
pub fn take_error_buf(&self) -> Option<String> {
self.inner.take_error_buf()
}
}
impl EasyData {
/// An unsafe function to get the appropriate callback field.
///
/// We can have callbacks configured from one of two different sources.
/// We could either have a callback from the `borrowed` field, callbacks on
/// an ephemeral `Transfer`, or the `owned` field which are `'static`
/// callbacks that live for the lifetime of this `EasyData`.
///
/// The first set of callbacks are unsafe to access because they're actually
/// owned elsewhere and we're just aliasing. Additionally they don't
/// technically live long enough for us to access them, so they're hidden
/// behind unsafe pointers and casts.
///
/// This function returns `&'a mut T` but that's actually somewhat of a lie.
/// The value should **not be stored to** nor should it be used for the full
/// lifetime of `'a`, but rather immediately in the local scope.
///
/// Basically this is just intended to acquire a callback, invoke it, and
/// then stop. Nothing else. Super unsafe.
unsafe fn callback<'a, T, F>(&'a mut self, f: F) -> Option<&'a mut T>
where
F: for<'b> Fn(&'b mut Callbacks<'static>) -> &'b mut Option<T>,
{
let ptr = self.borrowed.get();
if !ptr.is_null() {
let val = f(&mut *ptr);
if val.is_some() {
return val.as_mut();
}
}
f(&mut self.owned).as_mut()
}
}
impl Handler for EasyData {
fn write(&mut self, data: &[u8]) -> Result<usize, WriteError> {
unsafe {
match self.callback(|s| &mut s.write) {
Some(write) => write(data),
None => Ok(data.len()),
}
}
}
fn read(&mut self, data: &mut [u8]) -> Result<usize, ReadError> {
unsafe {
match self.callback(|s| &mut s.read) {
Some(read) => read(data),
None => Ok(0),
}
}
}
fn seek(&mut self, whence: SeekFrom) -> SeekResult {
unsafe {
match self.callback(|s| &mut s.seek) {
Some(seek) => seek(whence),
None => SeekResult::CantSeek,
}
}
}
fn debug(&mut self, kind: InfoType, data: &[u8]) {
unsafe {
match self.callback(|s| &mut s.debug) {
Some(debug) => debug(kind, data),
None => handler::debug(kind, data),
}
}
}
fn header(&mut self, data: &[u8]) -> bool {
unsafe {
match self.callback(|s| &mut s.header) {
Some(header) => header(data),
None => true,
}
}
}
fn progress(&mut self, dltotal: f64, dlnow: f64, ultotal: f64, ulnow: f64) -> bool {
unsafe {
match self.callback(|s| &mut s.progress) {
Some(progress) => progress(dltotal, dlnow, ultotal, ulnow),
None => true,
}
}
}
fn ssl_ctx(&mut self, cx: *mut c_void) -> Result<(), Error> {
unsafe {
match self.callback(|s| &mut s.ssl_ctx) {
Some(ssl_ctx) => ssl_ctx(cx),
None => handler::ssl_ctx(cx),
}
}
}
}
impl fmt::Debug for EasyData {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
"callbacks ...".fmt(f)
}
}
impl<'easy, 'data> Transfer<'easy, 'data> {
/// Same as `Easy::write_function`, just takes a non `'static` lifetime
/// corresponding to the lifetime of this transfer.
pub fn write_function<F>(&mut self, f: F) -> Result<(), Error>
where
F: FnMut(&[u8]) -> Result<usize, WriteError> + 'data,
{
self.data.write = Some(Box::new(f));
Ok(())
}
/// Same as `Easy::read_function`, just takes a non `'static` lifetime
/// corresponding to the lifetime of this transfer.
pub fn read_function<F>(&mut self, f: F) -> Result<(), Error>
where
F: FnMut(&mut [u8]) -> Result<usize, ReadError> + 'data,
{
self.data.read = Some(Box::new(f));
Ok(())
}
/// Same as `Easy::seek_function`, just takes a non `'static` lifetime
/// corresponding to the lifetime of this transfer.
pub fn seek_function<F>(&mut self, f: F) -> Result<(), Error>
where
F: FnMut(SeekFrom) -> SeekResult + 'data,
{
self.data.seek = Some(Box::new(f));
Ok(())
}
/// Same as `Easy::progress_function`, just takes a non `'static` lifetime
/// corresponding to the lifetime of this transfer.
pub fn progress_function<F>(&mut self, f: F) -> Result<(), Error>
where
F: FnMut(f64, f64, f64, f64) -> bool + 'data,
{ | }
/// Same as `Easy::ssl_ctx_function`, just takes a non `'static`
/// lifetime corresponding to the lifetime of this transfer.
pub fn ssl_ctx_function<F>(&mut self, f: F) -> Result<(), Error>
where
F: FnMut(*mut c_void) -> Result<(), Error> + Send + 'data,
{
self.data.ssl_ctx = Some(Box::new(f));
Ok(())
}
/// Same as `Easy::debug_function`, just takes a non `'static` lifetime
/// corresponding to the lifetime of this transfer.
pub fn debug_function<F>(&mut self, f: F) -> Result<(), Error>
where
F: FnMut(InfoType, &[u8]) + 'data,
{
self.data.debug = Some(Box::new(f));
Ok(())
}
/// Same as `Easy::header_function`, just takes a non `'static` lifetime
/// corresponding to the lifetime of this transfer.
pub fn header_function<F>(&mut self, f: F) -> Result<(), Error>
where
F: FnMut(&[u8]) -> bool + 'data,
{
self.data.header = Some(Box::new(f));
Ok(())
}
/// Same as `Easy::perform`.
pub fn perform(&self) -> Result<(), Error> {
let inner = self.easy.inner.get_ref();
// Note that we're casting a `&self` pointer to a `*mut`, and then
// during the invocation of this call we're going to invoke `FnMut`
// closures that we ourselves own.
//
// This should be ok, however, because `do_perform` checks for recursive
// invocations of `perform` and disallows them. Our type also isn't
// `Sync`.
inner.borrowed.set(&*self.data as *const _ as *mut _);
// Make sure to reset everything back to the way it was before when
// we're done.
struct Reset<'a>(&'a Cell<*mut Callbacks<'static>>);
impl<'a> Drop for Reset<'a> {
fn drop(&mut self) {
self.0.set(ptr::null_mut());
}
}
let _reset = Reset(&inner.borrowed);
self.easy.do_perform()
}
/// Same as `Easy::upkeep`
#[cfg(feature = "upkeep_7_62_0")]
pub fn upkeep(&self) -> Result<(), Error> {
self.easy.upkeep()
}
/// Same as `Easy::unpause_read`.
pub fn unpause_read(&self) -> Result<(), Error> {
self.easy.unpause_read()
}
/// Same as `Easy::unpause_write`
pub fn unpause_write(&self) -> Result<(), Error> {
self.easy.unpause_write()
}
}
impl<'easy, 'data> fmt::Debug for Transfer<'easy, 'data> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Transfer")
.field("easy", &self.easy)
.finish()
}
}
impl<'easy, 'data> Drop for Transfer<'easy, 'data> {
fn drop(&mut self) {
// Extra double check to make sure we don't leak a pointer to ourselves.
assert!(self.easy.inner.get_ref().borrowed.get().is_null());
}
} | self.data.progress = Some(Box::new(f));
Ok(()) |
test.py |
from Bio import SeqIO | #!/usr/bin/python |
|
biosample.py | from contentbase.auditor import (
AuditFailure,
audit_checker,
)
term_mapping = {
"head": "UBERON:0000033",
"limb": "UBERON:0002101",
"salivary gland": "UBERON:0001044",
"male accessory sex gland": "UBERON:0010147",
"testis": "UBERON:0000473",
"female gonad": "UBERON:0000992",
"digestive system": "UBERON:0001007",
"arthropod fat body": "UBERON:0003917",
"antenna": "UBERON:0000972",
"adult maxillary segment": "FBbt:00003016",
"female reproductive system": "UBERON:0000474",
"male reproductive system": "UBERON:0000079",
"nucleus": "GO:0005634",
"cytosol": "GO:0005829",
"chromatin": "GO:0000785",
"membrane": "GO:0016020",
"mitochondria": "GO:0005739",
"nuclear matrix": "GO:0016363",
"nucleolus": "GO:0005730",
"nucleoplasm": "GO:0005654",
"polysome": "GO:0005844",
"insoluble cytoplasmic fraction": "NTR:0002594"
}
@audit_checker('biosample', frame='object')
def audit_biosample_term(value, system):
'''
Biosample_term_id and biosample_term_name
and biosample_type should all be present.
This should be handled by schemas.
Biosample_term_id should be in the ontology.
Biosample_term_name should match biosample_term_id.
'''
if value['status'] in ['deleted']:
return
if 'biosample_term_id' not in value:
return
ontology = system['registry']['ontology']
term_id = value['biosample_term_id']
term_name = value.get('biosample_term_name')
if term_id.startswith('NTR:'):
detail = 'Biosample {} has a New Term Request {} - {}'.format(
value['@id'],
term_id,
term_name)
raise AuditFailure('NTR biosample', detail, level='DCC_ACTION')
if term_id not in ontology:
detail = 'Biosample {} has biosample_term_id of {} which is not in ontology'.format(
value['@id'],
term_id)
raise AuditFailure('term_id not in ontology', term_id, level='DCC_ACTION')
ontology_term_name = ontology[term_id]['name']
if ontology_term_name != term_name and term_name not in ontology[term_id]['synonyms']:
detail = 'Biosample {} has a mismatch between biosample_term_id "{}" and biosample_term_name "{}"'.format(
value['@id'],
term_id,
term_name,
)
raise AuditFailure('mismatched biosample_term', detail, level='DCC_ACTION')
@audit_checker('biosample', frame='object')
def audit_biosample_culture_date(value, system):
'''
A culture_harvest_date should not precede
a culture_start_date.
This should move to the schema.
'''
if value['status'] in ['deleted']:
return
if ('culture_start_date' not in value) or ('culture_harvest_date' not in value):
return
if value['culture_harvest_date'] <= value['culture_start_date']:
detail = 'Biosample {} has a culture_harvest_date {} which precedes the culture_start_date {}'.format(
value['@id'],
value['culture_harvest_date'],
value['culture_start_date'])
raise AuditFailure('invalid dates', detail, level='ERROR')
@audit_checker('biosample', frame=['organism', 'donor', 'donor.organism', 'donor.mutated_gene', 'donor.mutated_gene.organism'])
def audit_biosample_donor(value, system):
'''
A biosample should have a donor.
The organism of donor and biosample should match.
Pooled_from biosamples do not need donors??
'''
if value['status'] in ['deleted']:
return
if ('donor' not in value) and (value['pooled_from']):
return
if ('donor' not in value) and (not value['pooled_from']):
detail = 'Biosample {} requires a donor'.format(value['@id'])
raise AuditFailure('missing donor', detail, level='ERROR')
return
donor = value['donor']
if value['organism']['name'] != donor['organism']['name']:
detail = 'Biosample {} is organism {}, yet its donor {} is organism {}. Biosamples require a donor of the same species'.format(
value['@id'],
value['organism']['name'],
donor['@id'],
donor['organism']['name'])
raise AuditFailure('mismatched organism', detail, level='ERROR')
if 'mutated_gene' not in donor:
return
if value['organism']['name'] != donor['mutated_gene']['organism']['name']:
detail = 'Biosample {} is organism {}, but its donor {} mutated_gene is in {}. Donor mutated_gene should be of the same species as the donor and biosample'.format(
value['@id'],
value['organism']['name'],
donor['@id'],
donor['mutated_gene']['organism']['name'])
raise AuditFailure('mismatched mutated_gene organism', detail, level='ERROR')
for i in donor['mutated_gene']['investigated_as']:
if i in ['histone modification', 'tag', 'control', 'recombinant protein', 'nucleotide modification', 'other post-translational modification']:
detail = 'Donor {} has an invalid mutated_gene {}. Donor mutated_genes should not be tags, controls, recombinant proteins or modifications'.format(
donor['@id'],
donor['mutated_gene']['name'])
raise AuditFailure('invalid donor mutated_gene', detail, level='ERROR')
@audit_checker('biosample', frame='object')
def audit_biosample_subcellular_term_match(value, system):
'''
The subcellular_fraction_term_name and subcellular_fraction_term_id
should be concordant. This should be a calculated field
If one exists the other should. This should be handled in the schema.
'''
if value['status'] in ['deleted']:
return
if ('subcellular_fraction_term_name' not in value) or ('subcellular_fraction_term_id' not in value):
return
expected_name = term_mapping[value['subcellular_fraction_term_name']]
if expected_name != (value['subcellular_fraction_term_id']):
detail = 'Biosample {} has a mismatch between subcellular_fraction_term_name "{}" and subcellular_fraction_term_id "{}"'.format(
value['@id'],
value['subcellular_fraction_term_name'],
value['subcellular_fraction_term_id'])
raise AuditFailure('mismatched subcellular_fraction_term', detail, level='ERROR')
@audit_checker('biosample', frame='object')
def audit_biosample_depleted_term_match(value, system):
'''
The depleted_in_term_name and depleted_in_term_name
should be concordant. This should be a calcualted field.
If one exists, the other should. This should be handled in the schema.
'''
if value['status'] == 'deleted':
return
if 'depleted_in_term_name' not in value:
return
if len(value['depleted_in_term_name']) != len(value['depleted_in_term_id']):
detail = 'Biosample {} has a depleted_in_term_name array and depleted_in_term_id array of differing lengths'.format(
value['@id'])
raise AuditFailure('mismatched depleted_in_term length', detail, level='ERROR')
return
for i, dep_term in enumerate(value['depleted_in_term_name']):
if (term_mapping[dep_term]) != (value['depleted_in_term_id'][i]):
detail = 'Biosample {} has a mismatch between {} and {}'.format(
value['@id'],
dep_term,
value['depleted_in_term_id'][i])
raise AuditFailure('mismatched depleted_in_term', detail, level='ERROR')
@audit_checker('biosample', frame='object')
def audit_biosample_transfection_type(value, system):
| '''
A biosample with constructs or rnais should have a
transfection_type
'''
if value['status'] == 'deleted':
return
if (value['rnais']) and ('transfection_type' not in value):
detail = 'Biosample {} with a value for RNAi requires transfection_type'.format(value['@id'])
raise AuditFailure('missing transfection_type', detail, level='ERROR')
if (value['constructs']) and ('transfection_type' not in value):
detail = 'Biosample {} with a value for construct requires transfection_type'.format(value['@id'])
raise AuditFailure('missing transfection_type', detail, level='ERROR') |
|
acl.go | package s3
import (
"net/http"
"github.com/emicklei/go-restful"
"github.com/opensds/multi-cloud/api/pkg/common"
. "github.com/opensds/multi-cloud/api/pkg/s3/datatype"
)
func getAclFromHeader(request *restful.Request) (acl Acl, err error) {
acl.CannedAcl = request.HeaderParameter(common.REQUEST_HEADER_ACL)
if acl.CannedAcl == "" {
acl.CannedAcl = "private"
}
err = IsValidCannedAcl(acl)
return
}
func getAclFromFormValues(formValues map[string]string) (acl Acl, err error) {
headerfiedFormValues := make(http.Header)
for key := range formValues {
headerfiedFormValues.Add(key, formValues[key])
}
acl.CannedAcl = headerfiedFormValues.Get("acl")
if acl.CannedAcl == "" |
err = IsValidCannedAcl(acl)
return
}
| {
acl.CannedAcl = "private"
} |
Comment.js | const {Model, DataTypes} = require('sequelize');
const sequelize = require('../config/connection');
class Comment extends Model {
}
Comment.init(
{
id: {
type: DataTypes.INTEGER,
primaryKey: true,
autoIncrement: true,
allowNull: false
},
comment_text: {
type: DataTypes.TEXT,
allowNull: false,
validate: {
len: [4]
}
},
user_id: {
type: DataTypes.INTEGER,
allowNull: false,
references: {
model: "user",
key: "id" | },
post_id: {
type: DataTypes.INTEGER,
allowNull: false,
references: {
model: 'post',
key: 'id'
}
}
},
{
sequelize,
timestamps: true,
freezeTableName: true,
underscored: true,
modelName: 'comment'
}
)
module.exports = Comment | } |
test_parameters.py | """Tests for functions that calculate plasma parameters."""
import numpy as np
import pytest
from astropy import units as u
from astropy.constants import m_e, m_p
from astropy.tests.helper import assert_quantity_allclose
from plasmapy.formulary.parameters import (
Alfven_speed,
betaH_,
Bohm_diffusion,
cs_,
cwp_,
DB_,
Debye_length,
Debye_number,
gyrofrequency,
gyroradius,
Hall_parameter,
inertial_length,
ion_sound_speed,
lambdaD_,
lower_hybrid_frequency,
magnetic_energy_density,
magnetic_pressure,
mass_density,
nD_,
oc_,
plasma_frequency,
pmag_,
pth_,
rc_,
rho_,
rhoc_,
thermal_pressure,
thermal_speed,
ub_,
upper_hybrid_frequency,
va_,
wc_,
wlh_,
wp_,
wuh_,
)
from plasmapy.particles import Particle
from plasmapy.particles.exceptions import InvalidParticleError
from plasmapy.utils.exceptions import (
PhysicsError,
PhysicsWarning,
PlasmaPyFutureWarning,
RelativityError,
RelativityWarning,
)
from plasmapy.utils.pytest_helpers import assert_can_handle_nparray
B = 1.0 * u.T
Z = 1
ion = "p"
m_i = m_p
n_i = 5e19 * u.m ** -3
n_e = Z * 5e19 * u.m ** -3
rho = n_i * m_i + n_e * m_e
T_e = 1e6 * u.K
T_i = 1e6 * u.K
k_1 = 3e1 * u.m ** -1
k_2 = 3e7 * u.m ** -1
B_arr = np.array([0.001, 0.002]) * u.T
B_nanarr = np.array([0.001, np.nan]) * u.T
B_allnanarr = np.array([np.nan, np.nan]) * u.T
rho_arr = np.array([5e-10, 2e-10]) * u.kg / u.m ** 3
rho_infarr = np.array([np.inf, 5e19]) * u.m ** -3
rho_negarr = np.array([-5e19, 6e19]) * u.m ** -3
T_arr = np.array([1e6, 2e6]) * u.K
T_nanarr = np.array([1e6, np.nan]) * u.K
T_nanarr2 = np.array([np.nan, 2e6]) * u.K
T_allnanarr = np.array([np.nan, np.nan]) * u.K
T_negarr = np.array([1e6, -5151.0]) * u.K
V = 25.2 * u.m / u.s
V_arr = np.array([25, 50]) * u.m / u.s
V_nanarr = np.array([25, np.nan]) * u.m / u.s
V_allnanarr = np.array([np.nan, np.nan]) * u.m / u.s
mu = m_p.to(u.u).value
class Test_mass_density:
r"""Test the mass_density function in parameters.py."""
@pytest.mark.parametrize(
"args, kwargs, conditional",
[
((-1 * u.kg * u.m ** -3, "He"), {}, pytest.raises(ValueError)),
((-1 * u.m ** -3, "He"), {}, pytest.raises(ValueError)),
(("not a Quantity", "He"), {}, pytest.raises(TypeError)),
((1 * u.m ** -3,), {}, pytest.raises(TypeError)),
((1 * u.J, "He"), {}, pytest.raises(u.UnitTypeError)),
((1 * u.m ** -3, None), {}, pytest.raises(TypeError)),
(
(1 * u.m ** -3, "He"),
{"z_ratio": "not a ratio"},
pytest.raises(TypeError),
),
],
)
def test_raises(self, args, kwargs, conditional):
with conditional:
mass_density(*args, **kwargs)
@pytest.mark.parametrize(
"args, kwargs, expected",
[
((1.0 * u.g * u.m ** -3, ""), {}, 1.0e-3 * u.kg * u.m ** -3),
((5.0e12 * u.cm ** -3, "He"), {}, 3.32323849e-8 * u.kg * u.m ** -3),
(
(5.0e12 * u.cm ** -3, Particle("He")),
{},
3.32323849e-8 * u.kg * u.m ** -3,
),
(
(5.0e12 * u.cm ** -3, "He"),
{"z_ratio": 0.5},
1.66161925e-08 * u.kg * u.m ** -3,
),
(
(5.0e12 * u.cm ** -3, "He"),
{"z_ratio": -0.5},
1.66161925e-08 * u.kg * u.m ** -3,
),
],
)
def | (self, args, kwargs, expected):
assert np.isclose(mass_density(*args, **kwargs), expected)
def test_handle_nparrays(self):
"""Test for ability to handle numpy array quantities"""
assert_can_handle_nparray(mass_density)
# Assertions below that are in CGS units with 2-3 significant digits
# are generally from the NRL Plasma Formulary.
class TestAlfvenSpeed:
"""Test `~plasmapy.formulary.parameters.Alfven_speed`."""
@pytest.mark.parametrize("alias", [va_])
def test_aliases(self, alias):
assert alias is Alfven_speed
@pytest.mark.parametrize(
"args, kwargs, _error",
[
# scenarios that raise RelativityError
((10 * u.T, 1.0e-10 * u.kg * u.m ** -3), {}, RelativityError),
((np.inf * u.T, 1 * u.m ** -3), {"ion": "p"}, RelativityError),
((-np.inf * u.T, 1 * u.m ** -3), {"ion": "p"}, RelativityError),
#
# scenarios that raise InvalidParticleError
((1 * u.T, 5e19 * u.m ** -3), {"ion": "spacecats"}, InvalidParticleError),
#
# scenarios that raise TypeError
(("not a Bfield", 1.0e-10 * u.kg * u.m ** -3), {}, TypeError),
((10 * u.T, "not a density"), {}, TypeError),
((10 * u.T, 5), {"ion": "p"}, TypeError),
((1 * u.T, 1.0e18 * u.m ** -3), {"ion": ["He"]}, TypeError),
((1 * u.T, 1.0e18 * u.m ** -3), {"ion": "He", "z_mean": "nope"}, TypeError),
#
# scenarios that raise UnitTypeError
((1 * u.T, 1.0e18 * u.cm), {"ion": "He"}, u.UnitTypeError),
((1 * u.T, 5 * u.m ** -2), {"ion": "p"}, u.UnitTypeError),
((1 * u.cm, 1.0e18 * u.m ** -3), {"ion": "He"}, u.UnitTypeError),
((5 * u.A, 5e19 * u.m ** -3), {"ion": "p"}, u.UnitTypeError),
#
# scenarios that raise ValueError
((1 * u.T, -1.0e18 * u.m ** -3), {"ion": "He"}, ValueError),
(
(np.array([5, 6, 7]) * u.T, np.array([5, 6]) * u.m ** -3),
{"ion": "p"},
ValueError,
),
(
(np.array([0.001, 0.002]) * u.T, np.array([-5e19, 6e19]) * u.m ** -3),
{"ion": "p"},
ValueError,
),
],
)
def test_raises(self, args, kwargs, _error):
"""Test scenarios that raise exceptions or warnings."""
with pytest.raises(_error):
Alfven_speed(*args, **kwargs)
@pytest.mark.parametrize(
"args, kwargs, expected, isclose_kw, _warning",
[
# scenarios that issue RelativityWarning
(
(5 * u.T, 5e19 * u.m ** -3),
{"ion": "H"},
15413707.39,
{},
RelativityWarning,
),
(
(5 * u.T, 5e19 * u.m ** -3),
{"ion": "H+"},
15413707.39,
{"rtol": 3.0e-4},
RelativityWarning,
),
(
(5 * u.T, 5e19 * u.m ** -3),
{"ion": "p"},
15413707.39,
{"rtol": 4.0e-4},
RelativityWarning,
),
#
# scenarios that issue UnitsWarning
((0.5, 1.0e18 * u.m ** -3), {"ion": "He"}, 5470657.93, {}, u.UnitsWarning),
],
)
def test_warns(self, args, kwargs, expected, isclose_kw, _warning):
"""Test scenarios that issue warnings"""
with pytest.warns(_warning):
val = Alfven_speed(*args, **kwargs)
assert isinstance(val, u.Quantity)
assert val.unit == u.m / u.s
assert np.isclose(val.value, expected, **isclose_kw)
@pytest.mark.parametrize(
"args, kwargs, expected, isclose_kw",
[
(
(1 * u.T, 1e-8 * u.kg * u.m ** -3),
{"ion": "p"},
8920620.58 * u.m / u.s,
{"rtol": 1e-6},
),
(
(1 * u.T, 1e-8 * u.kg * u.m ** -3),
{},
8920620.58 * u.m / u.s,
{"rtol": 1e-6},
),
(
(0.05 * u.T, 1e18 * u.m ** -3),
{"ion": "He"},
Alfven_speed(0.05 * u.T, 6.64738793e-09 * u.kg * u.m ** -3),
{},
),
(
(0.05 * u.T, 1e18 * u.m ** -3),
{"ion": "He+"},
Alfven_speed(0.05 * u.T, 1e18 * u.m ** -3, ion="He"),
{"rtol": 7e-5},
),
(
(0.05 * u.T, 1e18 * u.m ** -3),
{"ion": "He", "z_mean": 2},
Alfven_speed(0.05 * u.T, 1e18 * u.m ** -3, ion="He +2"),
{"rtol": 1.4e-4},
),
(
(0.05 * u.T, 1e18 * u.m ** -3),
{"ion": Particle("He+")},
Alfven_speed(0.05 * u.T, 1e18 * u.m ** -3, ion="He+"),
{},
),
(
([0.001, 0.002] * u.T, 5e-10 * u.kg * u.m ** -3),
{},
[
va_(0.001 * u.T, 5e-10 * u.kg * u.m ** -3).value,
va_(0.002 * u.T, 5e-10 * u.kg * u.m ** -3).value,
]
* (u.m / u.s),
{},
),
(
([0.001, 0.002] * u.T, [5e-10, 2e-10] * u.kg * u.m ** -3),
{},
[
va_(0.001 * u.T, 5e-10 * u.kg * u.m ** -3).value,
va_(0.002 * u.T, 2e-10 * u.kg * u.m ** -3).value,
]
* (u.m / u.s),
{},
),
(
(0.001 * u.T, [1.0e18, 2e18] * u.m ** -3),
{"ion": "p"},
[
va_(0.001 * u.T, 1e18 * u.m ** -3, ion="p").value,
va_(0.001 * u.T, 2e18 * u.m ** -3, ion="p").value,
]
* (u.m / u.s),
{},
),
],
)
def test_values(self, args, kwargs, expected, isclose_kw):
"""Test expected values."""
assert np.allclose(Alfven_speed(*args, **kwargs), expected, **isclose_kw)
@pytest.mark.parametrize(
"args, kwargs, nan_mask",
[
((np.nan * u.T, 1 * u.kg * u.m ** -3), {}, []),
((0.001 * u.T, np.nan * u.kg * u.m ** -3), {}, []),
(([np.nan, 0.001] * u.T, 1 * u.kg * u.m ** -3), {}, [True, False]),
(
(0.001 * u.T, [np.nan, 1.0, np.nan] * u.kg * u.m ** -3),
{},
[True, False, True],
),
(([np.nan, 0.001] * u.T, [1, np.nan] * u.kg * u.m ** -3), {}, [True, True]),
(
(0.001 * u.T, [np.nan, 1e18, np.nan] * u.m ** -3),
{"ion": "Ar+"},
[True, False, True],
),
],
)
def test_nan_values(self, args, kwargs, nan_mask):
"""Input scenarios that leat to `numpy.nan` values being returned."""
val = Alfven_speed(*args, **kwargs)
if np.isscalar(val.value):
assert np.isnan(val)
else:
nan_arr = np.isnan(val)
assert np.all(nan_arr[nan_mask])
assert np.all(np.logical_not(nan_arr[np.logical_not(nan_mask)]))
def test_handle_nparrays(self):
"""Test for ability to handle numpy array quantities"""
assert_can_handle_nparray(Alfven_speed)
class Test_Ion_Sound_Speed:
r"""Test the ion_sound_speed function in parameters.py."""
@pytest.mark.parametrize(
"args, kwargs, expected, isclose_kw",
[
(
(),
{
"T_i": 1.3232 * u.MK,
"T_e": 1.831 * u.MK,
"ion": "p",
"gamma_e": 1,
"gamma_i": 3,
},
218816.06086407552 * (u.m / u.s),
{},
),
(
(1.831 * u.MK, 1.3232 * u.MK, "p"),
{},
218816.06086407552 * (u.m / u.s),
{},
), # Test that function call without keyword argument works correctly
(
(),
{
"T_i": 1.3232 * u.MK,
"T_e": 1.831 * u.MK,
"n_e": n_e,
"k": k_1,
"ion": "p",
"gamma_e": 1,
"gamma_i": 3,
},
218816.06086407552 * (u.m / u.s),
{},
),
(
(),
{
"T_i": 1.3232 * u.MK,
"T_e": 1.831 * u.MK,
"n_e": n_e,
"k": k_2,
"ion": "p",
"gamma_e": 1,
"gamma_i": 3,
},
552.3212936293337 * (u.m / u.s),
{},
),
(
(),
{
"T_i": 0.88 * u.MK,
"T_e": 1.28 * u.MK,
"n_e": n_e,
"k": 0 * u.m ** -1,
"ion": "p",
"gamma_e": 1.2,
"gamma_i": 3.4,
},
193328.52857788358 * (u.m / u.s),
{},
),
(
(),
{"T_i": T_i, "T_e": 0 * u.K, "n_e": n_e, "k": k_1, "ion": "p+"},
ion_sound_speed(T_i=T_i, T_e=0 * u.K, n_e=n_e, k=k_1, ion="p+").value
* (u.m / u.s),
{},
),
(
(),
{
"T_e": 1.2e6 * u.K,
"T_i": 0 * u.K,
"n_e": n_e,
"k": 0 * u.m ** -1,
"z_mean": 0.8,
"ion": "p",
},
89018.09 * (u.m / u.s),
{"atol": 0.0, "rtol": 1e-6},
), # testing for user input z_mean
],
)
def test_values(self, args, kwargs, expected, isclose_kw):
assert np.isclose(ion_sound_speed(*args, **kwargs), expected, **isclose_kw)
# case when Z=1 is assumed
# assert ion_sound_speed(T_i=T_i, T_e=T_e, ion='p+') == ion_sound_speed(T_i=T_i, T_e=T_e,
# ion='H-1')
@pytest.mark.parametrize(
"kwargs1, kwargs2, _warning",
[
({"T_i": T_i, "T_e": T_e, "n_e": n_e, "ion": "p"}, {}, PhysicsWarning),
({"T_i": T_i, "T_e": T_e, "k": k_1, "ion": "p"}, {}, PhysicsWarning),
({"T_i": 5e11 * u.K, "T_e": 0 * u.K, "ion": "p"}, {}, RelativityWarning),
(
{"T_e": 1.2e6, "T_i": 0 * u.K, "n_e": n_e, "k": k_1, "ion": "p"},
{"T_e": 1.2e6 * u.K, "T_i": 0 * u.K, "n_e": n_e, "k": k_1, "ion": "p"},
u.UnitsWarning,
),
(
{"T_i": 1.3e6, "T_e": 0 * u.K, "n_e": n_e, "k": k_1, "ion": "p"},
{"T_i": 1.3e6 * u.K, "T_e": 0 * u.K, "n_e": n_e, "k": k_1, "ion": "p"},
u.UnitsWarning,
),
],
)
def test_warns(self, kwargs1, kwargs2, _warning):
with pytest.warns(_warning):
val = ion_sound_speed(**kwargs1)
if kwargs2 != {}:
val == ion_sound_speed(**kwargs2)
@pytest.mark.parametrize(
"args, kwargs, _error",
[
(
(),
{
"T_i": T_i,
"T_e": T_e,
"n_e": n_e,
"k": k_1,
"ion": "p",
"gamma_i": np.inf,
},
RelativityError,
),
(
(),
{
"T_i": np.array([5, 6, 5]) * u.K,
"T_e": np.array([3, 4]) * u.K,
"n_e": np.array([5, 6, 5]) * u.m ** -3,
"k": np.array([3, 4]) * u.m ** -3,
"ion": "p",
},
u.UnitTypeError,
),
((5 * u.T), {"ion": "p"}, TypeError), # Is this test right??????
((), {"ion": "p"}, TypeError),
(
(),
{"T_i": T_i, "T_e": 0 * u.K, "gamma_i": 0.9999, "ion": "p"},
PhysicsError,
),
(
(),
{"T_i": T_i, "T_e": 0 * u.K, "gamma_e": 0.9999, "ion": "p"},
PhysicsError,
),
(
(),
{"T_i": T_i, "T_e": 0 * u.K, "gamma_e": "sdjklsf", "ion": "p"},
TypeError,
),
(
(),
{"T_i": T_i, "T_e": 0 * u.K, "gamma_i": "fsdfas", "ion": "p"},
TypeError,
),
((), {"T_i": T_i, "T_e": 0 * u.K, "ion": "cupcakes"}, InvalidParticleError),
((), {"T_i": -np.abs(T_i), "T_e": 0 * u.K, "ion": "p"}, ValueError),
(
(),
{"T_i": T_i, "T_e": 0 * u.K, "n_e": -np.abs(n_e), "k": k_1, "ion": "p"},
ValueError,
),
(
(),
{"T_i": T_i, "T_e": 0 * u.K, "n_e": n_e, "k": -np.abs(k_1), "ion": "p"},
ValueError,
),
((), {"T_i": 5e19 * u.K, "T_e": 0 * u.K, "ion": "p"}, RelativityError),
(
(),
{"T_i": 5 * u.A, "T_e": 0 * u.K, "n_e": n_e, "k": k_1, "ion": "p"},
u.UnitTypeError,
),
(
(),
{"T_i": T_negarr, "T_e": 0 * u.K, "n_e": n_e, "k": k_1, "ion": "p"},
ValueError,
),
(
(),
{"T_e": T_negarr, "T_i": 0 * u.K, "n_e": n_e, "k": k_1, "ion": "p"},
ValueError,
),
],
)
def test_raises(self, args, kwargs, _error):
with pytest.raises(_error):
ion_sound_speed(*args, **kwargs)
@pytest.mark.parametrize(
"kwargs",
[
({"T_i": T_nanarr, "T_e": 0 * u.K, "n_e": n_e, "k": k_1, "ion": "p"}),
({"T_e": T_nanarr, "T_i": 0 * u.K, "n_e": n_e, "k": k_1, "ion": "p"}),
],
)
def test_nan_values(self, kwargs):
np.isnan(ion_sound_speed(**kwargs)[1])
def test_handle_nparrays(self):
assert_can_handle_nparray(ion_sound_speed)
def test_thermal_pressure():
assert thermal_pressure(T_e, n_i).unit.is_equivalent(u.Pa)
# TODO: may be array issues with arg "mass"
assert_can_handle_nparray(thermal_pressure)
def test_gyrofrequency():
r"""Test the gyrofrequency function in parameters.py."""
assert gyrofrequency(B, "e-").unit.is_equivalent(u.rad / u.s)
assert gyrofrequency(B, "e-", to_hz=True).unit.is_equivalent(u.Hz)
assert np.isclose(gyrofrequency(1 * u.T, "e-").value, 175882008784.72018)
assert np.isclose(gyrofrequency(2.4 * u.T, "e-").value, 422116821083.3284)
assert np.isclose(
gyrofrequency(1 * u.T, "e-", to_hz=True).value, 27992490076.528206
)
assert np.isclose(
gyrofrequency(2.4 * u.T, "e-", signed=True).value, -422116821083.3284
)
assert np.isclose(gyrofrequency(1 * u.G, "e-").cgs.value, 1.76e7, rtol=1e-3)
with pytest.raises(TypeError):
with pytest.warns(u.UnitsWarning):
gyrofrequency(u.m, "e-")
with pytest.raises(u.UnitTypeError):
gyrofrequency(u.m * 1, "e-")
assert np.isnan(gyrofrequency(B_nanarr, "e-")[-1])
# The following is a test to check that equivalencies from astropy
# are working.
omega_ce = gyrofrequency(2.2 * u.T, "e-")
f_ce = (omega_ce / (2 * np.pi)) / u.rad
f_ce_use_equiv = omega_ce.to(u.Hz, equivalencies=[(u.cy / u.s, u.Hz)])
assert np.isclose(f_ce.value, f_ce_use_equiv.value)
with pytest.warns(u.UnitsWarning):
assert gyrofrequency(5.0, "e-") == gyrofrequency(5.0 * u.T, "e-")
assert gyrofrequency(B, particle=ion).unit.is_equivalent(u.rad / u.s)
assert np.isclose(gyrofrequency(1 * u.T, particle="p").value, 95788335.834874)
assert np.isclose(gyrofrequency(2.4 * u.T, particle="p").value, 229892006.00369796)
assert np.isclose(gyrofrequency(1 * u.G, particle="p").cgs.value, 9.58e3, rtol=2e-3)
assert gyrofrequency(-5 * u.T, "p") == gyrofrequency(5 * u.T, "p")
# Case when Z=1 is assumed
# assert gyrofrequency(B, particle='p+') == gyrofrequency(B, particle='H-1')
assert gyrofrequency(B, particle="e+") == gyrofrequency(B, "e-")
with pytest.warns(u.UnitsWarning):
gyrofrequency(8, "p")
with pytest.raises(u.UnitTypeError):
gyrofrequency(5 * u.m, "p")
with pytest.raises(InvalidParticleError):
gyrofrequency(8 * u.T, particle="asdfasd")
with pytest.warns(u.UnitsWarning):
# TODO this should be WARNS, not RAISES. and it's probably still raised
assert gyrofrequency(5.0, "p") == gyrofrequency(5.0 * u.T, "p")
gyrofrequency(1 * u.T, particle="p")
# testing for user input Z
testMeth1 = gyrofrequency(1 * u.T, particle="p", Z=0.8).si.value
testTrue1 = 76630665.79318453
errStr = f"gyrofrequency() gave {testMeth1}, should be {testTrue1}."
assert np.isclose(testMeth1, testTrue1, atol=0.0, rtol=1e-5), errStr
assert_can_handle_nparray(gyrofrequency, kwargs={"signed": True})
assert_can_handle_nparray(gyrofrequency, kwargs={"signed": False})
def test_gyroradius():
r"""Test the gyroradius function in parameters.py."""
assert gyroradius(B, "e-", T=T_e).unit.is_equivalent(u.m)
assert gyroradius(B, "e-", Vperp=25 * u.m / u.s).unit.is_equivalent(u.m)
# test for possiblity to allow nan for input values
assert np.isnan(gyroradius(np.nan * u.T, particle="e-", T=1 * u.K))
assert np.isnan(gyroradius(1 * u.T, particle="e-", T=np.nan * u.K))
assert np.isnan(gyroradius(1 * u.T, particle="e-", Vperp=np.nan * u.m / u.s))
Vperp = 1e6 * u.m / u.s
Bmag = 1 * u.T
omega_ce = gyrofrequency(Bmag, "e-")
analytical_result = (Vperp / omega_ce).to(
u.m, equivalencies=u.dimensionless_angles()
)
assert gyroradius(Bmag, "e-", Vperp=Vperp) == analytical_result
with pytest.raises(TypeError):
with pytest.warns(u.UnitsWarning):
gyroradius(u.T, "e-")
with pytest.raises(u.UnitTypeError):
gyroradius(5 * u.A, "e-", Vperp=8 * u.m / u.s)
with pytest.raises(u.UnitTypeError):
gyroradius(5 * u.T, "e-", Vperp=8 * u.m)
with pytest.raises(ValueError):
gyroradius(np.array([5, 6]) * u.T, "e-", Vperp=np.array([5, 6, 7]) * u.m / u.s)
assert np.isnan(gyroradius(np.nan * u.T, "e-", Vperp=1 * u.m / u.s))
with pytest.raises(ValueError):
gyroradius(3.14159 * u.T, "e-", T=-1 * u.K)
with pytest.warns(u.UnitsWarning):
assert gyroradius(1.0, "e-", Vperp=1.0) == gyroradius(
1.0 * u.T, "e-", Vperp=1.0 * u.m / u.s
)
with pytest.warns(u.UnitsWarning):
assert gyroradius(1.1, "e-", T=1.2) == gyroradius(1.1 * u.T, "e-", T=1.2 * u.K)
with pytest.raises(ValueError):
gyroradius(1.1 * u.T, "e-", Vperp=1 * u.m / u.s, T=1.2 * u.K)
with pytest.raises(u.UnitTypeError):
gyroradius(1.1 * u.T, "e-", Vperp=1.1 * u.m, T=1.2 * u.K)
# Check for Deprecation warning when using T_i instead of T
with pytest.warns(PlasmaPyFutureWarning):
gyroradius(1.1 * u.T, "e-", T_i=1.2 * u.K)
assert gyroradius(B, particle="p", T=T_i).unit.is_equivalent(u.m)
assert gyroradius(B, particle="p", Vperp=25 * u.m / u.s).unit.is_equivalent(u.m)
# Case when Z=1 is assumed
assert np.isclose(
gyroradius(B, particle="p", T=T_i),
gyroradius(B, particle="H+", T=T_i),
atol=1e-6 * u.m,
)
gyroPos = gyroradius(B, particle="p", Vperp=V)
gyroNeg = gyroradius(B, particle="p", Vperp=-V)
assert gyroPos == gyroNeg
Vperp = 1e6 * u.m / u.s
Bmag = 1 * u.T
omega_ci = gyrofrequency(Bmag, particle="p")
analytical_result = (Vperp / omega_ci).to(
u.m, equivalencies=u.dimensionless_angles()
)
assert gyroradius(Bmag, particle="p", Vperp=Vperp) == analytical_result
T2 = 1.2 * u.MK
B2 = 123 * u.G
particle2 = "alpha"
Vperp2 = thermal_speed(T2, particle=particle2)
gyro_by_vperp = gyroradius(B2, particle="alpha", Vperp=Vperp2)
assert gyro_by_vperp == gyroradius(B2, particle="alpha", T=T2)
explicit_positron_gyro = gyroradius(1 * u.T, particle="positron", T=1 * u.MK)
assert explicit_positron_gyro == gyroradius(1 * u.T, "e-", T=1 * u.MK)
with pytest.raises(TypeError):
with pytest.warns(u.UnitsWarning):
gyroradius(u.T, particle="p", Vperp=8 * u.m / u.s)
with pytest.raises(ValueError):
gyroradius(B, particle="p", T=-1 * u.K)
with pytest.warns(u.UnitsWarning):
gyro_without_units = gyroradius(1.0, particle="p", Vperp=1.0)
gyro_with_units = gyroradius(1.0 * u.T, particle="p", Vperp=1.0 * u.m / u.s)
assert gyro_without_units == gyro_with_units
with pytest.warns(u.UnitsWarning):
gyro_t_without_units = gyroradius(1.1, particle="p", T=1.2)
gyro_t_with_units = gyroradius(1.1 * u.T, particle="p", T=1.2 * u.K)
assert gyro_t_with_units == gyro_t_without_units
with pytest.raises(ValueError):
gyroradius(1.1 * u.T, particle="p", Vperp=1 * u.m / u.s, T=1.2 * u.K)
with pytest.raises(u.UnitTypeError):
gyroradius(1.1 * u.T, particle="p", Vperp=1.1 * u.m, T=1.2 * u.K)
with pytest.raises(u.UnitTypeError):
gyroradius(1.1 * u.T, particle="p", Vperp=1.2 * u.m, T=1.1 * u.K)
class Test_gyroradius:
# some custom numpy array tests here, because of the T / Vperp situation
def test_handle_numpy_array(self):
# Tests to verify that can handle Quantities with numpy array as the value:
assert gyroradius(B_arr, "e-", Vperp=V_arr)[0] == gyroradius(
B_arr[0], "e-", Vperp=V_arr[0]
)
assert gyroradius(B_arr, "e-", T=T_arr)[0] == gyroradius(
B_arr[0], "e-", T=T_arr[0]
)
def test_handle_mixed_Qarrays(self):
# If both Vperp or T are input as Qarrays, but only one of the two is valid
# at each element, then that's fine, the function should work:
assert gyroradius(B_arr, "e-", Vperp=V_nanarr, T=T_nanarr2)[0] == gyroradius(
B_arr[0], "e-", Vperp=V_nanarr[0], T=T_nanarr2[0]
)
def test_raise_two_valid_inputs(self):
# If both Vperp or T are nan-less, Qarrays or not, should raise ValueError:
with pytest.raises(ValueError):
gyroradius(B_arr, "e-", Vperp=V, T=T_arr)
with pytest.raises(ValueError):
gyroradius(B_arr, "e-", Vperp=V_arr, T=T_i)
def test_all_valid_and_one_valid(self):
# If one of (Vperp, T) is a valid and one is Qarray with at least one valid, ValueError:
with pytest.raises(ValueError):
gyroradius(B_arr, "e-", Vperp=V, T=T_nanarr)
with pytest.raises(ValueError):
gyroradius(B_arr, "e-", Vperp=V_nanarr, T=T_i)
def test_scalar_and_nan_qarray(self):
# If either Vperp or T is a valid scalar and the other is a Qarray of all nans,
# should do something valid and not raise a ValueError
assert np.all(np.isfinite(gyroradius(B_arr, "e-", Vperp=V, T=T_allnanarr)))
assert np.all(np.isfinite(gyroradius(B_arr, "e-", Vperp=V_allnanarr, T=T_i)))
def test_keeps_arguments_unchanged(self):
Vperp1 = u.Quantity([np.nan, 1], unit=u.m / u.s)
Vperp2 = u.Quantity([np.nan, 1], unit=u.m / u.s) # an exact copy
T_i = u.Quantity([1, np.nan], unit=u.K)
gyroradius(B_arr, "e-", Vperp=Vperp1, T=T_i)
assert_quantity_allclose(Vperp1, Vperp2)
def test_plasma_frequency():
r"""Test the plasma_frequency function in parameters.py."""
assert plasma_frequency(n_e, "e-").unit.is_equivalent(u.rad / u.s)
assert plasma_frequency(n_e, "e-", to_hz=True).unit.is_equivalent(u.Hz)
assert np.isclose(plasma_frequency(1 * u.cm ** -3, "e-").value, 5.64e4, rtol=1e-2)
assert np.isclose(
plasma_frequency(1 * u.cm ** -3, particle="N").value, 3.53e2, rtol=1e-1
)
assert np.isclose(
plasma_frequency(1 * u.cm ** -3, particle="N", to_hz=True).value,
56.19000195094519,
)
with pytest.raises(TypeError):
with pytest.warns(u.UnitsWarning):
plasma_frequency(u.m ** -3, "e-")
with pytest.raises(u.UnitTypeError):
plasma_frequency(5 * u.m ** -2, "e-")
assert np.isnan(plasma_frequency(np.nan * u.m ** -3, "e-"))
with pytest.warns(u.UnitsWarning):
assert plasma_frequency(1e19, "e-") == plasma_frequency(1e19 * u.m ** -3, "e-")
assert plasma_frequency(n_i, particle="p").unit.is_equivalent(u.rad / u.s)
# Case where Z=1 is assumed
assert plasma_frequency(n_i, particle="H-1+") == plasma_frequency(n_i, particle="p")
assert np.isclose(
plasma_frequency(mu * u.cm ** -3, particle="p").value, 1.32e3, rtol=1e-2
)
with pytest.raises(ValueError):
plasma_frequency(n=5 * u.m ** -3, particle="sdfas")
with pytest.warns(u.UnitsWarning):
plasma_freq_no_units = plasma_frequency(1e19, particle="p")
assert plasma_freq_no_units == plasma_frequency(1e19 * u.m ** -3, particle="p")
plasma_frequency(1e17 * u.cm ** -3, particle="p")
# testing for user input z_mean
testMeth1 = plasma_frequency(1e17 * u.cm ** -3, particle="p", z_mean=0.8).si.value
testTrue1 = 333063562455.4028
errStr = f"plasma_frequency() gave {testMeth1}, should be {testTrue1}."
assert np.isclose(testMeth1, testTrue1, atol=0.0, rtol=1e-6), errStr
assert_can_handle_nparray(plasma_frequency)
def test_Debye_length():
r"""Test the Debye_length function in parameters.py."""
assert Debye_length(T_e, n_e).unit.is_equivalent(u.m)
assert np.isclose(Debye_length(1 * u.eV, 1 * u.cm ** -3).value, 7.43, atol=0.005)
with pytest.warns(u.UnitsWarning):
Debye_length(5, 5 * u.m ** -3)
with pytest.raises(u.UnitTypeError):
Debye_length(56 * u.kg, 5 * u.m ** -3)
with pytest.raises(ValueError):
Debye_length(5 * u.eV, -5 * u.m ** -3)
with pytest.raises(ValueError):
Debye_length(-45 * u.K, 5 * u.m ** -3)
Tarr2 = np.array([1, 2]) * u.K
narr3 = np.array([1, 2, 3]) * u.m ** -3
with pytest.raises(ValueError):
Debye_length(Tarr2, narr3)
with pytest.warns(u.UnitsWarning):
assert Debye_length(2.0, 2.0) == Debye_length(2.0 * u.K, 2.0 * u.m ** -3)
with pytest.warns(u.UnitsWarning):
assert Debye_length(2.0 * u.K, 2.0) == Debye_length(2.0, 2.0 * u.m ** -3)
assert_can_handle_nparray(Debye_length)
def test_Debye_number():
r"""Test the Debye_number function in parameters.py."""
assert Debye_number(T_e, n_e).unit.is_equivalent(u.dimensionless_unscaled)
T_e_eV = T_e.to(u.eV, equivalencies=u.temperature_energy())
assert np.isclose(Debye_number(T_e, n_e).value, Debye_number(T_e_eV, n_e).value)
assert np.isclose(Debye_number(1 * u.eV, 1 * u.cm ** -3).value, 1720862385.43342)
with pytest.warns(u.UnitsWarning):
Debye_number(T_e, 4)
with pytest.raises(ValueError):
Debye_number(None, n_e)
with pytest.raises(u.UnitTypeError):
Debye_number(5 * u.m, 5 * u.m ** -3)
with pytest.raises(u.UnitTypeError):
Debye_number(5 * u.K, 5 * u.m ** 3)
with pytest.raises(ValueError):
Debye_number(5j * u.K, 5 * u.cm ** -3)
Tarr2 = np.array([1, 2]) * u.K
narr3 = np.array([1, 2, 3]) * u.m ** -3
with pytest.raises(ValueError):
Debye_number(Tarr2, narr3)
with pytest.warns(u.UnitsWarning):
assert Debye_number(1.1, 1.1) == Debye_number(1.1 * u.K, 1.1 * u.m ** -3)
with pytest.warns(u.UnitsWarning):
assert Debye_number(1.1 * u.K, 1.1) == Debye_number(1.1, 1.1 * u.m ** -3)
assert_can_handle_nparray(Debye_number)
def test_inertial_length():
r"""Test the inertial_length function in parameters.py."""
assert inertial_length(n_i, particle="p").unit.is_equivalent(u.m)
assert np.isclose(
inertial_length(mu * u.cm ** -3, particle="p").cgs.value, 2.28e7, rtol=0.01
)
inertial_length_electron_plus = inertial_length(5.351 * u.m ** -3, particle="e+")
assert inertial_length_electron_plus == inertial_length(
5.351 * u.m ** -3, particle="e"
)
assert inertial_length(n_i, particle="p") == inertial_length(n_i, particle="p")
with pytest.warns(u.UnitsWarning):
inertial_length(4, particle="p")
with pytest.raises(u.UnitTypeError):
inertial_length(4 * u.m ** -2, particle="p")
with pytest.raises(ValueError):
inertial_length(-5 * u.m ** -3, particle="p")
with pytest.raises(InvalidParticleError):
inertial_length(n_i, particle=-135)
with pytest.warns(u.UnitsWarning):
inertial_length_no_units = inertial_length(1e19, particle="p")
assert inertial_length_no_units == inertial_length(
1e19 * u.m ** -3, particle="p"
)
assert inertial_length(n_e, "e-").unit.is_equivalent(u.m)
assert np.isclose(
inertial_length(1 * u.cm ** -3, "e-").cgs.value, 5.31e5, rtol=1e-3
)
with pytest.warns(u.UnitsWarning):
inertial_length(5, "e-")
with pytest.raises(u.UnitTypeError):
inertial_length(5 * u.m, "e-")
with pytest.raises(ValueError):
inertial_length(-5 * u.m ** -3, "e-")
with pytest.warns(u.UnitsWarning):
assert inertial_length(1e19, "e-") == inertial_length(1e19 * u.m ** -3, "e-")
assert_can_handle_nparray(inertial_length)
def test_magnetic_pressure():
r"""Test the magnetic_pressure function in parameters.py."""
assert magnetic_pressure(B_arr).unit.is_equivalent(u.Pa)
assert magnetic_pressure(B).unit.is_equivalent(u.Pa)
assert magnetic_pressure(B).unit.name == "Pa"
assert magnetic_pressure(B).value == magnetic_energy_density(B).value
assert magnetic_pressure(B) == magnetic_energy_density(B.to(u.G))
assert np.isclose(magnetic_pressure(B).value, 397887.35772973835)
with pytest.warns(u.UnitsWarning):
magnetic_pressure(5)
with pytest.raises(u.UnitTypeError):
magnetic_pressure(5 * u.m)
assert np.isnan(magnetic_pressure(np.nan * u.T))
with pytest.raises(ValueError):
magnetic_pressure(5j * u.T)
assert np.isnan(magnetic_pressure(B_nanarr)[-1])
with pytest.warns(u.UnitsWarning):
assert magnetic_pressure(22.2) == magnetic_pressure(22.2 * u.T)
assert_can_handle_nparray(magnetic_pressure)
def test_magnetic_energy_density():
r"""Test the magnetic_energy_density function in parameters.py."""
assert magnetic_energy_density(B_arr).unit.is_equivalent(u.J / u.m ** 3)
assert magnetic_energy_density(B).unit.is_equivalent("J / m3")
assert magnetic_energy_density(B).value == magnetic_pressure(B).value
assert_quantity_allclose(
magnetic_energy_density(2 * B), 4 * magnetic_energy_density(B)
)
assert_quantity_allclose(magnetic_energy_density(B).value, 397887.35772973835)
assert_quantity_allclose(
magnetic_energy_density(B), magnetic_energy_density(B.to(u.G))
)
assert isinstance(magnetic_energy_density(B_arr), u.Quantity)
with pytest.warns(u.UnitsWarning):
magnetic_energy_density(5)
with pytest.raises(u.UnitTypeError):
magnetic_energy_density(5 * u.m)
assert np.isnan(magnetic_energy_density(np.nan * u.T))
with pytest.raises(ValueError):
magnetic_energy_density(5j * u.T)
assert np.isnan(magnetic_energy_density(B_nanarr)[-1])
with pytest.warns(u.UnitsWarning):
assert magnetic_energy_density(22.2) == magnetic_energy_density(22.2 * u.T)
assert_can_handle_nparray(magnetic_energy_density)
def test_upper_hybrid_frequency():
r"""Test the upper_hybrid_frequency function in parameters.py."""
omega_uh = upper_hybrid_frequency(B, n_e=n_e)
omega_uh_hz = upper_hybrid_frequency(B, n_e=n_e, to_hz=True)
omega_ce = gyrofrequency(B, "e-")
omega_pe = plasma_frequency(n=n_e, particle="e-")
assert omega_ce.unit.is_equivalent(u.rad / u.s)
assert omega_pe.unit.is_equivalent(u.rad / u.s)
assert omega_uh.unit.is_equivalent(u.rad / u.s)
assert omega_uh_hz.unit.is_equivalent(u.Hz)
left_hand_side = omega_uh ** 2
right_hand_side = omega_ce ** 2 + omega_pe ** 2
assert np.isclose(left_hand_side.value, right_hand_side.value)
assert np.isclose(omega_uh_hz.value, 69385868857.90918)
with pytest.raises(ValueError):
upper_hybrid_frequency(5 * u.T, n_e=-1 * u.m ** -3)
with pytest.warns(u.UnitsWarning):
assert upper_hybrid_frequency(1.2, 1.3) == upper_hybrid_frequency(
1.2 * u.T, 1.3 * u.m ** -3
)
with pytest.warns(u.UnitsWarning):
assert upper_hybrid_frequency(1.4 * u.T, 1.3) == upper_hybrid_frequency(
1.4, 1.3 * u.m ** -3
)
assert_can_handle_nparray(upper_hybrid_frequency)
def test_lower_hybrid_frequency():
r"""Test the lower_hybrid_frequency function in parameters.py."""
ion = "He-4 1+"
omega_ci = gyrofrequency(B, particle=ion)
omega_pi = plasma_frequency(n=n_i, particle=ion)
omega_ce = gyrofrequency(B, "e-")
omega_lh = lower_hybrid_frequency(B, n_i=n_i, ion=ion)
omega_lh_hz = lower_hybrid_frequency(B, n_i=n_i, ion=ion, to_hz=True)
assert omega_ci.unit.is_equivalent(u.rad / u.s)
assert omega_pi.unit.is_equivalent(u.rad / u.s)
assert omega_ce.unit.is_equivalent(u.rad / u.s)
assert omega_lh.unit.is_equivalent(u.rad / u.s)
left_hand_side = omega_lh ** -2
right_hand_side = (
1 / (omega_ci ** 2 + omega_pi ** 2) + omega_ci ** -1 * omega_ce ** -1
)
assert np.isclose(left_hand_side.value, right_hand_side.value)
assert np.isclose(omega_lh_hz.value, 299878691.3223296)
with pytest.raises(ValueError):
lower_hybrid_frequency(0.2 * u.T, n_i=5e19 * u.m ** -3, ion="asdfasd")
with pytest.raises(ValueError):
lower_hybrid_frequency(0.2 * u.T, n_i=-5e19 * u.m ** -3, ion="asdfasd")
with pytest.raises(ValueError):
lower_hybrid_frequency(np.nan * u.T, n_i=-5e19 * u.m ** -3, ion="asdfasd")
with pytest.warns(u.UnitsWarning):
assert lower_hybrid_frequency(1.3, 1e19, "p+") == lower_hybrid_frequency(
1.3 * u.T, 1e19 * u.m ** -3, "p+"
)
assert_can_handle_nparray(lower_hybrid_frequency)
def test_Bohm_diffusion():
r"""Test Mag_Reynolds in dimensionless.py"""
T_e = 5000 * u.K
B = 10 * u.T
assert (Bohm_diffusion(T_e, B)).unit == u.m ** 2 / u.s
with pytest.warns(u.UnitsWarning):
Bohm_diffusion(5000, B)
with pytest.raises(u.UnitTypeError):
Bohm_diffusion(2.2 * u.kg, B)
@pytest.mark.parametrize(
"alias, parent",
[
(rho_, mass_density),
(va_, Alfven_speed),
(cs_, ion_sound_speed),
(pth_, thermal_pressure),
(betaH_, Hall_parameter),
(oc_, gyrofrequency),
(wc_, gyrofrequency),
(rc_, gyroradius),
(rhoc_, gyroradius),
(wp_, plasma_frequency),
(lambdaD_, Debye_length),
(nD_, Debye_number),
(cwp_, inertial_length),
(pmag_, magnetic_pressure),
(ub_, magnetic_energy_density),
(wuh_, upper_hybrid_frequency),
(wlh_, lower_hybrid_frequency),
(DB_, Bohm_diffusion),
],
)
def test_parameters_aliases(alias, parent):
"""Test all aliases defined in parameters.py"""
assert alias is parent
| test_values |
YouTubeThumbnail.ts | import { ModalBox as ModalBoxModule } from '../../ExternalModulesShim';
import { exportGlobally } from '../../GlobalExports';
import { IQueryResult } from '../../rest/QueryResult';
import { $$, Dom } from '../../utils/Dom';
import { DomUtils } from '../../utils/DomUtils';
import { SVGDom } from '../../utils/SVGDom';
import { SVGIcons } from '../../utils/SVGIcons';
import { Utils } from '../../utils/Utils';
import { Component } from '../Base/Component';
import { ComponentOptions } from '../Base/ComponentOptions';
import { Initialization } from '../Base/Initialization';
import { get } from '../Base/RegisteredNamedMethods';
import { IResultsComponentBindings } from '../Base/ResultsComponentBindings';
import { ResultLink } from '../ResultLink/ResultLink';
export interface IYouTubeThumbnailOptions {
width: string;
height: string;
embed: boolean;
}
/**
* The YouTubeThumbnail component automatically fetches the thumbnail of a YouTube video.
*
* This component differs from the standard {@link Thumbnail} component because the thumbnail it outputs is always
* clickable.
*
* Depending on the component configuration, clicking a YouTube thumbnail can either automatically open a modal box
* containing the `iframe` from YouTube, or open the target URL in the current window (see
* {@link YouTubeThumbnail.options.embed}).
*
* This component is a result template component (see [Result Templates](https://developers.coveo.com/x/aIGfAQ)).
*/
export class | extends Component {
static ID = 'YouTubeThumbnail';
static doExport = () => {
exportGlobally({
YouTubeThumbnail: YouTubeThumbnail
});
};
/**
* @componentOptions
*/
static options: IYouTubeThumbnailOptions = {
/**
* Specifies the width (in pixels) of the YouTube thumbnail.
*
* Default value is `200px`.
*/
width: ComponentOptions.buildStringOption({ defaultValue: '200px' }),
/**
* Specifies the height (in pixels) of the YouTube thumbnail.
*
* Default value is `112px`.
*/
height: ComponentOptions.buildStringOption({ defaultValue: '112px' }),
/**
* Specifies whether clicking on the YouTube thumbnail loads the video in a modal box.
*
* Setting this option to `false` causes the browser to change its current location to that of the target URL when
* the end user clicks the YouTube thumbnail.
*
* Default value is `true`.
*/
embed: ComponentOptions.buildBooleanOption({ defaultValue: true })
};
public resultLink: Dom;
private modalbox: Coveo.ModalBox.ModalBox;
constructor(
public element: HTMLElement,
public options?: IYouTubeThumbnailOptions,
public bindings?: IResultsComponentBindings,
public result?: IQueryResult,
public ModalBox = ModalBoxModule
) {
super(element, YouTubeThumbnail.ID, bindings);
this.options = ComponentOptions.initComponentOptions(element, YouTubeThumbnail, options);
this.resultLink = $$('a', {
className: Component.computeCssClassName(ResultLink)
});
const thumbnailDiv = $$('div', {
className: 'coveo-youtube-thumbnail-container'
});
this.resultLink.append(thumbnailDiv.el);
const img = $$('img', {
src: Utils.getFieldValue(this.result, 'ytthumbnailurl'),
className: 'coveo-youtube-thumbnail-img',
alt: this.result.title,
title: this.result.title
});
img.el.style.width = this.options.width;
img.el.style.height = this.options.height;
img.el.onerror = () => {
const svgVideo = $$('div', {}, SVGIcons.icons.video).el;
SVGDom.addStyleToSVGInContainer(svgVideo, {
width: this.options.width
});
$$(img).remove();
thumbnailDiv.append(svgVideo);
};
thumbnailDiv.append(img.el);
const span = $$('span', {
className: 'coveo-youtube-thumbnail-play-button'
});
thumbnailDiv.append(span.el);
$$(this.element).append(this.resultLink.el);
Initialization.automaticallyCreateComponentsInsideResult(element, result, {
ResultLink: this.options.embed ? { onClick: () => this.openYoutubeIframe() } : null
});
}
/**
* Open the result link embedded in this component.
*
* With a standard configuration of this component, this will open an iframe that automatically plays the video.
*/
public openResultLink() {
let resultLinkComponent = <ResultLink>get(this.resultLink.el);
resultLinkComponent.openLinkAsConfigured();
}
private openYoutubeIframe() {
// need to put iframe inside div : iframe with position absolute and left:0, right : 0 , bottom: 0 is not standard/supported
const iframe = $$('iframe', {
src: `https://www.youtube.com/embed/${this.extractVideoId()}?autoplay=1`,
allowfullscreen: 'allowfullscreen',
width: '100%',
height: '100%'
});
const div = $$('div');
div.append(iframe.el);
this.modalbox = this.ModalBox.open(div.el, {
overlayClose: true,
title: DomUtils.getQuickviewHeader(this.result, { showDate: true, title: this.result.title }, this.bindings).el,
className: 'coveo-youtube-player',
validation: () => true,
body: this.element.ownerDocument.body,
sizeMod: 'big'
});
$$($$(this.modalbox.wrapper).find('.coveo-quickview-close-button')).on('click', () => {
this.modalbox.close();
});
}
private extractVideoId() {
return this.result.clickUri.split('watch?v=')[1];
}
}
Initialization.registerAutoCreateComponent(YouTubeThumbnail);
| YouTubeThumbnail |
hostfile.go | package hostess
import (
"fmt"
"io/ioutil"
"os"
"runtime"
"strings"
)
const defaultOSX = `
##
# Host Database
#
# localhost is used to configure the loopback interface
# when the system is booting. Do not change this entry.
##
127.0.0.1 localhost
255.255.255.255 broadcasthost
::1 localhost
fe80::1%lo0 localhost
`
const defaultLinux = `
127.0.0.1 localhost
127.0.1.1 HOSTNAME
# The following lines are desirable for IPv6 capable hosts
::1 localhost ip6-localhost ip6-loopback
fe00::0 ip6-localnet
ff00::0 ip6-mcastprefix
ff02::1 ip6-allnodes
ff02::2 ip6-allrouters
ff02::3 ip6-allhosts
`
// Hostfile represents /etc/hosts (or a similar file, depending on OS), and
// includes a list of Hostnames. Hostfile includes
type Hostfile struct {
Path string
Hosts Hostlist
data []byte
}
// NewHostfile creates a new Hostfile object from the specified file.
func NewHostfile() *Hostfile {
return &Hostfile{GetHostsPath(), Hostlist{}, []byte{}}
}
// GetHostsPath returns the location of the hostfile; either env HOSTESS_PATH
// or /etc/hosts if HOSTESS_PATH is not set.
func GetHostsPath() string {
path := os.Getenv("HOSTESS_PATH")
if path == "" {
if runtime.GOOS == "windows" {
path = "C:\\Windows\\System32\\drivers\\etc\\hosts"
} else {
path = "/etc/hosts"
}
}
return path
}
// TrimWS (Trim Whitespace) removes space, newline, and tabs from a string
// using strings.Trim()
func TrimWS(s string) string {
return strings.TrimSpace(s)
}
// ParseLine parses an individual line in a hostfile, which may contain one
// (un)commented ip and one or more hostnames. For example
//
// 127.0.0.1 localhost mysite1 mysite2
func ParseLine(line string) (Hostlist, error) {
var hostnames Hostlist
if len(line) == 0 {
return hostnames, fmt.Errorf("line is blank")
}
// Parse leading # for disabled lines
enabled := true
if line[0:1] == "#" {
enabled = false
line = TrimWS(line[1:])
}
// Parse other #s for actual comments
line = strings.Split(line, "#")[0]
// Replace tabs and multispaces with single spaces throughout
line = strings.Replace(line, "\t", " ", -1)
for strings.Contains(line, " ") {
line = strings.Replace(line, " ", " ", -1)
}
line = TrimWS(line)
// Break line into words
words := strings.Split(line, " ")
for idx, word := range words {
words[idx] = TrimWS(word)
}
// Separate the first bit (the ip) from the other bits (the domains)
ip := words[0]
domains := words[1:]
// if LooksLikeIPv4(ip) || LooksLikeIPv6(ip) {
for _, v := range domains {
hostname, err := NewHostname(v, ip, enabled)
if err != nil {
return nil, err
}
hostnames = append(hostnames, hostname)
}
// }
return hostnames, nil
}
// MustParseLine is like ParseLine but panics instead of errors.
func MustParseLine(line string) Hostlist |
// Parse reads
func (h *Hostfile) Parse() []error {
var errs []error
var line = 1
for _, v := range strings.Split(string(h.data), "\n") {
hostnames, _ := ParseLine(v)
// if err != nil {
// log.Printf("Error parsing line %d: %s\n", line, err)
// }
for _, hostname := range hostnames {
err := h.Hosts.UnsafeAdd(hostname)
if err != nil {
errs = append(errs, err)
}
}
line++
}
return errs
}
// Read the contents of the hostfile from disk
func (h *Hostfile) Read() error {
data, err := ioutil.ReadFile(h.Path)
if err == nil {
h.data = data
}
return err
}
// LoadHostfile creates a new Hostfile struct and tries to populate it from
// disk. Read and/or parse errors are returned as a slice.
func LoadHostfile() (hostfile *Hostfile, errs []error) {
hostfile = NewHostfile()
readErr := hostfile.Read()
if readErr != nil {
errs = []error{readErr}
return
}
errs = hostfile.Parse()
hostfile.Hosts.Sort()
return
}
// GetData returns the internal snapshot of the hostfile we read when we loaded
// this hostfile from disk (if we ever did that). This is implemented for
// testing and you probably won't need to use it.
func (h *Hostfile) GetData() []byte {
return h.data
}
// Format takes the current list of Hostnames in this Hostfile and turns it
// into a string suitable for use as an /etc/hosts file.
// Sorting uses the following logic:
// 1. List is sorted by IP address
// 2. Commented items are left in place
// 3. 127.* appears at the top of the list (so boot resolvers don't break)
// 4. When present, localhost will always appear first in the domain list
func (h *Hostfile) Format() []byte {
return h.Hosts.Format()
}
// Save writes the Hostfile to disk to /etc/hosts or to the location specified
// by the HOSTESS_PATH environment variable (if set).
func (h *Hostfile) Save() error {
file, err := os.OpenFile(h.Path, os.O_RDWR|os.O_APPEND|os.O_TRUNC, 0644)
if err != nil {
return err
}
defer file.Close()
_, err = file.Write(h.Format())
return err
}
| {
hostlist, err := ParseLine(line)
if err != nil {
panic(err)
}
return hostlist
} |
aql-builder-contains.component.ts | /**
* Copyright 2021 Vitagroup AG
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { Component, Input, OnInit } from '@angular/core'
import { AqbContainsCompositionUiModel } from '../../../../shared/models/aqb/aqb-contains-composition-ui.model'
import { AqbUiModel } from '../../../../shared/models/aqb/aqb-ui.model'
@Component({
selector: 'num-aql-builder-contains',
templateUrl: './aql-builder-contains.component.html',
styleUrls: ['./aql-builder-contains.component.scss'],
})
export class | implements OnInit {
constructor() {}
@Input()
aqbModel: AqbUiModel
@Input()
compositions: AqbContainsCompositionUiModel[] = []
ngOnInit(): void {}
deleteCompositionByReferenceId(compositionReferenceId: number): void {
this.aqbModel.handleDeletionByCompositionReferenceIds([compositionReferenceId])
this.compositions = this.compositions.filter(
(composition) => composition.compositionReferenceId !== compositionReferenceId
)
}
deleteArchetypesByReferenceIds(archetypeReferenceIds: number[]): void {
this.aqbModel.handleDeletionByArchetypeReferenceIds(archetypeReferenceIds)
}
}
| AqlBuilderContainsComponent |
0025_auto__add_field_userban_is_unbanned.py | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
| def forwards(self, orm):
# Adding field 'UserBan.is_unbanned'
db.add_column('base_userban', 'is_unbanned', self.gf('django.db.models.fields.BooleanField')(default=False), keep_default=False)
def backwards(self, orm):
# Deleting field 'UserBan.is_unbanned'
db.delete_column('base_userban', 'is_unbanned')
models = {
'atlas.city': {
'Meta': {'ordering': "('name',)", 'object_name': 'City'},
'coordinates': ('atlas.fields.CoordinateField', [], {'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['atlas.Country']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['atlas.Region']", 'null': 'True', 'blank': 'True'})
},
'atlas.country': {
'Meta': {'ordering': "('name',)", 'object_name': 'Country'},
'border': ('django.contrib.gis.db.models.fields.MultiPolygonField', [], {'null': 'True', 'blank': 'True'}),
'coordinates': ('atlas.fields.CoordinateField', [], {'null': 'True', 'blank': 'True'}),
'country_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '2', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'atlas.location': {
'Meta': {'object_name': 'Location'},
'address': ('django.db.models.fields.TextField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['atlas.City']"}),
'coordinates': ('atlas.fields.CoordinateField', [], {'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['atlas.Country']"}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'photo': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['photologue.Photo']", 'null': 'True', 'blank': 'True'})
},
'atlas.region': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('country', 'code'),)", 'object_name': 'Region'},
'border': ('django.contrib.gis.db.models.fields.MultiPolygonField', [], {'null': 'True', 'blank': 'True'}),
'code': ('django.db.models.fields.CharField', [], {'max_length': '2', 'db_index': 'True'}),
'coordinates': ('atlas.fields.CoordinateField', [], {'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['atlas.Country']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 5, 20, 14, 40, 19, 580727)'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 5, 20, 14, 40, 19, 580636)'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'base.banner': {
'Meta': {'ordering': "('-created',)", 'object_name': 'Banner', '_ormbases': ['jmbo.ModelBase']},
'banner_type': ('django.db.models.fields.CharField', [], {'default': "'banner'", 'max_length': '10'}),
'modelbase_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['jmbo.ModelBase']", 'unique': 'True', 'primary_key': 'True'}),
'time_off': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'}),
'time_on': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'base.certificate': {
'Meta': {'object_name': 'Certificate'},
'duration': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'institution': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '45'}),
'year': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'})
},
'base.curriculumvitae': {
'Meta': {'object_name': 'CurriculumVitae'},
'about_me': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'address': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'certificates': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['base.Certificate']", 'symmetrical': 'False', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '45', 'null': 'True', 'blank': 'True'}),
'comment_as_anon': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'connection_requests': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'connection_requests'", 'blank': 'True', 'to': "orm['auth.User']"}),
'date_of_birth': ('django.db.models.fields.CharField', [], {'max_length': '45', 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.CharField', [], {'max_length': '45', 'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '45', 'null': 'True', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '45', 'null': 'True', 'blank': 'True'}),
'highest_grade': ('django.db.models.fields.CharField', [], {'max_length': '45', 'null': 'True', 'blank': 'True'}),
'highest_grade_year': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'languages': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['base.Language']", 'symmetrical': 'False', 'blank': 'True'}),
'nr_of_faxes_sent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'preferred_skill': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'profiles_preferred'", 'null': 'True', 'to': "orm['base.Skill']"}),
'province': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'references': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['base.Reference']", 'symmetrical': 'False', 'blank': 'True'}),
'school': ('django.db.models.fields.CharField', [], {'max_length': '45', 'null': 'True', 'blank': 'True'}),
'show_address': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'show_contact_number': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'skills': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'profiles'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['base.Skill']"}),
'surname': ('django.db.models.fields.CharField', [], {'max_length': '45', 'null': 'True', 'blank': 'True'}),
'telephone_number': ('django.db.models.fields.CharField', [], {'max_length': '45', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'}),
'work_experiences': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['base.WorkExperience']", 'symmetrical': 'False', 'blank': 'True'})
},
'base.language': {
'Meta': {'object_name': 'Language'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '45'}),
'read_write': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'base.reference': {
'Meta': {'object_name': 'Reference'},
'contact_no': ('django.db.models.fields.CharField', [], {'max_length': '45', 'null': 'True', 'blank': 'True'}),
'fullname': ('django.db.models.fields.CharField', [], {'max_length': '45'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'relationship': ('django.db.models.fields.CharField', [], {'max_length': '45', 'null': 'True', 'blank': 'True'})
},
'base.skill': {
'Meta': {'object_name': 'Skill'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'primary': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'skill': ('django.db.models.fields.CharField', [], {'max_length': '45'})
},
'base.userban': {
'Meta': {'object_name': 'UserBan'},
'ban_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_unbanned': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'unban_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'base.workexperience': {
'Meta': {'object_name': 'WorkExperience'},
'company': ('django.db.models.fields.CharField', [], {'max_length': '45', 'null': 'True', 'blank': 'True'}),
'end_year': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'start_year': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '45'})
},
'category.category': {
'Meta': {'ordering': "('title',)", 'object_name': 'Category'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['category.Category']", 'null': 'True', 'blank': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['sites.Site']", 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
'subtitle': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'category.tag': {
'Meta': {'ordering': "('title',)", 'object_name': 'Tag'},
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['category.Category']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'jmbo.modelbase': {
'Meta': {'ordering': "('-created',)", 'object_name': 'ModelBase'},
'anonymous_comments': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'anonymous_likes': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['category.Category']", 'null': 'True', 'blank': 'True'}),
'class_name': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'comments_closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'comments_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'blank': 'True'}),
'crop_from': ('django.db.models.fields.CharField', [], {'default': "'center'", 'max_length': '10', 'blank': 'True'}),
'date_taken': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'effect': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'modelbase_related'", 'null': 'True', 'to': "orm['photologue.PhotoEffect']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'likes_closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'likes_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['atlas.Location']", 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'primary_category': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'primary_modelbase_set'", 'null': 'True', 'to': "orm['category.Category']"}),
'publish_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'publishers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['publisher.Publisher']", 'null': 'True', 'blank': 'True'}),
'retract_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sites.Site']", 'null': 'True', 'symmetrical': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'default': "'unpublished'", 'max_length': '32', 'null': 'True', 'blank': 'True'}),
'subtitle': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['category.Tag']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'photologue.photo': {
'Meta': {'ordering': "['-date_added']", 'object_name': 'Photo'},
'caption': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'crop_from': ('django.db.models.fields.CharField', [], {'default': "'center'", 'max_length': '10', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_taken': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'effect': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'photo_related'", 'null': 'True', 'to': "orm['photologue.PhotoEffect']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'tags': ('photologue.models.TagField', [], {'max_length': '255', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'title_slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'photologue.photoeffect': {
'Meta': {'object_name': 'PhotoEffect'},
'background_color': ('django.db.models.fields.CharField', [], {'default': "'#FFFFFF'", 'max_length': '7'}),
'brightness': ('django.db.models.fields.FloatField', [], {'default': '1.0'}),
'color': ('django.db.models.fields.FloatField', [], {'default': '1.0'}),
'contrast': ('django.db.models.fields.FloatField', [], {'default': '1.0'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'filters': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'reflection_size': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'reflection_strength': ('django.db.models.fields.FloatField', [], {'default': '0.6'}),
'sharpness': ('django.db.models.fields.FloatField', [], {'default': '1.0'}),
'transpose_method': ('django.db.models.fields.CharField', [], {'max_length': '15', 'blank': 'True'})
},
'publisher.publisher': {
'Meta': {'object_name': 'Publisher'},
'class_name': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'secretballot.vote': {
'Meta': {'unique_together': "(('token', 'content_type', 'object_id'),)", 'object_name': 'Vote'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'vote': ('django.db.models.fields.SmallIntegerField', [], {})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['base'] |
|
client_auth_test.go | // Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ssh
import (
"bytes"
"crypto/rand"
"errors"
"fmt"
"io"
"log"
"net"
"os"
"runtime"
"strings"
"testing"
)
type keyboardInteractive map[string]string
func (cr keyboardInteractive) Challenge(user string, instruction string, questions []string, echos []bool) ([]string, error) {
var answers []string
for _, q := range questions {
answers = append(answers, cr[q])
}
return answers, nil
}
// reused internally by tests
var clientPassword = "tiger"
// tryAuth runs a handshake with a given config against an SSH server
// with config serverConfig. Returns both client and server side errors.
func | (t *testing.T, config *ClientConfig) error {
err, _ := tryAuthBothSides(t, config, nil)
return err
}
// tryAuth runs a handshake with a given config against an SSH server
// with a given GSSAPIWithMICConfig and config serverConfig. Returns both client and server side errors.
func tryAuthWithGSSAPIWithMICConfig(t *testing.T, clientConfig *ClientConfig, gssAPIWithMICConfig *GSSAPIWithMICConfig) error {
err, _ := tryAuthBothSides(t, clientConfig, gssAPIWithMICConfig)
return err
}
// tryAuthBothSides runs the handshake and returns the resulting errors from both sides of the connection.
func tryAuthBothSides(t *testing.T, config *ClientConfig, gssAPIWithMICConfig *GSSAPIWithMICConfig) (clientError error, serverAuthErrors []error) {
c1, c2, err := netPipe()
if err != nil {
t.Fatalf("netPipe: %v", err)
}
defer c1.Close()
defer c2.Close()
certChecker := CertChecker{
IsUserAuthority: func(k PublicKey) bool {
return bytes.Equal(k.Marshal(), testPublicKeys["ecdsa"].Marshal())
},
UserKeyFallback: func(conn ConnMetadata, key PublicKey) (*Permissions, error) {
if conn.User() == "testuser" && bytes.Equal(key.Marshal(), testPublicKeys["rsa"].Marshal()) {
return nil, nil
}
return nil, fmt.Errorf("pubkey for %q not acceptable", conn.User())
},
IsRevoked: func(c *Certificate) bool {
return c.Serial == 666
},
}
serverConfig := &ServerConfig{
PasswordCallback: func(conn ConnMetadata, pass []byte) (*Permissions, error) {
if conn.User() == "testuser" && string(pass) == clientPassword {
return nil, nil
}
return nil, errors.New("password auth failed")
},
PublicKeyCallback: certChecker.Authenticate,
KeyboardInteractiveCallback: func(conn ConnMetadata, challenge KeyboardInteractiveChallenge) (*Permissions, error) {
ans, err := challenge("user",
"instruction",
[]string{"question1", "question2"},
[]bool{true, true})
if err != nil {
return nil, err
}
ok := conn.User() == "testuser" && ans[0] == "answer1" && ans[1] == "answer2"
if ok {
challenge("user", "motd", nil, nil)
return nil, nil
}
return nil, errors.New("keyboard-interactive failed")
},
GSSAPIWithMICConfig: gssAPIWithMICConfig,
}
serverConfig.AddHostKey(testSigners["rsa"])
serverConfig.AuthLogCallback = func(conn ConnMetadata, method string, err error) {
serverAuthErrors = append(serverAuthErrors, err)
}
go newServer(c1, serverConfig)
_, _, _, err = NewClientConn(c2, "", config)
return err, serverAuthErrors
}
func TestClientAuthPublicKey(t *testing.T) {
config := &ClientConfig{
User: "testuser",
Auth: []AuthMethod{
PublicKeys(testSigners["rsa"]),
},
HostKeyCallback: InsecureIgnoreHostKey(),
}
if err := tryAuth(t, config); err != nil {
t.Fatalf("unable to dial remote side: %s", err)
}
}
func TestAuthMethodPassword(t *testing.T) {
config := &ClientConfig{
User: "testuser",
Auth: []AuthMethod{
Password(clientPassword),
},
HostKeyCallback: InsecureIgnoreHostKey(),
}
if err := tryAuth(t, config); err != nil {
t.Fatalf("unable to dial remote side: %s", err)
}
}
func TestAuthMethodFallback(t *testing.T) {
var passwordCalled bool
config := &ClientConfig{
User: "testuser",
Auth: []AuthMethod{
PublicKeys(testSigners["rsa"]),
PasswordCallback(
func() (string, error) {
passwordCalled = true
return "WRONG", nil
}),
},
HostKeyCallback: InsecureIgnoreHostKey(),
}
if err := tryAuth(t, config); err != nil {
t.Fatalf("unable to dial remote side: %s", err)
}
if passwordCalled {
t.Errorf("password auth tried before public-key auth.")
}
}
func TestAuthMethodWrongPassword(t *testing.T) {
config := &ClientConfig{
User: "testuser",
Auth: []AuthMethod{
Password("wrong"),
PublicKeys(testSigners["rsa"]),
},
HostKeyCallback: InsecureIgnoreHostKey(),
}
if err := tryAuth(t, config); err != nil {
t.Fatalf("unable to dial remote side: %s", err)
}
}
func TestAuthMethodKeyboardInteractive(t *testing.T) {
answers := keyboardInteractive(map[string]string{
"question1": "answer1",
"question2": "answer2",
})
config := &ClientConfig{
User: "testuser",
Auth: []AuthMethod{
KeyboardInteractive(answers.Challenge),
},
HostKeyCallback: InsecureIgnoreHostKey(),
}
if err := tryAuth(t, config); err != nil {
t.Fatalf("unable to dial remote side: %s", err)
}
}
func TestAuthMethodWrongKeyboardInteractive(t *testing.T) {
answers := keyboardInteractive(map[string]string{
"question1": "answer1",
"question2": "WRONG",
})
config := &ClientConfig{
User: "testuser",
Auth: []AuthMethod{
KeyboardInteractive(answers.Challenge),
},
}
if err := tryAuth(t, config); err == nil {
t.Fatalf("wrong answers should not have authenticated with KeyboardInteractive")
}
}
// the mock server will only authenticate ssh-rsa keys
func TestAuthMethodInvalidPublicKey(t *testing.T) {
config := &ClientConfig{
User: "testuser",
Auth: []AuthMethod{
PublicKeys(testSigners["dsa"]),
},
}
if err := tryAuth(t, config); err == nil {
t.Fatalf("dsa private key should not have authenticated with rsa public key")
}
}
// the client should authenticate with the second key
func TestAuthMethodRSAandDSA(t *testing.T) {
config := &ClientConfig{
User: "testuser",
Auth: []AuthMethod{
PublicKeys(testSigners["dsa"], testSigners["rsa"]),
},
HostKeyCallback: InsecureIgnoreHostKey(),
}
if err := tryAuth(t, config); err != nil {
t.Fatalf("client could not authenticate with rsa key: %v", err)
}
}
type invalidAlgSigner struct {
Signer
}
func (s *invalidAlgSigner) Sign(rand io.Reader, data []byte) (*Signature, error) {
sig, err := s.Signer.Sign(rand, data)
if sig != nil {
sig.Format = "invalid"
}
return sig, err
}
func TestMethodInvalidAlgorithm(t *testing.T) {
config := &ClientConfig{
User: "testuser",
Auth: []AuthMethod{
PublicKeys(&invalidAlgSigner{testSigners["rsa"]}),
},
HostKeyCallback: InsecureIgnoreHostKey(),
}
err, serverErrors := tryAuthBothSides(t, config, nil)
if err == nil {
t.Fatalf("login succeeded")
}
found := false
want := "algorithm \"invalid\""
var errStrings []string
for _, err := range serverErrors {
found = found || (err != nil && strings.Contains(err.Error(), want))
errStrings = append(errStrings, err.Error())
}
if !found {
t.Errorf("server got error %q, want substring %q", errStrings, want)
}
}
func TestClientHMAC(t *testing.T) {
for _, mac := range supportedMACs {
config := &ClientConfig{
User: "testuser",
Auth: []AuthMethod{
PublicKeys(testSigners["rsa"]),
},
Config: Config{
MACs: []string{mac},
},
HostKeyCallback: InsecureIgnoreHostKey(),
}
if err := tryAuth(t, config); err != nil {
t.Fatalf("client could not authenticate with mac algo %s: %v", mac, err)
}
}
}
// issue 4285.
func TestClientUnsupportedCipher(t *testing.T) {
config := &ClientConfig{
User: "testuser",
Auth: []AuthMethod{
PublicKeys(),
},
Config: Config{
Ciphers: []string{"aes128-cbc"}, // not currently supported
},
}
if err := tryAuth(t, config); err == nil {
t.Errorf("expected no ciphers in common")
}
}
func TestClientUnsupportedKex(t *testing.T) {
if os.Getenv("GO_BUILDER_NAME") != "" {
t.Skip("skipping known-flaky test on the Go build dashboard; see golang.org/issue/15198")
}
config := &ClientConfig{
User: "testuser",
Auth: []AuthMethod{
PublicKeys(),
},
Config: Config{
KeyExchanges: []string{"non-existent-kex"},
},
HostKeyCallback: InsecureIgnoreHostKey(),
}
if err := tryAuth(t, config); err == nil || !strings.Contains(err.Error(), "common algorithm") {
t.Errorf("got %v, expected 'common algorithm'", err)
}
}
func TestClientLoginCert(t *testing.T) {
cert := &Certificate{
Key: testPublicKeys["rsa"],
ValidBefore: CertTimeInfinity,
CertType: UserCert,
}
cert.SignCert(rand.Reader, testSigners["ecdsa"])
certSigner, err := NewCertSigner(cert, testSigners["rsa"])
if err != nil {
t.Fatalf("NewCertSigner: %v", err)
}
clientConfig := &ClientConfig{
User: "user",
HostKeyCallback: InsecureIgnoreHostKey(),
}
clientConfig.Auth = append(clientConfig.Auth, PublicKeys(certSigner))
// should succeed
if err := tryAuth(t, clientConfig); err != nil {
t.Errorf("cert login failed: %v", err)
}
// corrupted signature
cert.Signature.Blob[0]++
if err := tryAuth(t, clientConfig); err == nil {
t.Errorf("cert login passed with corrupted sig")
}
// revoked
cert.Serial = 666
cert.SignCert(rand.Reader, testSigners["ecdsa"])
if err := tryAuth(t, clientConfig); err == nil {
t.Errorf("revoked cert login succeeded")
}
cert.Serial = 1
// sign with wrong key
cert.SignCert(rand.Reader, testSigners["dsa"])
if err := tryAuth(t, clientConfig); err == nil {
t.Errorf("cert login passed with non-authoritative key")
}
// host cert
cert.CertType = HostCert
cert.SignCert(rand.Reader, testSigners["ecdsa"])
if err := tryAuth(t, clientConfig); err == nil {
t.Errorf("cert login passed with wrong type")
}
cert.CertType = UserCert
// principal specified
cert.ValidPrincipals = []string{"user"}
cert.SignCert(rand.Reader, testSigners["ecdsa"])
if err := tryAuth(t, clientConfig); err != nil {
t.Errorf("cert login failed: %v", err)
}
// wrong principal specified
cert.ValidPrincipals = []string{"fred"}
cert.SignCert(rand.Reader, testSigners["ecdsa"])
if err := tryAuth(t, clientConfig); err == nil {
t.Errorf("cert login passed with wrong principal")
}
cert.ValidPrincipals = nil
// added critical option
cert.CriticalOptions = map[string]string{"root-access": "yes"}
cert.SignCert(rand.Reader, testSigners["ecdsa"])
if err := tryAuth(t, clientConfig); err == nil {
t.Errorf("cert login passed with unrecognized critical option")
}
// allowed source address
cert.CriticalOptions = map[string]string{"source-address": "127.0.0.42/24,::42/120"}
cert.SignCert(rand.Reader, testSigners["ecdsa"])
if err := tryAuth(t, clientConfig); err != nil {
t.Errorf("cert login with source-address failed: %v", err)
}
// disallowed source address
cert.CriticalOptions = map[string]string{"source-address": "127.0.0.42,::42"}
cert.SignCert(rand.Reader, testSigners["ecdsa"])
if err := tryAuth(t, clientConfig); err == nil {
t.Errorf("cert login with source-address succeeded")
}
}
func testPermissionsPassing(withPermissions bool, t *testing.T) {
serverConfig := &ServerConfig{
PublicKeyCallback: func(conn ConnMetadata, key PublicKey) (*Permissions, error) {
if conn.User() == "nopermissions" {
return nil, nil
}
return &Permissions{}, nil
},
}
serverConfig.AddHostKey(testSigners["rsa"])
clientConfig := &ClientConfig{
Auth: []AuthMethod{
PublicKeys(testSigners["rsa"]),
},
HostKeyCallback: InsecureIgnoreHostKey(),
}
if withPermissions {
clientConfig.User = "permissions"
} else {
clientConfig.User = "nopermissions"
}
c1, c2, err := netPipe()
if err != nil {
t.Fatalf("netPipe: %v", err)
}
defer c1.Close()
defer c2.Close()
go NewClientConn(c2, "", clientConfig)
serverConn, err := newServer(c1, serverConfig)
if err != nil {
t.Fatal(err)
}
if p := serverConn.Permissions; (p != nil) != withPermissions {
t.Fatalf("withPermissions is %t, but Permissions object is %#v", withPermissions, p)
}
}
func TestPermissionsPassing(t *testing.T) {
testPermissionsPassing(true, t)
}
func TestNoPermissionsPassing(t *testing.T) {
testPermissionsPassing(false, t)
}
func TestRetryableAuth(t *testing.T) {
n := 0
passwords := []string{"WRONG1", "WRONG2"}
config := &ClientConfig{
User: "testuser",
Auth: []AuthMethod{
RetryableAuthMethod(PasswordCallback(func() (string, error) {
p := passwords[n]
n++
return p, nil
}), 2),
PublicKeys(testSigners["rsa"]),
},
HostKeyCallback: InsecureIgnoreHostKey(),
}
if err := tryAuth(t, config); err != nil {
t.Fatalf("unable to dial remote side: %s", err)
}
if n != 2 {
t.Fatalf("Did not try all passwords")
}
}
func ExampleRetryableAuthMethod() {
user := "testuser"
NumberOfPrompts := 3
// Normally this would be a callback that prompts the user to answer the
// provided questions
Cb := func(user, instruction string, questions []string, echos []bool) (answers []string, err error) {
return []string{"answer1", "answer2"}, nil
}
config := &ClientConfig{
HostKeyCallback: InsecureIgnoreHostKey(),
User: user,
Auth: []AuthMethod{
RetryableAuthMethod(KeyboardInteractiveChallenge(Cb), NumberOfPrompts),
},
}
host := "mysshserver"
netConn, err := net.Dial("tcp", host)
if err != nil {
log.Fatal(err)
}
sshConn, _, _, err := NewClientConn(netConn, host, config)
if err != nil {
log.Fatal(err)
}
_ = sshConn
}
// Test if username is received on server side when NoClientAuth is used
func TestClientAuthNone(t *testing.T) {
user := "testuser"
serverConfig := &ServerConfig{
NoClientAuth: true,
}
serverConfig.AddHostKey(testSigners["rsa"])
clientConfig := &ClientConfig{
User: user,
HostKeyCallback: InsecureIgnoreHostKey(),
}
c1, c2, err := netPipe()
if err != nil {
t.Fatalf("netPipe: %v", err)
}
defer c1.Close()
defer c2.Close()
go NewClientConn(c2, "", clientConfig)
serverConn, err := newServer(c1, serverConfig)
if err != nil {
t.Fatalf("newServer: %v", err)
}
if serverConn.User() != user {
t.Fatalf("server: got %q, want %q", serverConn.User(), user)
}
}
// Test if authentication attempts are limited on server when MaxAuthTries is set
func TestClientAuthMaxAuthTries(t *testing.T) {
user := "testuser"
serverConfig := &ServerConfig{
MaxAuthTries: 2,
PasswordCallback: func(conn ConnMetadata, pass []byte) (*Permissions, error) {
if conn.User() == "testuser" && string(pass) == "right" {
return nil, nil
}
return nil, errors.New("password auth failed")
},
}
serverConfig.AddHostKey(testSigners["rsa"])
expectedErr := fmt.Errorf("ssh: handshake failed: %v", &disconnectMsg{
Reason: 2,
Message: "too many authentication failures",
})
for tries := 2; tries < 4; tries++ {
n := tries
clientConfig := &ClientConfig{
User: user,
Auth: []AuthMethod{
RetryableAuthMethod(PasswordCallback(func() (string, error) {
n--
if n == 0 {
return "right", nil
}
return "wrong", nil
}), tries),
},
HostKeyCallback: InsecureIgnoreHostKey(),
}
c1, c2, err := netPipe()
if err != nil {
t.Fatalf("netPipe: %v", err)
}
defer c1.Close()
defer c2.Close()
go newServer(c1, serverConfig)
_, _, _, err = NewClientConn(c2, "", clientConfig)
if tries > 2 {
if err == nil {
t.Fatalf("client: got no error, want %s", expectedErr)
} else if err.Error() != expectedErr.Error() {
t.Fatalf("client: got %s, want %s", err, expectedErr)
}
} else {
if err != nil {
t.Fatalf("client: got %s, want no error", err)
}
}
}
}
// Test if authentication attempts are correctly limited on server
// when more public keys are provided then MaxAuthTries
func TestClientAuthMaxAuthTriesPublicKey(t *testing.T) {
signers := []Signer{}
for i := 0; i < 6; i++ {
signers = append(signers, testSigners["dsa"])
}
validConfig := &ClientConfig{
User: "testuser",
Auth: []AuthMethod{
PublicKeys(append([]Signer{testSigners["rsa"]}, signers...)...),
},
HostKeyCallback: InsecureIgnoreHostKey(),
}
if err := tryAuth(t, validConfig); err != nil {
t.Fatalf("unable to dial remote side: %s", err)
}
expectedErr := fmt.Errorf("ssh: handshake failed: %v", &disconnectMsg{
Reason: 2,
Message: "too many authentication failures",
})
invalidConfig := &ClientConfig{
User: "testuser",
Auth: []AuthMethod{
PublicKeys(append(signers, testSigners["rsa"])...),
},
HostKeyCallback: InsecureIgnoreHostKey(),
}
if err := tryAuth(t, invalidConfig); err == nil {
t.Fatalf("client: got no error, want %s", expectedErr)
} else if err.Error() != expectedErr.Error() {
// On Windows we can see a WSAECONNABORTED error
// if the client writes another authentication request
// before the client goroutine reads the disconnection
// message. See issue 50805.
if runtime.GOOS == "windows" && strings.Contains(err.Error(), "wsarecv: An established connection was aborted") {
// OK.
} else {
t.Fatalf("client: got %s, want %s", err, expectedErr)
}
}
}
// Test whether authentication errors are being properly logged if all
// authentication methods have been exhausted
func TestClientAuthErrorList(t *testing.T) {
publicKeyErr := errors.New("This is an error from PublicKeyCallback")
clientConfig := &ClientConfig{
Auth: []AuthMethod{
PublicKeys(testSigners["rsa"]),
},
HostKeyCallback: InsecureIgnoreHostKey(),
}
serverConfig := &ServerConfig{
PublicKeyCallback: func(_ ConnMetadata, _ PublicKey) (*Permissions, error) {
return nil, publicKeyErr
},
}
serverConfig.AddHostKey(testSigners["rsa"])
c1, c2, err := netPipe()
if err != nil {
t.Fatalf("netPipe: %v", err)
}
defer c1.Close()
defer c2.Close()
go NewClientConn(c2, "", clientConfig)
_, err = newServer(c1, serverConfig)
if err == nil {
t.Fatal("newServer: got nil, expected errors")
}
authErrs, ok := err.(*ServerAuthError)
if !ok {
t.Fatalf("errors: got %T, want *ssh.ServerAuthError", err)
}
for i, e := range authErrs.Errors {
switch i {
case 0:
if e != ErrNoAuth {
t.Fatalf("errors: got error %v, want ErrNoAuth", e)
}
case 1:
if e != publicKeyErr {
t.Fatalf("errors: got %v, want %v", e, publicKeyErr)
}
default:
t.Fatalf("errors: got %v, expected 2 errors", authErrs.Errors)
}
}
}
func TestAuthMethodGSSAPIWithMIC(t *testing.T) {
type testcase struct {
config *ClientConfig
gssConfig *GSSAPIWithMICConfig
clientWantErr string
serverWantErr string
}
testcases := []*testcase{
{
config: &ClientConfig{
User: "testuser",
Auth: []AuthMethod{
GSSAPIWithMICAuthMethod(
&FakeClient{
exchanges: []*exchange{
{
outToken: "client-valid-token-1",
},
{
expectedToken: "server-valid-token-1",
},
},
mic: []byte("valid-mic"),
maxRound: 2,
}, "testtarget",
),
},
HostKeyCallback: InsecureIgnoreHostKey(),
},
gssConfig: &GSSAPIWithMICConfig{
AllowLogin: func(conn ConnMetadata, srcName string) (*Permissions, error) {
if srcName != conn.User()+"@DOMAIN" {
return nil, fmt.Errorf("srcName is %s, conn user is %s", srcName, conn.User())
}
return nil, nil
},
Server: &FakeServer{
exchanges: []*exchange{
{
outToken: "server-valid-token-1",
expectedToken: "client-valid-token-1",
},
},
maxRound: 1,
expectedMIC: []byte("valid-mic"),
srcName: "testuser@DOMAIN",
},
},
},
{
config: &ClientConfig{
User: "testuser",
Auth: []AuthMethod{
GSSAPIWithMICAuthMethod(
&FakeClient{
exchanges: []*exchange{
{
outToken: "client-valid-token-1",
},
{
expectedToken: "server-valid-token-1",
},
},
mic: []byte("valid-mic"),
maxRound: 2,
}, "testtarget",
),
},
HostKeyCallback: InsecureIgnoreHostKey(),
},
gssConfig: &GSSAPIWithMICConfig{
AllowLogin: func(conn ConnMetadata, srcName string) (*Permissions, error) {
return nil, fmt.Errorf("user is not allowed to login")
},
Server: &FakeServer{
exchanges: []*exchange{
{
outToken: "server-valid-token-1",
expectedToken: "client-valid-token-1",
},
},
maxRound: 1,
expectedMIC: []byte("valid-mic"),
srcName: "testuser@DOMAIN",
},
},
serverWantErr: "user is not allowed to login",
clientWantErr: "ssh: handshake failed: ssh: unable to authenticate",
},
{
config: &ClientConfig{
User: "testuser",
Auth: []AuthMethod{
GSSAPIWithMICAuthMethod(
&FakeClient{
exchanges: []*exchange{
{
outToken: "client-valid-token-1",
},
{
expectedToken: "server-valid-token-1",
},
},
mic: []byte("valid-mic"),
maxRound: 2,
}, "testtarget",
),
},
HostKeyCallback: InsecureIgnoreHostKey(),
},
gssConfig: &GSSAPIWithMICConfig{
AllowLogin: func(conn ConnMetadata, srcName string) (*Permissions, error) {
if srcName != conn.User() {
return nil, fmt.Errorf("srcName is %s, conn user is %s", srcName, conn.User())
}
return nil, nil
},
Server: &FakeServer{
exchanges: []*exchange{
{
outToken: "server-invalid-token-1",
expectedToken: "client-valid-token-1",
},
},
maxRound: 1,
expectedMIC: []byte("valid-mic"),
srcName: "testuser@DOMAIN",
},
},
clientWantErr: "ssh: handshake failed: got \"server-invalid-token-1\", want token \"server-valid-token-1\"",
},
{
config: &ClientConfig{
User: "testuser",
Auth: []AuthMethod{
GSSAPIWithMICAuthMethod(
&FakeClient{
exchanges: []*exchange{
{
outToken: "client-valid-token-1",
},
{
expectedToken: "server-valid-token-1",
},
},
mic: []byte("invalid-mic"),
maxRound: 2,
}, "testtarget",
),
},
HostKeyCallback: InsecureIgnoreHostKey(),
},
gssConfig: &GSSAPIWithMICConfig{
AllowLogin: func(conn ConnMetadata, srcName string) (*Permissions, error) {
if srcName != conn.User() {
return nil, fmt.Errorf("srcName is %s, conn user is %s", srcName, conn.User())
}
return nil, nil
},
Server: &FakeServer{
exchanges: []*exchange{
{
outToken: "server-valid-token-1",
expectedToken: "client-valid-token-1",
},
},
maxRound: 1,
expectedMIC: []byte("valid-mic"),
srcName: "testuser@DOMAIN",
},
},
serverWantErr: "got MICToken \"invalid-mic\", want \"valid-mic\"",
clientWantErr: "ssh: handshake failed: ssh: unable to authenticate",
},
}
for i, c := range testcases {
clientErr, serverErrs := tryAuthBothSides(t, c.config, c.gssConfig)
if (c.clientWantErr == "") != (clientErr == nil) {
t.Fatalf("client got %v, want %s, case %d", clientErr, c.clientWantErr, i)
}
if (c.serverWantErr == "") != (len(serverErrs) == 2 && serverErrs[1] == nil || len(serverErrs) == 1) {
t.Fatalf("server got err %v, want %s", serverErrs, c.serverWantErr)
}
if c.clientWantErr != "" {
if clientErr != nil && !strings.Contains(clientErr.Error(), c.clientWantErr) {
t.Fatalf("client got %v, want %s, case %d", clientErr, c.clientWantErr, i)
}
}
found := false
var errStrings []string
if c.serverWantErr != "" {
for _, err := range serverErrs {
found = found || (err != nil && strings.Contains(err.Error(), c.serverWantErr))
errStrings = append(errStrings, err.Error())
}
if !found {
t.Errorf("server got error %q, want substring %q, case %d", errStrings, c.serverWantErr, i)
}
}
}
}
| tryAuth |
error.go | package httputil
import (
"fmt"
"io"
"io/ioutil"
"net/http"
"strings"
)
const maxBody = 512
type httpError struct {
format string
args []interface{}
code int
status string
body []byte
}
// Error statisfies the "error" interface.
func (e httpError) Error() string {
r := strings.NewReplacer("%S", e.status, "%B", string(e.body))
return fmt.Sprintf(r.Replace(e.format), e.args...)
}
// IsHTTPStatus returns true if the given error is caused by an HTTP
// response with the given HTTP status.
func IsHTTPStatus(err error, status int) bool |
// HTTPError converts an http response into an error.
//
// Note that this reads the body, so only use it when the response
// exists and you don't believe it's valid for your needs.
func HTTPError(res *http.Response) error {
return HTTPErrorf(res, "HTTP Error %S - %B")
}
// HTTPErrorf converts an http response into an error.
//
// This allows for standard printf-style formatting with the addition
// of %S for the http status (e.g. "404 Not Found") and %B for the
// body that was returned along with the error.
//
// Note that this reads the body, so only use it when the response
// exists and you don't believe it's valid for your needs.
func HTTPErrorf(res *http.Response, format string, args ...interface{}) error {
// The read error is explicitly ignored here because we're
// only trying to use it to produce a prettier error.
body, _ := ioutil.ReadAll(io.LimitReader(res.Body, maxBody))
return httpError{
format: format,
args: args,
code: res.StatusCode,
status: res.Status,
body: body,
}
}
| {
if he, ok := err.(httpError); ok {
return he.code == status
}
return false
} |
vertexAttribPointer.ts | import { BaseCommand } from "./baseCommand";
import { WebGlConstants } from "../types/webglConstants";
export class VertexAttribPointer extends BaseCommand {
public static readonly commandName = "vertexAttribPointer";
| protected stringifyArgs(args: IArguments): string[] {
const stringified = [];
stringified.push(args[0]);
stringified.push(args[1]);
stringified.push(WebGlConstants.stringifyWebGlConstant(args[2], "vertexAttribPointer"));
stringified.push(args[3]);
stringified.push(args[4]);
stringified.push(args[5]);
return stringified;
}
} | protected get spiedCommandName(): string {
return VertexAttribPointer.commandName;
}
|
get-files.ts | import fs, { Dirent } from 'fs';
const getFiles = (dir: string, suffix: string): string[] => {
const files: Dirent[] = fs.readdirSync(dir, {
withFileTypes: true,
})
let commandFiles: string[] = [];
for (const file of files) {
if (file.isDirectory()){
commandFiles = [
...commandFiles,
...getFiles(`${dir}/${file.name}`, suffix),
];
} else if (file.name.endsWith(suffix)) { | }
return commandFiles;
}
export default getFiles; | commandFiles.push(`${dir}/${file.name}`);
} |
camera_stream.tsx | /**
* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import * as React from 'react';
import * as tf from '@tensorflow/tfjs-core';
import {
StyleSheet,
PixelRatio,
LayoutChangeEvent,
Platform
} from 'react-native';
import { Camera } from 'expo-camera';
import { GLView, ExpoWebGLRenderingContext } from 'expo-gl';
import { fromTexture, renderToGLView, detectGLCapabilities } from './camera';
interface WrappedComponentProps {
onLayout?: (event: LayoutChangeEvent) => void;
// tslint:disable-next-line: no-any
[index: string]: any;
}
interface Props {
cameraTextureWidth: number;
cameraTextureHeight: number;
resizeWidth: number;
resizeHeight: number;
resizeDepth: number;
autorender: boolean;
onReady: (
images: IterableIterator<tf.Tensor3D>,
updateCameraPreview: () => void,
gl: ExpoWebGLRenderingContext,
cameraTexture: WebGLTexture,
) => void;
}
interface State {
cameraLayout: { x: number; y: number; width: number; height: number };
}
const DEFAULT_AUTORENDER = true;
const DEFAULT_RESIZE_DEPTH = 3;
| * A higher-order-component (HOC) that augments the [Expo.Camera](https://docs.expo.io/versions/latest/sdk/camera/)
* component with the ability to yield tensors representing the camera stream.
*
* Because the camera data will be consumed in the process, the original
* camera component will not render any content. A provided by this component
* is used to render the camera preview.
*
* Notably the component allows on-the-fly resizing of the camera image to
* smaller dimensions, this speeds up data transfer between the native and
* javascript threads immensely.
*
* __In addition to__ all the props taken by Expo.Camera. The returned
* component takes the following props
*
* - __cameraTextureWidth__: number — the width the camera preview texture
* (see example and note below)
* - __cameraTextureHeight__: number — the height the camera preview texture
* (see example and note below)
* - __resizeWidth__: number — the width of the output tensor
* - __resizeHeight__: number — the height of the output tensor
* - __resizeDepth__: number — the depth (num of channels) of the output tensor.
* Should be 3 or 4.
* - __autorender__: boolean — if true the view will be automatically updated
* with the contents of the camera. Set this to false if you want more direct
* control on when rendering happens.
* - __onReady__: (
* images: IterableIterator<tf.Tensor3D>,
* updateCameraPreview: () => void,
* gl: ExpoWebGLRenderingContext,
* cameraTexture: WebGLTexture
* ) => void — When the component is mounted and ready this callback will
* be called and recieve the following 3 elements:
* - __images__ is a (iterator)[https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Iterators_and_Generators]
* that yields tensors representing the camera image on demand.
* - __updateCameraPreview__ is a function that will update the WebGL render
* buffer with the contents of the camera. Not needed when `autorender`
* is true
* - __gl__ is the ExpoWebGl context used to do the rendering. After calling
* `updateCameraPreview` and any other operations you want to synchronize
* to the camera rendering you must call gl.endFrameExp() to display it
* on the screen. This is also provided in case you want to do other
* rendering using WebGL. Not needed when `autorender` is true.
* - __cameraTexture__ The underlying cameraTexture. This can be used to
* implement your own __updateCameraPreview__.
*
* ```js
* import { Camera } from 'expo-camera';
* import { cameraWithTensors } from '@tensorflow/tfjs-react-native';
*
* const TensorCamera = cameraWithTensors(Camera);
*
* class MyComponent {
*
* handleCameraStream(images, updatePreview, gl) {
* const loop = async () => {
* const nextImageTensor = images.next().value
*
* //
* // do something with tensor here
* //
*
* // if autorender is false you need the following two lines.
* // updatePreview();
* // gl.endFrameEXP();
*
* requestAnimationFrame(loop);
* }
* loop();
* }
*
* render() {
* // Currently expo does not support automatically determining the
* // resolution of the camera texture used. So it must be determined
* // empirically for the supported devices and preview size.
*
* let textureDims;
* if (Platform.OS === 'ios') {
* textureDims = {
* height: 1920,
* width: 1080,
* };
* } else {
* textureDims = {
* height: 1200,
* width: 1600,
* };
* }
*
* return <View>
* <TensorCamera
* // Standard Camera props
* style={styles.camera}
* type={Camera.Constants.Type.front}
* // Tensor related props
* cameraTextureHeight={textureDims.height}
* cameraTextureWidth={textureDims.width}
* resizeHeight={200}
* resizeWidth={152}
* resizeDepth={3}
* onReady={this.handleCameraStream}
* autorender={true}
* />
* </View>
* }
* }
* ```
*
* @param CameraComponent an expo Camera component constructor
*/
/** @doc {heading: 'Media', subheading: 'Camera'} */
export function cameraWithTensors<T extends WrappedComponentProps>(
// tslint:disable-next-line: variable-name
CameraComponent: React.ComponentType<T>,
) {
return class CameraWithTensorStream
extends React.Component<T & Props, State> {
camera: Camera;
glView: GLView;
glContext: ExpoWebGLRenderingContext;
rafID: number;
constructor(props: T & Props) {
super(props);
this.onCameraLayout = this.onCameraLayout.bind(this);
this.onGLContextCreate = this.onGLContextCreate.bind(this);
this.state = {
cameraLayout: null,
};
}
componentWillUnmount() {
cancelAnimationFrame(this.rafID);
if(this.glContext) {
GLView.destroyContextAsync(this.glContext);
}
this.camera = null;
this.glView = null;
this.glContext = null;
}
/*
* Measure the camera component when it is laid out so that we can overlay
* the GLView.
*/
onCameraLayout(event: LayoutChangeEvent) {
const { x, y, width, height } = event.nativeEvent.layout;
this.setState({
cameraLayout: { x, y, width, height },
});
}
/**
* Creates a WebGL texture that is updated by the underlying platform to
* contain the contents of the camera.
*/
async createCameraTexture(): Promise<WebGLTexture> {
if (this.glView != null && this.camera != null) {
//@ts-ignore
return this.glView.createCameraTextureAsync(this.camera);
} else {
throw new Error('Expo GL context or camera not available');
}
}
/**
* Callback for GL context creation. We do mose of the work of setting
* up the component here.
* @param gl
*/
async onGLContextCreate(gl: ExpoWebGLRenderingContext) {
this.glContext = gl;
const cameraTexture = await this.createCameraTexture();
await detectGLCapabilities(gl);
// Optionally set up a render loop that just displays the camera texture
// to the GLView.
const autorender =
this.props.autorender != null
? this.props.autorender
: DEFAULT_AUTORENDER;
const updatePreview = this.previewUpdateFunc(gl, cameraTexture);
if (autorender) {
const renderLoop = () => {
updatePreview();
gl.endFrameEXP();
this.rafID = requestAnimationFrame(renderLoop);
};
renderLoop();
}
const {
resizeHeight,
resizeWidth,
resizeDepth,
cameraTextureHeight,
cameraTextureWidth,
} = this.props;
//
// Set up a generator function that yields tensors representing the
// camera on demand.
//
const cameraStreamView = this;
function* nextFrameGenerator() {
const RGBA_DEPTH = 4;
const textureDims = {
height: cameraTextureHeight,
width: cameraTextureWidth,
depth: RGBA_DEPTH,
};
const targetDims = {
height: resizeHeight,
width: resizeWidth,
depth: resizeDepth || DEFAULT_RESIZE_DEPTH,
};
while (cameraStreamView.glContext != null) {
const imageTensor = fromTexture(
gl,
cameraTexture,
textureDims,
targetDims
);
yield imageTensor;
}
}
const nextFrameIterator = nextFrameGenerator();
// Pass the utility functions to the caller provided callback
this.props.onReady(nextFrameIterator, updatePreview, gl, cameraTexture);
}
/**
* Helper function that can be used to update the GLView framebuffer.
*
* @param gl the open gl texture to render to
* @param cameraTexture the texture to draw.
*/
previewUpdateFunc(
gl: ExpoWebGLRenderingContext,
cameraTexture: WebGLTexture
) {
const renderFunc = () => {
const { cameraLayout } = this.state;
const width = PixelRatio.getPixelSizeForLayoutSize(cameraLayout.width);
const height = PixelRatio.getPixelSizeForLayoutSize(
cameraLayout.height
);
const isFrontCamera =
this.camera.props.type === Camera.Constants.Type.front;
const flipHorizontal =
Platform.OS === 'ios' && isFrontCamera ? false : true;
renderToGLView(gl, cameraTexture, { width, height }, flipHorizontal);
};
return renderFunc.bind(this);
}
/**
* Render the component
*/
render() {
const { cameraLayout } = this.state;
// Before passing props into the original wrapped component we want to
// remove the props that we augment the component with.
// Use this object to use typescript to check that we are removing
// all the tensorCamera properties.
const tensorCameraPropMap: Props = {
cameraTextureWidth: null,
cameraTextureHeight: null,
resizeWidth: null,
resizeHeight: null,
resizeDepth: null,
autorender: null,
onReady: null,
};
const tensorCameraPropKeys = Object.keys(tensorCameraPropMap);
const cameraProps: WrappedComponentProps = {};
const allProps = Object.keys(this.props);
for (let i = 0; i < allProps.length; i++) {
const key = allProps[i];
if (!tensorCameraPropKeys.includes(key)) {
cameraProps[key] = this.props[key];
}
}
// Set up an on layout handler
const onlayout = this.props.onLayout ? (e: LayoutChangeEvent) => {
this.props.onLayout(e);
this.onCameraLayout(e);
} : this.onCameraLayout;
cameraProps.onLayout = onlayout;
const cameraComp = (
//@ts-ignore see https://github.com/microsoft/TypeScript/issues/30650
<CameraComponent
key='camera-with-tensor-camera-view'
{...(cameraProps)}
ref={(ref: Camera) => (this.camera = ref)}
/>
);
// Create the glView if the camera has mounted.
let glViewComponent = null;
if (cameraLayout != null) {
const styles = StyleSheet.create({
glView: {
position: 'absolute',
left: cameraLayout.x,
top: cameraLayout.y,
width: cameraLayout.width,
height: cameraLayout.height,
zIndex: this.props.style.zIndex ?
parseInt(this.props.style.zIndex, 10) + 10 : 10,
}
});
glViewComponent = (
<GLView
key='camera-with-tensor-gl-view'
style={styles.glView}
onContextCreate={this.onGLContextCreate}
ref={ref => (this.glView = ref)}
/>
);
}
return [cameraComp, glViewComponent];
}
};
} | /** |
base.go | // Copyright 2021 Flant CJSC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package config
import (
"context"
"encoding/json"
"fmt"
"io/ioutil"
"regexp"
"strings"
"time"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"sigs.k8s.io/yaml"
"github.com/deckhouse/deckhouse/dhctl/pkg/kubernetes/client"
"github.com/deckhouse/deckhouse/dhctl/pkg/log"
"github.com/deckhouse/deckhouse/dhctl/pkg/util/retry"
)
const (
candiDir = "/deckhouse/candi"
providerSchemaFilenameSuffix = "_configuration.yaml"
)
func numerateManifestLines(manifest []byte) string {
manifestLines := strings.Split(string(manifest), "\n")
builder := strings.Builder{}
for index, line := range manifestLines {
builder.WriteString(fmt.Sprintf("%d\t%s\n", index+1, line))
}
return builder.String()
}
func ParseConfig(path string) (*MetaConfig, error) {
fileContent, err := ioutil.ReadFile(path)
if err != nil {
return nil, fmt.Errorf("loading config file: %v", err)
}
return ParseConfigFromData(string(fileContent))
}
func ParseConfigFromCluster(kubeCl *client.KubernetesClient) (*MetaConfig, error) {
var metaConfig *MetaConfig
var err error
err = log.Process("common", "Get Cluster configuration", func() error {
return retry.NewLoop("Get Cluster configuration from Kubernetes cluster", 10, 5*time.Second).Run(func() error {
metaConfig, err = parseConfigFromCluster(kubeCl)
return err
})
})
if err != nil {
return nil, err
}
return metaConfig, nil
}
func ParseConfigInCluster(kubeCl *client.KubernetesClient) (*MetaConfig, error) {
var metaConfig *MetaConfig
var err error
err = retry.NewSilentLoop("Get Cluster configuration from inside Kubernetes cluster", 5, 5*time.Second).Run(func() error {
metaConfig, err = parseConfigFromCluster(kubeCl)
return err
})
if err != nil {
return nil, err
}
return metaConfig, nil
}
func parseConfigFromCluster(kubeCl *client.KubernetesClient) (*MetaConfig, error) {
metaConfig := MetaConfig{}
schemaStore := NewSchemaStore()
clusterConfig, err := kubeCl.CoreV1().Secrets("kube-system").Get(context.TODO(), "d8-cluster-configuration", metav1.GetOptions{})
if err != nil {
return nil, err
}
clusterConfigData := clusterConfig.Data["cluster-configuration.yaml"]
_, err = schemaStore.Validate(&clusterConfigData)
if err != nil {
return nil, err
}
var parsedClusterConfig map[string]json.RawMessage
if err := yaml.Unmarshal(clusterConfigData, &parsedClusterConfig); err != nil {
return nil, err
}
metaConfig.ClusterConfig = parsedClusterConfig
var clusterType string
if err := json.Unmarshal(parsedClusterConfig["clusterType"], &clusterType); err != nil {
return nil, err
}
if clusterType == CloudClusterType { |
providerClusterConfigData := providerClusterConfig.Data["cloud-provider-cluster-configuration.yaml"]
_, err = schemaStore.Validate(&providerClusterConfigData)
if err != nil {
return nil, err
}
var parsedProviderClusterConfig map[string]json.RawMessage
if err := yaml.Unmarshal(providerClusterConfigData, &parsedProviderClusterConfig); err != nil {
return nil, err
}
metaConfig.ProviderClusterConfig = parsedProviderClusterConfig
}
return metaConfig.Prepare()
}
func ParseConfigFromData(configData string) (*MetaConfig, error) {
schemaStore := NewSchemaStore()
bigFileTmp := strings.TrimSpace(configData)
docs := regexp.MustCompile(`(?:^|\s*\n)---\s*`).Split(bigFileTmp, -1)
metaConfig := MetaConfig{}
for _, doc := range docs {
doc = strings.TrimSpace(doc)
if doc == "" {
continue
}
docData := []byte(doc)
index, err := schemaStore.Validate(&docData)
if err != nil {
return nil, fmt.Errorf("config validation: %v\ndata: \n%s\n", err, numerateManifestLines(docData))
}
var data map[string]json.RawMessage
if err = yaml.Unmarshal(docData, &data); err != nil {
return nil, fmt.Errorf("config unmarshal: %v\ndata: \n%s\n", err, numerateManifestLines(docData))
}
switch {
case index.Kind == "InitConfiguration":
metaConfig.InitClusterConfig = data
case index.Kind == "ClusterConfiguration":
metaConfig.ClusterConfig = data
case index.Kind == "StaticClusterConfiguration":
metaConfig.StaticClusterConfig = data
case strings.HasSuffix(index.Kind, "ClusterConfiguration"):
metaConfig.ProviderClusterConfig = data
}
}
return metaConfig.Prepare()
} | providerClusterConfig, err := kubeCl.CoreV1().Secrets("kube-system").Get(context.TODO(), "d8-provider-cluster-configuration", metav1.GetOptions{})
if err != nil {
return nil, err
} |
ajoutEtudiant_20200709004553.js | function val() {
etat = $("#etudiant_Etat").val();
if (etat == "boursier") {
//alert("bb");
document.getElementById("etudiant_montant").style.display = "block";
document.getElementById("etudiant_isHoused").style.display = "block";
document.getElementById("etudiant_adresse").style.display = "none";
$("#etudiant_adresse").val() = "";
} else if (etat == "nonBoursier") {
document.getElementById("etudiant_montant").style.display = "none";
document.getElementById("etudiant_isHoused").style.display = "none";
document.getElementById("etudiant_loger").style.display = "none";
$("#etudiant_montant").val() = "";
$("#etudiant_isHoused").val() = "";
$("#etudiant_loger").val() = "";
document.getElementById("etudiant_adresse").style.display = "block";
//alert("nnbb");
}
}
function | () {
loge = $("#etudiant_isHoused").val();
if (loge == "oui") {
document.getElementById("etudiant_loger").style.display = "block";
} else if (loge == "non") {
document.getElementById("etudiant_loger").style.display = "none";
}
}
| valLoge |
mod.rs | // Copyright (c) The Dijets Core Contributors
// SPDX-License-Identifier: Apache-2.0
mod state;
use super::absint::*;
use crate::{
diagnostics::Diagnostics,
hlir::ast::*,
parser::ast::{BinOp_, StructName, Var},
shared::{unique_map::UniqueMap, CompilationEnv},
};
use move_ir_types::location::*;
use state::*;
use std::collections::BTreeMap;
//**************************************************************************************************
// Entry and trait bindings
//**************************************************************************************************
struct BorrowSafety {
local_numbers: UniqueMap<Var, usize>,
}
impl BorrowSafety {
fn new<T>(local_types: &UniqueMap<Var, T>) -> Self {
let mut local_numbers = UniqueMap::new();
for (idx, (v, _)) in local_types.key_cloned_iter().enumerate() {
local_numbers.add(v, idx).unwrap();
}
Self { local_numbers }
}
}
struct Context<'a, 'b> {
local_numbers: &'a UniqueMap<Var, usize>,
borrow_state: &'b mut BorrowState,
diags: Diagnostics,
}
impl<'a, 'b> Context<'a, 'b> {
fn new(safety: &'a BorrowSafety, borrow_state: &'b mut BorrowState) -> Self {
let local_numbers = &safety.local_numbers;
Self {
local_numbers,
borrow_state,
diags: Diagnostics::new(),
}
}
fn get_diags(self) -> Diagnostics {
self.diags
}
fn add_diags(&mut self, additional: Diagnostics) |
}
impl TransferFunctions for BorrowSafety {
type State = BorrowState;
fn execute(
&mut self,
pre: &mut Self::State,
_lbl: Label,
_idx: usize,
cmd: &Command,
) -> Diagnostics {
let mut context = Context::new(self, pre);
command(&mut context, cmd);
context
.borrow_state
.canonicalize_locals(context.local_numbers);
context.get_diags()
}
}
impl AbstractInterpreter for BorrowSafety {}
pub fn verify(
compilation_env: &mut CompilationEnv,
signature: &FunctionSignature,
acquires: &BTreeMap<StructName, Loc>,
locals: &UniqueMap<Var, SingleType>,
cfg: &super::cfg::BlockCFG,
) -> BTreeMap<Label, BorrowState> {
let mut initial_state =
BorrowState::initial(locals, acquires.clone(), compilation_env.has_diags());
initial_state.bind_arguments(&signature.parameters);
let mut safety = BorrowSafety::new(locals);
initial_state.canonicalize_locals(&safety.local_numbers);
let (final_state, ds) = safety.analyze_function(cfg, initial_state);
compilation_env.add_diags(ds);
final_state
}
//**************************************************************************************************
// Command
//**************************************************************************************************
fn command(context: &mut Context, sp!(loc, cmd_): &Command) {
use Command_ as C;
match cmd_ {
C::Assign(ls, e) => {
let values = exp(context, e);
lvalues(context, ls, values);
}
C::Mutate(el, er) => {
let value = assert_single_value(exp(context, er));
assert!(!value.is_ref());
let lvalue = assert_single_value(exp(context, el));
let diags = context.borrow_state.mutate(*loc, lvalue);
context.add_diags(diags);
}
C::JumpIf { cond: e, .. } => {
let value = assert_single_value(exp(context, e));
assert!(!value.is_ref());
}
C::IgnoreAndPop { exp: e, .. } => {
let values = exp(context, e);
context.borrow_state.release_values(values);
}
C::Return { exp: e, .. } => {
let values = exp(context, e);
let diags = context.borrow_state.return_(*loc, values);
context.add_diags(diags);
}
C::Abort(e) => {
let value = assert_single_value(exp(context, e));
assert!(!value.is_ref());
context.borrow_state.abort()
}
C::Jump { .. } => (),
C::Break | C::Continue => panic!("ICE break/continue not translated to jumps"),
}
}
fn lvalues(context: &mut Context, ls: &[LValue], values: Values) {
assert!(ls.len() == values.len());
ls.iter()
.zip(values)
.for_each(|(l, value)| lvalue(context, l, value))
}
fn lvalue(context: &mut Context, sp!(loc, l_): &LValue, value: Value) {
use LValue_ as L;
match l_ {
L::Ignore => {
context.borrow_state.release_value(value);
}
L::Var(v, _) => {
let diags = context.borrow_state.assign_local(*loc, v, value);
context.add_diags(diags)
}
L::Unpack(_, _, fields) => {
assert!(!value.is_ref());
fields
.iter()
.for_each(|(_, l)| lvalue(context, l, Value::NonRef))
}
}
}
fn exp(context: &mut Context, parent_e: &Exp) -> Values {
use UnannotatedExp_ as E;
let eloc = &parent_e.exp.loc;
let svalue = || vec![Value::NonRef];
match &parent_e.exp.value {
E::Move { var, .. } => {
let (diags, value) = context.borrow_state.move_local(*eloc, var);
context.add_diags(diags);
vec![value]
}
E::Copy { var, .. } => {
let (diags, value) = context.borrow_state.copy_local(*eloc, var);
context.add_diags(diags);
vec![value]
}
E::BorrowLocal(mut_, var) => {
let (diags, value) = context.borrow_state.borrow_local(*eloc, *mut_, var);
context.add_diags(diags);
assert!(value.is_ref());
vec![value]
}
E::Freeze(e) => {
let evalue = assert_single_value(exp(context, e));
let (diags, value) = context.borrow_state.freeze(*eloc, evalue);
context.add_diags(diags);
vec![value]
}
E::Dereference(e) => {
let evalue = assert_single_value(exp(context, e));
let (errors, value) = context.borrow_state.dereference(*eloc, evalue);
context.add_diags(errors);
vec![value]
}
E::Borrow(mut_, e, f) => {
let evalue = assert_single_value(exp(context, e));
let (diags, value) = context.borrow_state.borrow_field(*eloc, *mut_, evalue, f);
context.add_diags(diags);
vec![value]
}
E::Builtin(b, e) => {
let evalues = exp(context, e);
let b: &BuiltinFunction = b;
match b {
sp!(_, BuiltinFunction_::BorrowGlobal(mut_, t)) => {
assert!(!assert_single_value(evalues).is_ref());
let (diags, value) = context.borrow_state.borrow_global(*eloc, *mut_, t);
context.add_diags(diags);
vec![value]
}
sp!(_, BuiltinFunction_::MoveFrom(t)) => {
assert!(!assert_single_value(evalues).is_ref());
let (diags, value) = context.borrow_state.move_from(*eloc, t);
assert!(!value.is_ref());
context.add_diags(diags);
vec![value]
}
_ => {
let ret_ty = &parent_e.ty;
let (diags, values) =
context
.borrow_state
.call(*eloc, evalues, &BTreeMap::new(), ret_ty);
context.add_diags(diags);
values
}
}
}
E::ModuleCall(mcall) => {
let evalues = exp(context, &mcall.arguments);
let ret_ty = &parent_e.ty;
let (diags, values) =
context
.borrow_state
.call(*eloc, evalues, &mcall.acquires, ret_ty);
context.add_diags(diags);
values
}
E::Unit { .. } | E::Value(_) | E::Constant(_) | E::Spec(_, _) | E::UnresolvedError => {
svalue()
}
E::Cast(e, _) | E::UnaryExp(_, e) => {
let v = exp(context, e);
assert!(!assert_single_value(v).is_ref());
svalue()
}
E::BinopExp(e1, sp!(_, BinOp_::Eq), e2) | E::BinopExp(e1, sp!(_, BinOp_::Neq), e2) => {
let v1 = assert_single_value(exp(context, e1));
let v2 = assert_single_value(exp(context, e2));
// must check separately incase of using a local with an unassigned value
if v1.is_ref() {
let (errors, _) = context.borrow_state.dereference(e1.exp.loc, v1);
assert!(errors.is_empty(), "ICE eq freezing failed");
}
if v2.is_ref() {
let (errors, _) = context.borrow_state.dereference(e1.exp.loc, v2);
assert!(errors.is_empty(), "ICE eq freezing failed");
}
svalue()
}
E::BinopExp(e1, _, e2) => {
let v1 = assert_single_value(exp(context, e1));
let v2 = assert_single_value(exp(context, e2));
assert!(!v1.is_ref());
assert!(!v2.is_ref());
svalue()
}
E::Pack(_, _, fields) => {
fields.iter().for_each(|(_, _, e)| {
let arg = exp(context, e);
assert!(!assert_single_value(arg).is_ref());
});
svalue()
}
E::ExpList(es) => es
.iter()
.flat_map(|item| exp_list_item(context, item))
.collect(),
E::Unreachable => panic!("ICE should not analyze dead code"),
}
}
fn exp_list_item(context: &mut Context, item: &ExpListItem) -> Values {
match item {
ExpListItem::Single(e, _) | ExpListItem::Splat(_, e, _) => exp(context, e),
}
}
| {
self.diags.extend(additional);
} |
dev_grpc.pb.go | // Code generated by protoc-gen-go-grpc. DO NOT EDIT.
package devrpc
import (
context "context"
lnrpc "github.com/lightningnetwork/lnd/lnrpc"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
)
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
// Requires gRPC-Go v1.32.0 or later.
const _ = grpc.SupportPackageIsVersion7
// DevClient is the client API for Dev service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
type DevClient interface {
//
//ImportGraph imports a ChannelGraph into the graph database. Should only be
//used for development.
ImportGraph(ctx context.Context, in *lnrpc.ChannelGraph, opts ...grpc.CallOption) (*ImportGraphResponse, error)
}
type devClient struct {
cc grpc.ClientConnInterface
}
func NewDevClient(cc grpc.ClientConnInterface) DevClient {
return &devClient{cc}
}
func (c *devClient) ImportGraph(ctx context.Context, in *lnrpc.ChannelGraph, opts ...grpc.CallOption) (*ImportGraphResponse, error) {
out := new(ImportGraphResponse)
err := c.cc.Invoke(ctx, "/devrpc.Dev/ImportGraph", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// DevServer is the server API for Dev service.
// All implementations must embed UnimplementedDevServer
// for forward compatibility
type DevServer interface {
//
//ImportGraph imports a ChannelGraph into the graph database. Should only be
//used for development.
ImportGraph(context.Context, *lnrpc.ChannelGraph) (*ImportGraphResponse, error)
mustEmbedUnimplementedDevServer()
}
// UnimplementedDevServer must be embedded to have forward compatible implementations.
type UnimplementedDevServer struct {
}
func (UnimplementedDevServer) ImportGraph(context.Context, *lnrpc.ChannelGraph) (*ImportGraphResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method ImportGraph not implemented")
}
func (UnimplementedDevServer) mustEmbedUnimplementedDevServer() {}
// UnsafeDevServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to DevServer will
// result in compilation errors.
type UnsafeDevServer interface {
mustEmbedUnimplementedDevServer()
}
func RegisterDevServer(s grpc.ServiceRegistrar, srv DevServer) {
s.RegisterService(&Dev_ServiceDesc, srv)
}
func | (srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(lnrpc.ChannelGraph)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DevServer).ImportGraph(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/devrpc.Dev/ImportGraph",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DevServer).ImportGraph(ctx, req.(*lnrpc.ChannelGraph))
}
return interceptor(ctx, in, info, handler)
}
// Dev_ServiceDesc is the grpc.ServiceDesc for Dev service.
// It's only intended for direct use with grpc.RegisterService,
// and not to be introspected or modified (even as a copy)
var Dev_ServiceDesc = grpc.ServiceDesc{
ServiceName: "devrpc.Dev",
HandlerType: (*DevServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "ImportGraph",
Handler: _Dev_ImportGraph_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "devrpc/dev.proto",
}
| _Dev_ImportGraph_Handler |
shell.go | // package shell implements a remote API interface for a running ipfs daemon
package shell
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"io/ioutil"
gohttp "net/http"
"os"
"path"
"strings"
"time"
files "github.com/ipfs/go-ipfs-cmdkit/files"
homedir "github.com/mitchellh/go-homedir"
ma "github.com/multiformats/go-multiaddr"
manet "github.com/multiformats/go-multiaddr-net"
tar "github.com/whyrusleeping/tar-utils"
p2pmetrics "github.com/libp2p/go-libp2p-metrics"
)
const (
DefaultPathName = ".ipfs"
DefaultPathRoot = "~/" + DefaultPathName
DefaultApiFile = "api"
EnvDir = "IPFS_PATH"
)
type Shell struct {
url string
httpcli *gohttp.Client
}
func NewLocalShell() *Shell {
baseDir := os.Getenv(EnvDir)
if baseDir == "" {
baseDir = DefaultPathRoot
}
baseDir, err := homedir.Expand(baseDir)
if err != nil {
return nil
}
apiFile := path.Join(baseDir, DefaultApiFile)
if _, err := os.Stat(apiFile); err != nil {
return nil
}
api, err := ioutil.ReadFile(apiFile)
if err != nil {
return nil
}
return NewShell(strings.TrimSpace(string(api)))
}
func NewShell(url string) *Shell {
c := &gohttp.Client{
Transport: &gohttp.Transport{
Proxy: gohttp.ProxyFromEnvironment,
DisableKeepAlives: true,
},
}
return NewShellWithClient(url, c)
}
func NewShellWithClient(url string, c *gohttp.Client) *Shell {
if a, err := ma.NewMultiaddr(url); err == nil {
_, host, err := manet.DialArgs(a)
if err == nil {
url = host
}
}
return &Shell{
url: url,
httpcli: c,
}
}
func (s *Shell) SetTimeout(d time.Duration) {
s.httpcli.Timeout = d
}
func (s *Shell) Request(command string, args ...string) *RequestBuilder {
return &RequestBuilder{
command: command,
args: args,
shell: s,
}
}
type IdOutput struct {
ID string
PublicKey string
Addresses []string
AgentVersion string
ProtocolVersion string
}
// ID gets information about a given peer. Arguments:
//
// peer: peer.ID of the node to look up. If no peer is specified,
// return information about the local peer.
func (s *Shell) ID(peer ...string) (*IdOutput, error) {
if len(peer) > 1 {
return nil, fmt.Errorf("Too many peer arguments")
}
var out IdOutput
if err := s.Request("id", peer...).Exec(context.Background(), &out); err != nil {
return nil, err
}
return &out, nil
}
// Cat the content at the given path. Callers need to drain and close the returned reader after usage.
func (s *Shell) Cat(path string) (io.ReadCloser, error) {
resp, err := s.Request("cat", path).Send(context.Background())
if err != nil {
return nil, err
}
if resp.Error != nil {
return nil, resp.Error
}
return resp.Output, nil
}
type object struct {
Hash string
}
// Add a file to ipfs from the given reader, returns the hash of the added file
func (s *Shell) Add(r io.Reader) (string, error) {
return s.AddWithOpts(r, true, false)
}
// AddNoPin a file to ipfs from the given reader, returns the hash of the added file without pinning the file
func (s *Shell) AddNoPin(r io.Reader) (string, error) {
return s.AddWithOpts(r, false, false)
}
func (s *Shell) AddWithOpts(r io.Reader, pin bool, rawLeaves bool) (string, error) {
var rc io.ReadCloser
if rclose, ok := r.(io.ReadCloser); ok {
rc = rclose
} else {
rc = ioutil.NopCloser(r)
}
// handler expects an array of files
fr := files.NewReaderFile("", "", rc, nil)
slf := files.NewSliceFile("", "", []files.File{fr})
fileReader := files.NewMultiFileReader(slf, true)
var out object
return out.Hash, s.Request("add").
Option("progress", false).
Option("pin", pin).
Option("raw-leaves", rawLeaves).
Body(fileReader).
Exec(context.Background(), &out)
}
func (s *Shell) AddLink(target string) (string, error) {
link := files.NewLinkFile("", "", target, nil)
slf := files.NewSliceFile("", "", []files.File{link})
reader := files.NewMultiFileReader(slf, true)
var out object
return out.Hash, s.Request("add").Body(reader).Exec(context.Background(), &out)
}
// AddDir adds a directory recursively with all of the files under it
func (s *Shell) AddDir(dir string) (string, error) {
stat, err := os.Lstat(dir)
if err != nil {
return "", err
}
sf, err := files.NewSerialFile(path.Base(dir), dir, false, stat)
if err != nil {
return "", err
}
slf := files.NewSliceFile("", dir, []files.File{sf})
reader := files.NewMultiFileReader(slf, true)
resp, err := s.Request("add").
Option("recursive", true).
Body(reader).
Send(context.Background())
if err != nil {
return "", nil
}
defer resp.Close()
if resp.Error != nil {
return "", resp.Error
}
dec := json.NewDecoder(resp.Output)
var final string
for {
var out object
err = dec.Decode(&out)
if err != nil {
if err == io.EOF {
break
}
return "", err
}
final = out.Hash
}
if final == "" |
return final, nil
}
const (
TRaw = iota
TDirectory
TFile
TMetadata
TSymlink
)
// List entries at the given path
func (s *Shell) List(path string) ([]*LsLink, error) {
var out struct{ Objects []LsObject }
err := s.Request("ls", path).Exec(context.Background(), &out)
if err != nil {
return nil, err
}
if len(out.Objects) != 1 {
return nil, errors.New("bad response from server")
}
return out.Objects[0].Links, nil
}
type LsLink struct {
Hash string
Name string
Size uint64
Type int
}
type LsObject struct {
Links []*LsLink
LsLink
}
// Pin the given path
func (s *Shell) Pin(path string) error {
return s.Request("pin/add", path).
Option("recursive", true).
Exec(context.Background(), nil)
}
// Unpin the given path
func (s *Shell) Unpin(path string) error {
return s.Request("pin/rm", path).
Option("recursive", true).
Exec(context.Background(), nil)
}
const (
DirectPin = "direct"
RecursivePin = "recursive"
IndirectPin = "indirect"
)
type PinInfo struct {
Type string
}
// Pins returns a map of the pin hashes to their info (currently just the
// pin type, one of DirectPin, RecursivePin, or IndirectPin. A map is returned
// instead of a slice because it is easier to do existence lookup by map key
// than unordered array searching. The map is likely to be more useful to a
// client than a flat list.
func (s *Shell) Pins() (map[string]PinInfo, error) {
var raw struct{ Keys map[string]PinInfo }
return raw.Keys, s.Request("pin/ls").Exec(context.Background(), &raw)
}
type PeerInfo struct {
Addrs []string
ID string
}
func (s *Shell) FindPeer(peer string) (*PeerInfo, error) {
var peers struct{ Responses []PeerInfo }
err := s.Request("dht/findpeer", peer).Exec(context.Background(), &peers)
if err != nil {
return nil, err
}
if len(peers.Responses) == 0 {
return nil, errors.New("peer not found")
}
return &peers.Responses[0], nil
}
func (s *Shell) Refs(hash string, recursive bool) (<-chan string, error) {
resp, err := s.Request("refs", hash).
Option("recursive", recursive).
Send(context.Background())
if err != nil {
return nil, err
}
defer resp.Close()
if resp.Error != nil {
return nil, resp.Error
}
out := make(chan string)
go func() {
var ref struct {
Ref string
}
defer close(out)
dec := json.NewDecoder(resp.Output)
for {
err := dec.Decode(&ref)
if err != nil {
return
}
if len(ref.Ref) > 0 {
out <- ref.Ref
}
}
}()
return out, nil
}
func (s *Shell) Patch(root, action string, args ...string) (string, error) {
var out object
return out.Hash, s.Request("object/patch/"+action, root).
Arguments(args...).
Exec(context.Background(), &out)
}
func (s *Shell) PatchData(root string, set bool, data interface{}) (string, error) {
var read io.Reader
switch d := data.(type) {
case io.Reader:
read = d
case []byte:
read = bytes.NewReader(d)
case string:
read = strings.NewReader(d)
default:
return "", fmt.Errorf("unrecognized type: %#v", data)
}
cmd := "append-data"
if set {
cmd = "set-data"
}
fr := files.NewReaderFile("", "", ioutil.NopCloser(read), nil)
slf := files.NewSliceFile("", "", []files.File{fr})
fileReader := files.NewMultiFileReader(slf, true)
var out object
return out.Hash, s.Request("object/patch/"+cmd, root).
Body(fileReader).
Exec(context.Background(), &out)
}
func (s *Shell) PatchLink(root, path, childhash string, create bool) (string, error) {
var out object
return out.Hash, s.Request("object/patch/add-link", root, path, childhash).
Option("create", true).
Exec(context.Background(), &out)
}
func (s *Shell) Get(hash, outdir string) error {
resp, err := s.Request("get", hash).Option("create", true).Send(context.Background())
if err != nil {
return err
}
defer resp.Close()
if resp.Error != nil {
return resp.Error
}
extractor := &tar.Extractor{Path: outdir}
return extractor.Extract(resp.Output)
}
func (s *Shell) NewObject(template string) (string, error) {
var out object
req := s.Request("object/new")
if template != "" {
req.Arguments(template)
}
return out.Hash, req.Exec(context.Background(), &out)
}
func (s *Shell) ResolvePath(path string) (string, error) {
var out struct {
Path string
}
err := s.Request("resolve", path).Exec(context.Background(), &out)
if err != nil {
return "", err
}
return strings.TrimPrefix(out.Path, "/ipfs/"), nil
}
// returns ipfs version and commit sha
func (s *Shell) Version() (string, string, error) {
ver := struct {
Version string
Commit string
}{}
if err := s.Request("version").Exec(context.Background(), &ver); err != nil {
return "", "", err
}
return ver.Version, ver.Commit, nil
}
func (s *Shell) IsUp() bool {
_, _, err := s.Version()
return err == nil
}
func (s *Shell) BlockStat(path string) (string, int, error) {
var inf struct {
Key string
Size int
}
if err := s.Request("block/stat", path).Exec(context.Background(), &inf); err != nil {
return "", 0, err
}
return inf.Key, inf.Size, nil
}
func (s *Shell) BlockGet(path string) ([]byte, error) {
resp, err := s.Request("block/get", path).Send(context.Background())
if err != nil {
return nil, err
}
defer resp.Close()
if resp.Error != nil {
return nil, resp.Error
}
return ioutil.ReadAll(resp.Output)
}
func (s *Shell) BlockPut(block []byte, format, mhtype string, mhlen int) (string, error) {
var out struct {
Key string
}
data := bytes.NewReader(block)
rc := ioutil.NopCloser(data)
fr := files.NewReaderFile("", "", rc, nil)
slf := files.NewSliceFile("", "", []files.File{fr})
fileReader := files.NewMultiFileReader(slf, true)
return out.Key, s.Request("block/put").
Option("mhtype", mhtype).
Option("format", format).
Option("mhlen", mhlen).
Body(fileReader).
Exec(context.Background(), &out)
}
type IpfsObject struct {
Links []ObjectLink
Data string
}
type ObjectLink struct {
Name, Hash string
Size uint64
}
func (s *Shell) ObjectGet(path string) (*IpfsObject, error) {
var obj IpfsObject
if err := s.Request("object/get", path).Exec(context.Background(), &obj); err != nil {
return nil, err
}
return &obj, nil
}
func (s *Shell) ObjectPut(obj *IpfsObject) (string, error) {
var data bytes.Buffer
err := json.NewEncoder(&data).Encode(obj)
if err != nil {
return "", err
}
rc := ioutil.NopCloser(&data)
fr := files.NewReaderFile("", "", rc, nil)
slf := files.NewSliceFile("", "", []files.File{fr})
fileReader := files.NewMultiFileReader(slf, true)
var out object
return out.Hash, s.Request("object/put").
Body(fileReader).
Exec(context.Background(), &out)
}
func (s *Shell) PubSubSubscribe(topic string) (*PubSubSubscription, error) {
// connect
resp, err := s.Request("pubsub/sub", topic).Send(context.Background())
if err != nil {
return nil, err
}
return newPubSubSubscription(resp), nil
}
func (s *Shell) PubSubPublish(topic, data string) (err error) {
resp, err := s.Request("pubsub/pub", topic, data).Send(context.Background())
if err != nil {
return err
}
defer resp.Close()
if resp.Error != nil {
return resp.Error
}
return nil
}
type ObjectStats struct {
Hash string
BlockSize int
CumulativeSize int
DataSize int
LinksSize int
NumLinks int
}
// ObjectStat gets stats for the DAG object named by key. It returns
// the stats of the requested Object or an error.
func (s *Shell) ObjectStat(key string) (*ObjectStats, error) {
var stat ObjectStats
err := s.Request("object/stat", key).Exec(context.Background(), &stat)
if err != nil {
return nil, err
}
return &stat, nil
}
// ObjectStat gets stats for the DAG object named by key. It returns
// the stats of the requested Object or an error.
func (s *Shell) StatsBW(ctx context.Context) (*p2pmetrics.Stats, error) {
v := &p2pmetrics.Stats{}
err := s.Request("stats/bw").Exec(ctx, &v)
return v, err
}
type SwarmStreamInfo struct {
Protocol string
}
type SwarmConnInfo struct {
Addr string
Peer string
Latency string
Muxer string
Streams []SwarmStreamInfo
}
type SwarmConnInfos struct {
Peers []SwarmConnInfo
}
// SwarmPeers gets all the swarm peers
func (s *Shell) SwarmPeers(ctx context.Context) (*SwarmConnInfos, error) {
v := &SwarmConnInfos{}
err := s.Request("swarm/peers").Exec(ctx, &v)
return v, err
}
type swarmConnection struct {
Strings []string
}
// SwarmConnect opens a swarm connection to a specific address.
func (s *Shell) SwarmConnect(ctx context.Context, addr ...string) error {
var conn *swarmConnection
err := s.Request("swarm/connect").
Arguments(addr...).
Exec(ctx, &conn)
return err
}
// RepoGC
func (s *Shell) RepoGC() error {
return s.Request("repo/gc").
Option("recursive", true).
Exec(context.Background(), nil)
}
| {
return "", errors.New("no results received")
} |
LockdropIntroducer.ts | import { FullClaimData } from './EventTypes';
import _ from 'lodash';
import BigNumber from 'bignumber.js';
type IntroducerParams = {
ethAddress: string;
locks: FullClaimData[]; // lock references if the affiliate has locked themselves
references: FullClaimData[]; // locks that references this introducer
};
| constructor(private _params: IntroducerParams) {
if (_params.references.length < 1) throw new Error(`Introducer ${_params.ethAddress} does not have any references`);
this._totalBonus = this._calculateTotalBonus();
}
public get bonusRate() {
return 0.01;
}
public get ethAddress() {
return this._params.ethAddress;
}
public get plmAddress() {
// note: a user can claim from multiple PLM addresses
// note: if the introducer did not participate, we don't know the public key
return _.map(this._params.locks, (i) => i.claimedAddress);
}
public get locks() {
return this._params.locks;
}
public get references() {
return this._params.references;
}
public get totalBonus() {
return this._totalBonus;
}
public get totalBonusToRefs() {
return new BigNumber(this._totalBonus).multipliedBy(this.bonusRate);
}
private _totalBonus: string;
private _calculateTotalBonus() {
const totalBonus = _.reduce(
_.map(this._params.references, (i) => {
const claimedPlm = new BigNumber(i.amount.toString());
return claimedPlm.multipliedBy(this.bonusRate);
}),
(sum, current) => {
return sum.plus(current);
},
new BigNumber(0),
);
return totalBonus.toFixed();
}
} | export default class Introducer { |
getWebService.go | // *** WARNING: this file was generated by the Pulumi SDK Generator. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package latest
import (
"github.com/pulumi/pulumi/sdk/v2/go/pulumi"
)
// Instance of an Azure ML web service resource.
// Latest API Version: 2017-01-01.
//
// Deprecated: The 'latest' version is deprecated. Please migrate to the function in the top-level module: 'azure-nextgen:machinelearning:getWebService'.
func | (ctx *pulumi.Context, args *LookupWebServiceArgs, opts ...pulumi.InvokeOption) (*LookupWebServiceResult, error) {
var rv LookupWebServiceResult
err := ctx.Invoke("azure-nextgen:machinelearning/latest:getWebService", args, &rv, opts...)
if err != nil {
return nil, err
}
return &rv, nil
}
type LookupWebServiceArgs struct {
// The region for which encrypted credential parameters are valid.
Region *string `pulumi:"region"`
// Name of the resource group in which the web service is located.
ResourceGroupName string `pulumi:"resourceGroupName"`
// The name of the web service.
WebServiceName string `pulumi:"webServiceName"`
}
// Instance of an Azure ML web service resource.
type LookupWebServiceResult struct {
// Specifies the resource ID.
Id string `pulumi:"id"`
// Specifies the location of the resource.
Location string `pulumi:"location"`
// Specifies the name of the resource.
Name string `pulumi:"name"`
// Contains the property payload that describes the web service.
Properties WebServicePropertiesForGraphResponse `pulumi:"properties"`
// Contains resource tags defined as key/value pairs.
Tags map[string]string `pulumi:"tags"`
// Specifies the type of the resource.
Type string `pulumi:"type"`
}
| LookupWebService |
spawn.rs | use itertools::Itertools;
use tracing::{debug, error, warn};
use ibc::{
ics02_client::client_state::{ClientState, IdentifiedAnyClientState},
ics03_connection::connection::{IdentifiedConnectionEnd, State as ConnectionState},
ics04_channel::channel::{IdentifiedChannelEnd, State as ChannelState},
ics24_host::identifier::{ChainId, ConnectionId},
Height,
};
use ibc_proto::ibc::core::{
channel::v1::QueryConnectionChannelsRequest, client::v1::QueryClientStatesRequest,
connection::v1::QueryClientConnectionsRequest,
};
use crate::{
chain::{
counterparty::{channel_on_destination, connection_state_on_destination},
handle::ChainHandle,
},
config::Config,
object::{Channel, Client, Connection, Object, Packet},
registry::Registry,
supervisor::client_state_filter::{FilterPolicy, Permission},
supervisor::error::Error as SupervisorError,
worker::WorkerMap,
};
use super::{Error, RwArc};
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum SpawnMode {
Startup,
Reload,
}
/// A context for spawning workers within the [`crate::supervisor::Supervisor`].
pub struct SpawnContext<'a> {
config: &'a RwArc<Config>,
registry: &'a mut Registry,
workers: &'a mut WorkerMap,
client_state_filter: &'a mut FilterPolicy,
mode: SpawnMode,
}
impl<'a> SpawnContext<'a> {
pub fn new(
config: &'a RwArc<Config>,
registry: &'a mut Registry,
client_state_filter: &'a mut FilterPolicy,
workers: &'a mut WorkerMap,
mode: SpawnMode,
) -> Self {
Self {
config,
registry,
workers,
client_state_filter,
mode,
}
}
fn client_filter_enabled(&self) -> bool {
// Currently just a wrapper over the global filter.
self.config.read().expect("poisoned lock").global.filter
}
pub fn spawn_workers(&mut self) {
let chain_ids = self
.config
.read()
.expect("poisoned lock")
.chains
.iter()
.map(|c| &c.id)
.cloned()
.collect_vec();
for chain_id in chain_ids {
self.spawn_workers_for_chain(&chain_id);
}
}
pub fn spawn_workers_from_chain_to_chain(
&mut self,
from_chain_id: &ChainId,
to_chain_id: &ChainId,
) {
let clients_req = QueryClientStatesRequest {
pagination: ibc_proto::cosmos::base::query::pagination::all(),
};
let chain = match self.registry.get_or_spawn(from_chain_id) {
Ok(chain_handle) => chain_handle,
Err(e) => {
error!(
"skipping workers for chain {}, reason: failed to spawn chain runtime with error: {}",
from_chain_id, e
);
return;
}
};
let clients = match chain.query_clients(clients_req) {
Ok(clients) => clients,
Err(e) => {
error!(
"skipping workers for chain {}, reason: failed to query clients with error: {}",
from_chain_id, e
);
return;
}
};
for client in clients {
if &client.client_state.chain_id() == to_chain_id {
self.spawn_workers_for_client(chain.clone(), client);
}
}
}
pub fn spawn_workers_for_chain(&mut self, chain_id: &ChainId) {
let clients_req = QueryClientStatesRequest {
pagination: ibc_proto::cosmos::base::query::pagination::all(),
};
let chain = match self.registry.get_or_spawn(chain_id) {
Ok(chain_handle) => chain_handle,
Err(e) => {
error!(
"skipping workers for chain {}, reason: failed to spawn chain runtime with error: {}",
chain_id, e
);
return;
}
};
let clients = match chain.query_clients(clients_req) {
Ok(clients) => clients,
Err(e) => {
error!(
"skipping workers for chain {}, reason: failed to query clients with error: {}",
chain_id, e
);
return;
}
};
for client in clients {
self.spawn_workers_for_client(chain.clone(), client);
}
if self.mode != SpawnMode::Reload {
return;
}
let chain_ids = self
.config
.read()
.expect("poisoned lock")
.chains
.iter()
.map(|c| &c.id)
.cloned()
.collect_vec();
for id in chain_ids {
if chain_id == &id {
continue;
}
self.spawn_workers_from_chain_to_chain(&id, chain_id);
}
}
pub fn spawn_workers_for_client(
&mut self,
chain: Box<dyn ChainHandle>,
client: IdentifiedAnyClientState,
) {
// Potentially ignore the client
if self.client_filter_enabled()
&& matches!(
self.client_state_filter.control_client(
&chain.id(),
&client.client_id,
&client.client_state
),
Permission::Deny
)
{
warn!(
"skipping workers for chain {}, client {}. \
reason: client is not allowed (client trust level={:?})",
chain.id(),
client.client_id,
client.client_state.trust_threshold()
);
return;
}
let counterparty_chain_id = client.client_state.chain_id();
let has_counterparty = self
.config
.read()
.expect("poisoned lock")
.has_chain(&counterparty_chain_id);
if !has_counterparty {
debug!(
"skipping client worker for client {} on chain {} has its counterparty ({}) is not present in config",
client.client_id, chain.id(), counterparty_chain_id
);
return;
}
let chain_id = chain.id();
let conns_req = QueryClientConnectionsRequest {
client_id: client.client_id.to_string(),
};
let client_connections = match chain.query_client_connections(conns_req) {
Ok(connections) => connections,
Err(e) => {
error!(
"skipping workers for chain {}, reason: failed to query client connections for client {}: {}",
chain_id, client.client_id, e
);
return;
}
};
for connection_id in client_connections {
self.spawn_workers_for_connection(chain.clone(), &client, connection_id);
}
}
pub fn spawn_workers_for_connection(
&mut self,
chain: Box<dyn ChainHandle>,
client: &IdentifiedAnyClientState,
connection_id: ConnectionId,
) {
let chain_id = chain.id();
let connection_end = match chain.query_connection(&connection_id, Height::zero()) {
Ok(connection_end) => connection_end,
Err(e) => {
error!(
"skipping workers for chain {} and connection {}, reason: failed to query connection end: {}",
chain_id, connection_id, e
);
return;
}
};
let connection = IdentifiedConnectionEnd {
connection_id: connection_id.clone(),
connection_end: connection_end.clone(),
};
// Apply the client state filter
if self.client_filter_enabled() {
match self.client_state_filter.control_connection_end_and_client(
&mut self.registry,
&chain_id,
&client.client_state,
&connection_end,
&connection_id,
) {
Ok(Permission::Deny) => {
warn!(
"skipping workers for chain {}, client {} & conn {}. \
reason: client or counterparty client is not allowed",
chain_id, client.client_id, connection_id
);
return;
}
Err(e) => {
error!("skipping workers for chain {}. reason: {}", chain_id, e);
return;
}
_ => {} // allowed
}
}
match self.spawn_connection_workers(chain.clone(), client.clone(), connection.clone()) {
Ok(()) => debug!(
"done spawning workers for connection {} on chain {}",
connection.connection_id,
chain.id(),
),
Err(e) => error!(
"skipped workers for connection {} on chain {} due to error {}",
chain.id(),
connection.connection_id,
e
),
}
if !connection_end.is_open() {
debug!(
"connection {} not open, skip workers for channels over this connection",
connection.connection_id
);
return;
}
let connection = IdentifiedConnectionEnd {
connection_id: connection_id.clone(),
connection_end: connection_end.clone(),
};
match self.counterparty_connection_state(client.clone(), connection.clone()) {
Err(e) => {
debug!("error with counterparty: reason {}", e);
return;
}
Ok(state) => {
if !state.eq(&ConnectionState::Open) {
debug!(
"connection {} not open, skip workers for channels over this connection",
connection.connection_id
);
debug!(
"drop connection {} because its counterparty is not open",
connection_id
);
return;
}
}
};
| };
let connection_channels = match chain.query_connection_channels(chans_req) {
Ok(channels) => channels,
Err(e) => {
error!(
"skipping workers for chain {} and connection {}, reason: failed to query its channels: {}",
chain.id(), connection_id, e
);
return;
}
};
let connection = IdentifiedConnectionEnd::new(connection_id, connection_end);
for channel in connection_channels {
let channel_id = channel.channel_id.clone();
match self.spawn_workers_for_channel(chain.clone(), client, &connection, channel) {
Ok(()) => debug!(
"done spawning workers for chain {} and channel {}",
chain.id(),
channel_id,
),
Err(e) => error!(
"skipped workers for chain {} and channel {} due to error {}",
chain.id(),
channel_id,
e
),
}
}
}
fn counterparty_connection_state(
&mut self,
client: IdentifiedAnyClientState,
connection: IdentifiedConnectionEnd,
) -> Result<ConnectionState, Error> {
let counterparty_chain = self
.registry
.get_or_spawn(&client.client_state.chain_id())
.map_err(Error::spawn)?;
connection_state_on_destination(connection, counterparty_chain.as_ref())
}
fn spawn_connection_workers(
&mut self,
chain: Box<dyn ChainHandle>,
client: IdentifiedAnyClientState,
connection: IdentifiedConnectionEnd,
) -> Result<(), Error> {
let handshake_enabled = self
.config
.read()
.expect("poisoned lock")
.handshake_enabled();
let counterparty_chain = self
.registry
.get_or_spawn(&client.client_state.chain_id())
.map_err(Error::spawn)?;
let conn_state_src = connection.connection_end.state;
let conn_state_dst =
connection_state_on_destination(connection.clone(), counterparty_chain.as_ref())?;
debug!(
"connection {} on chain {} is: {:?}, state on dest. chain ({}) is: {:?}",
connection.connection_id,
chain.id(),
conn_state_src,
counterparty_chain.id(),
conn_state_dst
);
if conn_state_src.is_open() && conn_state_dst.is_open() {
debug!(
"connection {} on chain {} is already open, not spawning Client worker",
connection.connection_id,
chain.id()
);
} else if !conn_state_dst.is_open()
&& conn_state_dst.less_or_equal_progress(conn_state_src)
&& handshake_enabled
{
// create worker for connection handshake that will advance the remote state
let connection_object = Object::Connection(Connection {
dst_chain_id: client.client_state.chain_id(),
src_chain_id: chain.id(),
src_connection_id: connection.connection_id,
});
self.workers
.spawn(
chain.clone(),
counterparty_chain.clone(),
&connection_object,
&self.config.read().expect("poisoned lock"),
)
.then(|| {
debug!(
"spawning Connection worker: {}",
connection_object.short_name()
);
});
}
Ok(())
}
/// Spawns all the [`Worker`]s that will handle a given channel for a given source chain.
pub fn spawn_workers_for_channel(
&mut self,
chain: Box<dyn ChainHandle>,
client: &IdentifiedAnyClientState,
connection: &IdentifiedConnectionEnd,
channel: IdentifiedChannelEnd,
) -> Result<(), Error> {
let handshake_enabled = self
.config
.read()
.expect("poisoned lock")
.handshake_enabled();
let counterparty_chain = self
.registry
.get_or_spawn(&client.client_state.chain_id())
.map_err(SupervisorError::spawn)?;
let counterparty_channel =
channel_on_destination(&channel, connection, counterparty_chain.as_ref())?;
let chan_state_src = channel.channel_end.state;
let chan_state_dst = counterparty_channel
.as_ref()
.map_or(ChannelState::Uninitialized, |c| c.state);
debug!(
"channel {} on chain {} is: {}; state on dest. chain ({}) is: {}",
channel.channel_id,
chain.id(),
chan_state_src,
counterparty_chain.id(),
chan_state_dst
);
if chan_state_src.is_open()
&& chan_state_dst.is_open()
&& self.relay_packets_on_channel(chain.as_ref(), &channel)
{
// spawn the client worker
let client_object = Object::Client(Client {
dst_client_id: client.client_id.clone(),
dst_chain_id: chain.id(),
src_chain_id: client.client_state.chain_id(),
});
self.workers
.spawn(
counterparty_chain.clone(),
chain.clone(),
&client_object,
&self.config.read().expect("poisoned lock"),
)
.then(|| debug!("spawned Client worker: {}", client_object.short_name()));
// create the Packet object and spawn worker
let path_object = Object::Packet(Packet {
dst_chain_id: counterparty_chain.id(),
src_chain_id: chain.id(),
src_channel_id: channel.channel_id,
src_port_id: channel.port_id,
});
self.workers
.spawn(
chain.clone(),
counterparty_chain.clone(),
&path_object,
&self.config.read().expect("poisoned lock"),
)
.then(|| debug!("spawned Path worker: {}", path_object.short_name()));
} else if !chan_state_dst.is_open()
&& chan_state_dst.less_or_equal_progress(chan_state_src)
&& handshake_enabled
{
// create worker for channel handshake that will advance the remote state
let channel_object = Object::Channel(Channel {
dst_chain_id: counterparty_chain.id(),
src_chain_id: chain.id(),
src_channel_id: channel.channel_id,
src_port_id: channel.port_id,
});
self.workers
.spawn(
chain,
counterparty_chain,
&channel_object,
&self.config.read().expect("poisoned lock"),
)
.then(|| debug!("spawned Channel worker: {}", channel_object.short_name()));
}
Ok(())
}
fn relay_packets_on_channel(
&mut self,
chain: &dyn ChainHandle,
channel: &IdentifiedChannelEnd,
) -> bool {
let config = self.config.read().expect("poisoned lock");
config.packets_on_channel_allowed(&chain.id(), &channel.port_id, &channel.channel_id)
}
pub fn shutdown_workers_for_chain(&mut self, chain_id: &ChainId) {
let affected_workers = self.workers.objects_for_chain(chain_id);
for object in affected_workers {
self.workers.shutdown_worker(&object);
}
}
} | let chans_req = QueryConnectionChannelsRequest {
connection: connection_id.to_string(),
pagination: ibc_proto::cosmos::base::query::pagination::all(), |
hellebit_el_GR.ts | <TS language="el_GR" version="2.1">
<context>
<name>AddressBookPage</name>
<message>
<source>Right-click to edit address or label</source>
<translation>Δεξί-κλικ για επεξεργασία της διεύθυνσης ή της ετικέτας</translation>
</message>
<message>
<source>Create a new address</source>
<translation>Δημιουργία νέας διεύθυνσης</translation>
</message>
<message>
<source>&New</source>
<translation>&Νέo</translation>
</message>
<message>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Αντέγραψε την επιλεγμένη διεύθυνση στο πρόχειρο του συστήματος</translation>
</message>
<message>
<source>&Copy</source>
<translation>&Αντιγραφή</translation>
</message>
<message>
<source>C&lose</source>
<translation>Κ&λείσιμο</translation>
</message>
<message>
<source>&Copy Address</source>
<translation>&Αντιγραφή διεύθυνσης</translation>
</message>
<message>
<source>Delete the currently selected address from the list</source>
<translation>Αντιγραφη της επιλεγμενης διεύθυνσης στο πρόχειρο του συστηματος</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Εξαγωγή δεδομένων καρτέλας σε αρχείο</translation>
</message>
<message>
<source>&Export</source>
<translation>&Εξαγωγή</translation>
</message>
<message>
<source>&Delete</source>
<translation>&Διαγραφή</translation>
</message>
<message>
<source>Choose the address to send coins to</source>
<translation>Επιλογή διεύθυνσης όπου θα σταλθούν νομίσματα</translation>
</message>
<message>
<source>Choose the address to receive coins with</source>
<translation>Επιλογή διεύθυνσης απ' όπου θα ληφθούν νομίσματα</translation>
</message>
<message>
<source>C&hoose</source>
<translation>Ε&πιλογή</translation>
</message>
<message>
<source>Sending addresses</source>
<translation>Διευθύνσεις αποστολής</translation>
</message>
<message>
<source>Receiving addresses</source>
<translation>Διευθύνσεις λήψης</translation>
</message>
<message>
<source>These are your Hellebit addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>Αυτές είναι οι Hellebit διευθύνσεις σας για να λαμβάνετε πληρωμές. Δίνοντας μία ξεχωριστή διεύθυνση σε κάθε αποστολέα, θα μπορείτε να ελέγχετε ποιος σας πληρώνει.</translation>
</message>
<message>
<source>These are your Hellebit addresses for receiving payments. It is recommended to use a new receiving address for each transaction.</source>
<translation>Αυτές είναι οι Hellebit διευθύνσεις σας για να λαμβάνετε πληρωμές. Δίνοντας μία ξεχωριστή διεύθυνση σε κάθε αποστολέα, θα μπορείτε να ελέγχετε ποιος σας πληρώνει.</translation>
</message>
<message>
<source>Copy &Label</source>
<translation>Αντιγραφή &επιγραφής</translation>
</message>
<message>
<source>&Edit</source>
<translation>&Επεξεργασία</translation>
</message>
<message>
<source>Export Address List</source>
<translation>Εξαγωγή της λίστας διευθύνσεων</translation>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>Αρχείο οριοθετημένο με κόμματα (*.csv)</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>Η Εξαγωγή Απέτυχε</translation>
</message>
<message>
<source>There was an error trying to save the address list to %1. Please try again.</source>
<translation>Παρουσιάστηκε σφάλμα κατά την αποθήκευση της λίστας πορτοφολιών στο %1. Παρακαλώ δοκιμάστε ξανά</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<source>Label</source>
<translation>Επιγραφή</translation>
</message>
<message>
<source>Address</source>
<translation>Διεύθυνση</translation>
</message>
<message>
<source>(no label)</source>
<translation>(χωρίς ετικέτα)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<source>Passphrase Dialog</source>
<translation>Φράση πρόσβασης </translation>
</message>
<message>
<source>Enter passphrase</source>
<translation>Βάλτε κωδικό πρόσβασης</translation>
</message>
<message>
<source>New passphrase</source>
<translation>&Αλλαγή κωδικού</translation>
</message>
<message>
<source>Repeat new passphrase</source>
<translation>Επανέλαβε τον νέο κωδικό πρόσβασης</translation>
</message>
<message>
<source>Encrypt wallet</source>
<translation>&Κρυπτογράφηση πορτοφολιού</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Αυτη η ενεργεία χρειάζεται τον κωδικό του πορτοφολιού για να ξεκλειδώσει το πορτοφόλι.</translation>
</message>
<message>
<source>Unlock wallet</source>
<translation>Ξεκλειδωσε το πορτοφολι</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Αυτη η ενεργεια χρειάζεται τον κωδικο του πορτοφολιου για να αποκρυπτογραφησειι το πορτοφολι.</translation>
</message>
<message>
<source>Decrypt wallet</source>
<translation>Αποκρυπτογράφησε το πορτοφολι</translation>
</message>
<message>
<source>Change passphrase</source>
<translation>Άλλαξε κωδικο πρόσβασης</translation>
</message>
<message>
<source>Confirm wallet encryption</source>
<translation>Επιβεβαίωσε την κρυπτογραφηση του πορτοφολιού</translation>
</message>
<message>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR HELLEBITS</b>!</source>
<translation>Προσοχη: Εαν κρυπτογραφησεις το πορτοφολι σου και χάσεις τον κωδικο σου θα χάσεις <b> ΟΛΑ ΣΟΥ ΤΑ HELLEBITS</b>!
Είσαι σίγουρος ότι θέλεις να κρυπτογραφησεις το πορτοφολι;</translation>
</message>
<message>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Είστε σίγουροι ότι θέλετε να κρυπτογραφήσετε το πορτοφόλι σας;</translation>
</message>
<message>
<source>Hellebit Core will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your hellebits from being stolen by malware infecting your computer.</source>
<translation>Το Hellebit Core θα κλεισει τώρα για να τελειώσει την διαδικασία κρυπτογράφησης. Θυμήσου ότι κρυπτογραφώντας το πορτοφόλι σου δεν μπορείς να προστατέψεις πλήρως τα hellebits σου από κλοπή στην περίπτωση που μολυνθεί ο υπολογιστής σου με κακόβουλο λογισμικό.
</translation>
</message>
<message>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>ΣΗΜΑΝΤΙΚΟ: Τα προηγούμενα αντίγραφα ασφαλείας που έχετε κάνει από το αρχείο του πορτοφόλιου σας θα πρέπει να αντικατασταθουν με το νέο που δημιουργείται, κρυπτογραφημένο αρχείο πορτοφόλιου. Για λόγους ασφαλείας, τα προηγούμενα αντίγραφα ασφαλείας του μη κρυπτογραφημένου αρχείου πορτοφόλιου θα καταστουν άχρηστα μόλις αρχίσετε να χρησιμοποιείτε το νέο κρυπτογραφημένο πορτοφόλι. </translation>
</message>
<message>
<source>Warning: The Caps Lock key is on!</source>
<translation>Προσοχη: το πλήκτρο Caps Lock είναι ενεργο.</translation>
</message>
<message>
<source>Wallet encrypted</source>
<translation>Κρυπτογραφημενο πορτοφολι</translation>
</message>
<message>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>ten or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Εισάγετε τον νέο κωδικό πρόσβασης στον πορτοφόλι <br/> Παρακαλώ χρησιμοποιείστε ένα κωδικό με <b> 10 ή περισσότερους τυχαίους χαρακτήρες</b> ή <b> οχτώ ή παραπάνω λέξεις</b>.</translation>
</message>
<message>
<source>Enter the old passphrase and new passphrase to the wallet.</source>
<translation>Πληκτρολόγησε τον παλιό και τον νέο κωδικό στο πορτοφολι.</translation>
</message>
<message>
<source>Wallet encryption failed</source>
<translation>Η κρυπτογραφηση του πορτοφολιού απέτυχε</translation>
</message>
<message>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Η κρυπτογράφηση του πορτοφολιού απέτυχε λογω εσωτερικού σφάλματος. Το πορτοφολι δεν κρυπτογραφηθηκε.</translation>
</message>
<message>
<source>The supplied passphrases do not match.</source>
<translation>Οι εισαχθέντες κωδικοί δεν ταιριάζουν.</translation>
</message>
<message>
<source>Wallet unlock failed</source>
<translation>το ξεκλείδωμα του πορτοφολιού απέτυχε</translation>
</message>
<message>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Ο κωδικος που εισήχθη για την αποκρυπτογραφηση του πορτοφολιού ήταν λαθος.</translation>
</message>
<message>
<source>Wallet decryption failed</source>
<translation>Η αποκρυπτογραφηση του πορτοφολιού απέτυχε</translation>
</message>
<message>
<source>Wallet passphrase was successfully changed.</source>
<translation>Ο κωδικος του πορτοφολιού άλλαξε με επιτυχία.</translation>
</message>
</context>
<context>
<name>BanTableModel</name>
</context>
<context>
<name>HellebitGUI</name>
<message>
<source>Sign &message...</source>
<translation>Υπογραφή &Μηνύματος...</translation>
</message>
<message>
<source>Synchronizing with network...</source>
<translation>Συγχρονισμός με το δίκτυο...</translation>
</message>
<message>
<source>&Overview</source>
<translation>&Επισκόπηση</translation>
</message>
<message>
<source>Node</source>
<translation>Κόμβος</translation>
</message>
<message>
<source>Show general overview of wallet</source>
<translation>Εμφάνισε τη γενική εικόνα του πορτοφολιού</translation>
</message>
<message>
<source>&Transactions</source>
<translation>&Συναλλαγές</translation>
</message>
<message>
<source>Browse transaction history</source>
<translation>Περιήγηση στο ιστορικό συναλλαγών</translation>
</message>
<message>
<source>E&xit</source>
<translation>Έ&ξοδος</translation>
</message>
<message>
<source>Quit application</source>
<translation>Εξοδος από την εφαρμογή</translation>
</message>
<message>
<source>About &Qt</source>
<translation>Σχετικά με &Qt</translation>
</message>
<message>
<source>Show information about Qt</source>
<translation>Εμφάνισε πληροφορίες σχετικά με Qt</translation>
</message>
<message>
<source>&Options...</source>
<translation>&Επιλογές...</translation>
</message>
<message>
<source>&Encrypt Wallet...</source>
<translation>&Κρυπτογράφησε το πορτοφόλι</translation>
</message>
<message>
<source>&Backup Wallet...</source>
<translation>&Αντίγραφο ασφαλείας του πορτοφολιού</translation>
</message>
<message>
<source>&Change Passphrase...</source>
<translation>&Άλλαξε κωδικο πρόσβασης</translation>
</message>
<message>
<source>&Sending addresses...</source>
<translation>Διευθύνσεις αποστολής</translation>
</message>
<message>
<source>&Receiving addresses...</source>
<translation>Διευθύνσεις λήψης</translation>
</message>
<message>
<source>Open &URI...</source>
<translation>'Ανοιγμα &URI</translation>
</message>
<message>
<source>Hellebit Core client</source>
<translation>Εφαρμογή Hellebit Core</translation>
</message>
<message>
<source>Importing blocks from disk...</source>
<translation>Εισαγωγή μπλοκ από τον σκληρο δίσκο ... </translation>
</message>
<message>
<source>Reindexing blocks on disk...</source>
<translation>Φόρτωση ευρετηρίου μπλοκ στον σκληρο δισκο...</translation>
</message>
<message>
<source>Send coins to a Hellebit address</source>
<translation>Στείλε νομίσματα σε μια διεύθυνση hellebit</translation>
</message>
<message>
<source>Backup wallet to another location</source>
<translation>Δημιουργία αντιγράφου ασφαλείας πορτοφολιού σε άλλη τοποθεσία</translation>
</message>
<message>
<source>Change the passphrase used for wallet encryption</source>
<translation>Αλλαγή του κωδικού κρυπτογράφησης του πορτοφολιού</translation>
</message>
<message>
<source>&Debug window</source>
<translation>&Παράθυρο αποσφαλμάτωσης</translation>
</message>
<message>
<source>Open debugging and diagnostic console</source>
<translation>Άνοιγμα κονσόλας αποσφαλμάτωσης και διαγνωστικών</translation>
</message>
<message>
<source>&Verify message...</source>
<translation>&Επιβεβαίωση μηνύματος</translation>
</message>
<message>
<source>Hellebit</source>
<translation>Hellebit</translation>
</message>
<message>
<source>Wallet</source>
<translation>Πορτοφόλι</translation>
</message>
<message>
<source>&Send</source>
<translation>&Αποστολή</translation>
</message>
<message>
<source>&Receive</source>
<translation>&Παραλαβή </translation>
</message>
<message>
<source>Show information about Hellebit Core</source>
<translation>Σχετικά με το Hellebit Core</translation>
</message>
<message>
<source>&Show / Hide</source>
<translation>&Εμφάνισε/Κρύψε</translation>
</message>
<message>
<source>Show or hide the main Window</source>
<translation>Εμφάνιση ή αποκρύψη του κεντρικου παράθυρου </translation>
</message>
<message>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>Κρυπτογραφήστε τα ιδιωτικά κλειδιά που ανήκουν στο πορτοφόλι σας </translation>
</message>
<message>
<source>Sign messages with your Hellebit addresses to prove you own them</source>
<translation>Υπογράψτε ένα μήνυμα για να βεβαιώσετε πως είστε ο κάτοχος αυτής της διεύθυνσης</translation>
</message>
<message>
<source>Verify messages to ensure they were signed with specified Hellebit addresses</source>
<translation>Υπογράψτε ένα μήνυμα για ν' αποδείξετε πως ανήκει μια συγκεκριμένη διεύθυνση Hellebit</translation>
</message>
<message>
<source>&File</source>
<translation>&Αρχείο</translation>
</message>
<message>
<source>&Settings</source>
<translation>&Ρυθμίσεις</translation>
</message>
<message>
<source>&Help</source>
<translation>&Βοήθεια</translation>
</message>
<message>
<source>Tabs toolbar</source>
<translation>Εργαλειοθήκη καρτελών</translation>
</message>
<message>
<source>Hellebit Core</source>
<translation>Hellebit Core</translation>
</message>
<message>
<source>Request payments (generates QR codes and hellebit: URIs)</source>
<translation>Αίτηση πληρωμών (δημιουργεί QR codes και διευθύνσεις hellebit: )</translation>
</message>
<message>
<source>&About Hellebit Core</source>
<translation>&Σχετικά με το Hellebit Core</translation>
</message>
<message>
<source>Show the list of used sending addresses and labels</source>
<translation>Προβολή της λίστας των χρησιμοποιημένων διευθύνσεων και ετικετών αποστολής</translation>
</message>
<message>
<source>Show the list of used receiving addresses and labels</source>
<translation>Προβολή της λίστας των χρησιμοποιημένων διευθύνσεων και ετικετών λήψεως</translation>
</message>
<message>
<source>Open a hellebit: URI or payment request</source>
<translation>Άνοιγμα hellebit: URI αίτησης πληρωμής</translation>
</message>
<message>
<source>&Command-line options</source>
<translation>&Επιλογές γραμμής εντολών</translation>
</message>
<message>
<source>Show the Hellebit Core help message to get a list with possible Hellebit command-line options</source>
<translation>Εμφανιση του Hellebit-Qt μήνυματος βοήθειας για να πάρετε μια λίστα με τις πιθανές επιλογές Hellebit γραμμής εντολών.</translation>
</message>
<message>
<source>No block source available...</source>
<translation>Η πηγή του μπλοκ δεν ειναι διαθέσιμη... </translation>
</message>
<message>
<source>%1 and %2</source>
<translation>%1 και %2</translation>
</message>
<message>
<source>%1 behind</source>
<translation>%1 πίσω</translation>
</message>
<message>
<source>Last received block was generated %1 ago.</source>
<translation>Το τελευταίο μπλοκ που ελήφθη δημιουργήθηκε %1 πριν.</translation>
</message>
<message>
<source>Transactions after this will not yet be visible.</source>
<translation>Οι συναλλαγές μετά από αυτό δεν θα είναι ακόμη ορατες.</translation>
</message>
<message>
<source>Error</source>
<translation>Σφάλμα</translation>
</message>
<message>
<source>Warning</source>
<translation>Προειδοποίηση</translation>
</message>
<message>
<source>Information</source>
<translation>Πληροφορία</translation>
</message>
<message>
<source>Up to date</source>
<translation>Ενημερωμένο</translation>
</message>
<message>
<source>Catching up...</source>
<translation>Ενημέρωση...</translation>
</message>
<message>
<source>Date: %1
</source>
<translation>Ημερομηνία: %1
</translation>
</message>
<message>
<source>Amount: %1
</source>
<translation>Ποσό: %1
</translation>
</message>
<message>
<source>Type: %1
</source>
<translation>Τύπος: %1
</translation>
</message>
<message>
<source>Label: %1
</source>
<translation>Ετικέτα: %1
</translation>
</message>
<message>
<source>Address: %1
</source>
<translation>Διεύθυνση: %1
</translation>
</message>
<message>
<source>Sent transaction</source>
<translation>Η συναλλαγή απεστάλη</translation>
</message>
<message>
<source>Incoming transaction</source>
<translation>Εισερχόμενη συναλλαγή</translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Το πορτοφόλι είναι <b>κρυπτογραφημένο</b> και <b>ξεκλείδωτο</b></translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Το πορτοφόλι είναι <b>κρυπτογραφημένο</b> και <b>κλειδωμένο</b></translation>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<source>Network Alert</source>
<translation>Ειδοποίηση Δικτύου</translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<source>Coin Selection</source>
<translation>Επιλογή κερμάτων</translation>
</message>
<message>
<source>Quantity:</source>
<translation>Ποσότητα:</translation>
</message>
<message>
<source>Bytes:</source>
<translation>Bytes:</translation>
</message>
<message>
<source>Amount:</source>
<translation>Ποσό:</translation>
</message>
<message>
<source>Priority:</source>
<translation>Προτεραιότητα:</translation>
</message>
<message>
<source>Fee:</source>
<translation>Ταρίφα</translation>
</message>
<message>
<source>Dust:</source>
<translation>Σκόνη</translation>
</message>
<message>
<source>After Fee:</source>
<translation>Ταρίφα αλλαγής</translation>
</message>
<message>
<source>Change:</source>
<translation>Ρέστα:</translation>
</message>
<message>
<source>(un)select all</source>
<translation>(από)επιλογή όλων</translation>
</message>
<message> | <source>Tree mode</source>
<translation>Εμφάνιση τύπου δέντρο</translation>
</message>
<message>
<source>List mode</source>
<translation>Λίστα εντολών</translation>
</message>
<message>
<source>Amount</source>
<translation>Ποσό</translation>
</message>
<message>
<source>Received with label</source>
<translation>Παραλήφθηκε με επιγραφή</translation>
</message>
<message>
<source>Received with address</source>
<translation>Παραλείφθηκε με την εξής διεύθυνση</translation>
</message>
<message>
<source>Date</source>
<translation>Ημερομηνία</translation>
</message>
<message>
<source>Confirmations</source>
<translation>Επικυρώσεις</translation>
</message>
<message>
<source>Confirmed</source>
<translation>Επικυρωμένες</translation>
</message>
<message>
<source>Priority</source>
<translation>Προτεραιότητα</translation>
</message>
<message>
<source>Copy address</source>
<translation>Αντιγραφή διεύθυνσης</translation>
</message>
<message>
<source>Copy label</source>
<translation>Αντιγραφή επιγραφής</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Αντιγραφή ποσού</translation>
</message>
<message>
<source>Copy transaction ID</source>
<translation>Αντιγραφη του ID Συναλλαγής</translation>
</message>
<message>
<source>Lock unspent</source>
<translation>Κλείδωμα αξόδευτων</translation>
</message>
<message>
<source>Unlock unspent</source>
<translation>Ξεκλείδωμα αξόδευτων</translation>
</message>
<message>
<source>Copy quantity</source>
<translation>Αντιγραφή ποσότητας</translation>
</message>
<message>
<source>Copy fee</source>
<translation>Αντιγραφή ταρίφας</translation>
</message>
<message>
<source>Copy after fee</source>
<translation>Αντιγραφή μετα-ταρίφας</translation>
</message>
<message>
<source>Copy bytes</source>
<translation>Αντιγραφή των byte</translation>
</message>
<message>
<source>Copy priority</source>
<translation>Αντιγραφή προτεραιότητας</translation>
</message>
<message>
<source>Copy dust</source>
<translation>Αντιγραφή 'σκόνης'</translation>
</message>
<message>
<source>Copy change</source>
<translation>Αντιγραφή των ρέστων</translation>
</message>
<message>
<source>highest</source>
<translation>ύψιστη</translation>
</message>
<message>
<source>higher</source>
<translation>υψηλότερη</translation>
</message>
<message>
<source>high</source>
<translation>ψηλή</translation>
</message>
<message>
<source>medium-high</source>
<translation>μεσαία-ψηλή</translation>
</message>
<message>
<source>medium</source>
<translation>μεσαία</translation>
</message>
<message>
<source>low-medium</source>
<translation>μεσαία-χαμηλή</translation>
</message>
<message>
<source>low</source>
<translation>χαμηλή</translation>
</message>
<message>
<source>lower</source>
<translation>χαμηλότερη</translation>
</message>
<message>
<source>lowest</source>
<translation>χαμηλότατη</translation>
</message>
<message>
<source>(%1 locked)</source>
<translation>(%1 κλειδωμένο)</translation>
</message>
<message>
<source>none</source>
<translation>κανένα</translation>
</message>
<message>
<source>This label turns red if the transaction size is greater than 1000 bytes.</source>
<translation>Αυτή η ετικέτα γίνεται κόκκινη αν το μέγεθος της συναλλαγής είναι μεγαλύτερο από 1000 bytes.</translation>
</message>
<message>
<source>This label turns red if the priority is smaller than "medium".</source>
<translation>Αυτή η ετικέτα γίνεται κόκκινη αν η προτεραιότητα είναι μικρότερη από "μεσαία".</translation>
</message>
<message>
<source>This label turns red if any recipient receives an amount smaller than %1.</source>
<translation>Αυτή η ετικέτα γίνεται κόκκινη αν οποιοσδήποτε παραλήπτης λάβει ποσό μικρότερο από %1.</translation>
</message>
<message>
<source>Can vary +/- %1 satoshi(s) per input.</source>
<translation>Μπορεί να διαφέρει +/- %1 Satoshi (ες) ανά εγγραφή.</translation>
</message>
<message>
<source>yes</source>
<translation>ναι</translation>
</message>
<message>
<source>no</source>
<translation>όχι</translation>
</message>
<message>
<source>This means a fee of at least %1 per kB is required.</source>
<translation>Ελάχιστο χρεώσιμο ποσό τουλάχιστο %1 ανα kB</translation>
</message>
<message>
<source>Can vary +/- 1 byte per input.</source>
<translation>Μπορεί να διαφέρει +/- 1 byte ανά εγγραφή.</translation>
</message>
<message>
<source>Transactions with higher priority are more likely to get included into a block.</source>
<translation>Συναλλαγές με υψηλότερη προτεραιότητα είναι πιο πιθανό να περιλαμβάνονται σε ένα μπλοκ.</translation>
</message>
<message>
<source>(no label)</source>
<translation>(χωρίς ετικέτα)</translation>
</message>
<message>
<source>change from %1 (%2)</source>
<translation>ρέστα από %1 (%2) </translation>
</message>
<message>
<source>(change)</source>
<translation>(ρέστα)
</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<source>Edit Address</source>
<translation>Επεξεργασία Διεύθυνσης</translation>
</message>
<message>
<source>&Label</source>
<translation>&Επιγραφή</translation>
</message>
<message>
<source>The label associated with this address list entry</source>
<translation>Η ετικέτα που συνδέεται με αυτήν την καταχώρηση στο βιβλίο διευθύνσεων</translation>
</message>
<message>
<source>The address associated with this address list entry. This can only be modified for sending addresses.</source>
<translation>Η διεύθυνση σχετίζεται με αυτή την καταχώρηση του βιβλίου διευθύνσεων. Μπορεί να τροποποιηθεί μόνο για τις διευθύνσεις αποστολής.</translation>
</message>
<message>
<source>&Address</source>
<translation>&Διεύθυνση</translation>
</message>
<message>
<source>New receiving address</source>
<translation>Νέα διεύθυνση λήψης</translation>
</message>
<message>
<source>New sending address</source>
<translation>Νέα διεύθυνση αποστολής</translation>
</message>
<message>
<source>Edit receiving address</source>
<translation>Επεξεργασία διεύθυνσης λήψης</translation>
</message>
<message>
<source>Edit sending address</source>
<translation>Επεξεργασία διεύθυνσης αποστολής</translation>
</message>
<message>
<source>The entered address "%1" is already in the address book.</source>
<translation>Η διεύθυνση "%1" βρίσκεται ήδη στο βιβλίο διευθύνσεων.</translation>
</message>
<message>
<source>The entered address "%1" is not a valid Hellebit address.</source>
<translation>Η διεύθυνση "%1" δεν είναι έγκυρη Hellebit διεύθυνση.</translation>
</message>
<message>
<source>Could not unlock wallet.</source>
<translation>Δεν είναι δυνατό το ξεκλείδωμα του πορτοφολιού.</translation>
</message>
<message>
<source>New key generation failed.</source>
<translation>Η δημιουργία νέου κλειδιού απέτυχε.</translation>
</message>
</context>
<context>
<name>FreespaceChecker</name>
<message>
<source>A new data directory will be created.</source>
<translation>Θα δημιουργηθεί ένας νέος φάκελος δεδομένων.</translation>
</message>
<message>
<source>name</source>
<translation>όνομα</translation>
</message>
<message>
<source>Directory already exists. Add %1 if you intend to create a new directory here.</source>
<translation>Κατάλογος ήδη υπάρχει. Προσθήκη %1, αν σκοπεύετε να δημιουργήσετε έναν νέο κατάλογο εδώ.</translation>
</message>
<message>
<source>Path already exists, and is not a directory.</source>
<translation>Η διαδρομή υπάρχει ήδη αλλά δεν είναι φάκελος</translation>
</message>
<message>
<source>Cannot create data directory here.</source>
<translation>Δεν μπορεί να δημιουργηθεί φάκελος δεδομένων εδώ.</translation>
</message>
</context>
<context>
<name>HelpMessageDialog</name>
<message>
<source>Hellebit Core</source>
<translation>Hellebit Core</translation>
</message>
<message>
<source>version</source>
<translation>έκδοση</translation>
</message>
<message>
<source>(%1-bit)</source>
<translation>(%1-bit)</translation>
</message>
<message>
<source>About Hellebit Core</source>
<translation>Σχετικά με το Hellebit Core</translation>
</message>
<message>
<source>Command-line options</source>
<translation>επιλογής γραμμής εντολών</translation>
</message>
<message>
<source>Usage:</source>
<translation>Χρήση:</translation>
</message>
<message>
<source>command-line options</source>
<translation>επιλογής γραμμής εντολών</translation>
</message>
</context>
<context>
<name>Intro</name>
<message>
<source>Welcome</source>
<translation>Καλώς ήρθατε</translation>
</message>
<message>
<source>Welcome to Hellebit Core.</source>
<translation>Καλώς ήρθατε στο Hellebit Core.</translation>
</message>
<message>
<source>As this is the first time the program is launched, you can choose where Hellebit Core will store its data.</source>
<translation>Καθώς αυτή είναι η πρώτη φορά που εκκινείται το πρόγραμμα, μπορείτε να διαλέξετε πού θα αποθηκεύει το Hellebit Core τα δεδομένα του.</translation>
</message>
<message>
<source>Hellebit Core will download and store a copy of the Hellebit block chain. At least %1GB of data will be stored in this directory, and it will grow over time. The wallet will also be stored in this directory.</source>
<translation>O πυρήνας Hellebit θα κατεβάσει και να αποθηκεύσει ένα αντίγραφο της αλυσίδας μπλοκ Hellebit. Τουλάχιστον %1GB δεδομένων θα αποθηκευτούν σε αυτόν τον κατάλογο, και θα αυξηθεί με την πάροδο του χρόνου. Το πορτοφόλι θα αποθηκευτεί σε αυτόν τον κατάλογο.</translation>
</message>
<message>
<source>Use the default data directory</source>
<translation>Χρήση του προεπιλεγμένου φακέλου δεδομένων</translation>
</message>
<message>
<source>Use a custom data directory:</source>
<translation>Προσαρμογή του φακέλου δεδομένων: </translation>
</message>
<message>
<source>Hellebit Core</source>
<translation>Hellebit Core</translation>
</message>
<message>
<source>Error: Specified data directory "%1" cannot be created.</source>
<translation>Σφάλμα: Ο καθορισμένος φάκελος δεδομένων "%1" δεν μπορεί να δημιουργηθεί.</translation>
</message>
<message>
<source>Error</source>
<translation>Σφάλμα</translation>
</message>
<message numerus="yes">
<source>%n GB of free space available</source>
<translation><numerusform>%n GB ελεύθερου χώρου διαθέσιμα</numerusform><numerusform>%n GB ελεύθερου χώρου διαθέσιμα</numerusform></translation>
</message>
<message numerus="yes">
<source>(of %n GB needed)</source>
<translation><numerusform>(από το %n GB που απαιτείται)</numerusform><numerusform>(από τα %n GB που απαιτούνται)</numerusform></translation>
</message>
</context>
<context>
<name>OpenURIDialog</name>
<message>
<source>Open URI</source>
<translation>'Ανοιγμα &URI</translation>
</message>
<message>
<source>Open payment request from URI or file</source>
<translation>Ανοιχτό αίτημα πληρωμής από URI ή απο αρχείο</translation>
</message>
<message>
<source>URI:</source>
<translation>URI:</translation>
</message>
<message>
<source>Select payment request file</source>
<translation>Επιλέξτε πληρωμή αρχείου αίτησης</translation>
</message>
<message>
<source>Select payment request file to open</source>
<translation>Επιλέξτε αρχείο πληρωμής για άνοιγμα.</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<source>Options</source>
<translation>Ρυθμίσεις</translation>
</message>
<message>
<source>&Main</source>
<translation>&Κύριο</translation>
</message>
<message>
<source>Size of &database cache</source>
<translation>Μέγεθος κρυφής μνήμης βάσης δεδομένων.</translation>
</message>
<message>
<source>MB</source>
<translation>MB</translation>
</message>
<message>
<source>Number of script &verification threads</source>
<translation>Αριθμός script και γραμμές επαλήθευσης </translation>
</message>
<message>
<source>Accept connections from outside</source>
<translation>Αποδοχή συνδέσεων απο έξω</translation>
</message>
<message>
<source>Allow incoming connections</source>
<translation>Αποδοχή εισερχόμενων συναλλαγών</translation>
</message>
<message>
<source>IP address of the proxy (e.g. IPv4: 127.0.0.1 / IPv6: ::1)</source>
<translation>Διεύθυνση IP του διαμεσολαβητή (π.χ. 127.0.0.1 / IPv6: ::1)</translation>
</message>
<message>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Exit in the menu.</source>
<translation>Ελαχιστοποίηση αντί για έξοδο κατά το κλείσιμο του παραθύρου. Όταν αυτή η επιλογή είναι ενεργοποιημένη, η εφαρμογή θα κλείνει μόνο αν επιλεχθεί η Έξοδος στο μενού.</translation>
</message>
<message>
<source>Third party URLs (e.g. a block explorer) that appear in the transactions tab as context menu items. %s in the URL is replaced by transaction hash. Multiple URLs are separated by vertical bar |.</source>
<translation>URLs από τρίτους (π.χ. ένας εξερευνητής μπλοκ) τα οποία εμφανίζονται στην καρτέλα συναλλαγών ως στοιχεία μενού. Το %s στα URL αντικαθιστάται από την τιμή της κατατεμαχισμένης συναλλαγής.</translation>
</message>
<message>
<source>Third party transaction URLs</source>
<translation>Διευθύνσεις τρίτων συναλλαγών.</translation>
</message>
<message>
<source>Active command-line options that override above options:</source>
<translation>Ενεργές επιλογές γραμμής-εντολών που παρακάμπτουν τις παραπάνω επιλογές:</translation>
</message>
<message>
<source>Reset all client options to default.</source>
<translation>Επαναφορα όλων των επιλογων του πελάτη σε default.</translation>
</message>
<message>
<source>&Reset Options</source>
<translation>Επαναφορα ρυθμίσεων</translation>
</message>
<message>
<source>&Network</source>
<translation>&Δίκτυο</translation>
</message>
<message>
<source>(0 = auto, <0 = leave that many cores free)</source>
<translation>(0 = αυτόματο, <0 = ελεύθεροι πυρήνες)</translation>
</message>
<message>
<source>W&allet</source>
<translation>Π&ορτοφόλι</translation>
</message>
<message>
<source>Expert</source>
<translation>Έμπειρος</translation>
</message>
<message>
<source>Enable coin &control features</source>
<translation>Επιλογή κατα πόσο να αναδείχνονται οι δυνατότητες ελέγχου κερμάτων.</translation>
</message>
<message>
<source>If you disable the spending of unconfirmed change, the change from a transaction cannot be used until that transaction has at least one confirmation. This also affects how your balance is computed.</source>
<translation>Εάν απενεργοποιήσετε το ξόδεμα μη επικυρωμένων ρέστων, τα ρέστα από μια συναλλαγή δεν μπορούν να χρησιμοποιηθούν έως ότου αυτή η συναλλαγή έχει έστω μια επικύρωση. Αυτό επίσης επηρεάζει το πως υπολογίζεται το υπόλοιπό σας.</translation>
</message>
<message>
<source>&Spend unconfirmed change</source>
<translation>&Ξόδεμα μη επικυρωμένων ρέστων</translation>
</message>
<message>
<source>Automatically open the Hellebit client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>Αυτόματο άνοιγμα των θυρών Hellebit στον δρομολογητή. Λειτουργεί μόνο αν ο δρομολογητής σας υποστηρίζει τη λειτουργία UPnP.</translation>
</message>
<message>
<source>Map port using &UPnP</source>
<translation>Απόδοση θυρών με χρήστη &UPnP</translation>
</message>
<message>
<source>Connect to the Hellebit network through a SOCKS5 proxy.</source>
<translation>Σύνδεση στο Hellebit δίκτυο μέσω διαμεσολαβητή SOCKS5 (π.χ. για σύνδεση μέσω Tor)</translation>
</message>
<message>
<source>&Connect through SOCKS5 proxy (default proxy):</source>
<translation>&Σύνδεση μέσω διαμεσολαβητή SOCKS5 (προεπιλεγμένος)</translation>
</message>
<message>
<source>Proxy &IP:</source>
<translation>&IP διαμεσολαβητή:</translation>
</message>
<message>
<source>&Port:</source>
<translation>&Θύρα:</translation>
</message>
<message>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Θύρα διαμεσολαβητή</translation>
</message>
<message>
<source>&Window</source>
<translation>&Παράθυρο</translation>
</message>
<message>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Εμφάνιση μόνο εικονιδίου στην περιοχή ειδοποιήσεων κατά την ελαχιστοποίηση</translation>
</message>
<message>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&Ελαχιστοποίηση στην περιοχή ειδοποιήσεων αντί της γραμμής εργασιών</translation>
</message>
<message>
<source>M&inimize on close</source>
<translation>Ε&λαχιστοποίηση κατά το κλείσιμο</translation>
</message>
<message>
<source>&Display</source>
<translation>&Απεικόνιση</translation>
</message>
<message>
<source>User Interface &language:</source>
<translation>Γλώσσα περιβάλλοντος εργασίας: </translation>
</message>
<message>
<source>&Unit to show amounts in:</source>
<translation>&Μονάδα μέτρησης:</translation>
</message>
<message>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Διαλέξτε την προεπιλεγμένη υποδιαίρεση που θα εμφανίζεται όταν στέλνετε νομίσματα.</translation>
</message>
<message>
<source>Whether to show coin control features or not.</source>
<translation>Επιλογή κατα πόσο να αναδείχνονται οι δυνατότητες ελέγχου κερμάτων.
</translation>
</message>
<message>
<source>&OK</source>
<translation>&ΟΚ</translation>
</message>
<message>
<source>&Cancel</source>
<translation>&Ακύρωση</translation>
</message>
<message>
<source>default</source>
<translation>προεπιλογή</translation>
</message>
<message>
<source>none</source>
<translation>κανένα</translation>
</message>
<message>
<source>Confirm options reset</source>
<translation>Επιβεβαιώση των επιλογων επαναφοράς </translation>
</message>
<message>
<source>Client restart required to activate changes.</source>
<translation>Χρειάζεται επανεκκίνηση του προγράμματος για να ενεργοποιηθούν οι αλλαγές.</translation>
</message>
<message>
<source>This change would require a client restart.</source>
<translation>Η αλλαγή αυτή θα χρειαστεί επανεκκίνηση του προγράμματος</translation>
</message>
<message>
<source>The supplied proxy address is invalid.</source>
<translation>Δεν είναι έγκυρη η διεύθυνση διαμεσολαβητή</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<source>Form</source>
<translation>Φόρμα</translation>
</message>
<message>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Hellebit network after a connection is established, but this process has not completed yet.</source>
<translation>Οι πληροφορίες που εμφανίζονται μπορεί να είναι ξεπερασμένες. Το πορτοφόλι σας συγχρονίζεται αυτόματα με το δίκτυο Hellebit μετά από μια σύνδεση, αλλά αυτή η διαδικασία δεν έχει ακόμη ολοκληρωθεί. </translation>
</message>
<message>
<source>Watch-only:</source>
<translation>Επίβλεψη μόνο:</translation>
</message>
<message>
<source>Available:</source>
<translation>Διαθέσιμο:</translation>
</message>
<message>
<source>Your current spendable balance</source>
<translation>Το τρέχον διαθέσιμο υπόλοιπο</translation>
</message>
<message>
<source>Pending:</source>
<translation>Εκκρεμούν:</translation>
</message>
<message>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the spendable balance</source>
<translation>Το άθροισμα των συναλλαγών που δεν έχουν ακόμα επιβεβαιωθεί και δεν προσμετρώνται στο τρέχον διαθέσιμο υπόλοιπό σας</translation>
</message>
<message>
<source>Immature:</source>
<translation>Ανώριμος</translation>
</message>
<message>
<source>Mined balance that has not yet matured</source>
<translation>Εξορυγμενο υπόλοιπο που δεν έχει ακόμα ωριμάσει </translation>
</message>
<message>
<source>Balances</source>
<translation>Υπόλοιπο:</translation>
</message>
<message>
<source>Total:</source>
<translation>Σύνολο:</translation>
</message>
<message>
<source>Your current total balance</source>
<translation>Το τρέχον συνολικό υπόλοιπο</translation>
</message>
<message>
<source>Your current balance in watch-only addresses</source>
<translation>Το τρέχον υπόλοιπο σας σε διευθύνσεις παρακολούθησης μόνο</translation>
</message>
<message>
<source>Spendable:</source>
<translation>Ξοδεμένα:</translation>
</message>
<message>
<source>Recent transactions</source>
<translation>Πρόσφατες συναλλαγές</translation>
</message>
<message>
<source>Unconfirmed transactions to watch-only addresses</source>
<translation>Μη επικυρωμένες συναλλαγές σε διευθύνσεις παρακολούθησης μόνο</translation>
</message>
<message>
<source>Mined balance in watch-only addresses that has not yet matured</source>
<translation>Εξορυγμένο υπόλοιπο σε διευθύνσεις παρακολούθησης μόνο που δεν έχει ωριμάσει ακόμα</translation>
</message>
<message>
<source>Current total balance in watch-only addresses</source>
<translation>Το τρέχον συνολικό υπόλοιπο σε διευθύνσεις παρακολούθησης μόνο</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<source>URI handling</source>
<translation>Χειρισμός URI</translation>
</message>
<message>
<source>Invalid payment address %1</source>
<translation>Μη έγκυρη διεύθυνση πληρωμής %1</translation>
</message>
<message>
<source>Payment request rejected</source>
<translation>Η αίτηση πληρωμής έχει αρνηθεί.</translation>
</message>
<message>
<source>Payment request is not initialized.</source>
<translation>Η αίτηση πληρωμής δεν έχει αρχίζει ακόμα.</translation>
</message>
<message>
<source>Requested payment amount of %1 is too small (considered dust).</source>
<translation>Το ζητούμενο ποσό πληρωμής του %1 είναι πολύ μικρό (θεωρείται σκόνη)</translation>
</message>
<message>
<source>Payment request error</source>
<translation>Σφάλμα αιτήματος πληρωμής</translation>
</message>
<message>
<source>Cannot start hellebit: click-to-pay handler</source>
<translation>Δεν είναι δυνατή η εκκίνηση του Hellebit: click-to-pay handler</translation>
</message>
<message>
<source>Payment request fetch URL is invalid: %1</source>
<translation>Η διεύθυνση πληρωμής (URL) δεν είναι έγκυρη: %1</translation>
</message>
<message>
<source>Payment request file handling</source>
<translation>Επιλέξτε αρχείο πληρωμής για άνοιγμα.</translation>
</message>
<message>
<source>Refund from %1</source>
<translation>Επιστροφή ποσού από %1</translation>
</message>
<message>
<source>Error communicating with %1: %2</source>
<translation>Σφάλμα επικοινωνίας με %1: %2</translation>
</message>
<message>
<source>Payment request cannot be parsed!</source>
<translation>Η αίτηση πληρωμής δεν μπορεί να αναλυθεί!</translation>
</message>
<message>
<source>Bad response from server %1</source>
<translation>Κακή απάντηση από διακομιστή %1</translation>
</message>
<message>
<source>Payment acknowledged</source>
<translation>Πληρωμή αναγνωρίστηκε</translation>
</message>
<message>
<source>Network request error</source>
<translation>Σφάλμα αιτήματος δικτύου</translation>
</message>
</context>
<context>
<name>PeerTableModel</name>
<message>
<source>Ping Time</source>
<translation>Χρόνος καθυστέρησης</translation>
</message>
</context>
<context>
<name>QObject</name>
<message>
<source>Amount</source>
<translation>Ποσό</translation>
</message>
<message>
<source>Enter a Hellebit address (e.g. %1)</source>
<translation>Εισάγετε μια διεύθυνση Hellebit (π.χ. %1)</translation>
</message>
<message>
<source>%1 d</source>
<translation>%1 d</translation>
</message>
<message>
<source>%1 h</source>
<translation>%1 ώ</translation>
</message>
<message>
<source>%1 m</source>
<translation>%1 λ</translation>
</message>
<message>
<source>%1 s</source>
<translation>%1 s</translation>
</message>
<message>
<source>None</source>
<translation>Κανένα</translation>
</message>
<message>
<source>N/A</source>
<translation>Μη διαθέσιμο</translation>
</message>
<message>
<source>%1 ms</source>
<translation>%1 ms</translation>
</message>
</context>
<context>
<name>QRImageWidget</name>
<message>
<source>&Save Image...</source>
<translation>&Αποθήκευση εικόνας...</translation>
</message>
<message>
<source>&Copy Image</source>
<translation>&Αντιγραφή εικόνας</translation>
</message>
<message>
<source>Save QR Code</source>
<translation>Αποθήκευση κώδικα QR</translation>
</message>
<message>
<source>PNG Image (*.png)</source>
<translation>Εικόνες PNG (*.png)</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<source>Client name</source>
<translation>Όνομα Πελάτη</translation>
</message>
<message>
<source>N/A</source>
<translation>Μη διαθέσιμο</translation>
</message>
<message>
<source>Client version</source>
<translation>Έκδοση Πελάτη</translation>
</message>
<message>
<source>&Information</source>
<translation>&Πληροφορία</translation>
</message>
<message>
<source>Debug window</source>
<translation>Παράθυρο αποσφαλμάτωσης</translation>
</message>
<message>
<source>General</source>
<translation>Γενικά</translation>
</message>
<message>
<source>Using OpenSSL version</source>
<translation>Χρησιμοποιηση της OpenSSL εκδοσης</translation>
</message>
<message>
<source>Using BerkeleyDB version</source>
<translation>Χρήση BerkeleyDB έκδοσης</translation>
</message>
<message>
<source>Startup time</source>
<translation>Χρόνος εκκίνησης</translation>
</message>
<message>
<source>Network</source>
<translation>Δίκτυο</translation>
</message>
<message>
<source>Name</source>
<translation>Όνομα</translation>
</message>
<message>
<source>Number of connections</source>
<translation>Αριθμός συνδέσεων</translation>
</message>
<message>
<source>Block chain</source>
<translation>Αλυσίδα μπλοκ</translation>
</message>
<message>
<source>Current number of blocks</source>
<translation>Τρέχον αριθμός μπλοκ</translation>
</message>
<message>
<source>Received</source>
<translation>Παραλήφθησαν</translation>
</message>
<message>
<source>Sent</source>
<translation>Αποστολή</translation>
</message>
<message>
<source>&Peers</source>
<translation>&Χρήστες</translation>
</message>
<message>
<source>Select a peer to view detailed information.</source>
<translation>Επιλέξτε ένα χρήστη για να δείτε αναλυτικές πληροφορίες.</translation>
</message>
<message>
<source>Version</source>
<translation>Έκδοση</translation>
</message>
<message>
<source>Services</source>
<translation>Υπηρεσίες</translation>
</message>
<message>
<source>Ban Score</source>
<translation>Σκορ αποκλησμού</translation>
</message>
<message>
<source>Connection Time</source>
<translation>Χρόνος σύνδεσης</translation>
</message>
<message>
<source>Last Send</source>
<translation>Τελευταία αποστολή</translation>
</message>
<message>
<source>Last Receive</source>
<translation>Τελευταία λήψη</translation>
</message>
<message>
<source>Ping Time</source>
<translation>Χρόνος καθυστέρησης</translation>
</message>
<message>
<source>Last block time</source>
<translation>Χρόνος τελευταίου μπλοκ</translation>
</message>
<message>
<source>&Open</source>
<translation>&Άνοιγμα</translation>
</message>
<message>
<source>&Console</source>
<translation>&Κονσόλα</translation>
</message>
<message>
<source>&Network Traffic</source>
<translation>&Κίνηση δικτύου</translation>
</message>
<message>
<source>&Clear</source>
<translation>&Εκκαθάριση</translation>
</message>
<message>
<source>Totals</source>
<translation>Σύνολα</translation>
</message>
<message>
<source>In:</source>
<translation>Εισερχόμενα:</translation>
</message>
<message>
<source>Out:</source>
<translation>Εξερχόμενα:</translation>
</message>
<message>
<source>Build date</source>
<translation>Ημερομηνία κατασκευής</translation>
</message>
<message>
<source>Debug log file</source>
<translation>Αρχείο καταγραφής εντοπισμού σφαλμάτων </translation>
</message>
<message>
<source>Clear console</source>
<translation>Καθαρισμός κονσόλας</translation>
</message>
<message>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>Χρησιμοποιήστε το πάνω και κάτω βέλος για να περιηγηθείτε στο ιστορικο, και <b>Ctrl-L</b> για εκκαθαριση οθονης.</translation>
</message>
<message>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>Γράψτε <b>help</b> για μια επισκόπηση των διαθέσιμων εντολών</translation>
</message>
<message>
<source>%1 B</source>
<translation>%1 B</translation>
</message>
<message>
<source>%1 KB</source>
<translation>%1 KB</translation>
</message>
<message>
<source>%1 MB</source>
<translation>%1 MB</translation>
</message>
<message>
<source>%1 GB</source>
<translation>%1 GB</translation>
</message>
<message>
<source>via %1</source>
<translation>μέσω %1</translation>
</message>
<message>
<source>never</source>
<translation>ποτέ</translation>
</message>
<message>
<source>Inbound</source>
<translation>Εισερχόμενα</translation>
</message>
<message>
<source>Outbound</source>
<translation>Εξερχόμενα</translation>
</message>
<message>
<source>Unknown</source>
<translation>Άγνωστο(α)</translation>
</message>
</context>
<context>
<name>ReceiveCoinsDialog</name>
<message>
<source>&Amount:</source>
<translation>&Ποσό:</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Επιγραφή</translation>
</message>
<message>
<source>&Message:</source>
<translation>&Μήνυμα:</translation>
</message>
<message>
<source>R&euse an existing receiving address (not recommended)</source>
<translation>Ε&παναχρησιμοποίηση υπάρχουσας διεύθυνσης λήψης (δεν συνιστάται)</translation>
</message>
<message>
<source>Clear all fields of the form.</source>
<translation>Καθαρισμός όλων των πεδίων της φόρμας.</translation>
</message>
<message>
<source>Clear</source>
<translation>Καθαρισμός</translation>
</message>
<message>
<source>&Request payment</source>
<translation>&Αίτηση πληρωμής</translation>
</message>
<message>
<source>Show</source>
<translation>Εμφάνιση</translation>
</message>
<message>
<source>Remove the selected entries from the list</source>
<translation>Αφαίρεση επιλεγμένων καταχωρίσεων από τη λίστα</translation>
</message>
<message>
<source>Remove</source>
<translation>Αφαίρεση</translation>
</message>
<message>
<source>Copy label</source>
<translation>Αντιγραφή επιγραφής</translation>
</message>
<message>
<source>Copy message</source>
<translation>Αντιγραφή μηνύματος</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Αντιγραφή ποσού</translation>
</message>
</context>
<context>
<name>ReceiveRequestDialog</name>
<message>
<source>QR Code</source>
<translation>Κώδικας QR</translation>
</message>
<message>
<source>Copy &URI</source>
<translation>Αντιγραφη της επιλεγμενης διεύθυνσης στο πρόχειρο του συστηματος</translation>
</message>
<message>
<source>Copy &Address</source>
<translation>Αντιγραφή &Διεύθυνσης</translation>
</message>
<message>
<source>&Save Image...</source>
<translation>&Αποθήκευση εικόνας...</translation>
</message>
<message>
<source>Request payment to %1</source>
<translation>Αίτηση πληρωμής για %1</translation>
</message>
<message>
<source>Payment information</source>
<translation>Πληροφορίες πληρωμής</translation>
</message>
<message>
<source>URI</source>
<translation>URI:</translation>
</message>
<message>
<source>Address</source>
<translation>Διεύθυνση</translation>
</message>
<message>
<source>Amount</source>
<translation>Ποσό</translation>
</message>
<message>
<source>Label</source>
<translation>Επιγραφή</translation>
</message>
<message>
<source>Message</source>
<translation>Μήνυμα</translation>
</message>
<message>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation>Το αποτέλεσμα της διεύθυνσης είναι πολύ μεγάλο. Μειώστε το μέγεθος για το κείμενο της ετικέτας/ μηνύματος.</translation>
</message>
<message>
<source>Error encoding URI into QR Code.</source>
<translation>Σφάλμα κατά την κωδικοποίηση του URI σε κώδικα QR</translation>
</message>
</context>
<context>
<name>RecentRequestsTableModel</name>
<message>
<source>Date</source>
<translation>Ημερομηνία</translation>
</message>
<message>
<source>Label</source>
<translation>Επιγραφή</translation>
</message>
<message>
<source>Message</source>
<translation>Μήνυμα</translation>
</message>
<message>
<source>Amount</source>
<translation>Ποσό</translation>
</message>
<message>
<source>(no label)</source>
<translation>(χωρίς ετικέτα)</translation>
</message>
<message>
<source>(no message)</source>
<translation>(κανένα μήνυμα)</translation>
</message>
<message>
<source>(no amount)</source>
<translation>(κανένα ποσό)</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<source>Send Coins</source>
<translation>Αποστολή νομισμάτων</translation>
</message>
<message>
<source>Coin Control Features</source>
<translation>Χαρακτηρηστικά επιλογής κερμάτων</translation>
</message>
<message>
<source>Inputs...</source>
<translation>Εισροές...</translation>
</message>
<message>
<source>automatically selected</source>
<translation>επιλεγμένο αυτόματα</translation>
</message>
<message>
<source>Insufficient funds!</source>
<translation>Ανεπαρκές κεφάλαιο!</translation>
</message>
<message>
<source>Quantity:</source>
<translation>Ποσότητα:</translation>
</message>
<message>
<source>Bytes:</source>
<translation>Bytes:</translation>
</message>
<message>
<source>Amount:</source>
<translation>Ποσό:</translation>
</message>
<message>
<source>Priority:</source>
<translation>Προτεραιότητα:</translation>
</message>
<message>
<source>Fee:</source>
<translation>Ταρίφα</translation>
</message>
<message>
<source>After Fee:</source>
<translation>Ταρίφα αλλαγής</translation>
</message>
<message>
<source>Change:</source>
<translation>Ρέστα:</translation>
</message>
<message>
<source>If this is activated, but the change address is empty or invalid, change will be sent to a newly generated address.</source>
<translation>Όταν ενεργό, αλλά η διεύθυνση ρέστων είναι κενή ή άκυρη, τα ρέστα θα σταλούν σε μία πρόσφατα δημιουργημένη διεύθυνση.</translation>
</message>
<message>
<source>Custom change address</source>
<translation>Προσαρμοσμένη διεύθυνση ρέστων</translation>
</message>
<message>
<source>Transaction Fee:</source>
<translation>Τέλος συναλλαγής:</translation>
</message>
<message>
<source>Choose...</source>
<translation>Επιλογή...</translation>
</message>
<message>
<source>per kilobyte</source>
<translation>ανά kilobyte</translation>
</message>
<message>
<source>Hide</source>
<translation>Απόκρυψη</translation>
</message>
<message>
<source>total at least</source>
<translation>συνολικά τουλάχιστον</translation>
</message>
<message>
<source>Recommended:</source>
<translation>Προτεινόμενο: </translation>
</message>
<message>
<source>Custom:</source>
<translation>Προσαρμογή:</translation>
</message>
<message>
<source>Confirmation time:</source>
<translation>Χρόνος επικύρωσης:</translation>
</message>
<message>
<source>normal</source>
<translation>κανονικό</translation>
</message>
<message>
<source>fast</source>
<translation>Γρήγορο</translation>
</message>
<message>
<source>(confirmation may take longer)</source>
<translation>(η επικύρωση ίσως χρειαστεί περισσότερο χρόνο)</translation>
</message>
<message>
<source>Send to multiple recipients at once</source>
<translation>Αποστολή σε πολλούς αποδέκτες ταυτόχρονα</translation>
</message>
<message>
<source>Add &Recipient</source>
<translation>&Προσθήκη αποδέκτη</translation>
</message>
<message>
<source>Clear all fields of the form.</source>
<translation>Καθαρισμός όλων των πεδίων της φόρμας.</translation>
</message>
<message>
<source>Dust:</source>
<translation>Σκόνη</translation>
</message>
<message>
<source>Clear &All</source>
<translation>Καθαρισμός &Όλων</translation>
</message>
<message>
<source>Balance:</source>
<translation>Υπόλοιπο:</translation>
</message>
<message>
<source>Confirm the send action</source>
<translation>Επιβεβαίωση αποστολής</translation>
</message>
<message>
<source>S&end</source>
<translation>Αποστολη</translation>
</message>
<message>
<source>Confirm send coins</source>
<translation>Επιβεβαίωση αποστολής νομισμάτων</translation>
</message>
<message>
<source>%1 to %2</source>
<translation>%1 σε %2</translation>
</message>
<message>
<source>Copy quantity</source>
<translation>Αντιγραφή ποσότητας</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Αντιγραφή ποσού</translation>
</message>
<message>
<source>Copy fee</source>
<translation>Αντιγραφή ταρίφας</translation>
</message>
<message>
<source>Copy after fee</source>
<translation>Αντιγραφή μετα-ταρίφας</translation>
</message>
<message>
<source>Copy bytes</source>
<translation>Αντιγραφή των byte</translation>
</message>
<message>
<source>Copy priority</source>
<translation>Αντιγραφή προτεραιότητας</translation>
</message>
<message>
<source>Copy change</source>
<translation>Αντιγραφή των ρέστων</translation>
</message>
<message>
<source>or</source>
<translation>ή</translation>
</message>
<message>
<source>The amount to pay must be larger than 0.</source>
<translation>Το ποσό πληρωμής πρέπει να είναι μεγαλύτερο από 0.</translation>
</message>
<message>
<source>The amount exceeds your balance.</source>
<translation>Το ποσό ξεπερνάει το διαθέσιμο υπόλοιπο</translation>
</message>
<message>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>Το σύνολο υπερβαίνει το υπόλοιπό σας όταν συμπεριληφθεί και η αμοιβή %1</translation>
</message>
<message>
<source>Transaction creation failed!</source>
<translation>Η δημιουργία της συναλλαγής απέτυχε!</translation>
</message>
<message>
<source>Warning: Invalid Hellebit address</source>
<translation>Προειδοποίηση: Μη έγκυρη διεύθυνση Hellebit</translation>
</message>
<message>
<source>(no label)</source>
<translation>(χωρίς ετικέτα)</translation>
</message>
<message>
<source>Copy dust</source>
<translation>Αντιγραφή 'σκόνης'</translation>
</message>
<message>
<source>Are you sure you want to send?</source>
<translation>Είστε βέβαιοι για την αποστολή;</translation>
</message>
<message>
<source>added as transaction fee</source>
<translation>προστέθηκαν ως αμοιβή συναλλαγής</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<source>A&mount:</source>
<translation>&Ποσό:</translation>
</message>
<message>
<source>Pay &To:</source>
<translation>Πληρωμή &σε:</translation>
</message>
<message>
<source>Enter a label for this address to add it to your address book</source>
<translation>Εισάγετε μια επιγραφή για αυτή τη διεύθυνση ώστε να καταχωρηθεί στο βιβλίο διευθύνσεων</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Επιγραφή</translation>
</message>
<message>
<source>Choose previously used address</source>
<translation>Επιλογή διεύθυνσης που έχει ήδη χρησιμοποιηθεί</translation>
</message>
<message>
<source>This is a normal payment.</source>
<translation>Αυτή είναι μια απλή πληρωμή.</translation>
</message>
<message>
<source>The Hellebit address to send the payment to</source>
<translation>Η διεύθυνση Hellebit που θα σταλεί η πληρωμή</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Επικόλληση διεύθυνσης από το βιβλίο διευθύνσεων</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Remove this entry</source>
<translation>Αφαίρεση αυτής της καταχώρησης</translation>
</message>
<message>
<source>Message:</source>
<translation>Μήνυμα:</translation>
</message>
<message>
<source>Enter a label for this address to add it to the list of used addresses</source>
<translation>Εισάγεται μία ετικέτα για αυτή την διεύθυνση για να προστεθεί στη λίστα με τις χρησιμοποιημένες διευθύνσεις</translation>
</message>
<message>
<source>Pay To:</source>
<translation>Πληρωμή σε:</translation>
</message>
<message>
<source>Memo:</source>
<translation>Σημείωση:</translation>
</message>
</context>
<context>
<name>ShutdownWindow</name>
<message>
<source>Hellebit Core is shutting down...</source>
<translation>Το Hellebit Core τερματίζεται...</translation>
</message>
<message>
<source>Do not shut down the computer until this window disappears.</source>
<translation>Μην απενεργοποιήσετε τον υπολογιστή μέχρι να κλείσει αυτό το παράθυρο.</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<source>Signatures - Sign / Verify a Message</source>
<translation>Υπογραφές - Είσοδος / Επαλήθευση μήνυματος </translation>
</message>
<message>
<source>&Sign Message</source>
<translation>&Υπογραφή Μηνύματος</translation>
</message>
<message>
<source>The Hellebit address to sign the message with</source>
<translation>Διεύθυνση Hellebit που θα σταλεί το μήνυμα</translation>
</message>
<message>
<source>Choose previously used address</source>
<translation>Επιλογή διεύθυνσης που έχει ήδη χρησιμοποιηθεί</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Επικόλληση διεύθυνσης από το βιβλίο διευθύνσεων</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Enter the message you want to sign here</source>
<translation>Εισάγετε εδώ το μήνυμα που θέλετε να υπογράψετε</translation>
</message>
<message>
<source>Signature</source>
<translation>Υπογραφή</translation>
</message>
<message>
<source>Copy the current signature to the system clipboard</source>
<translation>Αντέγραφη της επιλεγμενης διεύθυνσης στο πρόχειρο του συστηματος</translation>
</message>
<message>
<source>Sign the message to prove you own this Hellebit address</source>
<translation>Υπογράψτε ένα μήνυμα για ν' αποδείξετε πως σας ανήκει μια συγκεκριμένη διεύθυνση Hellebit</translation>
</message>
<message>
<source>Sign &Message</source>
<translation>Υπογραφη μήνυματος</translation>
</message>
<message>
<source>Reset all sign message fields</source>
<translation>Επαναφορά όλων των πεδίων μήνυματος</translation>
</message>
<message>
<source>Clear &All</source>
<translation>Καθαρισμός &Όλων</translation>
</message>
<message>
<source>&Verify Message</source>
<translation>&Επιβεβαίωση μηνύματος</translation>
</message>
<message>
<source>The Hellebit address the message was signed with</source>
<translation>Διεύθυνση Hellebit η οποία το μήνυμα έχει υπογραφεί</translation>
</message>
<message>
<source>Verify the message to ensure it was signed with the specified Hellebit address</source>
<translation>Υπογράψτε ένα μήνυμα για ν' αποδείξετε πως υπογραφθηκε απο μια συγκεκριμένη διεύθυνση Hellebit</translation>
</message>
<message>
<source>Verify &Message</source>
<translation>Επιβεβαίωση μηνύματος</translation>
</message>
<message>
<source>Reset all verify message fields</source>
<translation>Επαναφορά όλων επαλήθευμενων πεδίων μήνυματος </translation>
</message>
<message>
<source>Click "Sign Message" to generate signature</source>
<translation>Κάντε κλικ στο "Υπογραφή Μηνύματος" για να λάβετε την υπογραφή</translation>
</message>
<message>
<source>The entered address is invalid.</source>
<translation>Η διεύθυνση που εισήχθη είναι λάθος.</translation>
</message>
<message>
<source>Please check the address and try again.</source>
<translation>Παρακαλούμε ελέγξτε την διεύθυνση και δοκιμάστε ξανά.</translation>
</message>
<message>
<source>The entered address does not refer to a key.</source>
<translation>Η διεύθυνση που έχει εισαχθεί δεν αναφέρεται σε ένα πλήκτρο.</translation>
</message>
<message>
<source>Wallet unlock was cancelled.</source>
<translation>το ξεκλείδωμα του πορτοφολιού απέτυχε</translation>
</message>
<message>
<source>Private key for the entered address is not available.</source>
<translation>Το προσωπικό κλειδί εισαγμενης διευθυνσης δεν είναι διαθέσιμο.</translation>
</message>
<message>
<source>Message signing failed.</source>
<translation>Η υπογραφή του μηνύματος απέτυχε.</translation>
</message>
<message>
<source>Message signed.</source>
<translation>Μήνυμα υπεγράφη.</translation>
</message>
<message>
<source>The signature could not be decoded.</source>
<translation>Η υπογραφή δεν μπόρεσε να αποκρυπτογραφηθεί.</translation>
</message>
<message>
<source>Please check the signature and try again.</source>
<translation>Παρακαλούμε ελέγξτε την υπογραφή και δοκιμάστε ξανά.</translation>
</message>
<message>
<source>The signature did not match the message digest.</source>
<translation>Η υπογραφή δεν ταιριάζει με το μήνυμα. </translation>
</message>
<message>
<source>Message verification failed.</source>
<translation>Η επιβεβαίωση του μηνύματος απέτυχε</translation>
</message>
<message>
<source>Message verified.</source>
<translation>Μήνυμα επιβεβαιώθηκε.</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<source>Hellebit Core</source>
<translation>Hellebit Core</translation>
</message>
<message>
<source>The Bitcoin Core developers</source>
<translation>Οι προγραμματιστές του Hellebit Core</translation>
</message>
<message>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
</context>
<context>
<name>TrafficGraphWidget</name>
<message>
<source>KB/s</source>
<translation>KB/s</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<source>Open until %1</source>
<translation>Ανοιχτό μέχρι %1</translation>
</message>
<message>
<source>conflicted</source>
<translation>σύγκρουση</translation>
</message>
<message>
<source>%1/offline</source>
<translation>%1/χωρίς σύνδεση;</translation>
</message>
<message>
<source>%1/unconfirmed</source>
<translation>%1/χωρίς επιβεβαίωση</translation>
</message>
<message>
<source>%1 confirmations</source>
<translation>%1 επιβεβαιώσεις</translation>
</message>
<message>
<source>Status</source>
<translation>Κατάσταση</translation>
</message>
<message>
<source>Date</source>
<translation>Ημερομηνία</translation>
</message>
<message>
<source>Source</source>
<translation>Πηγή</translation>
</message>
<message>
<source>Generated</source>
<translation>Δημιουργία </translation>
</message>
<message>
<source>From</source>
<translation>Από</translation>
</message>
<message>
<source>To</source>
<translation>Προς</translation>
</message>
<message>
<source>own address</source>
<translation> δική σας διεύθυνση </translation>
</message>
<message>
<source>watch-only</source>
<translation>Επίβλεψη μόνο:</translation>
</message>
<message>
<source>label</source>
<translation>eπιγραφή</translation>
</message>
<message>
<source>Credit</source>
<translation>Πίστωση </translation>
</message>
<message>
<source>not accepted</source>
<translation>μη αποδεκτό</translation>
</message>
<message>
<source>Debit</source>
<translation>Debit</translation>
</message>
<message>
<source>Total debit</source>
<translation>Σύνολο χρέωσης</translation>
</message>
<message>
<source>Total credit</source>
<translation>Συνολική πίστωση</translation>
</message>
<message>
<source>Transaction fee</source>
<translation>Τέλος συναλλαγής </translation>
</message>
<message>
<source>Net amount</source>
<translation>Καθαρό ποσό</translation>
</message>
<message>
<source>Message</source>
<translation>Μήνυμα</translation>
</message>
<message>
<source>Comment</source>
<translation>Σχόλιο:</translation>
</message>
<message>
<source>Transaction ID</source>
<translation>ID Συναλλαγής:</translation>
</message>
<message>
<source>Merchant</source>
<translation>Έμπορος</translation>
</message>
<message>
<source>Generated coins must mature %1 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>Πρέπει να περιμένετε %1 μπλοκ πριν μπορέσετε να χρησιμοποιήσετε τα νομίσματα που έχετε δημιουργήσει. Το μπλοκ που δημιουργήσατε μεταδόθηκε στο δίκτυο για να συμπεριληφθεί στην αλυσίδα των μπλοκ. Αν δεν μπει σε αυτή θα μετατραπεί σε "μη αποδεκτό" και δε θα μπορεί να καταναλωθεί. Αυτό συμβαίνει σπάνια όταν κάποιος άλλος κόμβος δημιουργήσει ένα μπλοκ λίγα δευτερόλεπτα πριν από εσάς.</translation>
</message>
<message>
<source>Debug information</source>
<translation>Πληροφορίες αποσφαλμάτωσης</translation>
</message>
<message>
<source>Transaction</source>
<translation>Συναλλαγή</translation>
</message>
<message>
<source>Inputs</source>
<translation>εισροές </translation>
</message>
<message>
<source>Amount</source>
<translation>Ποσό</translation>
</message>
<message>
<source>true</source>
<translation>αληθής</translation>
</message>
<message>
<source>false</source>
<translation>αναληθής </translation>
</message>
<message>
<source>, has not been successfully broadcast yet</source>
<translation>, δεν έχει ακόμα μεταδοθεί μ' επιτυχία</translation>
</message>
<message>
<source>unknown</source>
<translation>άγνωστο</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<source>Transaction details</source>
<translation>Λεπτομέρειες συναλλαγής</translation>
</message>
<message>
<source>This pane shows a detailed description of the transaction</source>
<translation>Αυτό το παράθυρο δείχνει μια λεπτομερή περιγραφή της συναλλαγής</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<source>Date</source>
<translation>Ημερομηνία</translation>
</message>
<message>
<source>Type</source>
<translation>Τύπος</translation>
</message>
<message>
<source>Open until %1</source>
<translation>Ανοιχτό μέχρι %1</translation>
</message>
<message>
<source>Confirmed (%1 confirmations)</source>
<translation>Επικυρωμένη (%1 επικυρώσεις)</translation>
</message>
<message>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Αυτό το μπλοκ δεν έχει παραληφθεί από κανέναν άλλο κόμβο και κατά πάσα πιθανότητα θα απορριφθεί!</translation>
</message>
<message>
<source>Generated but not accepted</source>
<translation>Δημιουργήθηκε αλλά απορρίφθηκε</translation>
</message>
<message>
<source>Offline</source>
<translation>Offline</translation>
</message>
<message>
<source>Label</source>
<translation>Επιγραφή</translation>
</message>
<message>
<source>Unconfirmed</source>
<translation>Ανεπιβεβαίωτες</translation>
</message>
<message>
<source>Conflicted</source>
<translation>Σύγκρουση</translation>
</message>
<message>
<source>Received with</source>
<translation>Ελήφθη με</translation>
</message>
<message>
<source>Received from</source>
<translation>Ελήφθη από</translation>
</message>
<message>
<source>Sent to</source>
<translation>Απεστάλη προς</translation>
</message>
<message>
<source>Payment to yourself</source>
<translation>Πληρωμή προς εσάς</translation>
</message>
<message>
<source>Mined</source>
<translation>Εξόρυξη</translation>
</message>
<message>
<source>watch-only</source>
<translation>Επίβλεψη μόνο:</translation>
</message>
<message>
<source>(n/a)</source>
<translation>(δ/α)</translation>
</message>
<message>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Κατάσταση συναλλαγής. Πηγαίνετε το ποντίκι πάνω από αυτό το πεδίο για να δείτε τον αριθμό των επικυρώσεων</translation>
</message>
<message>
<source>Date and time that the transaction was received.</source>
<translation>Ημερομηνία κι ώρα λήψης της συναλλαγής.</translation>
</message>
<message>
<source>Type of transaction.</source>
<translation>Είδος συναλλαγής.</translation>
</message>
<message>
<source>Amount removed from or added to balance.</source>
<translation>Ποσό που αφαιρέθηκε ή προστέθηκε στο υπόλοιπο.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<source>All</source>
<translation>Όλα</translation>
</message>
<message>
<source>Today</source>
<translation>Σήμερα</translation>
</message>
<message>
<source>This week</source>
<translation>Αυτή την εβδομάδα</translation>
</message>
<message>
<source>This month</source>
<translation>Αυτόν τον μήνα</translation>
</message>
<message>
<source>Last month</source>
<translation>Τον προηγούμενο μήνα</translation>
</message>
<message>
<source>This year</source>
<translation>Αυτό το έτος</translation>
</message>
<message>
<source>Range...</source>
<translation>Έκταση...</translation>
</message>
<message>
<source>Received with</source>
<translation>Ελήφθη με</translation>
</message>
<message>
<source>Sent to</source>
<translation>Απεστάλη προς</translation>
</message>
<message>
<source>To yourself</source>
<translation>Προς εσάς</translation>
</message>
<message>
<source>Mined</source>
<translation>Εξόρυξη</translation>
</message>
<message>
<source>Other</source>
<translation>Άλλο</translation>
</message>
<message>
<source>Enter address or label to search</source>
<translation>Αναζήτηση με βάση τη διεύθυνση ή την επιγραφή</translation>
</message>
<message>
<source>Min amount</source>
<translation>Ελάχιστο ποσό</translation>
</message>
<message>
<source>Copy address</source>
<translation>Αντιγραφή διεύθυνσης</translation>
</message>
<message>
<source>Copy label</source>
<translation>Αντιγραφή επιγραφής</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Αντιγραφή ποσού</translation>
</message>
<message>
<source>Copy transaction ID</source>
<translation>Αντιγραφη του ID Συναλλαγής</translation>
</message>
<message>
<source>Edit label</source>
<translation>Επεξεργασία επιγραφής</translation>
</message>
<message>
<source>Show transaction details</source>
<translation>Εμφάνιση λεπτομερειών συναλλαγής</translation>
</message>
<message>
<source>Export Transaction History</source>
<translation>Εξαγωγή Ιστορικού Συναλλαγών</translation>
</message>
<message>
<source>Watch-only</source>
<translation>Επίβλεψη μόνο:</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>Η Εξαγωγή Απέτυχε</translation>
</message>
<message>
<source>There was an error trying to save the transaction history to %1.</source>
<translation>Yπήρξε σφάλμα κατά την προσπάθεια αποθήκευσης του ιστορικού συναλλαγών στο %1.</translation>
</message>
<message>
<source>Exporting Successful</source>
<translation>Επιτυχής εξαγωγή</translation>
</message>
<message>
<source>The transaction history was successfully saved to %1.</source>
<translation>Το ιστορικό συναλλαγών αποθηκεύτηκε επιτυχώς στο %1.</translation>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>Αρχείο οριοθετημένο με κόμματα (*.csv)</translation>
</message>
<message>
<source>Confirmed</source>
<translation>Επικυρωμένες</translation>
</message>
<message>
<source>Date</source>
<translation>Ημερομηνία</translation>
</message>
<message>
<source>Type</source>
<translation>Τύπος</translation>
</message>
<message>
<source>Label</source>
<translation>Επιγραφή</translation>
</message>
<message>
<source>Address</source>
<translation>Διεύθυνση</translation>
</message>
<message>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<source>Range:</source>
<translation>Έκταση:</translation>
</message>
<message>
<source>to</source>
<translation>έως</translation>
</message>
</context>
<context>
<name>UnitDisplayStatusBarControl</name>
<message>
<source>Unit to show amounts in. Click to select another unit.</source>
<translation>Μονάδα μέτρησης προβολής ποσών. Κάντε κλικ για επιλογή άλλης μονάδας.</translation>
</message>
</context>
<context>
<name>WalletFrame</name>
<message>
<source>No wallet has been loaded.</source>
<translation>Δεν έχει φορτωθεί πορτοφόλι</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<source>Send Coins</source>
<translation>Αποστολή νομισμάτων</translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<source>&Export</source>
<translation>&Εξαγωγή</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Εξαγωγή δεδομένων καρτέλας σε αρχείο</translation>
</message>
<message>
<source>Backup Wallet</source>
<translation>Αντίγραφο ασφαλείας του πορτοφολιού</translation>
</message>
<message>
<source>Wallet Data (*.dat)</source>
<translation>Αρχεία δεδομένων πορτοφολιού (*.dat)</translation>
</message>
<message>
<source>Backup Failed</source>
<translation>Αποτυχία κατά τη δημιουργία αντιγράφου</translation>
</message>
<message>
<source>There was an error trying to save the wallet data to %1.</source>
<translation>Παρουσιάστηκε σφάλμα κατά την αποθήκευση των δεδομένων πορτοφολιού στο %1.</translation>
</message>
<message>
<source>The wallet data was successfully saved to %1.</source>
<translation>Τα δεδομένα πορτοφολιού αποθηκεύτηκαν με επιτυχία στο %1.</translation>
</message>
<message>
<source>Backup Successful</source>
<translation>Η δημιουργια αντιγραφου ασφαλειας πετυχε</translation>
</message>
</context>
<context>
<name>hellebit-core</name>
<message>
<source>Options:</source>
<translation>Επιλογές:</translation>
</message>
<message>
<source>Specify data directory</source>
<translation>Ορισμός φακέλου δεδομένων</translation>
</message>
<message>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>Σύνδεση σε έναν κόμβο για την ανάκτηση διευθύνσεων από ομοτίμους, και αποσυνδέσh</translation>
</message>
<message>
<source>Specify your own public address</source>
<translation>Διευκρινίστε τη δικιά σας δημόσια διεύθυνση.</translation>
</message>
<message>
<source>Accept command line and JSON-RPC commands</source>
<translation>Αποδοχή εντολών κονσόλας και JSON-RPC</translation>
</message>
<message>
<source>Run in the background as a daemon and accept commands</source>
<translation>Εκτέλεση στο παρασκήνιο κι αποδοχή εντολών</translation>
</message>
<message>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation>Να δέχεσαι συνδέσεις από έξω(προεπιλογή:1)</translation>
</message>
<message>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation>Αποθηκευση σε συγκεκριμένη διεύθυνση. Χρησιμοποιήστε τα πλήκτρα [Host] : συμβολισμός θύρα για IPv6</translation>
</message>
<message>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>Εκτέλεσε την εντολή όταν το καλύτερο μπλοκ αλλάξει(%s στην εντολή αντικαθίσταται από το hash του μπλοκ)</translation>
</message>
<message>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation>Αυτό είναι ένα προ-τεστ κυκλοφορίας - χρησιμοποιήστε το με δική σας ευθύνη - δεν χρησιμοποιείτε για εξόρυξη ή για αλλες εφαρμογές</translation>
</message>
<message>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation>Προειδοποίηση : το αρχειο wallet.dat ειναι διεφθαρμένο, τα δεδομένα σώζονται ! Original wallet.dat αποθηκεύονται ως wallet.{timestamp}.bak στο %s . Αν το υπόλοιπο του ή τις συναλλαγές σας, είναι λάθος θα πρέπει να επαναφέρετε από ένα αντίγραφο ασφαλείας</translation>
</message>
<message>
<source>Block creation options:</source>
<translation>Αποκλεισμός επιλογων δημιουργίας: </translation>
</message>
<message>
<source>Connect only to the specified node(s)</source>
<translation>Σύνδεση μόνο με ορισμένους κόμβους</translation>
</message>
<message>
<source>Connection options:</source>
<translation>Επιλογές σύνδεσης:</translation>
</message>
<message>
<source>Corrupted block database detected</source>
<translation>Εντοπισθηκε διεφθαρμενη βαση δεδομενων των μπλοκ</translation>
</message>
<message>
<source>Do you want to rebuild the block database now?</source>
<translation>Θελετε να δημιουργηθει τωρα η βαση δεδομενων του μπλοκ? </translation>
</message>
<message>
<source>Error initializing block database</source>
<translation>Σφάλμα κατά την ενεργοποίηση της βάσης δεδομένων μπλοκ</translation>
</message>
<message>
<source>Error initializing wallet database environment %s!</source>
<translation>Σφάλμα κατά την ενεργοποίηση της βάσης δεδομένων πορτοφόλιου %s!</translation>
</message>
<message>
<source>Error loading block database</source>
<translation>Σφάλμα φορτωσης της βασης δεδομενων των μπλοκ</translation>
</message>
<message>
<source>Error opening block database</source>
<translation>Σφάλμα φορτωσης της βασης δεδομενων των μπλοκ</translation>
</message>
<message>
<source>Error: Disk space is low!</source>
<translation>Προειδοποίηση: Χαμηλός χώρος στο δίσκο </translation>
</message>
<message>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>ταλαιπωρηθειτε για να ακούσετε σε οποιαδήποτε θύρα. Χρήση - ακούστε = 0 , αν θέλετε αυτό.</translation>
</message>
<message>
<source>Importing...</source>
<translation>ΕΙσαγωγή...</translation>
</message>
<message>
<source>Invalid -onion address: '%s'</source>
<translation>Άκυρη διεύθυνση -onion : '%s'</translation>
</message>
<message>
<source>Not enough file descriptors available.</source>
<translation>Δεν ειναι αρκετες περιγραφες αρχείων διαθέσιμες.</translation>
</message>
<message>
<source>Only connect to nodes in network <net> (ipv4, ipv6 or onion)</source>
<translation>Μόνο σύνδεση σε κόμβους του δικτύου <net> (ipv4, ipv6 ή onion)</translation>
</message>
<message>
<source>Set maximum block size in bytes (default: %d)</source>
<translation>Ορίστε το μέγιστο μέγεθος block σε bytes (προεπιλογή: %d)</translation>
</message>
<message>
<source>Specify wallet file (within data directory)</source>
<translation>Επιλέξτε αρχείο πορτοφολιού (μέσα απο κατάλογο δεδομένων)</translation>
</message>
<message>
<source>Verifying blocks...</source>
<translation>Επαλήθευση των μπλοκ... </translation>
</message>
<message>
<source>Verifying wallet...</source>
<translation>Επαλήθευση πορτοφολιου... </translation>
</message>
<message>
<source>Wallet %s resides outside data directory %s</source>
<translation>Το πορτοφόλι %s βρίσκεται έξω από το φάκελο δεδομένων %s</translation>
</message>
<message>
<source>Wallet options:</source>
<translation>Επιλογές πορτοφολιού:</translation>
</message>
<message>
<source>Cannot obtain a lock on data directory %s. Hellebit Core is probably already running.</source>
<translation>Αδυναμία κλειδώματος του φακέλου δεδομένων %s. Πιθανώς το Hellebit να είναι ήδη ενεργό.</translation>
</message>
<message>
<source>Connect through SOCKS5 proxy</source>
<translation>Σύνδεση μέσω διαμεσολαβητή SOCKS5</translation>
</message>
<message>
<source>Copyright (C) 2009-%i The Hellebit Core Developers</source>
<translation>Πνευματικά δικαιώματα 2009-%i Οι προγραμματιστές του Hellebit Core</translation>
</message>
<message>
<source>Error loading wallet.dat: Wallet requires newer version of Hellebit Core</source>
<translation>Σφάλμα φόρτωσης wallet.dat: Το Πορτοφόλι απαιτεί μια νεότερη έκδοση του Hellebit</translation>
</message>
<message>
<source>Error reading from database, shutting down.</source>
<translation>Σφάλμα ανάγνωσης από τη βάση δεδομένων, γίνεται τερματισμός.</translation>
</message>
<message>
<source>Information</source>
<translation>Πληροφορία</translation>
</message>
<message>
<source>Initialization sanity check failed. Hellebit Core is shutting down.</source>
<translation>Η εκκίνηση ελέγχου ορθότητας απέτυχε. Γίνεται τερματισμός του Hellebit Core.</translation>
</message>
<message>
<source>Invalid amount for -maxtxfee=<amount>: '%s'</source>
<translation>Μη έγκυρο ποσό για την παράμετρο -maxtxfee=<amount>: '%s'</translation>
</message>
<message>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation>Μη έγκυρο ποσό για την παράμετρο -paytxfee=<amount>: '%s'</translation>
</message>
<message>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation>Μη έγκυρο ποσό για την παράμετρο -paytxfee=<amount>: '%s'</translation>
</message>
<message>
<source>Node relay options:</source>
<translation>Επιλογές αναμετάδοσης κόμβου: </translation>
</message>
<message>
<source>RPC server options:</source>
<translation>Επιλογές διακομιστή RPC:</translation>
</message>
<message>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Αποστολή πληροφοριών εντοπισμού σφαλμάτων στην κονσόλα αντί του αρχείου debug.log</translation>
</message>
<message>
<source>Show all debugging options (usage: --help -help-debug)</source>
<translation>Προβολή όλων των επιλογών εντοπισμού σφαλμάτων (χρήση: --help -help-debug)</translation>
</message>
<message>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation>Συρρίκνωση του αρχείο debug.log κατα την εκκίνηση του πελάτη (προεπιλογή: 1 όταν δεν-debug)</translation>
</message>
<message>
<source>Signing transaction failed</source>
<translation>Η υπογραφή συναλλαγής απέτυχε </translation>
</message>
<message>
<source>This is experimental software.</source>
<translation>Η εφαρμογή είναι σε πειραματικό στάδιο.</translation>
</message>
<message>
<source>Transaction amount too small</source>
<translation>Το ποσό της συναλλαγής είναι πολύ μικρο </translation>
</message>
<message>
<source>Transaction amounts must be positive</source>
<translation>Τα ποσά των συναλλαγών πρέπει να είναι θετικα</translation>
</message>
<message>
<source>Transaction too large</source>
<translation>Η συναλλαγή ειναι πολύ μεγάλη </translation>
</message>
<message>
<source>Username for JSON-RPC connections</source>
<translation>Όνομα χρήστη για τις συνδέσεις JSON-RPC</translation>
</message>
<message>
<source>Warning</source>
<translation>Προειδοποίηση</translation>
</message>
<message>
<source>Zapping all transactions from wallet...</source>
<translation>Μεταφορά όλων των συναλλαγών απο το πορτοφόλι</translation>
</message>
<message>
<source>wallet.dat corrupt, salvage failed</source>
<translation>Το αρχειο wallet.dat ειναι διεφθαρμένο, η διάσωση απέτυχε</translation>
</message>
<message>
<source>Password for JSON-RPC connections</source>
<translation>Κωδικός για τις συνδέσεις JSON-RPC</translation>
</message>
<message>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>Εκτέλεσε την εντολή όταν το καλύτερο μπλοκ αλλάξει(%s στην εντολή αντικαθίσταται από το hash του μπλοκ)</translation>
</message>
<message>
<source>This help message</source>
<translation>Αυτό το κείμενο βοήθειας</translation>
</message>
<message>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>Να επιτρέπονται οι έλεγχοι DNS για προσθήκη και σύνδεση κόμβων</translation>
</message>
<message>
<source>Loading addresses...</source>
<translation>Φόρτωση διευθύνσεων...</translation>
</message>
<message>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Σφάλμα φόρτωσης wallet.dat: Κατεστραμμένο Πορτοφόλι</translation>
</message>
<message>
<source>How thorough the block verification of -checkblocks is (0-4, default: %u)</source>
<translation>Πόσο εξονυχιστική να είναι η επιβεβαίωση του μπλοκ (0-4, προεπιλογή: %u)</translation>
</message>
<message>
<source>Maintain a full transaction index, used by the getrawtransaction rpc call (default: %u)</source>
<translation>Διατηρήση ένος πλήρες ευρετήριου συναλλαγών (προεπιλογή: %u) </translation>
</message>
<message>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: %u)</source>
<translation>Δευτερόλεπτα πριν επιτραπεί ξανά η σύνδεση των προβληματικών peers (προεπιλογή: %u)</translation>
</message>
<message>
<source>Error loading wallet.dat</source>
<translation>Σφάλμα φόρτωσης αρχείου wallet.dat</translation>
</message>
<message>
<source>Generate coins (default: %u)</source>
<translation>Δημιουργία νομισμάτων (προκαθορισμος: %u)</translation>
</message>
<message>
<source>How many blocks to check at startup (default: %u, 0 = all)</source>
<translation>Πόσα μπλοκ να ελέγχθουν κατά την εκκίνηση (προεπιλογή: %u, 0 = όλα)</translation>
</message>
<message>
<source>Include IP addresses in debug output (default: %u)</source>
<translation>Να συμπεριληφθεί η διεύθυνση IP στην αναφορά? (προεπιλογή: %u)</translation>
</message>
<message>
<source>Invalid -proxy address: '%s'</source>
<translation>Δεν είναι έγκυρη η διεύθυνση διαμεσολαβητή: '%s'</translation>
</message>
<message>
<source>Maintain at most <n> connections to peers (default: %u)</source>
<translation>Μέγιστες αριθμός συνδέσεων με τους peers <n> (προεπιλογή: %u)</translation>
</message>
<message>
<source>Specify configuration file (default: %s)</source>
<translation>Ορίστε αρχείο ρυθμίσεων (προεπιλογή: %s)</translation>
</message>
<message>
<source>Specify connection timeout in milliseconds (minimum: 1, default: %d)</source>
<translation>Ορισμός λήξης χρονικού ορίου σε χιλιοστά του δευτερολέπτου(προεπιλογή: %d)</translation>
</message>
<message>
<source>Specify pid file (default: %s)</source>
<translation>Ορίστε αρχείο pid (προεπιλογή: %s)</translation>
</message>
<message>
<source>Threshold for disconnecting misbehaving peers (default: %u)</source>
<translation>Όριο αποσύνδεσης προβληματικών peers (προεπιλογή: %u)</translation>
</message>
<message>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>Άγνωστo δίκτυο ορίζεται σε onlynet: '%s'</translation>
</message>
<message>
<source>Cannot resolve -bind address: '%s'</source>
<translation>Δεν μπορώ να γράψω την προεπιλεγμένη διεύθυνση: '%s'</translation>
</message>
<message>
<source>Cannot resolve -externalip address: '%s'</source>
<translation>Δεν μπορώ να γράψω την προεπιλεγμένη διεύθυνση: '%s'</translation>
</message>
<message>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>Μη έγκυρο ποσό για την παράμετρο -paytxfee=<amount>: '%s'</translation>
</message>
<message>
<source>Insufficient funds</source>
<translation>Ανεπαρκές κεφάλαιο</translation>
</message>
<message>
<source>Loading block index...</source>
<translation>Φόρτωση ευρετηρίου μπλοκ...</translation>
</message>
<message>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Προσέθεσε ένα κόμβο για σύνδεση και προσπάθησε να κρατήσεις την σύνδεση ανοιχτή</translation>
</message>
<message>
<source>Loading wallet...</source>
<translation>Φόρτωση πορτοφολιού...</translation>
</message>
<message>
<source>Cannot downgrade wallet</source>
<translation>Δεν μπορώ να υποβαθμίσω το πορτοφόλι</translation>
</message>
<message>
<source>Cannot write default address</source>
<translation>Δεν μπορώ να γράψω την προεπιλεγμένη διεύθυνση</translation>
</message>
<message>
<source>Rescanning...</source>
<translation>Ανίχνευση...</translation>
</message>
<message>
<source>Done loading</source>
<translation>Η φόρτωση ολοκληρώθηκε</translation>
</message>
<message>
<source>Error</source>
<translation>Σφάλμα</translation>
</message>
</context>
</TS> | |
main.rs | use anyhow::Result;
use nom::{
bytes::complete::{tag, take},
combinator::{map, map_res},
multi::{length_count, many_till},
sequence::preceded,
IResult,
};
pub fn main() -> Result<()> {
let start = std::time::Instant::now();
let soln_a = solve_a()?;
eprintln!("Part A elapsed {:?}", start.elapsed());
println!("solution part A: {}", soln_a);
let start = std::time::Instant::now();
let soln_b = solve_b()?;
eprintln!("Part B elapsed {:?}", start.elapsed());
println!("solution part B: {}", soln_b);
Ok(())
}
#[derive(Debug, PartialEq)]
enum Packet {
LiteralPacket {
version: u64,
type_id: u64,
num: u64,
},
OperatorPacket {
version: u64,
type_id: u64,
subpackets: Vec<Packet>,
},
}
fn hex2binary(s: &str) -> Result<String> {
let bytes: Vec<u8> = hex::decode(s)?;
let mut bin_string: String = String::new();
for byte in bytes {
bin_string.push_str(&format!("{:08b}", byte));
}
Ok(bin_string)
}
fn from_binary(s: &str) -> Result<u64, std::num::ParseIntError> {
u64::from_str_radix(s, 2)
}
fn parse_literal_packet(s: &str) -> IResult<&str, u64> {
let (s, parts) = map(
many_till(
preceded(tag("1"), take(4usize)),
preceded(tag("0"), take(4usize)),
),
|(mut parts, final_part)| {
parts.push(final_part);
parts
},
)(s)?;
let literal = parts.iter().enumerate().fold(0u64, |acc, (i, p)| {
let mut x = u64::from_str_radix(p, 2).unwrap();
x <<= 4 * (parts.len() - i - 1);
acc + x
});
Ok((s, literal))
}
fn u16_bin11(s: &str) -> IResult<&str, u16> {
map_res(take(11usize), |s| u16::from_str_radix(s, 2))(s)
}
fn | (s: &str) -> IResult<&str, Vec<Packet>> {
let (s, length_id) = map_res(take(1usize), from_binary)(s)?;
let sub_packets_from_len = |s| {
let (ns, subpacket_len) = map_res(take(15usize), from_binary)(s)?;
let mut packets = vec![];
let mut consumed_len = 0;
let mut remaining_s = ns;
while consumed_len < subpacket_len {
let (ns, p) = parse_packet(remaining_s)?;
packets.push(p);
consumed_len += (remaining_s.len() - ns.len()) as u64;
remaining_s = ns;
}
Ok((remaining_s, packets))
};
match length_id {
0 => sub_packets_from_len(s),
1 => length_count(u16_bin11, parse_packet)(s),
_ => unreachable!(),
}
}
fn parse_packet(s: &str) -> IResult<&str, Packet> {
let (s, version) = map_res(take(3usize), from_binary)(s)?;
let (s, type_id) = map_res(take(3usize), from_binary)(s)?;
let (s, p) = match type_id {
4 => {
let (s, lit) = parse_literal_packet(s)?;
(
s,
Packet::LiteralPacket {
version,
type_id,
num: lit,
},
)
}
_ => {
let (s, packets) = parse_operator_packet(s)?;
(
s,
Packet::OperatorPacket {
version,
type_id,
subpackets: packets,
},
)
}
};
Ok((s, p))
}
fn sum_versions(p: &Packet) -> u64 {
match p {
Packet::LiteralPacket { version, .. } => *version,
Packet::OperatorPacket {
version,
subpackets,
..
} => version + subpackets.iter().map(sum_versions).sum::<u64>(),
}
}
fn evaluate_packet(p: &Packet) -> u64 {
match p {
Packet::LiteralPacket { num, .. } => *num,
Packet::OperatorPacket {
type_id,
subpackets,
..
} => {
let x = subpackets.iter().map(evaluate_packet).collect::<Vec<_>>();
match type_id {
0 => x.iter().sum(),
1 => x.iter().product(),
2 => *x.iter().min().unwrap(),
3 => *x.iter().max().unwrap(),
5 => {
if x[0] > x[1] {
1
} else {
0
}
}
6 => {
if x[0] < x[1] {
1
} else {
0
}
}
7 => {
if x[0] == x[1] {
1
} else {
0
}
}
_ => unreachable!(),
}
}
}
}
pub fn solve_a() -> Result<u64> {
let x = hex2binary(include_str!("../input"))?;
let (_, p) = parse_packet(&x).map_err(|e| e.map(|e| (e.input.to_string(), e.code)))?;
let sver = sum_versions(&p);
Ok(sver)
}
pub fn solve_b() -> Result<u64> {
let x = hex2binary(include_str!("../input"))?;
let (_, p) = parse_packet(&x).map_err(|e| e.map(|e| (e.input.to_string(), e.code)))?;
let soln = evaluate_packet(&p);
Ok(soln)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn convert_hex2bin() {
assert_eq!(
hex2binary("D2FE28").unwrap(),
"110100101111111000101000".to_string()
)
}
#[test]
fn test_literal() {
let x = hex2binary("D2FE28").unwrap();
let (_, p) = parse_packet(&x).unwrap();
assert_eq!(
p,
Packet::LiteralPacket {
version: 6,
type_id: 4,
num: 2021,
}
)
}
#[test]
fn test_operator() {
let x = hex2binary("38006F45291200").unwrap();
let (_, p) = parse_packet(&x).unwrap();
assert_eq!(
p,
Packet::OperatorPacket {
version: 1,
type_id: 6,
subpackets: vec![
Packet::LiteralPacket {
version: 6,
type_id: 4,
num: 10,
},
Packet::LiteralPacket {
version: 2,
type_id: 4,
num: 20
}
]
}
)
}
#[test]
fn test_sum_versions() {
let x = hex2binary("8A004A801A8002F478").unwrap();
let (_, p) = parse_packet(&x).unwrap();
assert_eq!(sum_versions(&p), 16);
let x = hex2binary("620080001611562C8802118E34").unwrap();
let (_, p) = parse_packet(&x).unwrap();
assert_eq!(sum_versions(&p), 12);
let x = hex2binary("C0015000016115A2E0802F182340").unwrap();
let (_, p) = parse_packet(&x).unwrap();
assert_eq!(sum_versions(&p), 23);
let x = hex2binary("A0016C880162017C3686B18A3D4780").unwrap();
let (_, p) = parse_packet(&x).unwrap();
assert_eq!(sum_versions(&p), 31);
}
#[test]
fn test_evaluate() {
let x = hex2binary("C200B40A82").unwrap();
let (_, p) = parse_packet(&x).unwrap();
assert_eq!(evaluate_packet(&p), 3);
let x = hex2binary("04005AC33890").unwrap();
let (_, p) = parse_packet(&x).unwrap();
assert_eq!(evaluate_packet(&p), 54);
let x = hex2binary("880086C3E88112").unwrap();
let (_, p) = parse_packet(&x).unwrap();
assert_eq!(evaluate_packet(&p), 7);
let x = hex2binary("CE00C43D881120").unwrap();
let (_, p) = parse_packet(&x).unwrap();
assert_eq!(evaluate_packet(&p), 9);
let x = hex2binary("D8005AC2A8F0").unwrap();
let (_, p) = parse_packet(&x).unwrap();
assert_eq!(evaluate_packet(&p), 1);
let x = hex2binary("F600BC2D8F").unwrap();
let (_, p) = parse_packet(&x).unwrap();
assert_eq!(evaluate_packet(&p), 0);
let x = hex2binary("9C005AC2F8F0").unwrap();
let (_, p) = parse_packet(&x).unwrap();
assert_eq!(evaluate_packet(&p), 0);
let x = hex2binary("9C0141080250320F1802104A08").unwrap();
let (_, p) = parse_packet(&x).unwrap();
assert_eq!(evaluate_packet(&p), 1);
}
}
| parse_operator_packet |
client_test.go | package main
import (
"net/http"
"net/http/httptest"
"net/url"
"testing"
)
var (
mux *http.ServeMux
client *Client
server *httptest.Server
)
func | () {
mux = http.NewServeMux()
server = httptest.NewServer(mux)
client = NewClient("FAKE_API_KEY")
url, _ := url.Parse(server.URL)
client.BaseURL = url
}
func teardown() {
server.Close()
}
func TestClient_TranslateFormData(t *testing.T) {
setup()
defer teardown()
fromLang := ""
toLang := "en"
text := []string{"blahblah"}
mux.HandleFunc("/language/translate/v2", func(w http.ResponseWriter, r *http.Request) {
r.ParseForm()
assertEqual(t, r.Form.Get("key"), "FAKE_API_KEY")
assertEqual(t, r.Form.Get("q"), text[0])
assertEqual(t, r.Form.Get("target"), toLang)
assertEqual(t, r.Form.Get("source"), fromLang)
assertEqual(t, r.Form.Get("format"), "text")
})
client.Translate(fromLang, toLang, text)
}
| setup |
faucet_pipeline.py | """Standard FAUCET pipeline."""
# Copyright (C) 2015 Brad Cowie, Christopher Lorier and Joe Stringer.
# Copyright (C) 2015 Research and Education Advanced Network New Zealand Ltd.
# Copyright (C) 2015--2019 The Contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from faucet.faucet_metadata import EGRESS_METADATA_MASK
class ValveTableConfig: # pylint: disable=too-few-public-methods,too-many-instance-attributes
"""Configuration for a single table."""
def __init__(self, name, table_id, # pylint: disable=too-many-arguments
exact_match=None, meter=None, output=True, miss_goto=None,
size=None, match_types=None, set_fields=None, dec_ttl=None,
vlan_scale=None, vlan_port_scale=None,
next_tables=None, metadata_match=0, metadata_write=0):
self.name = name
self.table_id = table_id
self.exact_match = exact_match
self.meter = meter
self.output = output
self.miss_goto = miss_goto
self.size = size
self.match_types = match_types
self.set_fields = set_fields
self.dec_ttl = dec_ttl
self.vlan_scale = vlan_scale
self.vlan_port_scale = vlan_port_scale
self.metadata_match = metadata_match
self.metadata_write = metadata_write
if next_tables:
assert isinstance(next_tables, (list, tuple))
self.next_tables = next_tables
else:
self.next_tables = ()
def __str__(self):
field_strs = ' '.join([
'%s: %s' % (key, val)
for key, val in sorted(self.__dict__.items())
if val])
return 'table config %s' % field_strs
def __repr__(self):
return self.__str__()
def __hash__(self):
return hash(self.__str__())
def __eq__(self, other):
return self.__hash__() == other.__hash__()
def __lt__(self, other):
return self.__hash__() < other.__hash__()
_NEXT_ETH = ('eth_dst_hairpin', 'eth_dst', 'flood')
_NEXT_VIP = ('vip',) + _NEXT_ETH
def _fib_table(ipv, table_id):
|
PORT_ACL_DEFAULT_CONFIG = ValveTableConfig(
'port_acl',
0,
match_types=(('in_port', False),),
next_tables=(('vlan',) + _NEXT_VIP)
)
VLAN_DEFAULT_CONFIG = ValveTableConfig(
'vlan',
PORT_ACL_DEFAULT_CONFIG.table_id + 1,
match_types=(('eth_dst', True), ('eth_type', False),
('in_port', False), ('vlan_vid', False)),
set_fields=('vlan_vid',),
vlan_port_scale=3,
next_tables=('copro', 'vlan_acl', 'classification', 'eth_src')
)
COPRO_DEFAULT_CONFIG = ValveTableConfig(
'copro',
VLAN_DEFAULT_CONFIG.table_id + 1,
match_types=(('in_port', False), ('eth_type', False), ('vlan_vid', False)),
vlan_port_scale=1.5,
miss_goto='eth_dst',
next_tables=(('eth_dst',)),
)
VLAN_ACL_DEFAULT_CONFIG = ValveTableConfig(
'vlan_acl',
VLAN_DEFAULT_CONFIG.table_id + 1,
next_tables=(('classification', 'eth_src') + _NEXT_ETH))
CLASSIFICATION_DEFAULT_CONFIG = ValveTableConfig(
'classification',
VLAN_ACL_DEFAULT_CONFIG.table_id + 1,
miss_goto='eth_src',
next_tables=(('eth_src', 'ipv4_fib', 'ipv6_fib') + _NEXT_VIP)
)
ETH_SRC_DEFAULT_CONFIG = ValveTableConfig(
'eth_src',
CLASSIFICATION_DEFAULT_CONFIG.table_id + 1,
miss_goto='eth_dst',
next_tables=(('ipv4_fib', 'ipv6_fib') + _NEXT_VIP),
match_types=(('eth_dst', True), ('eth_src', False), ('eth_type', False),
('in_port', False), ('vlan_vid', False)),
set_fields=('vlan_vid', 'eth_dst'),
vlan_port_scale=4.1,
)
IPV4_FIB_DEFAULT_CONFIG = _fib_table(4, ETH_SRC_DEFAULT_CONFIG.table_id + 1)
IPV6_FIB_DEFAULT_CONFIG = _fib_table(6, IPV4_FIB_DEFAULT_CONFIG.table_id + 1)
VIP_DEFAULT_CONFIG = ValveTableConfig(
'vip',
IPV6_FIB_DEFAULT_CONFIG.table_id + 1,
match_types=(('arp_tpa', False), ('eth_dst', False), ('eth_type', False),
('icmpv6_type', False), ('ip_proto', False)),
next_tables=_NEXT_ETH,
vlan_scale=8,
)
ETH_DST_HAIRPIN_DEFAULT_CONFIG = ValveTableConfig(
'eth_dst_hairpin',
VIP_DEFAULT_CONFIG.table_id + 1,
match_types=(('in_port', False), ('eth_dst', False), ('vlan_vid', False)),
miss_goto='eth_dst',
exact_match=True,
vlan_port_scale=4.1,
)
ETH_DST_DEFAULT_CONFIG = ValveTableConfig(
'eth_dst',
ETH_DST_HAIRPIN_DEFAULT_CONFIG.table_id + 1,
exact_match=True,
miss_goto='flood', # Note: when using egress acls the miss goto will be
# egress acl table
match_types=(('eth_dst', False), ('vlan_vid', False)),
next_tables=('egress', 'egress_acl'),
vlan_port_scale=4.1,
metadata_write=EGRESS_METADATA_MASK
)
EGRESS_ACL_DEFAULT_CONFIG = ValveTableConfig(
'egress_acl',
ETH_DST_DEFAULT_CONFIG.table_id + 1,
next_tables=('egress',)
)
EGRESS_DEFAULT_CONFIG = ValveTableConfig(
'egress',
EGRESS_ACL_DEFAULT_CONFIG.table_id + 1,
match_types=(('metadata', True), ('vlan_vid', False)),
vlan_port_scale=1.5,
next_tables=('flood',),
miss_goto='flood',
metadata_match=EGRESS_METADATA_MASK
)
FLOOD_DEFAULT_CONFIG = ValveTableConfig(
'flood',
EGRESS_DEFAULT_CONFIG.table_id + 1,
match_types=(('eth_dst', True), ('in_port', False), ('vlan_vid', False)),
vlan_port_scale=8.0,
)
MINIMUM_FAUCET_PIPELINE_TABLES = {
'vlan', 'eth_src', 'eth_dst', 'flood'}
# TODO: implement an eth_type table before VLAN. This would enable interception
# of control protocols and simplify matches in vlan/eth_src, enabling use of
# exact_match.
FAUCET_PIPELINE = (
PORT_ACL_DEFAULT_CONFIG,
VLAN_DEFAULT_CONFIG,
COPRO_DEFAULT_CONFIG,
VLAN_ACL_DEFAULT_CONFIG,
CLASSIFICATION_DEFAULT_CONFIG,
ETH_SRC_DEFAULT_CONFIG,
IPV4_FIB_DEFAULT_CONFIG,
IPV6_FIB_DEFAULT_CONFIG,
VIP_DEFAULT_CONFIG,
ETH_DST_HAIRPIN_DEFAULT_CONFIG,
ETH_DST_DEFAULT_CONFIG,
EGRESS_ACL_DEFAULT_CONFIG,
EGRESS_DEFAULT_CONFIG,
FLOOD_DEFAULT_CONFIG,
)
DEFAULT_CONFIGS = {
'port_acl': PORT_ACL_DEFAULT_CONFIG,
'vlan': VLAN_DEFAULT_CONFIG,
'copro': COPRO_DEFAULT_CONFIG,
'vlan_acl': VLAN_ACL_DEFAULT_CONFIG,
'eth_src': ETH_SRC_DEFAULT_CONFIG,
'ipv4_fib': IPV4_FIB_DEFAULT_CONFIG,
'ipv6_fib': IPV6_FIB_DEFAULT_CONFIG,
'vip': VIP_DEFAULT_CONFIG,
'eth_dst_hairpin': ETH_DST_HAIRPIN_DEFAULT_CONFIG,
'eth_dst': ETH_DST_DEFAULT_CONFIG,
'egress_acl': EGRESS_ACL_DEFAULT_CONFIG,
'egress': EGRESS_DEFAULT_CONFIG,
'flood': FLOOD_DEFAULT_CONFIG,
}
| return ValveTableConfig(
'ipv%u_fib' % ipv,
table_id,
match_types=(('eth_type', False), ('ipv%u_dst' % ipv, True), ('vlan_vid', False)),
set_fields=('eth_dst', 'eth_src', 'vlan_vid'),
dec_ttl=True,
vlan_port_scale=3.1,
next_tables=_NEXT_VIP
) |
index.js | module.exports = new Date(1992, 11, 9) |
||
stack.go | package stack
// 数组实现栈
//Stack ...
type Stack struct {
items []string
current int
}
//NewStack ...
func NewStack() *Stack {
return &Stack{
items: make([]string, 10),
current: 0,
}
}
func (s *Stack) Push(item string) {
s.current++
if len(s.items) == s.current {
s.items = append(s.items, item)
return
}
s.items[s.current] = item
}
func (s *Stack) Pop() string {
if s.current == 0 {
return | s.items[s.current]
s.current--
return item
}
| ""
}
item := |
file.rs | // Copyright 2020 Red Hat, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::fs::{File, OpenOptions};
use std::io::{BufReader, BufWriter, Read};
use std::path::Path;
use anyhow::{anyhow, bail, Context, Result};
use bincode::Options;
use clap::crate_version;
use serde::{Deserialize, Serialize};
use xz2::read::XzDecoder;
use crate::io::BUFFER_SIZE;
use super::*;
/// Magic header value for osmet binary.
const OSMET_FILE_HEADER_MAGIC: [u8; 8] = *b"OSMET\0\0\0";
/// Basic versioning. Used as a safety check that we're unpacking something we understand. Bump
/// this when making changes to the format.
const OSMET_FILE_VERSION: u32 = 1;
/// We currently use bincode for serialization. Note bincode does not support backwards compatible
/// changes well. However we do not currently care about backcompatibility. If that changes, we
/// should change serializer.
#[derive(Serialize, Deserialize, Debug)]
pub(super) struct OsmetFileHeader {
magic: [u8; 8],
version: u32,
/// For informational purposes only.
app_version: String,
/// Required sector size of target block device during unpacking.
pub(super) sector_size: u32,
pub(super) os_description: String,
pub(super) os_architecture: String,
}
impl OsmetFileHeader {
pub(super) fn new(sector_size: u32, os_description: &str) -> Self {
Self {
magic: OSMET_FILE_HEADER_MAGIC,
version: OSMET_FILE_VERSION,
app_version: crate_version!().into(),
sector_size,
os_description: os_description.into(),
// There's an assumption here that the OS we're packing is for the same
// architecture on which we're running. This holds, because packing is done by cosa,
// which today doesn't support cross-building. But the osmet format and algorithm
// itself actually doesn't care about the target architecture. In the future, a more
// correct approach is to read this directly from the e.g. coreos-assembler.basearch
// in the commit metadata on the source disk.
os_architecture: nix::sys::utsname::uname().machine().into(),
}
}
}
pub(super) fn osmet_file_write(
path: &Path,
header: OsmetFileHeader,
osmet: Osmet,
mut xzpacked_image: File,
) -> Result<()> |
/// Reads in the header, and does some basic sanity checking.
fn read_and_check_header(mut f: &mut impl Read) -> Result<OsmetFileHeader> {
let header: OsmetFileHeader = bincoder()
.deserialize_from(&mut f)
.context("failed to deserialize osmet file")?;
if header.magic != OSMET_FILE_HEADER_MAGIC {
bail!("not an OSMET file!");
}
if header.version != OSMET_FILE_VERSION {
bail!("incompatible OSMET file version {}", header.version);
}
Ok(header)
}
pub(super) fn osmet_file_read_header(path: &Path) -> Result<OsmetFileHeader> {
let mut f = BufReader::with_capacity(
BUFFER_SIZE,
OpenOptions::new()
.read(true)
.open(path)
.with_context(|| format!("opening {:?}", path))?,
);
read_and_check_header(&mut f)
}
pub(super) fn osmet_file_read(path: &Path) -> Result<(OsmetFileHeader, Osmet, impl Read + Send)> {
let mut f = BufReader::with_capacity(
BUFFER_SIZE,
OpenOptions::new()
.read(true)
.open(path)
.with_context(|| format!("opening {:?}", path))?,
);
let header = read_and_check_header(&mut f)?;
let osmet: Osmet = bincoder()
.deserialize_from(&mut f)
.context("failed to deserialize osmet file")?;
validate_osmet(&osmet).context("validating after reading")?;
Ok((header, osmet, XzDecoder::new(f)))
}
fn validate_osmet(osmet: &Osmet) -> Result<()> {
if osmet.partitions.is_empty() {
bail!("OSMET file has no partitions!");
}
// sanity-check partitions and mappings are in canonical form
let mut cursor: u64 = 0;
for (i, partition) in osmet.partitions.iter().enumerate() {
if cursor > partition.start_offset {
bail!(
"cursor past partition start: {} vs {}",
cursor,
partition.start_offset
);
}
cursor = cursor
.checked_add(
verify_canonical(&partition.mappings)
.with_context(|| format!("partition {}", i))?,
)
.ok_or_else(|| anyhow!("overflow after partition {}", i))?;
if cursor > partition.end_offset {
bail!(
"cursor past partition end: {} vs {}",
cursor,
partition.end_offset
);
}
cursor = partition.end_offset;
}
Ok(())
}
fn verify_canonical(mappings: &[Mapping]) -> Result<u64> {
let mut cursor: u64 = 0;
for (i, mapping) in mappings.iter().enumerate() {
if cursor > mapping.extent.physical {
bail!(
"cursor past mapping start: {} vs {}",
cursor,
mapping.extent.physical
);
}
cursor = mapping
.extent
.physical
.checked_add(mapping.extent.length)
.ok_or_else(|| anyhow!("overflow after mapping {}", i))?;
}
Ok(cursor)
}
fn bincoder() -> impl bincode::Options {
bincode::options()
.allow_trailing_bytes()
// make the defaults explicit
.with_no_limit()
.with_little_endian()
.with_varint_encoding()
}
| {
validate_osmet(&osmet).context("validating before writing")?;
// would be nice to opportunistically do open(O_TMPFILE) then linkat here, but the tempfile API
// doesn't provide that API: https://github.com/Stebalien/tempfile/pull/31
let mut f = BufWriter::with_capacity(
BUFFER_SIZE,
tempfile::Builder::new()
.prefix("coreos-installer-osmet")
.suffix(".partial")
.tempfile_in(path.parent().unwrap())?,
);
bincoder()
.serialize_into(&mut f, &header)
.context("failed to serialize osmet file header")?;
bincoder()
.serialize_into(&mut f, &osmet)
.context("failed to serialize osmet")?;
// and followed by the xz-compressed packed image
copy(&mut xzpacked_image, &mut f)?;
f.into_inner()
.context("failed to flush write buffer")?
.persist(path)
.with_context(|| format!("failed to persist tempfile to {:?}", path))?;
Ok(())
} |
attach_detach_controller.go | /*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package attachdetach implements a controller to manage volume attach and detach
// operations.
package attachdetach
import (
"fmt"
"net"
"time"
authenticationv1 "k8s.io/api/authentication/v1"
"k8s.io/api/core/v1"
apierrors "k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/labels"
"k8s.io/apimachinery/pkg/types"
"k8s.io/apimachinery/pkg/util/runtime"
"k8s.io/apimachinery/pkg/util/wait"
coreinformers "k8s.io/client-go/informers/core/v1"
clientset "k8s.io/client-go/kubernetes"
"k8s.io/client-go/kubernetes/scheme"
v1core "k8s.io/client-go/kubernetes/typed/core/v1"
corelisters "k8s.io/client-go/listers/core/v1"
kcache "k8s.io/client-go/tools/cache"
"k8s.io/client-go/tools/record"
"k8s.io/client-go/util/workqueue"
cloudprovider "k8s.io/cloud-provider"
csiclient "k8s.io/csi-api/pkg/client/clientset/versioned"
"k8s.io/klog"
"k8s.io/kubernetes/pkg/controller"
"k8s.io/kubernetes/pkg/controller/volume/attachdetach/cache"
"k8s.io/kubernetes/pkg/controller/volume/attachdetach/metrics"
"k8s.io/kubernetes/pkg/controller/volume/attachdetach/populator"
"k8s.io/kubernetes/pkg/controller/volume/attachdetach/reconciler"
"k8s.io/kubernetes/pkg/controller/volume/attachdetach/statusupdater"
"k8s.io/kubernetes/pkg/controller/volume/attachdetach/util"
"k8s.io/kubernetes/pkg/util/mount"
"k8s.io/kubernetes/pkg/volume"
volumeutil "k8s.io/kubernetes/pkg/volume/util"
"k8s.io/kubernetes/pkg/volume/util/operationexecutor"
"k8s.io/kubernetes/pkg/volume/util/volumepathhandler"
)
// TimerConfig contains configuration of internal attach/detach timers and
// should be used only to speed up tests. DefaultTimerConfig is the suggested
// timer configuration for production.
type TimerConfig struct {
// ReconcilerLoopPeriod is the amount of time the reconciler loop waits
// between successive executions
ReconcilerLoopPeriod time.Duration
// ReconcilerMaxWaitForUnmountDuration is the maximum amount of time the
// attach detach controller will wait for a volume to be safely unmounted
// from its node. Once this time has expired, the controller will assume the
// node or kubelet are unresponsive and will detach the volume anyway.
ReconcilerMaxWaitForUnmountDuration time.Duration
// DesiredStateOfWorldPopulatorLoopSleepPeriod is the amount of time the
// DesiredStateOfWorldPopulator loop waits between successive executions
DesiredStateOfWorldPopulatorLoopSleepPeriod time.Duration
// DesiredStateOfWorldPopulatorListPodsRetryDuration is the amount of
// time the DesiredStateOfWorldPopulator loop waits between list pods
// calls.
DesiredStateOfWorldPopulatorListPodsRetryDuration time.Duration
}
// DefaultTimerConfig is the default configuration of Attach/Detach controller
// timers.
var DefaultTimerConfig TimerConfig = TimerConfig{
ReconcilerLoopPeriod: 100 * time.Millisecond,
ReconcilerMaxWaitForUnmountDuration: 6 * time.Minute,
DesiredStateOfWorldPopulatorLoopSleepPeriod: 1 * time.Minute,
DesiredStateOfWorldPopulatorListPodsRetryDuration: 3 * time.Minute,
}
// AttachDetachController defines the operations supported by this controller.
type AttachDetachController interface {
Run(stopCh <-chan struct{})
GetDesiredStateOfWorld() cache.DesiredStateOfWorld
}
// NewAttachDetachController returns a new instance of AttachDetachController.
func NewAttachDetachController(
kubeClient clientset.Interface,
csiClient csiclient.Interface,
podInformer coreinformers.PodInformer,
nodeInformer coreinformers.NodeInformer,
pvcInformer coreinformers.PersistentVolumeClaimInformer,
pvInformer coreinformers.PersistentVolumeInformer,
cloud cloudprovider.Interface,
plugins []volume.VolumePlugin,
prober volume.DynamicPluginProber,
disableReconciliationSync bool,
reconcilerSyncDuration time.Duration,
timerConfig TimerConfig) (AttachDetachController, error) |
const (
pvcKeyIndex string = "pvcKey"
)
// indexByPVCKey returns PVC keys for given pod. Note that the index is only
// used for attaching, so we are only interested in active pods with nodeName
// set.
func indexByPVCKey(obj interface{}) ([]string, error) {
pod, ok := obj.(*v1.Pod)
if !ok {
return []string{}, nil
}
if len(pod.Spec.NodeName) == 0 || volumeutil.IsPodTerminated(pod, pod.Status) {
return []string{}, nil
}
keys := []string{}
for _, podVolume := range pod.Spec.Volumes {
if pvcSource := podVolume.VolumeSource.PersistentVolumeClaim; pvcSource != nil {
keys = append(keys, fmt.Sprintf("%s/%s", pod.Namespace, pvcSource.ClaimName))
}
}
return keys, nil
}
type attachDetachController struct {
// kubeClient is the kube API client used by volumehost to communicate with
// the API server.
kubeClient clientset.Interface
// csiClient is the csi.storage.k8s.io API client used by volumehost to communicate with
// the API server.
csiClient csiclient.Interface
// pvcLister is the shared PVC lister used to fetch and store PVC
// objects from the API server. It is shared with other controllers and
// therefore the PVC objects in its store should be treated as immutable.
pvcLister corelisters.PersistentVolumeClaimLister
pvcsSynced kcache.InformerSynced
// pvLister is the shared PV lister used to fetch and store PV objects
// from the API server. It is shared with other controllers and therefore
// the PV objects in its store should be treated as immutable.
pvLister corelisters.PersistentVolumeLister
pvsSynced kcache.InformerSynced
podLister corelisters.PodLister
podsSynced kcache.InformerSynced
podIndexer kcache.Indexer
nodeLister corelisters.NodeLister
nodesSynced kcache.InformerSynced
// cloud provider used by volume host
cloud cloudprovider.Interface
// volumePluginMgr used to initialize and fetch volume plugins
volumePluginMgr volume.VolumePluginMgr
// desiredStateOfWorld is a data structure containing the desired state of
// the world according to this controller: i.e. what nodes the controller
// is managing, what volumes it wants be attached to these nodes, and which
// pods are scheduled to those nodes referencing the volumes.
// The data structure is populated by the controller using a stream of node
// and pod API server objects fetched by the informers.
desiredStateOfWorld cache.DesiredStateOfWorld
// actualStateOfWorld is a data structure containing the actual state of
// the world according to this controller: i.e. which volumes are attached
// to which nodes.
// The data structure is populated upon successful completion of attach and
// detach actions triggered by the controller and a periodic sync with
// storage providers for the "true" state of the world.
actualStateOfWorld cache.ActualStateOfWorld
// attacherDetacher is used to start asynchronous attach and operations
attacherDetacher operationexecutor.OperationExecutor
// reconciler is used to run an asynchronous periodic loop to reconcile the
// desiredStateOfWorld with the actualStateOfWorld by triggering attach
// detach operations using the attacherDetacher.
reconciler reconciler.Reconciler
// nodeStatusUpdater is used to update node status with the list of attached
// volumes
nodeStatusUpdater statusupdater.NodeStatusUpdater
// desiredStateOfWorldPopulator runs an asynchronous periodic loop to
// populate the current pods using podInformer.
desiredStateOfWorldPopulator populator.DesiredStateOfWorldPopulator
// recorder is used to record events in the API server
recorder record.EventRecorder
// pvcQueue is used to queue pvc objects
pvcQueue workqueue.RateLimitingInterface
}
func (adc *attachDetachController) Run(stopCh <-chan struct{}) {
defer runtime.HandleCrash()
defer adc.pvcQueue.ShutDown()
klog.Infof("Starting attach detach controller")
defer klog.Infof("Shutting down attach detach controller")
if !controller.WaitForCacheSync("attach detach", stopCh, adc.podsSynced, adc.nodesSynced, adc.pvcsSynced, adc.pvsSynced) {
return
}
err := adc.populateActualStateOfWorld()
if err != nil {
klog.Errorf("Error populating the actual state of world: %v", err)
}
err = adc.populateDesiredStateOfWorld()
if err != nil {
klog.Errorf("Error populating the desired state of world: %v", err)
}
go adc.reconciler.Run(stopCh)
go adc.desiredStateOfWorldPopulator.Run(stopCh)
go wait.Until(adc.pvcWorker, time.Second, stopCh)
metrics.Register(adc.pvcLister,
adc.pvLister,
adc.podLister,
adc.actualStateOfWorld,
adc.desiredStateOfWorld,
&adc.volumePluginMgr)
<-stopCh
}
func (adc *attachDetachController) populateActualStateOfWorld() error {
klog.V(5).Infof("Populating ActualStateOfworld")
nodes, err := adc.nodeLister.List(labels.Everything())
if err != nil {
return err
}
for _, node := range nodes {
nodeName := types.NodeName(node.Name)
for _, attachedVolume := range node.Status.VolumesAttached {
uniqueName := attachedVolume.Name
// The nil VolumeSpec is safe only in the case the volume is not in use by any pod.
// In such a case it should be detached in the first reconciliation cycle and the
// volume spec is not needed to detach a volume. If the volume is used by a pod, it
// its spec can be: this would happen during in the populateDesiredStateOfWorld which
// scans the pods and updates their volumes in the ActualStateOfWorld too.
err = adc.actualStateOfWorld.MarkVolumeAsAttached(uniqueName, nil /* VolumeSpec */, nodeName, attachedVolume.DevicePath)
if err != nil {
klog.Errorf("Failed to mark the volume as attached: %v", err)
continue
}
adc.processVolumesInUse(nodeName, node.Status.VolumesInUse)
adc.addNodeToDswp(node, types.NodeName(node.Name))
}
}
return nil
}
func (adc *attachDetachController) getNodeVolumeDevicePath(
volumeName v1.UniqueVolumeName, nodeName types.NodeName) (string, error) {
var devicePath string
var found bool
node, err := adc.nodeLister.Get(string(nodeName))
if err != nil {
return devicePath, err
}
for _, attachedVolume := range node.Status.VolumesAttached {
if volumeName == attachedVolume.Name {
devicePath = attachedVolume.DevicePath
found = true
break
}
}
if !found {
err = fmt.Errorf("Volume %s not found on node %s", volumeName, nodeName)
}
return devicePath, err
}
func (adc *attachDetachController) populateDesiredStateOfWorld() error {
klog.V(5).Infof("Populating DesiredStateOfworld")
pods, err := adc.podLister.List(labels.Everything())
if err != nil {
return err
}
for _, pod := range pods {
podToAdd := pod
adc.podAdd(podToAdd)
for _, podVolume := range podToAdd.Spec.Volumes {
// The volume specs present in the ActualStateOfWorld are nil, let's replace those
// with the correct ones found on pods. The present in the ASW with no corresponding
// pod will be detached and the spec is irrelevant.
volumeSpec, err := util.CreateVolumeSpec(podVolume, podToAdd.Namespace, adc.pvcLister, adc.pvLister)
if err != nil {
klog.Errorf(
"Error creating spec for volume %q, pod %q/%q: %v",
podVolume.Name,
podToAdd.Namespace,
podToAdd.Name,
err)
continue
}
nodeName := types.NodeName(podToAdd.Spec.NodeName)
plugin, err := adc.volumePluginMgr.FindAttachablePluginBySpec(volumeSpec)
if err != nil || plugin == nil {
klog.V(10).Infof(
"Skipping volume %q for pod %q/%q: it does not implement attacher interface. err=%v",
podVolume.Name,
podToAdd.Namespace,
podToAdd.Name,
err)
continue
}
volumeName, err := volumeutil.GetUniqueVolumeNameFromSpec(plugin, volumeSpec)
if err != nil {
klog.Errorf(
"Failed to find unique name for volume %q, pod %q/%q: %v",
podVolume.Name,
podToAdd.Namespace,
podToAdd.Name,
err)
continue
}
if adc.actualStateOfWorld.VolumeNodeExists(volumeName, nodeName) {
devicePath, err := adc.getNodeVolumeDevicePath(volumeName, nodeName)
if err != nil {
klog.Errorf("Failed to find device path: %v", err)
continue
}
err = adc.actualStateOfWorld.MarkVolumeAsAttached(volumeName, volumeSpec, nodeName, devicePath)
if err != nil {
klog.Errorf("Failed to update volume spec for node %s: %v", nodeName, err)
}
}
}
}
return nil
}
func (adc *attachDetachController) podAdd(obj interface{}) {
pod, ok := obj.(*v1.Pod)
if pod == nil || !ok {
return
}
if pod.Spec.NodeName == "" {
// Ignore pods without NodeName, indicating they are not scheduled.
return
}
volumeActionFlag := util.DetermineVolumeAction(
pod,
adc.desiredStateOfWorld,
true /* default volume action */)
util.ProcessPodVolumes(pod, volumeActionFlag, /* addVolumes */
adc.desiredStateOfWorld, &adc.volumePluginMgr, adc.pvcLister, adc.pvLister)
}
// GetDesiredStateOfWorld returns desired state of world associated with controller
func (adc *attachDetachController) GetDesiredStateOfWorld() cache.DesiredStateOfWorld {
return adc.desiredStateOfWorld
}
func (adc *attachDetachController) podUpdate(oldObj, newObj interface{}) {
pod, ok := newObj.(*v1.Pod)
if pod == nil || !ok {
return
}
if pod.Spec.NodeName == "" {
// Ignore pods without NodeName, indicating they are not scheduled.
return
}
volumeActionFlag := util.DetermineVolumeAction(
pod,
adc.desiredStateOfWorld,
true /* default volume action */)
util.ProcessPodVolumes(pod, volumeActionFlag, /* addVolumes */
adc.desiredStateOfWorld, &adc.volumePluginMgr, adc.pvcLister, adc.pvLister)
}
func (adc *attachDetachController) podDelete(obj interface{}) {
pod, ok := obj.(*v1.Pod)
if pod == nil || !ok {
return
}
util.ProcessPodVolumes(pod, false, /* addVolumes */
adc.desiredStateOfWorld, &adc.volumePluginMgr, adc.pvcLister, adc.pvLister)
}
func (adc *attachDetachController) nodeAdd(obj interface{}) {
node, ok := obj.(*v1.Node)
// TODO: investigate if nodeName is empty then if we can return
// kubernetes/kubernetes/issues/37777
if node == nil || !ok {
return
}
nodeName := types.NodeName(node.Name)
adc.nodeUpdate(nil, obj)
// kubernetes/kubernetes/issues/37586
// This is to workaround the case when a node add causes to wipe out
// the attached volumes field. This function ensures that we sync with
// the actual status.
adc.actualStateOfWorld.SetNodeStatusUpdateNeeded(nodeName)
}
func (adc *attachDetachController) nodeUpdate(oldObj, newObj interface{}) {
node, ok := newObj.(*v1.Node)
// TODO: investigate if nodeName is empty then if we can return
if node == nil || !ok {
return
}
nodeName := types.NodeName(node.Name)
adc.addNodeToDswp(node, nodeName)
adc.processVolumesInUse(nodeName, node.Status.VolumesInUse)
}
func (adc *attachDetachController) nodeDelete(obj interface{}) {
node, ok := obj.(*v1.Node)
if node == nil || !ok {
return
}
nodeName := types.NodeName(node.Name)
if err := adc.desiredStateOfWorld.DeleteNode(nodeName); err != nil {
// This might happen during drain, but we still want it to appear in our logs
klog.Infof("error removing node %q from desired-state-of-world: %v", nodeName, err)
}
adc.processVolumesInUse(nodeName, node.Status.VolumesInUse)
}
func (adc *attachDetachController) enqueuePVC(obj interface{}) {
key, err := kcache.DeletionHandlingMetaNamespaceKeyFunc(obj)
if err != nil {
runtime.HandleError(fmt.Errorf("Couldn't get key for object %+v: %v", obj, err))
return
}
adc.pvcQueue.Add(key)
}
// pvcWorker processes items from pvcQueue
func (adc *attachDetachController) pvcWorker() {
for adc.processNextItem() {
}
}
func (adc *attachDetachController) processNextItem() bool {
keyObj, shutdown := adc.pvcQueue.Get()
if shutdown {
return false
}
defer adc.pvcQueue.Done(keyObj)
if err := adc.syncPVCByKey(keyObj.(string)); err != nil {
// Rather than wait for a full resync, re-add the key to the
// queue to be processed.
adc.pvcQueue.AddRateLimited(keyObj)
runtime.HandleError(fmt.Errorf("Failed to sync pvc %q, will retry again: %v", keyObj.(string), err))
return true
}
// Finally, if no error occurs we Forget this item so it does not
// get queued again until another change happens.
adc.pvcQueue.Forget(keyObj)
return true
}
func (adc *attachDetachController) syncPVCByKey(key string) error {
klog.V(5).Infof("syncPVCByKey[%s]", key)
namespace, name, err := kcache.SplitMetaNamespaceKey(key)
if err != nil {
klog.V(4).Infof("error getting namespace & name of pvc %q to get pvc from informer: %v", key, err)
return nil
}
pvc, err := adc.pvcLister.PersistentVolumeClaims(namespace).Get(name)
if apierrors.IsNotFound(err) {
klog.V(4).Infof("error getting pvc %q from informer: %v", key, err)
return nil
}
if err != nil {
return err
}
if pvc.Status.Phase != v1.ClaimBound || pvc.Spec.VolumeName == "" {
// Skip unbound PVCs.
return nil
}
objs, err := adc.podIndexer.ByIndex(pvcKeyIndex, key)
if err != nil {
return err
}
for _, obj := range objs {
pod, ok := obj.(*v1.Pod)
if !ok {
continue
}
volumeActionFlag := util.DetermineVolumeAction(
pod,
adc.desiredStateOfWorld,
true /* default volume action */)
util.ProcessPodVolumes(pod, volumeActionFlag, /* addVolumes */
adc.desiredStateOfWorld, &adc.volumePluginMgr, adc.pvcLister, adc.pvLister)
}
return nil
}
// processVolumesInUse processes the list of volumes marked as "in-use"
// according to the specified Node's Status.VolumesInUse and updates the
// corresponding volume in the actual state of the world to indicate that it is
// mounted.
func (adc *attachDetachController) processVolumesInUse(
nodeName types.NodeName, volumesInUse []v1.UniqueVolumeName) {
klog.V(4).Infof("processVolumesInUse for node %q", nodeName)
for _, attachedVolume := range adc.actualStateOfWorld.GetAttachedVolumesForNode(nodeName) {
mounted := false
for _, volumeInUse := range volumesInUse {
if attachedVolume.VolumeName == volumeInUse {
mounted = true
break
}
}
err := adc.actualStateOfWorld.SetVolumeMountedByNode(attachedVolume.VolumeName, nodeName, mounted)
if err != nil {
klog.Warningf(
"SetVolumeMountedByNode(%q, %q, %v) returned an error: %v",
attachedVolume.VolumeName, nodeName, mounted, err)
}
}
}
// VolumeHost implementation
// This is an unfortunate requirement of the current factoring of volume plugin
// initializing code. It requires kubelet specific methods used by the mounting
// code to be implemented by all initializers even if the initializer does not
// do mounting (like this attach/detach controller).
// Issue kubernetes/kubernetes/issues/14217 to fix this.
func (adc *attachDetachController) GetPluginDir(podUID string) string {
return ""
}
func (adc *attachDetachController) GetVolumeDevicePluginDir(podUID string) string {
return ""
}
func (adc *attachDetachController) GetPodsDir() string {
return ""
}
func (adc *attachDetachController) GetPodVolumeDir(podUID types.UID, pluginName, volumeName string) string {
return ""
}
func (adc *attachDetachController) GetPodPluginDir(podUID types.UID, pluginName string) string {
return ""
}
func (adc *attachDetachController) GetPodVolumeDeviceDir(podUID types.UID, pluginName string) string {
return ""
}
func (adc *attachDetachController) GetKubeClient() clientset.Interface {
return adc.kubeClient
}
func (adc *attachDetachController) NewWrapperMounter(volName string, spec volume.Spec, pod *v1.Pod, opts volume.VolumeOptions) (volume.Mounter, error) {
return nil, fmt.Errorf("NewWrapperMounter not supported by Attach/Detach controller's VolumeHost implementation")
}
func (adc *attachDetachController) NewWrapperUnmounter(volName string, spec volume.Spec, podUID types.UID) (volume.Unmounter, error) {
return nil, fmt.Errorf("NewWrapperUnmounter not supported by Attach/Detach controller's VolumeHost implementation")
}
func (adc *attachDetachController) GetCloudProvider() cloudprovider.Interface {
return adc.cloud
}
func (adc *attachDetachController) GetMounter(pluginName string) mount.Interface {
return nil
}
func (adc *attachDetachController) GetHostName() string {
return ""
}
func (adc *attachDetachController) GetHostIP() (net.IP, error) {
return nil, fmt.Errorf("GetHostIP() not supported by Attach/Detach controller's VolumeHost implementation")
}
func (adc *attachDetachController) GetNodeAllocatable() (v1.ResourceList, error) {
return v1.ResourceList{}, nil
}
func (adc *attachDetachController) GetSecretFunc() func(namespace, name string) (*v1.Secret, error) {
return func(_, _ string) (*v1.Secret, error) {
return nil, fmt.Errorf("GetSecret unsupported in attachDetachController")
}
}
func (adc *attachDetachController) GetConfigMapFunc() func(namespace, name string) (*v1.ConfigMap, error) {
return func(_, _ string) (*v1.ConfigMap, error) {
return nil, fmt.Errorf("GetConfigMap unsupported in attachDetachController")
}
}
func (adc *attachDetachController) GetServiceAccountTokenFunc() func(_, _ string, _ *authenticationv1.TokenRequest) (*authenticationv1.TokenRequest, error) {
return func(_, _ string, _ *authenticationv1.TokenRequest) (*authenticationv1.TokenRequest, error) {
return nil, fmt.Errorf("GetServiceAccountToken unsupported in attachDetachController")
}
}
func (adc *attachDetachController) DeleteServiceAccountTokenFunc() func(types.UID) {
return func(types.UID) {
klog.Errorf("DeleteServiceAccountToken unsupported in attachDetachController")
}
}
func (adc *attachDetachController) GetExec(pluginName string) mount.Exec {
return mount.NewOsExec()
}
func (adc *attachDetachController) addNodeToDswp(node *v1.Node, nodeName types.NodeName) {
if _, exists := node.Annotations[volumeutil.ControllerManagedAttachAnnotation]; exists {
keepTerminatedPodVolumes := false
if t, ok := node.Annotations[volumeutil.KeepTerminatedPodVolumesAnnotation]; ok {
keepTerminatedPodVolumes = (t == "true")
}
// Node specifies annotation indicating it should be managed by attach
// detach controller. Add it to desired state of world.
adc.desiredStateOfWorld.AddNode(nodeName, keepTerminatedPodVolumes)
}
}
func (adc *attachDetachController) GetNodeLabels() (map[string]string, error) {
return nil, fmt.Errorf("GetNodeLabels() unsupported in Attach/Detach controller")
}
func (adc *attachDetachController) GetNodeName() types.NodeName {
return ""
}
func (adc *attachDetachController) GetEventRecorder() record.EventRecorder {
return adc.recorder
}
func (adc *attachDetachController) GetCSIClient() csiclient.Interface {
return adc.csiClient
}
| {
// TODO: The default resyncPeriod for shared informers is 12 hours, this is
// unacceptable for the attach/detach controller. For example, if a pod is
// skipped because the node it is scheduled to didn't set its annotation in
// time, we don't want to have to wait 12hrs before processing the pod
// again.
// Luckily https://github.com/kubernetes/kubernetes/issues/23394 is being
// worked on and will split resync in to resync and relist. Once that
// happens the resync period can be set to something much faster (30
// seconds).
// If that issue is not resolved in time, then this controller will have to
// consider some unappealing alternate options: use a non-shared informer
// and set a faster resync period even if it causes relist, or requeue
// dropped pods so they are continuously processed until it is accepted or
// deleted (probably can't do this with sharedInformer), etc.
adc := &attachDetachController{
kubeClient: kubeClient,
csiClient: csiClient,
pvcLister: pvcInformer.Lister(),
pvcsSynced: pvcInformer.Informer().HasSynced,
pvLister: pvInformer.Lister(),
pvsSynced: pvInformer.Informer().HasSynced,
podLister: podInformer.Lister(),
podsSynced: podInformer.Informer().HasSynced,
podIndexer: podInformer.Informer().GetIndexer(),
nodeLister: nodeInformer.Lister(),
nodesSynced: nodeInformer.Informer().HasSynced,
cloud: cloud,
pvcQueue: workqueue.NewNamedRateLimitingQueue(workqueue.DefaultControllerRateLimiter(), "pvcs"),
}
if err := adc.volumePluginMgr.InitPlugins(plugins, prober, adc); err != nil {
return nil, fmt.Errorf("Could not initialize volume plugins for Attach/Detach Controller: %+v", err)
}
eventBroadcaster := record.NewBroadcaster()
eventBroadcaster.StartLogging(klog.Infof)
eventBroadcaster.StartRecordingToSink(&v1core.EventSinkImpl{Interface: kubeClient.CoreV1().Events("")})
recorder := eventBroadcaster.NewRecorder(scheme.Scheme, v1.EventSource{Component: "attachdetach-controller"})
blkutil := volumepathhandler.NewBlockVolumePathHandler()
adc.desiredStateOfWorld = cache.NewDesiredStateOfWorld(&adc.volumePluginMgr)
adc.actualStateOfWorld = cache.NewActualStateOfWorld(&adc.volumePluginMgr)
adc.attacherDetacher =
operationexecutor.NewOperationExecutor(operationexecutor.NewOperationGenerator(
kubeClient,
&adc.volumePluginMgr,
recorder,
false, // flag for experimental binary check for volume mount
blkutil))
adc.nodeStatusUpdater = statusupdater.NewNodeStatusUpdater(
kubeClient, nodeInformer.Lister(), adc.actualStateOfWorld)
// Default these to values in options
adc.reconciler = reconciler.NewReconciler(
timerConfig.ReconcilerLoopPeriod,
timerConfig.ReconcilerMaxWaitForUnmountDuration,
reconcilerSyncDuration,
disableReconciliationSync,
adc.desiredStateOfWorld,
adc.actualStateOfWorld,
adc.attacherDetacher,
adc.nodeStatusUpdater,
recorder)
adc.desiredStateOfWorldPopulator = populator.NewDesiredStateOfWorldPopulator(
timerConfig.DesiredStateOfWorldPopulatorLoopSleepPeriod,
timerConfig.DesiredStateOfWorldPopulatorListPodsRetryDuration,
podInformer.Lister(),
adc.desiredStateOfWorld,
&adc.volumePluginMgr,
pvcInformer.Lister(),
pvInformer.Lister())
podInformer.Informer().AddEventHandler(kcache.ResourceEventHandlerFuncs{
AddFunc: adc.podAdd,
UpdateFunc: adc.podUpdate,
DeleteFunc: adc.podDelete,
})
// This custom indexer will index pods by its PVC keys. Then we don't need
// to iterate all pods every time to find pods which reference given PVC.
adc.podIndexer.AddIndexers(kcache.Indexers{
pvcKeyIndex: indexByPVCKey,
})
nodeInformer.Informer().AddEventHandler(kcache.ResourceEventHandlerFuncs{
AddFunc: adc.nodeAdd,
UpdateFunc: adc.nodeUpdate,
DeleteFunc: adc.nodeDelete,
})
pvcInformer.Informer().AddEventHandler(kcache.ResourceEventHandlerFuncs{
AddFunc: func(obj interface{}) {
adc.enqueuePVC(obj)
},
UpdateFunc: func(old, new interface{}) {
adc.enqueuePVC(new)
},
})
return adc, nil
} |
HiwinRT605_test_20190619131630.py | #!/usr/bin/env python3
# license removed for brevity
#策略 機械手臂 四點來回跑
import rospy
import os
import numpy as np
from std_msgs.msg import String
from ROS_Socket.srv import *
from ROS_Socket.msg import *
import math
import enum
import Hiwin_RT605_ROS as ArmTask
pos_feedback_times = 0
mode_feedback_times = 0
msg_feedback = 1
Arm_state_flag = 0
Strategy_flag = 0
arm_move_times = 1
##-----------server feedback arm state----------
def Arm_state(req):
global CurrentMissionType,Strategy_flag,Arm_state_flag
Arm_state_flag = int('%s'%req.Arm_state)
if Arm_state_flag == 1: #表示手臂忙碌
Strategy_flag = 0
return(1)
if Arm_state_flag == 0: #表示手臂準備
Strategy_flag = 1
return(0)
if Arm_state_flag == 6: #表示程式中斷
Strategy_flag= 6
return(6)
def strategy_server():
#rospy.init_node(NAME)
s = rospy.Service('arm_state',arm_state, Arm_state) ##server arm state
#rospy.spin() ## spin one
##-----------switch define------------##
class switch(object):
def __init__(self, value):
self.value = value
self.fall = False
def __iter__(self):
"""Return the match method once, then stop"""
yield self.match
raise StopIteration
def match(self, *args):
"""Indicate whether or not to enter a case suite"""
if self.fall or not args:
return True
elif self.value in args: # changed for v1.5, see below
self.fall = True
return True
else:
return False
##------------class-------
class point():
def __init__(self,x,y,z,pitch,roll,yaw):
self.x = x
self.y = y
self.z = z
self.pitch = pitch
self.roll = roll
self.yaw = yaw
##-------------------------strategy---------------------
##-----Mission 參數
GetInfoFlag = False
ExecuteFlag = False
GetKeyFlag = False
MotionSerialKey = []
MissionType_Flag = 0
MotionStep = 0
##-----手臂動作位置資訊
angle_SubCue = 0
LinePtpFlag = False
MoveFlag = False
PushBallHeight = 6
ObjAboveHeight = 10
SpeedValue = 10
MissionEndFlag = False
CurrentMissionType = 0
##---------------Enum---------------##
class ArmMotionCommand(enum.IntEnum):
Arm_Stop = 0
Arm_MoveToTargetUpside = 1
Arm_MoveFowardDown = 2
Arm_MoveVision = 3
Arm_PushBall = 4
Arm_LineUp = 5
Arm_LineDown = 6
Arm_Angle = 7
Arm_StopPush = 8
class MissionType(enum.IntEnum):
Get_Img = 0
PushBall = 1
Pushback = 2
Mission_End = 3
##-----------switch define------------##
class pos():
def __init__(self, x, y, z, pitch, roll, yaw):
self.x = 0
self.y = 36.8
self.z = 11.35
self.pitch = -90
self.roll = 0
self.yaw = 0
class Target_pos():
def __init__(self, x, y, z, pitch, roll, yaw):
self.x = 0
self.y = 36.8
self.z = 11.35
self.pitch = -90
self.roll = 0
self.yaw = 0
class TargetPush_pos():
def __init__(self, x, y, z, pitch, roll, yaw):
self.x = 0
self.y = 36.8
self.z = 11.35
self.pitch = -90
self.roll = 0
self.yaw = 0
class Item():
def __init__(self,x,y,label):
self.x = x
self.y = y
self.label = label
def Mission_Trigger():
if GetInfoFlag == True and GetKeyFlag == False and ExecuteFlag == False:
GetInfo_Mission()
if GetInfoFlag == False and GetKeyFlag == True and ExecuteFlag == False:
GetKey_Mission()
if GetInfoFlag == False and GetKeyFlag == False and ExecuteFlag == True:
Execute_Mission()
def GetInfo_Mission():
global GetInfoFlag,GetKeyFlag,ExecuteFlag
#Billiards_Calculation()
GetInfoFlag = False
GetKeyFlag = True
ExecuteFlag = False
def GetKey_Mission():
global GetInfoFlag,GetKeyFlag,ExecuteFlag,MotionKey,MotionSerialKey
Mission = Get_MissionType()
MissionItem(Mission)
MotionSerialKey = MotionKey
GetInfoFlag = False
GetKeyFlag = False
ExecuteFlag = True
def Get_MissionType():
global MissionType_Flag,CurrentMissionType
for case in switch(MissionType_Flag): #傳送指令給socket選擇手臂動作
if case(0):
Type = MissionType.PushBall
MissionType_Flag +=1
break
if case(1):
Type = MissionType.Pushback
MissionType_Flag -=1
break
CurrentMissionType = Type
return Type
def MissionItem(ItemNo):
global MotionKey
Key_PushBallCommand = [\
ArmMotionCommand.Arm_MoveToTargetUpside,\
ArmMotionCommand.Arm_LineDown,\
ArmMotionCommand.Arm_PushBall,\
ArmMotionCommand.Arm_LineUp,\
ArmMotionCommand.Arm_Stop,\
]
Key_PushBackCommand = [\
ArmMotionCommand.Arm_MoveVision,\
ArmMotionCommand.Arm_Stop,\
ArmMotionCommand.Arm_StopPush,\
]
for case in switch(ItemNo): #傳送指令給socket選擇手臂動作
if case(MissionType.PushBall):
MotionKey = Key_PushBallCommand
break
if case(MissionType.Pushback):
MotionKey = Key_PushBackCommand
break
return MotionKey
def Execute_Mission():
global GetInfoFlag,GetKeyFlag,ExecuteFlag,MotionKey,MotionStep,MotionSerialKey,MissionEndFlag,Curre | trategy_flag,Arm_state_flag
# print("strategy :" ,Strategy_flag)
# print("state :" ,Arm_state_flag)
# if Arm_state_flag == 0 and Strategy_flag == 1:
print(11111)
# Strategy_flag = 0
if ArmTask.state_flag.Arm_feedback == 0 and ArmTask.state_flag.Strategy_feedback == 1:
print(11111)
ArmTask.state_flag.Strategy_feedback = 0
if MotionKey[MotionStep] == ArmMotionCommand.Arm_Stop:
if MissionEndFlag == True:
CurrentMissionType = MissionType.Mission_End
GetInfoFlag = False
GetKeyFlag = False
ExecuteFlag = False
print("Mission_End")
elif CurrentMissionType == MissionType.PushBall:
GetInfoFlag = False
GetKeyFlag = True
ExecuteFlag = False
MotionStep = 0
print("PushBall")
else:
GetInfoFlag = True
GetKeyFlag = False
ExecuteFlag = False
MotionStep = 0
else:
MotionItem(MotionSerialKey[MotionStep])
MotionStep += 1
def MotionItem(ItemNo):
global angle_SubCue,SpeedValue,PushFlag,LinePtpFlag,MissionEndFlag
SpeedValue = 5
for case in switch(ItemNo): #傳送指令給socket選擇手臂動作
if case(ArmMotionCommand.Arm_Stop):
MoveFlag = False
print("Arm_Stop")
break
if case(ArmMotionCommand.Arm_StopPush):
MoveFlag = False
PushFlag = True #重新掃描物件
print("Arm_StopPush")
break
if case(ArmMotionCommand.Arm_MoveToTargetUpside):
pos.x = 10
pos.y = 36.8
pos.z = 11.35
pos.pitch = -90
pos.roll = 0
pos.yaw = 10
MoveFlag = True
LinePtpFlag = False
SpeedValue = 10
print("Arm_MoveToTargetUpside")
break
if case(ArmMotionCommand.Arm_LineUp):
pos.z = ObjAboveHeight
MoveFlag = True
LinePtpFlag = True
SpeedValue = 5
print("Arm_LineUp")
break
if case(ArmMotionCommand.Arm_LineDown):
pos.z = PushBallHeight
MoveFlag = True
LinePtpFlag = True
SpeedValue = 5
print("Arm_LineDown")
break
if case(ArmMotionCommand.Arm_PushBall):
pos.x = -10
pos.y = 36.8
pos.z = 11.35
pos.pitch = -90
pos.roll = 0
pos.yaw = -10
SpeedValue = 10 ##待測試up
MoveFlag = True
LinePtpFlag = False
print("Arm_PushBall")
break
if case(ArmMotionCommand.Arm_MoveVision):
pos.x = 0
pos.y = 36.8
pos.z = 11.35
pos.pitch = -90
pos.roll = 0
pos.yaw = 0
SpeedValue = 10
MoveFlag = True
LinePtpFlag = False
##任務結束旗標
MissionEndFlag = True
print("Arm_MoveVision")
break
if case(ArmMotionCommand.Arm_MoveFowardDown):
pos.x = 0
pos.y = 36.8
pos.z = 11.35
pos.pitch = -90
pos.roll = 0
pos.yaw = 0
MoveFlag = True
LinePtpFlag = False
print("Arm_MoveFowardDown")
break
if case(): # default, could also just omit condition or 'if True'
print ("something else!")
# No need to break here, it'll stop anyway
if MoveFlag == True:
if LinePtpFlag == False:
print('x: ',pos.x,' y: ',pos.y,' z: ',pos.z,' pitch: ',pos.pitch,' roll: ',pos.roll,' yaw: ',pos.yaw)
#strategy_client_Arm_Mode(0,1,0,30,2)#action,ra,grip,vel,both
ArmTask.strategy_client_Arm_Mode(2,1,0,SpeedValue,2)#action,ra,grip,vel,both
ArmTask.strategy_client_pos_move(pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw)
elif LinePtpFlag == True:
#strategy_client_Arm_Mode(0,1,0,40,2)#action,ra,grip,vel,both
print('x: ',pos.x,' y: ',pos.y,' z: ',pos.z,' pitch: ',pos.pitch,' roll: ',pos.roll,' yaw: ',pos.yaw)
ArmTask.strategy_client_Arm_Mode(3,1,0,SpeedValue,2)#action,ra,grip,vel,both
ArmTask.strategy_client_pos_move(pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw)
#action: ptp line
#ra : abs rel
#grip 夾爪
#vel speed
#both : Ctrl_Mode
##-------------strategy end ------------
def myhook():
print ("shutdown time!")
if __name__ == '__main__':
argv = rospy.myargv()
rospy.init_node('strategy', anonymous=True)
GetInfoFlag = True #Test no data
#strategy_server()
ArmTask.strategy_client_Arm_Mode(0,1,0,20,2)#action,ra,grip,vel,both
while 1:
Mission_Trigger()
if CurrentMissionType == MissionType.Mission_End:
ArmTask.rospy.on_shutdown(myhook)
ArmTask.rospy.spin()
rospy.spin()
| ntMissionType,S |
secretpass.go | package cmd
import (
"bytes"
"fmt"
"os"
"os/exec"
"github.com/spf13/cobra"
"github.com/twpayne/chezmoi/internal/chezmoi"
)
var passCmd = &cobra.Command{
Use: "pass [args...]",
Short: "Execute the pass CLI",
PreRunE: config.ensureNoError,
RunE: config.runSecretPassCmd,
}
type passCmdConfig struct {
Command string
}
var passCache = make(map[string]string)
func | () {
secretCmd.AddCommand(passCmd)
config.Pass.Command = "pass"
config.addTemplateFunc("pass", config.passFunc)
}
func (c *Config) runSecretPassCmd(cmd *cobra.Command, args []string) error {
return c.run("", c.Pass.Command, args...)
}
func (c *Config) passFunc(id string) string {
if s, ok := passCache[id]; ok {
return s
}
name := c.Pass.Command
args := []string{"show", id}
cmd := exec.Command(name, args...)
cmd.Stdin = os.Stdin
cmd.Stderr = os.Stderr
output, err := c.mutator.IdempotentCmdOutput(cmd)
if err != nil {
panic(fmt.Errorf("%s %s: %w", name, chezmoi.ShellQuoteArgs(args), err))
}
var password string
if index := bytes.IndexByte(output, '\n'); index != -1 {
password = string(output[:index])
} else {
password = string(output)
}
passCache[id] = password
return passCache[id]
}
| init |
common_test.go | // Copyright 2020, OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package splunk
import (
"encoding/json"
"strings"
"testing"
"github.com/stretchr/testify/assert"
)
func TestGetValues(t *testing.T) {
metric := Event{
Fields: map[string]interface{}{},
}
assert.Equal(t, map[string]interface{}{}, metric.GetMetricValues())
metric.Fields["metric_name:foo"] = "bar"
assert.Equal(t, map[string]interface{}{"foo": "bar"}, metric.GetMetricValues())
metric.Fields["metric_name:foo2"] = "foobar"
assert.Equal(t, map[string]interface{}{"foo": "bar", "foo2": "foobar"}, metric.GetMetricValues())
}
func TestIsMetric(t *testing.T) {
ev := Event{
Event: map[string]interface{}{},
}
assert.False(t, ev.IsMetric())
metric := Event{
Event: "metric",
}
assert.True(t, metric.IsMetric())
arr := Event{
Event: []interface{}{"foo", "bar"},
}
assert.False(t, arr.IsMetric())
yo := Event{
Event: "yo",
}
assert.False(t, yo.IsMetric())
}
func TestIsMetric_WithoutEventField(t *testing.T) {
fieldsOnly := Event{
Fields: map[string]interface{}{
"foo": "bar",
},
}
assert.False(t, fieldsOnly.IsMetric())
fieldsWithMetrics := Event{
Fields: map[string]interface{}{
"foo": "bar",
"metric_name:foo": 123,
"foobar": "foobar",
},
}
assert.True(t, fieldsWithMetrics.IsMetric())
}
func TestDecodeJsonWithNoTime(t *testing.T) {
dec := json.NewDecoder(strings.NewReader("{\"event\":\"hello\"}"))
dec.More()
var msg Event
err := dec.Decode(&msg)
assert.NoError(t, err)
assert.Nil(t, msg.Time)
}
func | (t *testing.T) {
dec := json.NewDecoder(strings.NewReader("{\"time\":1610760752.606,\"event\":\"hello\"}"))
dec.More()
var msg Event
err := dec.Decode(&msg)
assert.NoError(t, err)
assert.Equal(t, 1610760752.606, *msg.Time)
}
func TestDecodeJsonWithStringTime(t *testing.T) {
dec := json.NewDecoder(strings.NewReader("{\"time\":\"1610760752.606\",\"event\":\"hello\"}"))
dec.More()
var msg Event
err := dec.Decode(&msg)
assert.NoError(t, err)
assert.Equal(t, 1610760752.606, *msg.Time)
}
func TestDecodeJsonWithInvalidStringTime(t *testing.T) {
dec := json.NewDecoder(strings.NewReader("{\"time\":\"1610760752.606\\\"\",\"event\":\"hello\"}"))
dec.More()
var msg Event
err := dec.Decode(&msg)
assert.Error(t, err)
}
func TestDecodeJsonWithInvalidNumberStringTime(t *testing.T) {
dec := json.NewDecoder(strings.NewReader("{\"time\":\"0xdeadbeef\",\"event\":\"hello\"}"))
dec.More()
var msg Event
err := dec.Decode(&msg)
assert.Error(t, err)
}
func TestDecodeJsonWithInvalidNumberTime(t *testing.T) {
dec := json.NewDecoder(strings.NewReader("{\"time\":1e1024,\"event\":\"hello\"}"))
dec.More()
var msg Event
err := dec.Decode(&msg)
assert.Error(t, err)
}
| TestDecodeJsonWithNumberTime |
plugin.py | import os
import sys
import json
import yaml
import mkdocs
import logging
from mkdocs.plugins import BasePlugin
from mkdocs.utils import warning_filter
from jinja2 import Template
from pathlib import Path
from itertools import chain
log = logging.getLogger(__name__)
log.addFilter(warning_filter)
CONFIG_KEYS = ["site_name", "site_author", "site_url", "repo_url", "repo_name"]
if sys.version_info[0] >= 3:
str_type = str
else:
str_type = mkdocs.utils.string_types
class MarkdownExtraDataPlugin(BasePlugin):
"""
Inject certain config variables into the markdown
"""
config_scheme = (
("data", mkdocs.config.config_options.Type(str_type, default=None)),
)
def __add_data__(self, config, namespace, data):
# creates the namespace and adds the data there
namespace = ["extra"] + namespace.split(os.sep)
holder = config
while len(namespace) > 1:
if not namespace[0] in holder:
holder[namespace[0]] = {}
holder = holder[namespace[0]]
del namespace[0]
holder[namespace[0]] = data
def on_pre_build(self, config):
# Loads all data from the supplied data directories
# or, otherwise a _data directory next to mkdocs.yml and/or inside the docs_dir.
# Does nothing if the dir does not exist.
# assume an empty list if not defined
data_source_folders = self.config.get("data")
# cast as a list if is defined but is a string
if isinstance(data_source_folders, str):
data_source_folders = data_source_folders.split(',')
# if we have not value, then proceed to look in default folders
# and assume a _data folder, add to list of folders to check
if not data_source_folders:
for datadir in [
os.path.dirname(config["config_file_path"]),
config["docs_dir"],
]:
ds_folder = os.path.join(datadir, "_data")
if os.path.exists(ds_folder):
data_source_folders.append(ds_folder)
if not data_source_folders:
return
# iterate of a list of folders and look for data files
for ds_folder in data_source_folders:
if os.path.exists(ds_folder):
|
def on_page_read_source(self, page, config, **kwargs):
context = {key: config.get(key) for key in CONFIG_KEYS if key in config}
context.update(config.get("extra", {}))
try:
with open(page.file.abs_src_path, 'r', encoding='utf-8-sig', errors='strict') as f:
md_template = Template(f.read())
return md_template.render(**config.get("extra"))
except OSError:
log.error('File not found: {}'.format(self.file.src_path))
raise
except ValueError:
log.error('Encoding error reading file: {}'.format(self.file.src_path))
raise
| path = Path(ds_folder)
for filename in chain(
path.glob("**/*.yaml"),
path.glob("**/*.yml"),
path.glob("**/*.json"),
):
namespace = os.path.splitext(os.path.relpath(filename, ds_folder))[0]
# add data into dict based on its path as a namespace
self.__add_data__(
config,
namespace,
(
yaml.load(filename.read_bytes(), Loader=yaml.FullLoader)
if filename.suffix in [".yml", ".yaml"]
else json.loads(filename.read_bytes())
),
) |
11.3.1-2-2-s.js | /// Copyright (c) 2012 Ecma International. All rights reserved.
/// Ecma International makes this code available under the terms and conditions set
/// forth on http://hg.ecmascript.org/tests/test262/raw-file/tip/LICENSE (the
/// "Use Terms"). Any redistribution of this code must retain the above
/// copyright and this notice and otherwise comply with the Use Terms.
/**
* @path ch11/11.3/11.3.1/11.3.1-2-2-s.js
* @description Strict Mode - SyntaxError is thrown if the identifier 'eval' appear as a PostfixExpression(eval++)
* @onlyStrict
*/
function | () {
"use strict";
var blah = eval;
try {
eval("eval++;");
return false;
} catch (e) {
return e instanceof SyntaxError && blah === eval;
}
}
runTestCase(testcase);
| testcase |
fetch.rs | use crate::commands::UnevaluatedCallInfo;
use crate::context::SpanSource;
use crate::data::meta::Span;
use crate::data::Value;
use crate::errors::ShellError;
use crate::parser::hir::SyntaxShape;
use crate::parser::registry::Signature;
use crate::prelude::*;
use mime::Mime;
use std::path::PathBuf;
use std::str::FromStr; | impl PerItemCommand for Fetch {
fn name(&self) -> &str {
"fetch"
}
fn signature(&self) -> Signature {
Signature::build(self.name())
.required("path", SyntaxShape::Path)
.switch("raw")
}
fn usage(&self) -> &str {
"Load from a URL into a cell, convert to table if possible (avoid by appending '--raw')"
}
fn run(
&self,
call_info: &CallInfo,
registry: &CommandRegistry,
raw_args: &RawCommandArgs,
_input: Tagged<Value>,
) -> Result<OutputStream, ShellError> {
run(call_info, registry, raw_args)
}
}
fn run(
call_info: &CallInfo,
registry: &CommandRegistry,
raw_args: &RawCommandArgs,
) -> Result<OutputStream, ShellError> {
let path = match call_info
.args
.nth(0)
.ok_or_else(|| ShellError::string(&format!("No file or directory specified")))?
{
file => file,
};
let path_buf = path.as_path()?;
let path_str = path_buf.display().to_string();
let path_span = path.span();
let has_raw = call_info.args.has("raw");
let registry = registry.clone();
let raw_args = raw_args.clone();
let stream = async_stream_block! {
let result = fetch(&path_str, path_span).await;
if let Err(e) = result {
yield Err(e);
return;
}
let (file_extension, contents, contents_tag, span_source) = result.unwrap();
let file_extension = if has_raw {
None
} else {
// If the extension could not be determined via mimetype, try to use the path
// extension. Some file types do not declare their mimetypes (such as bson files).
file_extension.or(path_str.split('.').last().map(String::from))
};
if contents_tag.origin != uuid::Uuid::nil() {
// If we have loaded something, track its source
yield ReturnSuccess::action(CommandAction::AddSpanSource(
contents_tag.origin,
span_source,
));
}
let tagged_contents = contents.tagged(contents_tag);
if let Some(extension) = file_extension {
let command_name = format!("from-{}", extension);
if let Some(converter) = registry.get_command(&command_name) {
let new_args = RawCommandArgs {
host: raw_args.host,
shell_manager: raw_args.shell_manager,
call_info: UnevaluatedCallInfo {
args: crate::parser::hir::Call {
head: raw_args.call_info.args.head,
positional: None,
named: None
},
source: raw_args.call_info.source,
source_map: raw_args.call_info.source_map,
name_tag: raw_args.call_info.name_tag,
}
};
let mut result = converter.run(new_args.with_input(vec![tagged_contents]), ®istry, false);
let result_vec: Vec<Result<ReturnSuccess, ShellError>> = result.drain_vec().await;
for res in result_vec {
match res {
Ok(ReturnSuccess::Value(Tagged { item: Value::Table(list), ..})) => {
for l in list {
yield Ok(ReturnSuccess::Value(l));
}
}
Ok(ReturnSuccess::Value(Tagged { item, .. })) => {
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag }));
}
x => yield x,
}
}
} else {
yield ReturnSuccess::value(tagged_contents);
}
} else {
yield ReturnSuccess::value(tagged_contents);
}
};
Ok(stream.to_output_stream())
}
pub async fn fetch(
location: &str,
span: Span,
) -> Result<(Option<String>, Value, Tag, SpanSource), ShellError> {
if let Err(_) = url::Url::parse(location) {
return Err(ShellError::labeled_error(
"Incomplete or incorrect url",
"expected a full url",
span,
));
}
let response = surf::get(location).await;
match response {
Ok(mut r) => match r.headers().get("content-type") {
Some(content_type) => {
let content_type = Mime::from_str(content_type).unwrap();
match (content_type.type_(), content_type.subtype()) {
(mime::APPLICATION, mime::XML) => Ok((
Some("xml".to_string()),
Value::string(r.body_string().await.map_err(|_| {
ShellError::labeled_error(
"Could not load text from remote url",
"could not load",
span,
)
})?),
Tag {
span,
origin: Uuid::new_v4(),
},
SpanSource::Url(location.to_string()),
)),
(mime::APPLICATION, mime::JSON) => Ok((
Some("json".to_string()),
Value::string(r.body_string().await.map_err(|_| {
ShellError::labeled_error(
"Could not load text from remote url",
"could not load",
span,
)
})?),
Tag {
span,
origin: Uuid::new_v4(),
},
SpanSource::Url(location.to_string()),
)),
(mime::APPLICATION, mime::OCTET_STREAM) => {
let buf: Vec<u8> = r.body_bytes().await.map_err(|_| {
ShellError::labeled_error(
"Could not load binary file",
"could not load",
span,
)
})?;
Ok((
None,
Value::binary(buf),
Tag {
span,
origin: Uuid::new_v4(),
},
SpanSource::Url(location.to_string()),
))
}
(mime::IMAGE, mime::SVG) => Ok((
Some("svg".to_string()),
Value::string(r.body_string().await.map_err(|_| {
ShellError::labeled_error(
"Could not load svg from remote url",
"could not load",
span,
)
})?),
Tag {
span,
origin: Uuid::new_v4(),
},
SpanSource::Url(location.to_string()),
)),
(mime::IMAGE, image_ty) => {
let buf: Vec<u8> = r.body_bytes().await.map_err(|_| {
ShellError::labeled_error(
"Could not load image file",
"could not load",
span,
)
})?;
Ok((
Some(image_ty.to_string()),
Value::binary(buf),
Tag {
span,
origin: Uuid::new_v4(),
},
SpanSource::Url(location.to_string()),
))
}
(mime::TEXT, mime::HTML) => Ok((
Some("html".to_string()),
Value::string(r.body_string().await.map_err(|_| {
ShellError::labeled_error(
"Could not load text from remote url",
"could not load",
span,
)
})?),
Tag {
span,
origin: Uuid::new_v4(),
},
SpanSource::Url(location.to_string()),
)),
(mime::TEXT, mime::PLAIN) => {
let path_extension = url::Url::parse(location)
.unwrap()
.path_segments()
.and_then(|segments| segments.last())
.and_then(|name| if name.is_empty() { None } else { Some(name) })
.and_then(|name| {
PathBuf::from(name)
.extension()
.map(|name| name.to_string_lossy().to_string())
});
Ok((
path_extension,
Value::string(r.body_string().await.map_err(|_| {
ShellError::labeled_error(
"Could not load text from remote url",
"could not load",
span,
)
})?),
Tag {
span,
origin: Uuid::new_v4(),
},
SpanSource::Url(location.to_string()),
))
}
(ty, sub_ty) => Ok((
None,
Value::string(format!("Not yet supported MIME type: {} {}", ty, sub_ty)),
Tag {
span,
origin: Uuid::new_v4(),
},
SpanSource::Url(location.to_string()),
)),
}
}
None => Ok((
None,
Value::string(format!("No content type found")),
Tag {
span,
origin: Uuid::new_v4(),
},
SpanSource::Url(location.to_string()),
)),
},
Err(_) => {
return Err(ShellError::labeled_error(
"URL could not be opened",
"url not found",
span,
));
}
}
} | use surf::mime;
use uuid::Uuid;
pub struct Fetch;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.