hexsha
stringlengths 40
40
| size
int64 4
1.05M
| content
stringlengths 4
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
913ada1c91ce6db096749f518d97ca204e22f30b | 19,628 | #![allow(non_snake_case)]
use vulkan_bind::vk;
use std::ptr::{null, null_mut};
fn df<T: Default>() -> T {
Default::default()
}
pub fn memoryAllocateInfo() -> vk::MemoryAllocateInfo {
vk::MemoryAllocateInfo {
sType: vk::StructureType::MEMORY_ALLOCATE_INFO,
pNext: null(),
allocationSize: 0,
memoryTypeIndex: 0,
}
}
pub fn commandBufferAllocateInfo(commandPool: vk::CommandPool, level: vk::CommandBufferLevel, bufferCount: u32) -> vk::CommandBufferAllocateInfo {
vk::CommandBufferAllocateInfo {
sType: vk::StructureType::COMMAND_BUFFER_ALLOCATE_INFO,
commandPool: commandPool,
level: level,
commandBufferCount: bufferCount,
pNext: null(),
}
}
pub fn commandBufferBeginInfo() -> vk::CommandBufferBeginInfo {
vk::CommandBufferBeginInfo {
sType: vk::StructureType::COMMAND_BUFFER_BEGIN_INFO,
pNext: null(),
flags: df(),
pInheritanceInfo: null(),
}
}
pub fn renderPassBeginInfo() -> vk::RenderPassBeginInfo {
vk::RenderPassBeginInfo {
sType: vk::StructureType::RENDER_PASS_BEGIN_INFO,
pNext: null(),
// Fields that were omitted in C++:
renderPass: null_mut(),
framebuffer: null_mut(),
renderArea: df(),
clearValueCount: 0,
pClearValues: null(),
}
}
pub fn imageMemoryBarrier() -> vk::ImageMemoryBarrier {
vk::ImageMemoryBarrier {
sType: vk::StructureType::IMAGE_MEMORY_BARRIER,
pNext: null(),
// Some default values
srcQueueFamilyIndex: vk::QUEUE_FAMILY_IGNORED,
dstQueueFamilyIndex: vk::QUEUE_FAMILY_IGNORED,
// Fields that were omitted in C++:
srcAccessMask: df(),
dstAccessMask: df(),
oldLayout: vk::ImageLayout::UNDEFINED,
newLayout: vk::ImageLayout::UNDEFINED,
image: null_mut(),
subresourceRange: df(),
}
}
pub fn bufferMemoryBarrier() -> vk::BufferMemoryBarrier {
vk::BufferMemoryBarrier {
sType: vk::StructureType::BUFFER_MEMORY_BARRIER,
pNext: null(),
// Fields that were omitted in C++:
srcAccessMask: df(),
dstAccessMask: df(),
srcQueueFamilyIndex: 0,
dstQueueFamilyIndex: 0,
buffer: null_mut(),
offset: df(),
size: df(),
}
}
pub fn memoryBarrier() -> vk::MemoryBarrier {
vk::MemoryBarrier {
sType: vk::StructureType::MEMORY_BARRIER,
pNext: null(),
// Fields that were omitted in C++:
srcAccessMask: df(),
dstAccessMask: df(),
}
}
pub fn imageCreateInfo() -> vk::ImageCreateInfo {
vk::ImageCreateInfo {
sType: vk::StructureType::IMAGE_CREATE_INFO,
pNext: null(),
// Fields that were omitted in C++:
flags: df(),
imageType: vk::ImageType::E_1D,
format: vk::Format::UNDEFINED,
extent: df(),
mipLevels: 0,
arrayLayers: 0,
samples: df(),
tiling: vk::ImageTiling::OPTIMAL,
usage: df(),
sharingMode: vk::SharingMode::EXCLUSIVE,
queueFamilyIndexCount: 0,
pQueueFamilyIndices: null(),
initialLayout: vk::ImageLayout::UNDEFINED,
}
}
pub fn samplerCreateInfo() -> vk::SamplerCreateInfo {
vk::SamplerCreateInfo {
sType: vk::StructureType::SAMPLER_CREATE_INFO,
pNext: null(),
// Fields that were omitted in C++:
flags: df(),
magFilter: vk::Filter::NEAREST,
minFilter: vk::Filter::NEAREST,
mipmapMode: vk::SamplerMipmapMode::NEAREST,
addressModeU: vk::SamplerAddressMode::REPEAT,
addressModeV: vk::SamplerAddressMode::REPEAT,
addressModeW: vk::SamplerAddressMode::REPEAT,
mipLodBias: 0.0,
anisotropyEnable: vk::Bool32::False,
maxAnisotropy: 0.0,
compareEnable: vk::Bool32::False,
compareOp: vk::CompareOp::NEVER,
minLod: 0.0,
maxLod: 0.0,
borderColor: vk::BorderColor::FLOAT_TRANSPARENT_BLACK,
unnormalizedCoordinates: vk::Bool32::False,
}
}
pub fn imageViewCreateInfo() -> vk::ImageViewCreateInfo {
vk::ImageViewCreateInfo {
sType: vk::StructureType::IMAGE_VIEW_CREATE_INFO,
pNext: null(),
// Fields that were omitted in C++:
flags: df(),
image: null_mut(),
viewType: vk::ImageViewType::E_1D,
format: vk::Format::UNDEFINED,
components: df(),
subresourceRange: df(),
}
}
pub fn framebufferCreateInfo() -> vk::FramebufferCreateInfo {
vk::FramebufferCreateInfo {
sType: vk::StructureType::FRAMEBUFFER_CREATE_INFO,
pNext: null(),
// Fields that were omitted in C++:
flags: df(),
renderPass: null_mut(),
attachmentCount: 0,
pAttachments: null(),
width: 0,
height: 0,
layers: 0,
}
}
pub fn semaphoreCreateInfo(flags: vk::SemaphoreCreateFlags) -> vk::SemaphoreCreateInfo {
vk::SemaphoreCreateInfo {
sType: vk::StructureType::SEMAPHORE_CREATE_INFO,
pNext: null(),
flags: flags,
}
}
pub fn submitInfo() -> vk::SubmitInfo {
vk::SubmitInfo {
sType: vk::StructureType::SUBMIT_INFO,
pNext: null(),
// Fields that were omitted in C++:
waitSemaphoreCount: 0,
pWaitSemaphores: null(),
pWaitDstStageMask: null(),
commandBufferCount: 0,
pCommandBuffers: null(),
signalSemaphoreCount: 0,
pSignalSemaphores: null(),
}
}
pub fn viewport(width: f32,
height: f32,
minDepth: f32,
maxDepth: f32) -> vk::Viewport {
vk::Viewport {
x: 0.0,
y: 0.0,
width: width,
height: height,
minDepth: minDepth,
maxDepth: maxDepth,
}
}
pub fn rect2D(width: u32,
height: u32,
offsetX: i32,
offsetY: i32) -> vk::Rect2D {
vk::Rect2D {
extent: vk::Extent2D { width: width, height: height },
offset: vk::Offset2D { x: offsetX, y: offsetY },
}
}
pub fn bufferCreateInfo(usage: vk::BufferUsageFlags, size: vk::DeviceSize) -> vk::BufferCreateInfo {
vk::BufferCreateInfo {
sType: vk::StructureType::BUFFER_CREATE_INFO,
pNext: null(),
usage: usage,
size: size,
flags: df(),
// Fields that were omitted in C++:
sharingMode: vk::SharingMode::EXCLUSIVE,
queueFamilyIndexCount: 0,
pQueueFamilyIndices: null(),
}
}
pub fn descriptorPoolCreateInfo(poolSizes: &[vk::DescriptorPoolSize],
maxSets: u32) -> vk::DescriptorPoolCreateInfo {
vk::DescriptorPoolCreateInfo {
sType: vk::StructureType::DESCRIPTOR_POOL_CREATE_INFO,
pNext: null(),
poolSizeCount: poolSizes.len() as u32,
pPoolSizes: poolSizes.as_ptr(),
maxSets: maxSets,
// Fields that were omitted in C++:
flags: df(),
}
}
pub fn descriptorPoolSize(typ: vk::DescriptorType, descriptorCount: u32) -> vk::DescriptorPoolSize {
vk::DescriptorPoolSize {
typ: typ,
descriptorCount: descriptorCount,
}
}
pub fn descriptorSetLayoutBinding(typ: vk::DescriptorType,
stageFlags: vk::ShaderStageFlags,
binding: u32) -> vk::DescriptorSetLayoutBinding {
vk::DescriptorSetLayoutBinding {
descriptorType: typ,
stageFlags: stageFlags,
binding: binding,
// Default value in all examples
descriptorCount: 1,
// Fields that were omitted in C++:
pImmutableSamplers: null(),
}
}
pub fn descriptorSetLayoutCreateInfo(bindings: &[vk::DescriptorSetLayoutBinding]
) -> vk::DescriptorSetLayoutCreateInfo {
vk::DescriptorSetLayoutCreateInfo {
sType: vk::StructureType::DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
pNext: null(),
pBindings: bindings.as_ptr(),
bindingCount: bindings.len() as u32,
// Fields that were omitted in C++:
flags: df(),
}
}
pub fn pipelineLayoutCreateInfo(setLayouts: &[vk::DescriptorSetLayout]) -> vk::PipelineLayoutCreateInfo {
vk::PipelineLayoutCreateInfo {
sType: vk::StructureType::PIPELINE_LAYOUT_CREATE_INFO,
pNext: null(),
setLayoutCount: setLayouts.len() as u32,
pSetLayouts: setLayouts.as_ptr(),
// Fields that were omitted in C++:
flags: df(),
pushConstantRangeCount: 0,
pPushConstantRanges: null(),
}
}
pub fn descriptorSetAllocateInfo(descriptorPool: vk::DescriptorPool,
setLayouts: &[vk::DescriptorSetLayout],
descriptorSetCount: u32) -> vk::DescriptorSetAllocateInfo {
vk::DescriptorSetAllocateInfo {
sType: vk::StructureType::DESCRIPTOR_SET_ALLOCATE_INFO,
pNext: null(),
descriptorPool: descriptorPool,
pSetLayouts: setLayouts.as_ptr(),
descriptorSetCount: descriptorSetCount,
}
}
pub fn descriptorImageInfo(sampler: vk::Sampler, imageView: vk::ImageView, imageLayout: vk::ImageLayout) -> vk::DescriptorImageInfo {
vk::DescriptorImageInfo {
sampler: sampler,
imageView: imageView,
imageLayout: imageLayout,
}
}
pub fn writeDescriptorSetBuffer(dstSet: vk::DescriptorSet,
typ: vk::DescriptorType,
binding: u32,
bufferInfo: &vk::DescriptorBufferInfo) -> vk::WriteDescriptorSet {
vk::WriteDescriptorSet {
sType: vk::StructureType::WRITE_DESCRIPTOR_SET,
pNext: null(),
dstSet: dstSet,
descriptorType: typ,
dstBinding: binding,
pBufferInfo: bufferInfo,
// Default value in all examples
descriptorCount: 1,
// Fields that were omitted in C++:
dstArrayElement: 0,
pImageInfo: null(),
pTexelBufferView: null(),
}
}
pub fn writeDescriptorSetImage(dstSet: vk::DescriptorSet,
typ: vk::DescriptorType,
binding: u32,
imageInfo: &vk::DescriptorImageInfo) -> vk::WriteDescriptorSet {
vk::WriteDescriptorSet {
sType: vk::StructureType::WRITE_DESCRIPTOR_SET,
pNext: null(),
dstSet: dstSet,
descriptorType: typ,
dstBinding: binding,
pImageInfo: imageInfo,
// Default value in all examples
descriptorCount: 1,
// Fields that were omitted in C++:
dstArrayElement: 0,
pBufferInfo: null(),
pTexelBufferView: null(),
}
}
pub fn vertexInputBindingDescription(binding: u32, stride: u32,
inputRate: vk::VertexInputRate) -> vk::VertexInputBindingDescription {
vk::VertexInputBindingDescription {
binding: binding,
stride: stride,
inputRate: inputRate,
}
}
pub fn vertexInputAttributeDescription(binding: u32,
location: u32,
format: vk::Format,
offset: u32) -> vk::VertexInputAttributeDescription {
vk::VertexInputAttributeDescription {
location: location,
binding: binding,
format: format,
offset: offset,
}
}
pub fn pipelineVertexInputStateCreateInfo() -> vk::PipelineVertexInputStateCreateInfo {
vk::PipelineVertexInputStateCreateInfo {
sType: vk::StructureType::PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
pNext: null(),
// Fields that were omitted in C++:
flags: df(),
vertexBindingDescriptionCount: 0,
pVertexBindingDescriptions: null(),
vertexAttributeDescriptionCount: 0,
pVertexAttributeDescriptions: null(),
}
}
pub fn pipelineInputAssemblyStateCreateInfo(topology: vk::PrimitiveTopology,
flags: vk::PipelineInputAssemblyStateCreateFlags,
primitiveRestartEnable: vk::Bool32,
) -> vk::PipelineInputAssemblyStateCreateInfo {
vk::PipelineInputAssemblyStateCreateInfo {
sType: vk::StructureType::PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
topology: topology,
flags: flags,
primitiveRestartEnable: primitiveRestartEnable,
pNext: null(),
}
}
pub fn pipelineRasterizationStateCreateInfo(polygonMode: vk::PolygonMode,
cullMode: vk::CullModeFlags,
frontFace: vk::FrontFace,
flags: vk::PipelineRasterizationStateCreateFlags,
) -> vk::PipelineRasterizationStateCreateInfo {
vk::PipelineRasterizationStateCreateInfo {
sType: vk::StructureType::PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
polygonMode: polygonMode,
cullMode: cullMode,
frontFace: frontFace,
flags: flags,
depthClampEnable: vk::Bool32::True,
// Fields that were omitted in C++:
pNext: null(),
rasterizerDiscardEnable: vk::Bool32::False,
depthBiasEnable: vk::Bool32::False,
depthBiasConstantFactor: 0.0,
depthBiasClamp: 0.0,
depthBiasSlopeFactor: 0.0,
lineWidth: 0.0,
}
}
pub fn pipelineColorBlendAttachmentState(colorWriteMask: vk::ColorComponentFlags,
blendEnable: vk::Bool32,
) -> vk::PipelineColorBlendAttachmentState {
vk::PipelineColorBlendAttachmentState {
colorWriteMask: colorWriteMask,
blendEnable: blendEnable,
// Fields that were omitted in C++:
srcColorBlendFactor: vk::BlendFactor::ZERO,
dstColorBlendFactor: vk::BlendFactor::ZERO,
colorBlendOp: vk::BlendOp::ADD,
srcAlphaBlendFactor: vk::BlendFactor::ZERO,
dstAlphaBlendFactor: vk::BlendFactor::ZERO,
alphaBlendOp: vk::BlendOp::ADD,
}
}
pub fn pipelineColorBlendStateCreateInfo(attachments: &[vk::PipelineColorBlendAttachmentState]) -> vk::PipelineColorBlendStateCreateInfo {
vk::PipelineColorBlendStateCreateInfo {
sType: vk::StructureType::PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
pNext: null(),
attachmentCount: attachments.len() as u32,
pAttachments: attachments.as_ptr(),
// Fields that were omitted in C++:
flags: df(),
logicOpEnable: vk::Bool32::False,
logicOp: vk::LogicOp::CLEAR,
blendConstants: [0.0; 4],
}
}
pub fn pipelineDepthStencilStateCreateInfo(depthTestEnable: vk::Bool32, depthWriteEnable: vk::Bool32,
depthCompareOp: vk::CompareOp,
) -> vk::PipelineDepthStencilStateCreateInfo {
vk::PipelineDepthStencilStateCreateInfo {
sType: vk::StructureType::PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
depthTestEnable: depthTestEnable,
depthWriteEnable: depthWriteEnable,
depthCompareOp: depthCompareOp,
front: df(),
back: vk::StencilOpState { compareOp: vk::CompareOp::ALWAYS, ..df() },
// Fields that were omitted in C++:
pNext: null(),
flags: df(),
depthBoundsTestEnable: vk::Bool32::False,
stencilTestEnable: vk::Bool32::False,
minDepthBounds: 0.0,
maxDepthBounds: 0.0,
}
}
pub fn pipelineViewportStateCreateInfo(viewportCount: u32, scissorCount: u32,
flags: vk::PipelineViewportStateCreateFlags,
) -> vk::PipelineViewportStateCreateInfo {
vk::PipelineViewportStateCreateInfo {
sType: vk::StructureType::PIPELINE_VIEWPORT_STATE_CREATE_INFO,
viewportCount: viewportCount,
scissorCount: scissorCount,
flags: flags,
// Fields that were omitted in C++:
pNext: null(),
pViewports: null(),
pScissors: null(),
}
}
pub fn pipelineMultisampleStateCreateInfo(rasterizationSamples: vk::SampleCountFlag,
flags: vk::PipelineMultisampleStateCreateFlags,
) -> vk::PipelineMultisampleStateCreateInfo {
vk::PipelineMultisampleStateCreateInfo {
sType: vk::StructureType::PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
rasterizationSamples: rasterizationSamples.into(),
// Fields that were omitted in C++:
pNext: null(),
flags: flags,
sampleShadingEnable: vk::Bool32::False,
minSampleShading: 0.0,
pSampleMask: null(),
alphaToCoverageEnable: vk::Bool32::False,
alphaToOneEnable: vk::Bool32::False,
}
}
pub fn pipelineDynamicStateCreateInfo(dynamicStates: &[vk::DynamicState],
flags: vk::PipelineDynamicStateCreateFlags,
) -> vk::PipelineDynamicStateCreateInfo {
vk::PipelineDynamicStateCreateInfo {
sType: vk::StructureType::PIPELINE_DYNAMIC_STATE_CREATE_INFO,
pDynamicStates: dynamicStates.as_ptr(),
dynamicStateCount: dynamicStates.len() as u32,
// Fields that were omitted in C++:
pNext: null(),
flags: flags,
}
}
pub fn pipelineTessellationStateCreateInfo(patchControlPoints: u32) -> vk::PipelineTessellationStateCreateInfo {
vk::PipelineTessellationStateCreateInfo {
sType: vk::StructureType::PIPELINE_TESSELLATION_STATE_CREATE_INFO,
patchControlPoints: patchControlPoints,
// Fields that were omitted in C++:
pNext: null(),
flags: df(),
}
}
pub fn pipelineCreateInfo(layout: vk::PipelineLayout,
renderPass: vk::RenderPass,
flags: vk::PipelineCreateFlags) -> vk::GraphicsPipelineCreateInfo {
vk::GraphicsPipelineCreateInfo {
sType: vk::StructureType::GRAPHICS_PIPELINE_CREATE_INFO,
pNext: null(),
layout: layout,
renderPass: renderPass,
flags: flags,
// Fields that were omitted in C++:
stageCount: 0,
pStages: null(),
pVertexInputState: null(),
pInputAssemblyState: null(),
pTessellationState: null(),
pViewportState: null(),
pRasterizationState: null(),
pMultisampleState: null(),
pDepthStencilState: null(),
pColorBlendState: null(),
pDynamicState: null(),
subpass: 0,
basePipelineHandle: null_mut(),
basePipelineIndex: 0,
}
}
pub fn computePipelineCreateInfo(layout: vk::PipelineLayout, flags: vk::PipelineCreateFlags) -> vk::ComputePipelineCreateInfo {
vk::ComputePipelineCreateInfo {
sType: vk::StructureType::COMPUTE_PIPELINE_CREATE_INFO,
layout: layout,
flags: flags,
// Fields that were omitted in C++:
pNext: null(),
stage: vk::PipelineShaderStageCreateInfo {
sType: vk::StructureType::PIPELINE_SHADER_STAGE_CREATE_INFO,
pNext: null(),
flags: df(),
stage: df(),
module: null_mut(),
pName: null(),
pSpecializationInfo: null(),
},
basePipelineHandle: null_mut(),
basePipelineIndex: 0,
}
}
pub fn pushConstantRange(stageFlags: vk::ShaderStageFlags,
size: u32, offset: u32) -> vk::PushConstantRange {
vk::PushConstantRange {
stageFlags: stageFlags,
offset: offset,
size: size,
}
}
| 33.958478 | 146 | 0.607907 |
dee8a9eb48d98d663c83275435d43932652c2df1 | 3,611 | #[doc = "Reader of register MB9_8B_WORD1"]
pub type R = crate::R<u32, super::MB9_8B_WORD1>;
#[doc = "Writer for register MB9_8B_WORD1"]
pub type W = crate::W<u32, super::MB9_8B_WORD1>;
#[doc = "Register MB9_8B_WORD1 `reset()`'s with value 0"]
impl crate::ResetValue for super::MB9_8B_WORD1 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `DATA_BYTE_7`"]
pub type DATA_BYTE_7_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `DATA_BYTE_7`"]
pub struct DATA_BYTE_7_W<'a> {
w: &'a mut W,
}
impl<'a> DATA_BYTE_7_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0xff) | ((value as u32) & 0xff);
self.w
}
}
#[doc = "Reader of field `DATA_BYTE_6`"]
pub type DATA_BYTE_6_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `DATA_BYTE_6`"]
pub struct DATA_BYTE_6_W<'a> {
w: &'a mut W,
}
impl<'a> DATA_BYTE_6_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0xff << 8)) | (((value as u32) & 0xff) << 8);
self.w
}
}
#[doc = "Reader of field `DATA_BYTE_5`"]
pub type DATA_BYTE_5_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `DATA_BYTE_5`"]
pub struct DATA_BYTE_5_W<'a> {
w: &'a mut W,
}
impl<'a> DATA_BYTE_5_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0xff << 16)) | (((value as u32) & 0xff) << 16);
self.w
}
}
#[doc = "Reader of field `DATA_BYTE_4`"]
pub type DATA_BYTE_4_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `DATA_BYTE_4`"]
pub struct DATA_BYTE_4_W<'a> {
w: &'a mut W,
}
impl<'a> DATA_BYTE_4_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0xff << 24)) | (((value as u32) & 0xff) << 24);
self.w
}
}
impl R {
#[doc = "Bits 0:7 - Data byte 0 of Rx/Tx frame."]
#[inline(always)]
pub fn data_byte_7(&self) -> DATA_BYTE_7_R {
DATA_BYTE_7_R::new((self.bits & 0xff) as u8)
}
#[doc = "Bits 8:15 - Data byte 1 of Rx/Tx frame."]
#[inline(always)]
pub fn data_byte_6(&self) -> DATA_BYTE_6_R {
DATA_BYTE_6_R::new(((self.bits >> 8) & 0xff) as u8)
}
#[doc = "Bits 16:23 - Data byte 2 of Rx/Tx frame."]
#[inline(always)]
pub fn data_byte_5(&self) -> DATA_BYTE_5_R {
DATA_BYTE_5_R::new(((self.bits >> 16) & 0xff) as u8)
}
#[doc = "Bits 24:31 - Data byte 3 of Rx/Tx frame."]
#[inline(always)]
pub fn data_byte_4(&self) -> DATA_BYTE_4_R {
DATA_BYTE_4_R::new(((self.bits >> 24) & 0xff) as u8)
}
}
impl W {
#[doc = "Bits 0:7 - Data byte 0 of Rx/Tx frame."]
#[inline(always)]
pub fn data_byte_7(&mut self) -> DATA_BYTE_7_W {
DATA_BYTE_7_W { w: self }
}
#[doc = "Bits 8:15 - Data byte 1 of Rx/Tx frame."]
#[inline(always)]
pub fn data_byte_6(&mut self) -> DATA_BYTE_6_W {
DATA_BYTE_6_W { w: self }
}
#[doc = "Bits 16:23 - Data byte 2 of Rx/Tx frame."]
#[inline(always)]
pub fn data_byte_5(&mut self) -> DATA_BYTE_5_W {
DATA_BYTE_5_W { w: self }
}
#[doc = "Bits 24:31 - Data byte 3 of Rx/Tx frame."]
#[inline(always)]
pub fn data_byte_4(&mut self) -> DATA_BYTE_4_W {
DATA_BYTE_4_W { w: self }
}
}
| 31.955752 | 86 | 0.577402 |
1423f1db385fd6f6e9b678d70fe036926c6e3d22 | 14,986 | //! Checks the licenses of third-party dependencies.
use cargo_metadata::{Metadata, Package, PackageId, Resolve};
use std::collections::{BTreeSet, HashSet};
use std::path::Path;
/// These are licenses that are allowed for all crates, including the runtime,
/// rustc, tools, etc.
const LICENSES: &[&str] = &[
"MIT/Apache-2.0",
"MIT / Apache-2.0",
"Apache-2.0/MIT",
"Apache-2.0 / MIT",
"MIT OR Apache-2.0",
"Apache-2.0 OR MIT",
"Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT", // wasi license
"MIT",
"Unlicense/MIT",
"Unlicense OR MIT",
];
/// These are exceptions to Rust's permissive licensing policy, and
/// should be considered bugs. Exceptions are only allowed in Rust
/// tooling. It is _crucial_ that no exception crates be dependencies
/// of the Rust runtime (std/test).
const EXCEPTIONS: &[(&str, &str)] = &[
("mdbook", "MPL-2.0"), // mdbook
("openssl", "Apache-2.0"), // cargo, mdbook
("toml-query", "MPL-2.0"), // mdbook
("toml-query_derive", "MPL-2.0"), // mdbook
("is-match", "MPL-2.0"), // mdbook
("rdrand", "ISC"), // mdbook, rustfmt
("fuchsia-cprng", "BSD-3-Clause"), // mdbook, rustfmt
("fuchsia-zircon-sys", "BSD-3-Clause"), // rustdoc, rustc, cargo
("fuchsia-zircon", "BSD-3-Clause"), // rustdoc, rustc, cargo (jobserver & tempdir)
("colored", "MPL-2.0"), // rustfmt
("ordslice", "Apache-2.0"), // rls
("cloudabi", "BSD-2-Clause"), // (rls -> crossbeam-channel 0.2 -> rand 0.5)
("ryu", "Apache-2.0 OR BSL-1.0"), // rls/cargo/... (because of serde)
("bytesize", "Apache-2.0"), // cargo
("im-rc", "MPL-2.0+"), // cargo
("adler32", "BSD-3-Clause AND Zlib"), // cargo dep that isn't used
("constant_time_eq", "CC0-1.0"), // rustfmt
("sized-chunks", "MPL-2.0+"), // cargo via im-rc
("bitmaps", "MPL-2.0+"), // cargo via im-rc
// FIXME: this dependency violates the documentation comment above:
("fortanix-sgx-abi", "MPL-2.0"), // libstd but only for `sgx` target
("dunce", "CC0-1.0"), // mdbook-linkcheck
("codespan-reporting", "Apache-2.0"), // mdbook-linkcheck
("codespan", "Apache-2.0"), // mdbook-linkcheck
("crossbeam-channel", "MIT/Apache-2.0 AND BSD-2-Clause"), // cargo
];
/// These are the root crates that are part of the runtime. The licenses for
/// these and all their dependencies *must not* be in the exception list.
const RUNTIME_CRATES: &[&str] = &["std", "core", "alloc", "test", "panic_abort", "panic_unwind"];
/// Which crates to check against the whitelist?
const WHITELIST_CRATES: &[&str] = &["rustc_middle", "rustc_codegen_llvm"];
/// Whitelist of crates rustc is allowed to depend on. Avoid adding to the list if possible.
///
/// This list is here to provide a speed-bump to adding a new dependency to
/// rustc. Please check with the compiler team before adding an entry.
const WHITELIST: &[&str] = &[
"adler32",
"aho-corasick",
"annotate-snippets",
"ansi_term",
"arrayvec",
"atty",
"autocfg",
"backtrace",
"backtrace-sys",
"bitflags",
"block-buffer",
"block-padding",
"byte-tools",
"byteorder",
"c2-chacha",
"cc",
"cfg-if",
"cloudabi",
"cmake",
"compiler_builtins",
"crc32fast",
"crossbeam-deque",
"crossbeam-epoch",
"crossbeam-queue",
"crossbeam-utils",
"datafrog",
"digest",
"dlmalloc",
"either",
"ena",
"env_logger",
"fake-simd",
"filetime",
"flate2",
"fortanix-sgx-abi",
"fuchsia-zircon",
"fuchsia-zircon-sys",
"generic-array",
"getopts",
"getrandom",
"hashbrown",
"hermit-abi",
"humantime",
"indexmap",
"itertools",
"jobserver",
"kernel32-sys",
"lazy_static",
"libc",
"libz-sys",
"lock_api",
"log",
"log_settings",
"md-5",
"measureme",
"memchr",
"memmap",
"memoffset",
"miniz_oxide",
"nodrop",
"num_cpus",
"opaque-debug",
"parking_lot",
"parking_lot_core",
"pkg-config",
"polonius-engine",
"ppv-lite86",
"proc-macro2",
"psm",
"punycode",
"quick-error",
"quote",
"rand",
"rand_chacha",
"rand_core",
"rand_hc",
"rand_isaac",
"rand_pcg",
"rand_xorshift",
"redox_syscall",
"redox_termios",
"regex",
"regex-syntax",
"remove_dir_all",
"rustc-demangle",
"rustc-hash",
"rustc-rayon",
"rustc-rayon-core",
"rustc_version",
"scoped-tls",
"scopeguard",
"semver",
"semver-parser",
"serde",
"serde_derive",
"sha-1",
"smallvec",
"stable_deref_trait",
"stacker",
"syn",
"synstructure",
"tempfile",
"termcolor",
"termion",
"termize",
"thread_local",
"typenum",
"ucd-util",
"unicode-normalization",
"unicode-script",
"unicode-security",
"unicode-width",
"unicode-xid",
"utf8-ranges",
"vcpkg",
"version_check",
"wasi",
"winapi",
"winapi-build",
"winapi-i686-pc-windows-gnu",
"winapi-util",
"winapi-x86_64-pc-windows-gnu",
"wincolor",
];
/// Dependency checks.
///
/// `path` is path to the `src` directory, `cargo` is path to the cargo executable.
pub fn check(path: &Path, cargo: &Path, bad: &mut bool) {
let mut cmd = cargo_metadata::MetadataCommand::new();
cmd.cargo_path(cargo)
.manifest_path(path.parent().unwrap().join("Cargo.toml"))
.features(cargo_metadata::CargoOpt::AllFeatures);
let metadata = t!(cmd.exec());
check_exceptions(&metadata, bad);
check_whitelist(&metadata, bad);
check_crate_duplicate(&metadata, bad);
}
/// Check that all licenses are in the valid list in `LICENSES`.
///
/// Packages listed in `EXCEPTIONS` are allowed for tools.
fn check_exceptions(metadata: &Metadata, bad: &mut bool) {
// Validate the EXCEPTIONS list hasn't changed.
for (name, license) in EXCEPTIONS {
// Check that the package actually exists.
if !metadata.packages.iter().any(|p| p.name == *name) {
println!(
"could not find exception package `{}`\n\
Remove from EXCEPTIONS list if it is no longer used.",
name
);
*bad = true;
}
// Check that the license hasn't changed.
for pkg in metadata.packages.iter().filter(|p| p.name == *name) {
if pkg.name == "fuchsia-cprng" {
// This package doesn't declare a license expression. Manual
// inspection of the license file is necessary, which appears
// to be BSD-3-Clause.
assert!(pkg.license.is_none());
continue;
}
match &pkg.license {
None => {
println!(
"dependency exception `{}` does not declare a license expression",
pkg.id
);
*bad = true;
}
Some(pkg_license) => {
if pkg_license.as_str() != *license {
println!("dependency exception `{}` license has changed", name);
println!(" previously `{}` now `{}`", license, pkg_license);
println!(" update EXCEPTIONS for the new license");
*bad = true;
}
}
}
}
}
let exception_names: Vec<_> = EXCEPTIONS.iter().map(|(name, _license)| *name).collect();
let runtime_ids = compute_runtime_crates(metadata);
// Check if any package does not have a valid license.
for pkg in &metadata.packages {
if pkg.source.is_none() {
// No need to check local packages.
continue;
}
if !runtime_ids.contains(&pkg.id) && exception_names.contains(&pkg.name.as_str()) {
continue;
}
let license = match &pkg.license {
Some(license) => license,
None => {
println!("dependency `{}` does not define a license expression", pkg.id,);
*bad = true;
continue;
}
};
if !LICENSES.contains(&license.as_str()) {
if pkg.name == "fortanix-sgx-abi" {
// This is a specific exception because SGX is considered
// "third party". See
// https://github.com/rust-lang/rust/issues/62620 for more. In
// general, these should never be added.
continue;
}
println!("invalid license `{}` in `{}`", license, pkg.id);
*bad = true;
}
}
}
/// Checks the dependency of `WHITELIST_CRATES` at the given path. Changes `bad` to `true` if a
/// check failed.
///
/// Specifically, this checks that the dependencies are on the `WHITELIST`.
fn check_whitelist(metadata: &Metadata, bad: &mut bool) {
// Check that the WHITELIST does not have unused entries.
for name in WHITELIST {
if !metadata.packages.iter().any(|p| p.name == *name) {
println!(
"could not find whitelisted package `{}`\n\
Remove from WHITELIST list if it is no longer used.",
name
);
*bad = true;
}
}
// Get the whitelist in a convenient form.
let whitelist: HashSet<_> = WHITELIST.iter().cloned().collect();
// Check dependencies.
let mut visited = BTreeSet::new();
let mut unapproved = BTreeSet::new();
for &krate in WHITELIST_CRATES.iter() {
let pkg = pkg_from_name(metadata, krate);
let mut bad = check_crate_whitelist(&whitelist, metadata, &mut visited, pkg);
unapproved.append(&mut bad);
}
if !unapproved.is_empty() {
println!("Dependencies not on the whitelist:");
for dep in unapproved {
println!("* {}", dep);
}
*bad = true;
}
}
/// Checks the dependencies of the given crate from the given cargo metadata to see if they are on
/// the whitelist. Returns a list of illegal dependencies.
fn check_crate_whitelist<'a>(
whitelist: &'a HashSet<&'static str>,
metadata: &'a Metadata,
visited: &mut BTreeSet<&'a PackageId>,
krate: &'a Package,
) -> BTreeSet<&'a PackageId> {
// This will contain bad deps.
let mut unapproved = BTreeSet::new();
// Check if we have already visited this crate.
if visited.contains(&krate.id) {
return unapproved;
}
visited.insert(&krate.id);
// If this path is in-tree, we don't require it to be on the whitelist.
if krate.source.is_some() {
// If this dependency is not on `WHITELIST`, add to bad set.
if !whitelist.contains(krate.name.as_str()) {
unapproved.insert(&krate.id);
}
}
// Do a DFS in the crate graph.
let to_check = deps_of(metadata, &krate.id);
for dep in to_check {
let mut bad = check_crate_whitelist(whitelist, metadata, visited, dep);
unapproved.append(&mut bad);
}
unapproved
}
/// Prevents multiple versions of some expensive crates.
fn check_crate_duplicate(metadata: &Metadata, bad: &mut bool) {
const FORBIDDEN_TO_HAVE_DUPLICATES: &[&str] = &[
// These two crates take quite a long time to build, so don't allow two versions of them
// to accidentally sneak into our dependency graph, in order to ensure we keep our CI times
// under control.
"cargo",
"rustc-ap-rustc_ast",
];
for &name in FORBIDDEN_TO_HAVE_DUPLICATES {
let matches: Vec<_> = metadata.packages.iter().filter(|pkg| pkg.name == name).collect();
match matches.len() {
0 => {
println!(
"crate `{}` is missing, update `check_crate_duplicate` \
if it is no longer used",
name
);
*bad = true;
}
1 => {}
_ => {
println!(
"crate `{}` is duplicated in `Cargo.lock`, \
it is too expensive to build multiple times, \
so make sure only one version appears across all dependencies",
name
);
for pkg in matches {
println!(" * {}", pkg.id);
}
*bad = true;
}
}
}
}
/// Returns a list of dependencies for the given package.
fn deps_of<'a>(metadata: &'a Metadata, pkg_id: &'a PackageId) -> Vec<&'a Package> {
let resolve = metadata.resolve.as_ref().unwrap();
let node = resolve
.nodes
.iter()
.find(|n| &n.id == pkg_id)
.unwrap_or_else(|| panic!("could not find `{}` in resolve", pkg_id));
node.deps
.iter()
.map(|dep| {
metadata.packages.iter().find(|pkg| pkg.id == dep.pkg).unwrap_or_else(|| {
panic!("could not find dep `{}` for pkg `{}` in resolve", dep.pkg, pkg_id)
})
})
.collect()
}
/// Finds a package with the given name.
fn pkg_from_name<'a>(metadata: &'a Metadata, name: &'static str) -> &'a Package {
let mut i = metadata.packages.iter().filter(|p| p.name == name);
let result =
i.next().unwrap_or_else(|| panic!("could not find package `{}` in package list", name));
assert!(i.next().is_none(), "more than one package found for `{}`", name);
result
}
/// Finds all the packages that are in the rust runtime.
fn compute_runtime_crates<'a>(metadata: &'a Metadata) -> HashSet<&'a PackageId> {
let resolve = metadata.resolve.as_ref().unwrap();
let mut result = HashSet::new();
for name in RUNTIME_CRATES {
let id = &pkg_from_name(metadata, name).id;
normal_deps_of_r(resolve, id, &mut result);
}
result
}
/// Recursively find all normal dependencies.
fn normal_deps_of_r<'a>(
resolve: &'a Resolve,
pkg_id: &'a PackageId,
result: &mut HashSet<&'a PackageId>,
) {
if !result.insert(pkg_id) {
return;
}
let node = resolve
.nodes
.iter()
.find(|n| &n.id == pkg_id)
.unwrap_or_else(|| panic!("could not find `{}` in resolve", pkg_id));
// Don't care about dev-dependencies.
// Build dependencies *shouldn't* matter unless they do some kind of
// codegen. For now we'll assume they don't.
let deps = node.deps.iter().filter(|node_dep| {
node_dep
.dep_kinds
.iter()
.any(|kind_info| kind_info.kind == cargo_metadata::DependencyKind::Normal)
});
for dep in deps {
normal_deps_of_r(resolve, &dep.pkg, result);
}
}
| 32.578261 | 99 | 0.555719 |
90b6ee5f5a6a670b39e27a4251343e578cdff971 | 87,568 | pub mod on_unimplemented;
pub mod suggestions;
use super::{
EvaluationResult, FulfillmentError, FulfillmentErrorCode, MismatchedProjectionTypes,
Obligation, ObligationCause, ObligationCauseCode, OnUnimplementedDirective,
OnUnimplementedNote, OutputTypeParameterMismatch, Overflow, PredicateObligation,
SelectionContext, SelectionError, TraitNotObjectSafe,
};
use crate::infer::error_reporting::{TyCategory, TypeAnnotationNeeded as ErrorCode};
use crate::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
use crate::infer::{self, InferCtxt, TyCtxtInferExt};
use rustc_data_structures::fx::FxHashMap;
use rustc_errors::{pluralize, struct_span_err, Applicability, DiagnosticBuilder, ErrorReported};
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_hir::intravisit::Visitor;
use rustc_hir::Node;
use rustc_middle::mir::abstract_const::NotConstEvaluatable;
use rustc_middle::ty::error::ExpectedFound;
use rustc_middle::ty::fold::TypeFolder;
use rustc_middle::ty::{
self, fast_reject, AdtKind, SubtypePredicate, ToPolyTraitRef, ToPredicate, Ty, TyCtxt,
TypeFoldable, WithConstness,
};
use rustc_session::DiagnosticMessageId;
use rustc_span::symbol::{kw, sym};
use rustc_span::{ExpnKind, MultiSpan, Span, DUMMY_SP};
use std::fmt;
use std::iter;
use crate::traits::query::evaluate_obligation::InferCtxtExt as _;
use crate::traits::query::normalize::AtExt as _;
use on_unimplemented::InferCtxtExt as _;
use suggestions::InferCtxtExt as _;
pub use rustc_infer::traits::error_reporting::*;
pub trait InferCtxtExt<'tcx> {
fn report_fulfillment_errors(
&self,
errors: &[FulfillmentError<'tcx>],
body_id: Option<hir::BodyId>,
fallback_has_occurred: bool,
);
fn report_overflow_error<T>(
&self,
obligation: &Obligation<'tcx, T>,
suggest_increasing_limit: bool,
) -> !
where
T: fmt::Display + TypeFoldable<'tcx>;
fn report_overflow_error_cycle(&self, cycle: &[PredicateObligation<'tcx>]) -> !;
/// The `root_obligation` parameter should be the `root_obligation` field
/// from a `FulfillmentError`. If no `FulfillmentError` is available,
/// then it should be the same as `obligation`.
fn report_selection_error(
&self,
obligation: PredicateObligation<'tcx>,
root_obligation: &PredicateObligation<'tcx>,
error: &SelectionError<'tcx>,
fallback_has_occurred: bool,
points_at_arg: bool,
);
/// Given some node representing a fn-like thing in the HIR map,
/// returns a span and `ArgKind` information that describes the
/// arguments it expects. This can be supplied to
/// `report_arg_count_mismatch`.
fn get_fn_like_arguments(&self, node: Node<'_>) -> Option<(Span, Vec<ArgKind>)>;
/// Reports an error when the number of arguments needed by a
/// trait match doesn't match the number that the expression
/// provides.
fn report_arg_count_mismatch(
&self,
span: Span,
found_span: Option<Span>,
expected_args: Vec<ArgKind>,
found_args: Vec<ArgKind>,
is_closure: bool,
) -> DiagnosticBuilder<'tcx>;
}
impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
fn report_fulfillment_errors(
&self,
errors: &[FulfillmentError<'tcx>],
body_id: Option<hir::BodyId>,
fallback_has_occurred: bool,
) {
#[derive(Debug)]
struct ErrorDescriptor<'tcx> {
predicate: ty::Predicate<'tcx>,
index: Option<usize>, // None if this is an old error
}
let mut error_map: FxHashMap<_, Vec<_>> = self
.reported_trait_errors
.borrow()
.iter()
.map(|(&span, predicates)| {
(
span,
predicates
.iter()
.map(|&predicate| ErrorDescriptor { predicate, index: None })
.collect(),
)
})
.collect();
for (index, error) in errors.iter().enumerate() {
// We want to ignore desugarings here: spans are equivalent even
// if one is the result of a desugaring and the other is not.
let mut span = error.obligation.cause.span;
let expn_data = span.ctxt().outer_expn_data();
if let ExpnKind::Desugaring(_) = expn_data.kind {
span = expn_data.call_site;
}
error_map.entry(span).or_default().push(ErrorDescriptor {
predicate: error.obligation.predicate,
index: Some(index),
});
self.reported_trait_errors
.borrow_mut()
.entry(span)
.or_default()
.push(error.obligation.predicate);
}
// We do this in 2 passes because we want to display errors in order, though
// maybe it *is* better to sort errors by span or something.
let mut is_suppressed = vec![false; errors.len()];
for (_, error_set) in error_map.iter() {
// We want to suppress "duplicate" errors with the same span.
for error in error_set {
if let Some(index) = error.index {
// Suppress errors that are either:
// 1) strictly implied by another error.
// 2) implied by an error with a smaller index.
for error2 in error_set {
if error2.index.map_or(false, |index2| is_suppressed[index2]) {
// Avoid errors being suppressed by already-suppressed
// errors, to prevent all errors from being suppressed
// at once.
continue;
}
if self.error_implies(error2.predicate, error.predicate)
&& !(error2.index >= error.index
&& self.error_implies(error.predicate, error2.predicate))
{
info!("skipping {:?} (implied by {:?})", error, error2);
is_suppressed[index] = true;
break;
}
}
}
}
}
for (error, suppressed) in iter::zip(errors, is_suppressed) {
if !suppressed {
self.report_fulfillment_error(error, body_id, fallback_has_occurred);
}
}
}
/// Reports that an overflow has occurred and halts compilation. We
/// halt compilation unconditionally because it is important that
/// overflows never be masked -- they basically represent computations
/// whose result could not be truly determined and thus we can't say
/// if the program type checks or not -- and they are unusual
/// occurrences in any case.
fn report_overflow_error<T>(
&self,
obligation: &Obligation<'tcx, T>,
suggest_increasing_limit: bool,
) -> !
where
T: fmt::Display + TypeFoldable<'tcx>,
{
let predicate = self.resolve_vars_if_possible(obligation.predicate.clone());
let mut err = struct_span_err!(
self.tcx.sess,
obligation.cause.span,
E0275,
"overflow evaluating the requirement `{}`",
predicate
);
if suggest_increasing_limit {
self.suggest_new_overflow_limit(&mut err);
}
self.note_obligation_cause_code(
&mut err,
&obligation.predicate,
&obligation.cause.code,
&mut vec![],
&mut Default::default(),
);
err.emit();
self.tcx.sess.abort_if_errors();
bug!();
}
/// Reports that a cycle was detected which led to overflow and halts
/// compilation. This is equivalent to `report_overflow_error` except
/// that we can give a more helpful error message (and, in particular,
/// we do not suggest increasing the overflow limit, which is not
/// going to help).
fn report_overflow_error_cycle(&self, cycle: &[PredicateObligation<'tcx>]) -> ! {
let cycle = self.resolve_vars_if_possible(cycle.to_owned());
assert!(!cycle.is_empty());
debug!("report_overflow_error_cycle: cycle={:?}", cycle);
self.report_overflow_error(&cycle[0], false);
}
fn report_selection_error(
&self,
mut obligation: PredicateObligation<'tcx>,
root_obligation: &PredicateObligation<'tcx>,
error: &SelectionError<'tcx>,
fallback_has_occurred: bool,
points_at_arg: bool,
) {
let tcx = self.tcx;
let mut span = obligation.cause.span;
let mut err = match *error {
SelectionError::Unimplemented => {
// If this obligation was generated as a result of well-formedness checking, see if we
// can get a better error message by performing HIR-based well-formedness checking.
if let ObligationCauseCode::WellFormed(Some(wf_loc)) =
root_obligation.cause.code.peel_derives()
{
if let Some(cause) = self.tcx.diagnostic_hir_wf_check((
tcx.erase_regions(obligation.predicate),
wf_loc.clone(),
)) {
obligation.cause = cause;
span = obligation.cause.span;
}
}
if let ObligationCauseCode::CompareImplMethodObligation {
item_name,
impl_item_def_id,
trait_item_def_id,
}
| ObligationCauseCode::CompareImplTypeObligation {
item_name,
impl_item_def_id,
trait_item_def_id,
} = obligation.cause.code
{
self.report_extra_impl_obligation(
span,
item_name,
impl_item_def_id,
trait_item_def_id,
&format!("`{}`", obligation.predicate),
)
.emit();
return;
}
let bound_predicate = obligation.predicate.kind();
match bound_predicate.skip_binder() {
ty::PredicateKind::Trait(trait_predicate) => {
let trait_predicate = bound_predicate.rebind(trait_predicate);
let trait_predicate = self.resolve_vars_if_possible(trait_predicate);
if self.tcx.sess.has_errors() && trait_predicate.references_error() {
return;
}
let trait_ref = trait_predicate.to_poly_trait_ref();
let (post_message, pre_message, type_def) = self
.get_parent_trait_ref(&obligation.cause.code)
.map(|(t, s)| {
(
format!(" in `{}`", t),
format!("within `{}`, ", t),
s.map(|s| (format!("within this `{}`", t), s)),
)
})
.unwrap_or_default();
let OnUnimplementedNote { message, label, note, enclosing_scope } =
self.on_unimplemented_note(trait_ref, &obligation);
let have_alt_message = message.is_some() || label.is_some();
let is_try_conversion = self.is_try_conversion(span, trait_ref.def_id());
let is_unsize =
{ Some(trait_ref.def_id()) == self.tcx.lang_items().unsize_trait() };
let (message, note) = if is_try_conversion {
(
Some(format!(
"`?` couldn't convert the error to `{}`",
trait_ref.skip_binder().self_ty(),
)),
Some(
"the question mark operation (`?`) implicitly performs a \
conversion on the error value using the `From` trait"
.to_owned(),
),
)
} else {
(message, note)
};
let mut err = struct_span_err!(
self.tcx.sess,
span,
E0277,
"{}",
message.unwrap_or_else(|| format!(
"the trait bound `{}` is not satisfied{}",
trait_ref.without_const().to_predicate(tcx),
post_message,
))
);
if is_try_conversion {
let none_error = self
.tcx
.get_diagnostic_item(sym::none_error)
.map(|def_id| tcx.type_of(def_id));
let should_convert_option_to_result =
Some(trait_ref.skip_binder().substs.type_at(1)) == none_error;
let should_convert_result_to_option =
Some(trait_ref.self_ty().skip_binder()) == none_error;
if should_convert_option_to_result {
err.span_suggestion_verbose(
span.shrink_to_lo(),
"consider converting the `Option<T>` into a `Result<T, _>` \
using `Option::ok_or` or `Option::ok_or_else`",
".ok_or_else(|| /* error value */)".to_string(),
Applicability::HasPlaceholders,
);
} else if should_convert_result_to_option {
err.span_suggestion_verbose(
span.shrink_to_lo(),
"consider converting the `Result<T, _>` into an `Option<T>` \
using `Result::ok`",
".ok()".to_string(),
Applicability::MachineApplicable,
);
}
if let Some(ret_span) = self.return_type_span(&obligation) {
err.span_label(
ret_span,
&format!(
"expected `{}` because of this",
trait_ref.skip_binder().self_ty()
),
);
}
}
let explanation =
if obligation.cause.code == ObligationCauseCode::MainFunctionType {
"consider using `()`, or a `Result`".to_owned()
} else {
format!(
"{}the trait `{}` is not implemented for `{}`",
pre_message,
trait_ref.print_only_trait_path(),
trait_ref.skip_binder().self_ty(),
)
};
if self.suggest_add_reference_to_arg(
&obligation,
&mut err,
&trait_ref,
points_at_arg,
have_alt_message,
) {
self.note_obligation_cause(&mut err, &obligation);
err.emit();
return;
}
if let Some(ref s) = label {
// If it has a custom `#[rustc_on_unimplemented]`
// error message, let's display it as the label!
err.span_label(span, s.as_str());
if !matches!(trait_ref.skip_binder().self_ty().kind(), ty::Param(_)) {
// When the self type is a type param We don't need to "the trait
// `std::marker::Sized` is not implemented for `T`" as we will point
// at the type param with a label to suggest constraining it.
err.help(&explanation);
}
} else {
err.span_label(span, explanation);
}
if let Some((msg, span)) = type_def {
err.span_label(span, &msg);
}
if let Some(ref s) = note {
// If it has a custom `#[rustc_on_unimplemented]` note, let's display it
err.note(s.as_str());
}
if let Some(ref s) = enclosing_scope {
let body = tcx
.hir()
.opt_local_def_id(obligation.cause.body_id)
.unwrap_or_else(|| {
tcx.hir().body_owner_def_id(hir::BodyId {
hir_id: obligation.cause.body_id,
})
});
let enclosing_scope_span =
tcx.hir().span_with_body(tcx.hir().local_def_id_to_hir_id(body));
err.span_label(enclosing_scope_span, s.as_str());
}
self.suggest_dereferences(&obligation, &mut err, trait_ref, points_at_arg);
self.suggest_fn_call(&obligation, &mut err, trait_ref, points_at_arg);
self.suggest_remove_reference(&obligation, &mut err, trait_ref);
self.suggest_semicolon_removal(&obligation, &mut err, span, trait_ref);
self.note_version_mismatch(&mut err, &trait_ref);
if Some(trait_ref.def_id()) == tcx.lang_items().try_trait() {
self.suggest_await_before_try(&mut err, &obligation, trait_ref, span);
}
if self.suggest_impl_trait(&mut err, span, &obligation, trait_ref) {
err.emit();
return;
}
if is_unsize {
// If the obligation failed due to a missing implementation of the
// `Unsize` trait, give a pointer to why that might be the case
err.note(
"all implementations of `Unsize` are provided \
automatically by the compiler, see \
<https://doc.rust-lang.org/stable/std/marker/trait.Unsize.html> \
for more information",
);
}
let is_fn_trait = [
self.tcx.lang_items().fn_trait(),
self.tcx.lang_items().fn_mut_trait(),
self.tcx.lang_items().fn_once_trait(),
]
.contains(&Some(trait_ref.def_id()));
let is_target_feature_fn = if let ty::FnDef(def_id, _) =
*trait_ref.skip_binder().self_ty().kind()
{
!self.tcx.codegen_fn_attrs(def_id).target_features.is_empty()
} else {
false
};
if is_fn_trait && is_target_feature_fn {
err.note(
"`#[target_feature]` functions do not implement the `Fn` traits",
);
}
// Try to report a help message
if !trait_ref.has_infer_types_or_consts()
&& self.predicate_can_apply(obligation.param_env, trait_ref)
{
// If a where-clause may be useful, remind the
// user that they can add it.
//
// don't display an on-unimplemented note, as
// these notes will often be of the form
// "the type `T` can't be frobnicated"
// which is somewhat confusing.
self.suggest_restricting_param_bound(
&mut err,
trait_ref,
obligation.cause.body_id,
);
} else if !have_alt_message {
// Can't show anything else useful, try to find similar impls.
let impl_candidates = self.find_similar_impl_candidates(trait_ref);
self.report_similar_impl_candidates(impl_candidates, &mut err);
}
// Changing mutability doesn't make a difference to whether we have
// an `Unsize` impl (Fixes ICE in #71036)
if !is_unsize {
self.suggest_change_mut(
&obligation,
&mut err,
trait_ref,
points_at_arg,
);
}
// If this error is due to `!: Trait` not implemented but `(): Trait` is
// implemented, and fallback has occurred, then it could be due to a
// variable that used to fallback to `()` now falling back to `!`. Issue a
// note informing about the change in behaviour.
if trait_predicate.skip_binder().self_ty().is_never()
&& fallback_has_occurred
{
let predicate = trait_predicate.map_bound(|mut trait_pred| {
trait_pred.trait_ref.substs = self.tcx.mk_substs_trait(
self.tcx.mk_unit(),
&trait_pred.trait_ref.substs[1..],
);
trait_pred
});
let unit_obligation = obligation.with(predicate.to_predicate(tcx));
if self.predicate_may_hold(&unit_obligation) {
err.note("this trait is implemented for `()`.");
err.note(
"this error might have been caused by changes to \
Rust's type-inference algorithm (see issue #48950 \
<https://github.com/rust-lang/rust/issues/48950> \
for more information).",
);
err.help("did you intend to use the type `()` here instead?");
}
}
// Return early if the trait is Debug or Display and the invocation
// originates within a standard library macro, because the output
// is otherwise overwhelming and unhelpful (see #85844 for an
// example).
let trait_is_debug =
self.tcx.is_diagnostic_item(sym::debug_trait, trait_ref.def_id());
let trait_is_display =
self.tcx.is_diagnostic_item(sym::display_trait, trait_ref.def_id());
let in_std_macro =
match obligation.cause.span.ctxt().outer_expn_data().macro_def_id {
Some(macro_def_id) => {
let crate_name = tcx.crate_name(macro_def_id.krate);
crate_name == sym::std || crate_name == sym::core
}
None => false,
};
if in_std_macro && (trait_is_debug || trait_is_display) {
err.emit();
return;
}
err
}
ty::PredicateKind::Subtype(predicate) => {
// Errors for Subtype predicates show up as
// `FulfillmentErrorCode::CodeSubtypeError`,
// not selection error.
span_bug!(span, "subtype requirement gave wrong error: `{:?}`", predicate)
}
ty::PredicateKind::Coerce(predicate) => {
// Errors for Coerce predicates show up as
// `FulfillmentErrorCode::CodeSubtypeError`,
// not selection error.
span_bug!(span, "coerce requirement gave wrong error: `{:?}`", predicate)
}
ty::PredicateKind::RegionOutlives(predicate) => {
let predicate = bound_predicate.rebind(predicate);
let predicate = self.resolve_vars_if_possible(predicate);
let err = self
.region_outlives_predicate(&obligation.cause, predicate)
.err()
.unwrap();
struct_span_err!(
self.tcx.sess,
span,
E0279,
"the requirement `{}` is not satisfied (`{}`)",
predicate,
err,
)
}
ty::PredicateKind::Projection(..) | ty::PredicateKind::TypeOutlives(..) => {
let predicate = self.resolve_vars_if_possible(obligation.predicate);
struct_span_err!(
self.tcx.sess,
span,
E0280,
"the requirement `{}` is not satisfied",
predicate
)
}
ty::PredicateKind::ObjectSafe(trait_def_id) => {
let violations = self.tcx.object_safety_violations(trait_def_id);
report_object_safety_error(self.tcx, span, trait_def_id, violations)
}
ty::PredicateKind::ClosureKind(closure_def_id, closure_substs, kind) => {
let found_kind = self.closure_kind(closure_substs).unwrap();
let closure_span =
self.tcx.sess.source_map().guess_head_span(
self.tcx.hir().span_if_local(closure_def_id).unwrap(),
);
let hir_id =
self.tcx.hir().local_def_id_to_hir_id(closure_def_id.expect_local());
let mut err = struct_span_err!(
self.tcx.sess,
closure_span,
E0525,
"expected a closure that implements the `{}` trait, \
but this closure only implements `{}`",
kind,
found_kind
);
err.span_label(
closure_span,
format!("this closure implements `{}`, not `{}`", found_kind, kind),
);
err.span_label(
obligation.cause.span,
format!("the requirement to implement `{}` derives from here", kind),
);
// Additional context information explaining why the closure only implements
// a particular trait.
if let Some(typeck_results) = self.in_progress_typeck_results {
let typeck_results = typeck_results.borrow();
match (found_kind, typeck_results.closure_kind_origins().get(hir_id)) {
(ty::ClosureKind::FnOnce, Some((span, place))) => {
err.span_label(
*span,
format!(
"closure is `FnOnce` because it moves the \
variable `{}` out of its environment",
ty::place_to_string_for_capture(tcx, place)
),
);
}
(ty::ClosureKind::FnMut, Some((span, place))) => {
err.span_label(
*span,
format!(
"closure is `FnMut` because it mutates the \
variable `{}` here",
ty::place_to_string_for_capture(tcx, place)
),
);
}
_ => {}
}
}
err.emit();
return;
}
ty::PredicateKind::WellFormed(ty) => {
if !self.tcx.sess.opts.debugging_opts.chalk {
// WF predicates cannot themselves make
// errors. They can only block due to
// ambiguity; otherwise, they always
// degenerate into other obligations
// (which may fail).
span_bug!(span, "WF predicate not satisfied for {:?}", ty);
} else {
// FIXME: we'll need a better message which takes into account
// which bounds actually failed to hold.
self.tcx.sess.struct_span_err(
span,
&format!("the type `{}` is not well-formed (chalk)", ty),
)
}
}
ty::PredicateKind::ConstEvaluatable(..) => {
// Errors for `ConstEvaluatable` predicates show up as
// `SelectionError::ConstEvalFailure`,
// not `Unimplemented`.
span_bug!(
span,
"const-evaluatable requirement gave wrong error: `{:?}`",
obligation
)
}
ty::PredicateKind::ConstEquate(..) => {
// Errors for `ConstEquate` predicates show up as
// `SelectionError::ConstEvalFailure`,
// not `Unimplemented`.
span_bug!(
span,
"const-equate requirement gave wrong error: `{:?}`",
obligation
)
}
ty::PredicateKind::TypeWellFormedFromEnv(..) => span_bug!(
span,
"TypeWellFormedFromEnv predicate should only exist in the environment"
),
}
}
OutputTypeParameterMismatch(found_trait_ref, expected_trait_ref, _) => {
let found_trait_ref = self.resolve_vars_if_possible(found_trait_ref);
let expected_trait_ref = self.resolve_vars_if_possible(expected_trait_ref);
if expected_trait_ref.self_ty().references_error() {
return;
}
let found_trait_ty = match found_trait_ref.self_ty().no_bound_vars() {
Some(ty) => ty,
None => return,
};
let found_did = match *found_trait_ty.kind() {
ty::Closure(did, _) | ty::Foreign(did) | ty::FnDef(did, _) => Some(did),
ty::Adt(def, _) => Some(def.did),
_ => None,
};
let found_span = found_did
.and_then(|did| self.tcx.hir().span_if_local(did))
.map(|sp| self.tcx.sess.source_map().guess_head_span(sp)); // the sp could be an fn def
if self.reported_closure_mismatch.borrow().contains(&(span, found_span)) {
// We check closures twice, with obligations flowing in different directions,
// but we want to complain about them only once.
return;
}
self.reported_closure_mismatch.borrow_mut().insert((span, found_span));
let found = match found_trait_ref.skip_binder().substs.type_at(1).kind() {
ty::Tuple(ref tys) => vec![ArgKind::empty(); tys.len()],
_ => vec![ArgKind::empty()],
};
let expected_ty = expected_trait_ref.skip_binder().substs.type_at(1);
let expected = match expected_ty.kind() {
ty::Tuple(ref tys) => tys
.iter()
.map(|t| ArgKind::from_expected_ty(t.expect_ty(), Some(span)))
.collect(),
_ => vec![ArgKind::Arg("_".to_owned(), expected_ty.to_string())],
};
if found.len() == expected.len() {
self.report_closure_arg_mismatch(
span,
found_span,
found_trait_ref,
expected_trait_ref,
)
} else {
let (closure_span, found) = found_did
.and_then(|did| {
let node = self.tcx.hir().get_if_local(did)?;
let (found_span, found) = self.get_fn_like_arguments(node)?;
Some((Some(found_span), found))
})
.unwrap_or((found_span, found));
self.report_arg_count_mismatch(
span,
closure_span,
expected,
found,
found_trait_ty.is_closure(),
)
}
}
TraitNotObjectSafe(did) => {
let violations = self.tcx.object_safety_violations(did);
report_object_safety_error(self.tcx, span, did, violations)
}
SelectionError::NotConstEvaluatable(NotConstEvaluatable::MentionsInfer) => {
bug!(
"MentionsInfer should have been handled in `traits/fulfill.rs` or `traits/select/mod.rs`"
)
}
SelectionError::NotConstEvaluatable(NotConstEvaluatable::MentionsParam) => {
if !self.tcx.features().const_evaluatable_checked {
let mut err = self.tcx.sess.struct_span_err(
span,
"constant expression depends on a generic parameter",
);
// FIXME(const_generics): we should suggest to the user how they can resolve this
// issue. However, this is currently not actually possible
// (see https://github.com/rust-lang/rust/issues/66962#issuecomment-575907083).
//
// Note that with `feature(const_evaluatable_checked)` this case should not
// be reachable.
err.note("this may fail depending on what value the parameter takes");
err.emit();
return;
}
match obligation.predicate.kind().skip_binder() {
ty::PredicateKind::ConstEvaluatable(def, _) => {
let mut err =
self.tcx.sess.struct_span_err(span, "unconstrained generic constant");
let const_span = self.tcx.def_span(def.did);
match self.tcx.sess.source_map().span_to_snippet(const_span) {
Ok(snippet) => err.help(&format!(
"try adding a `where` bound using this expression: `where [(); {}]:`",
snippet
)),
_ => err.help("consider adding a `where` bound using this expression"),
};
err
}
_ => {
span_bug!(
span,
"unexpected non-ConstEvaluatable predicate, this should not be reachable"
)
}
}
}
// Already reported in the query.
SelectionError::NotConstEvaluatable(NotConstEvaluatable::Error(ErrorReported)) => {
// FIXME(eddyb) remove this once `ErrorReported` becomes a proof token.
self.tcx.sess.delay_span_bug(span, "`ErrorReported` without an error");
return;
}
Overflow => {
bug!("overflow should be handled before the `report_selection_error` path");
}
};
self.note_obligation_cause(&mut err, &obligation);
self.point_at_returns_when_relevant(&mut err, &obligation);
err.emit();
}
/// Given some node representing a fn-like thing in the HIR map,
/// returns a span and `ArgKind` information that describes the
/// arguments it expects. This can be supplied to
/// `report_arg_count_mismatch`.
fn get_fn_like_arguments(&self, node: Node<'_>) -> Option<(Span, Vec<ArgKind>)> {
let sm = self.tcx.sess.source_map();
let hir = self.tcx.hir();
Some(match node {
Node::Expr(&hir::Expr {
kind: hir::ExprKind::Closure(_, ref _decl, id, span, _),
..
}) => (
sm.guess_head_span(span),
hir.body(id)
.params
.iter()
.map(|arg| {
if let hir::Pat { kind: hir::PatKind::Tuple(ref args, _), span, .. } =
*arg.pat
{
Some(ArgKind::Tuple(
Some(span),
args.iter()
.map(|pat| {
sm.span_to_snippet(pat.span)
.ok()
.map(|snippet| (snippet, "_".to_owned()))
})
.collect::<Option<Vec<_>>>()?,
))
} else {
let name = sm.span_to_snippet(arg.pat.span).ok()?;
Some(ArgKind::Arg(name, "_".to_owned()))
}
})
.collect::<Option<Vec<ArgKind>>>()?,
),
Node::Item(&hir::Item { span, kind: hir::ItemKind::Fn(ref sig, ..), .. })
| Node::ImplItem(&hir::ImplItem {
span,
kind: hir::ImplItemKind::Fn(ref sig, _),
..
})
| Node::TraitItem(&hir::TraitItem {
span,
kind: hir::TraitItemKind::Fn(ref sig, _),
..
}) => (
sm.guess_head_span(span),
sig.decl
.inputs
.iter()
.map(|arg| match arg.kind {
hir::TyKind::Tup(ref tys) => ArgKind::Tuple(
Some(arg.span),
vec![("_".to_owned(), "_".to_owned()); tys.len()],
),
_ => ArgKind::empty(),
})
.collect::<Vec<ArgKind>>(),
),
Node::Ctor(ref variant_data) => {
let span = variant_data.ctor_hir_id().map_or(DUMMY_SP, |id| hir.span(id));
let span = sm.guess_head_span(span);
(span, vec![ArgKind::empty(); variant_data.fields().len()])
}
_ => panic!("non-FnLike node found: {:?}", node),
})
}
/// Reports an error when the number of arguments needed by a
/// trait match doesn't match the number that the expression
/// provides.
fn report_arg_count_mismatch(
&self,
span: Span,
found_span: Option<Span>,
expected_args: Vec<ArgKind>,
found_args: Vec<ArgKind>,
is_closure: bool,
) -> DiagnosticBuilder<'tcx> {
let kind = if is_closure { "closure" } else { "function" };
let args_str = |arguments: &[ArgKind], other: &[ArgKind]| {
let arg_length = arguments.len();
let distinct = matches!(other, &[ArgKind::Tuple(..)]);
match (arg_length, arguments.get(0)) {
(1, Some(&ArgKind::Tuple(_, ref fields))) => {
format!("a single {}-tuple as argument", fields.len())
}
_ => format!(
"{} {}argument{}",
arg_length,
if distinct && arg_length > 1 { "distinct " } else { "" },
pluralize!(arg_length)
),
}
};
let expected_str = args_str(&expected_args, &found_args);
let found_str = args_str(&found_args, &expected_args);
let mut err = struct_span_err!(
self.tcx.sess,
span,
E0593,
"{} is expected to take {}, but it takes {}",
kind,
expected_str,
found_str,
);
err.span_label(span, format!("expected {} that takes {}", kind, expected_str));
if let Some(found_span) = found_span {
err.span_label(found_span, format!("takes {}", found_str));
// move |_| { ... }
// ^^^^^^^^-- def_span
//
// move |_| { ... }
// ^^^^^-- prefix
let prefix_span = self.tcx.sess.source_map().span_until_non_whitespace(found_span);
// move |_| { ... }
// ^^^-- pipe_span
let pipe_span =
if let Some(span) = found_span.trim_start(prefix_span) { span } else { found_span };
// Suggest to take and ignore the arguments with expected_args_length `_`s if
// found arguments is empty (assume the user just wants to ignore args in this case).
// For example, if `expected_args_length` is 2, suggest `|_, _|`.
if found_args.is_empty() && is_closure {
let underscores = vec!["_"; expected_args.len()].join(", ");
err.span_suggestion_verbose(
pipe_span,
&format!(
"consider changing the closure to take and ignore the expected argument{}",
pluralize!(expected_args.len())
),
format!("|{}|", underscores),
Applicability::MachineApplicable,
);
}
if let &[ArgKind::Tuple(_, ref fields)] = &found_args[..] {
if fields.len() == expected_args.len() {
let sugg = fields
.iter()
.map(|(name, _)| name.to_owned())
.collect::<Vec<String>>()
.join(", ");
err.span_suggestion_verbose(
found_span,
"change the closure to take multiple arguments instead of a single tuple",
format!("|{}|", sugg),
Applicability::MachineApplicable,
);
}
}
if let &[ArgKind::Tuple(_, ref fields)] = &expected_args[..] {
if fields.len() == found_args.len() && is_closure {
let sugg = format!(
"|({}){}|",
found_args
.iter()
.map(|arg| match arg {
ArgKind::Arg(name, _) => name.to_owned(),
_ => "_".to_owned(),
})
.collect::<Vec<String>>()
.join(", "),
// add type annotations if available
if found_args.iter().any(|arg| match arg {
ArgKind::Arg(_, ty) => ty != "_",
_ => false,
}) {
format!(
": ({})",
fields
.iter()
.map(|(_, ty)| ty.to_owned())
.collect::<Vec<String>>()
.join(", ")
)
} else {
String::new()
},
);
err.span_suggestion_verbose(
found_span,
"change the closure to accept a tuple instead of individual arguments",
sugg,
Applicability::MachineApplicable,
);
}
}
}
err
}
}
trait InferCtxtPrivExt<'tcx> {
// returns if `cond` not occurring implies that `error` does not occur - i.e., that
// `error` occurring implies that `cond` occurs.
fn error_implies(&self, cond: ty::Predicate<'tcx>, error: ty::Predicate<'tcx>) -> bool;
fn report_fulfillment_error(
&self,
error: &FulfillmentError<'tcx>,
body_id: Option<hir::BodyId>,
fallback_has_occurred: bool,
);
fn report_projection_error(
&self,
obligation: &PredicateObligation<'tcx>,
error: &MismatchedProjectionTypes<'tcx>,
);
fn fuzzy_match_tys(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> bool;
fn describe_generator(&self, body_id: hir::BodyId) -> Option<&'static str>;
fn find_similar_impl_candidates(
&self,
trait_ref: ty::PolyTraitRef<'tcx>,
) -> Vec<ty::TraitRef<'tcx>>;
fn report_similar_impl_candidates(
&self,
impl_candidates: Vec<ty::TraitRef<'tcx>>,
err: &mut DiagnosticBuilder<'_>,
);
/// Gets the parent trait chain start
fn get_parent_trait_ref(
&self,
code: &ObligationCauseCode<'tcx>,
) -> Option<(String, Option<Span>)>;
/// If the `Self` type of the unsatisfied trait `trait_ref` implements a trait
/// with the same path as `trait_ref`, a help message about
/// a probable version mismatch is added to `err`
fn note_version_mismatch(
&self,
err: &mut DiagnosticBuilder<'_>,
trait_ref: &ty::PolyTraitRef<'tcx>,
);
/// Creates a `PredicateObligation` with `new_self_ty` replacing the existing type in the
/// `trait_ref`.
///
/// For this to work, `new_self_ty` must have no escaping bound variables.
fn mk_trait_obligation_with_new_self_ty(
&self,
param_env: ty::ParamEnv<'tcx>,
trait_ref: ty::PolyTraitRef<'tcx>,
new_self_ty: Ty<'tcx>,
) -> PredicateObligation<'tcx>;
fn maybe_report_ambiguity(
&self,
obligation: &PredicateObligation<'tcx>,
body_id: Option<hir::BodyId>,
);
fn predicate_can_apply(
&self,
param_env: ty::ParamEnv<'tcx>,
pred: ty::PolyTraitRef<'tcx>,
) -> bool;
fn note_obligation_cause(
&self,
err: &mut DiagnosticBuilder<'tcx>,
obligation: &PredicateObligation<'tcx>,
);
fn suggest_unsized_bound_if_applicable(
&self,
err: &mut DiagnosticBuilder<'tcx>,
obligation: &PredicateObligation<'tcx>,
);
fn is_recursive_obligation(
&self,
obligated_types: &mut Vec<&ty::TyS<'tcx>>,
cause_code: &ObligationCauseCode<'tcx>,
) -> bool;
}
impl<'a, 'tcx> InferCtxtPrivExt<'tcx> for InferCtxt<'a, 'tcx> {
// returns if `cond` not occurring implies that `error` does not occur - i.e., that
// `error` occurring implies that `cond` occurs.
fn error_implies(&self, cond: ty::Predicate<'tcx>, error: ty::Predicate<'tcx>) -> bool {
if cond == error {
return true;
}
// FIXME: It should be possible to deal with `ForAll` in a cleaner way.
let bound_error = error.kind();
let (cond, error) = match (cond.kind().skip_binder(), bound_error.skip_binder()) {
(ty::PredicateKind::Trait(..), ty::PredicateKind::Trait(error)) => {
(cond, bound_error.rebind(error))
}
_ => {
// FIXME: make this work in other cases too.
return false;
}
};
for obligation in super::elaborate_predicates(self.tcx, std::iter::once(cond)) {
let bound_predicate = obligation.predicate.kind();
if let ty::PredicateKind::Trait(implication) = bound_predicate.skip_binder() {
let error = error.to_poly_trait_ref();
let implication = bound_predicate.rebind(implication.trait_ref);
// FIXME: I'm just not taking associated types at all here.
// Eventually I'll need to implement param-env-aware
// `Γ₁ ⊦ φ₁ => Γ₂ ⊦ φ₂` logic.
let param_env = ty::ParamEnv::empty();
if self.can_sub(param_env, error, implication).is_ok() {
debug!("error_implies: {:?} -> {:?} -> {:?}", cond, error, implication);
return true;
}
}
}
false
}
fn report_fulfillment_error(
&self,
error: &FulfillmentError<'tcx>,
body_id: Option<hir::BodyId>,
fallback_has_occurred: bool,
) {
debug!("report_fulfillment_error({:?})", error);
match error.code {
FulfillmentErrorCode::CodeSelectionError(ref selection_error) => {
self.report_selection_error(
error.obligation.clone(),
&error.root_obligation,
selection_error,
fallback_has_occurred,
error.points_at_arg_span,
);
}
FulfillmentErrorCode::CodeProjectionError(ref e) => {
self.report_projection_error(&error.obligation, e);
}
FulfillmentErrorCode::CodeAmbiguity => {
self.maybe_report_ambiguity(&error.obligation, body_id);
}
FulfillmentErrorCode::CodeSubtypeError(ref expected_found, ref err) => {
self.report_mismatched_types(
&error.obligation.cause,
expected_found.expected,
expected_found.found,
err.clone(),
)
.emit();
}
FulfillmentErrorCode::CodeConstEquateError(ref expected_found, ref err) => {
self.report_mismatched_consts(
&error.obligation.cause,
expected_found.expected,
expected_found.found,
err.clone(),
)
.emit();
}
}
}
fn report_projection_error(
&self,
obligation: &PredicateObligation<'tcx>,
error: &MismatchedProjectionTypes<'tcx>,
) {
let predicate = self.resolve_vars_if_possible(obligation.predicate);
if predicate.references_error() {
return;
}
self.probe(|_| {
let err_buf;
let mut err = &error.err;
let mut values = None;
// try to find the mismatched types to report the error with.
//
// this can fail if the problem was higher-ranked, in which
// cause I have no idea for a good error message.
let bound_predicate = predicate.kind();
if let ty::PredicateKind::Projection(data) = bound_predicate.skip_binder() {
let mut selcx = SelectionContext::new(self);
let (data, _) = self.replace_bound_vars_with_fresh_vars(
obligation.cause.span,
infer::LateBoundRegionConversionTime::HigherRankedType,
bound_predicate.rebind(data),
);
let mut obligations = vec![];
let normalized_ty = super::normalize_projection_type(
&mut selcx,
obligation.param_env,
data.projection_ty,
obligation.cause.clone(),
0,
&mut obligations,
);
debug!(
"report_projection_error obligation.cause={:?} obligation.param_env={:?}",
obligation.cause, obligation.param_env
);
debug!(
"report_projection_error normalized_ty={:?} data.ty={:?}",
normalized_ty, data.ty
);
let is_normalized_ty_expected = !matches!(
obligation.cause.code.peel_derives(),
ObligationCauseCode::ItemObligation(_)
| ObligationCauseCode::BindingObligation(_, _)
| ObligationCauseCode::ObjectCastObligation(_)
| ObligationCauseCode::OpaqueType
);
if let Err(error) = self.at(&obligation.cause, obligation.param_env).eq_exp(
is_normalized_ty_expected,
normalized_ty,
data.ty,
) {
values = Some(infer::ValuePairs::Types(ExpectedFound::new(
is_normalized_ty_expected,
normalized_ty,
data.ty,
)));
err_buf = error;
err = &err_buf;
}
}
let msg = format!("type mismatch resolving `{}`", predicate);
let error_id = (DiagnosticMessageId::ErrorId(271), Some(obligation.cause.span), msg);
let fresh = self.tcx.sess.one_time_diagnostics.borrow_mut().insert(error_id);
if fresh {
let mut diag = struct_span_err!(
self.tcx.sess,
obligation.cause.span,
E0271,
"type mismatch resolving `{}`",
predicate
);
self.note_type_err(&mut diag, &obligation.cause, None, values, err);
self.note_obligation_cause(&mut diag, obligation);
diag.emit();
}
});
}
fn fuzzy_match_tys(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> bool {
/// returns the fuzzy category of a given type, or None
/// if the type can be equated to any type.
fn type_category(t: Ty<'_>) -> Option<u32> {
match t.kind() {
ty::Bool => Some(0),
ty::Char => Some(1),
ty::Str => Some(2),
ty::Int(..) | ty::Uint(..) | ty::Infer(ty::IntVar(..)) => Some(3),
ty::Float(..) | ty::Infer(ty::FloatVar(..)) => Some(4),
ty::Ref(..) | ty::RawPtr(..) => Some(5),
ty::Array(..) | ty::Slice(..) => Some(6),
ty::FnDef(..) | ty::FnPtr(..) => Some(7),
ty::Dynamic(..) => Some(8),
ty::Closure(..) => Some(9),
ty::Tuple(..) => Some(10),
ty::Projection(..) => Some(11),
ty::Param(..) => Some(12),
ty::Opaque(..) => Some(13),
ty::Never => Some(14),
ty::Adt(adt, ..) => match adt.adt_kind() {
AdtKind::Struct => Some(15),
AdtKind::Union => Some(16),
AdtKind::Enum => Some(17),
},
ty::Generator(..) => Some(18),
ty::Foreign(..) => Some(19),
ty::GeneratorWitness(..) => Some(20),
ty::Placeholder(..) | ty::Bound(..) | ty::Infer(..) | ty::Error(_) => None,
}
}
match (type_category(a), type_category(b)) {
(Some(cat_a), Some(cat_b)) => match (a.kind(), b.kind()) {
(&ty::Adt(def_a, _), &ty::Adt(def_b, _)) => def_a == def_b,
_ => cat_a == cat_b,
},
// infer and error can be equated to all types
_ => true,
}
}
fn describe_generator(&self, body_id: hir::BodyId) -> Option<&'static str> {
self.tcx.hir().body(body_id).generator_kind.map(|gen_kind| match gen_kind {
hir::GeneratorKind::Gen => "a generator",
hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Block) => "an async block",
hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Fn) => "an async function",
hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Closure) => "an async closure",
})
}
fn find_similar_impl_candidates(
&self,
trait_ref: ty::PolyTraitRef<'tcx>,
) -> Vec<ty::TraitRef<'tcx>> {
let simp = fast_reject::simplify_type(self.tcx, trait_ref.skip_binder().self_ty(), true);
let all_impls = self.tcx.all_impls(trait_ref.def_id());
match simp {
Some(simp) => all_impls
.filter_map(|def_id| {
let imp = self.tcx.impl_trait_ref(def_id).unwrap();
let imp_simp = fast_reject::simplify_type(self.tcx, imp.self_ty(), true);
if let Some(imp_simp) = imp_simp {
if simp != imp_simp {
return None;
}
}
if self.tcx.impl_polarity(def_id) == ty::ImplPolarity::Negative {
return None;
}
Some(imp)
})
.collect(),
None => all_impls
.filter_map(|def_id| {
if self.tcx.impl_polarity(def_id) == ty::ImplPolarity::Negative {
return None;
}
self.tcx.impl_trait_ref(def_id)
})
.collect(),
}
}
fn report_similar_impl_candidates(
&self,
impl_candidates: Vec<ty::TraitRef<'tcx>>,
err: &mut DiagnosticBuilder<'_>,
) {
if impl_candidates.is_empty() {
return;
}
let len = impl_candidates.len();
let end = if impl_candidates.len() <= 5 { impl_candidates.len() } else { 4 };
let normalize = |candidate| {
self.tcx.infer_ctxt().enter(|ref infcx| {
let normalized = infcx
.at(&ObligationCause::dummy(), ty::ParamEnv::empty())
.normalize(candidate)
.ok();
match normalized {
Some(normalized) => format!("\n {}", normalized.value),
None => format!("\n {}", candidate),
}
})
};
// Sort impl candidates so that ordering is consistent for UI tests.
let mut normalized_impl_candidates =
impl_candidates.iter().copied().map(normalize).collect::<Vec<String>>();
// Sort before taking the `..end` range,
// because the ordering of `impl_candidates` may not be deterministic:
// https://github.com/rust-lang/rust/pull/57475#issuecomment-455519507
normalized_impl_candidates.sort();
err.help(&format!(
"the following implementations were found:{}{}",
normalized_impl_candidates[..end].join(""),
if len > 5 { format!("\nand {} others", len - 4) } else { String::new() }
));
}
/// Gets the parent trait chain start
fn get_parent_trait_ref(
&self,
code: &ObligationCauseCode<'tcx>,
) -> Option<(String, Option<Span>)> {
match code {
ObligationCauseCode::BuiltinDerivedObligation(data) => {
let parent_trait_ref = self.resolve_vars_if_possible(data.parent_trait_ref);
match self.get_parent_trait_ref(&data.parent_code) {
Some(t) => Some(t),
None => {
let ty = parent_trait_ref.skip_binder().self_ty();
let span = TyCategory::from_ty(self.tcx, ty)
.map(|(_, def_id)| self.tcx.def_span(def_id));
Some((ty.to_string(), span))
}
}
}
_ => None,
}
}
/// If the `Self` type of the unsatisfied trait `trait_ref` implements a trait
/// with the same path as `trait_ref`, a help message about
/// a probable version mismatch is added to `err`
fn note_version_mismatch(
&self,
err: &mut DiagnosticBuilder<'_>,
trait_ref: &ty::PolyTraitRef<'tcx>,
) {
let get_trait_impl = |trait_def_id| {
self.tcx.find_map_relevant_impl(trait_def_id, trait_ref.skip_binder().self_ty(), Some)
};
let required_trait_path = self.tcx.def_path_str(trait_ref.def_id());
let all_traits = self.tcx.all_traits(());
let traits_with_same_path: std::collections::BTreeSet<_> = all_traits
.iter()
.filter(|trait_def_id| **trait_def_id != trait_ref.def_id())
.filter(|trait_def_id| self.tcx.def_path_str(**trait_def_id) == required_trait_path)
.collect();
for trait_with_same_path in traits_with_same_path {
if let Some(impl_def_id) = get_trait_impl(*trait_with_same_path) {
let impl_span = self.tcx.def_span(impl_def_id);
err.span_help(impl_span, "trait impl with same name found");
let trait_crate = self.tcx.crate_name(trait_with_same_path.krate);
let crate_msg = format!(
"perhaps two different versions of crate `{}` are being used?",
trait_crate
);
err.note(&crate_msg);
}
}
}
fn mk_trait_obligation_with_new_self_ty(
&self,
param_env: ty::ParamEnv<'tcx>,
trait_ref: ty::PolyTraitRef<'tcx>,
new_self_ty: Ty<'tcx>,
) -> PredicateObligation<'tcx> {
assert!(!new_self_ty.has_escaping_bound_vars());
let trait_ref = trait_ref.map_bound_ref(|tr| ty::TraitRef {
substs: self.tcx.mk_substs_trait(new_self_ty, &tr.substs[1..]),
..*tr
});
Obligation::new(
ObligationCause::dummy(),
param_env,
trait_ref.without_const().to_predicate(self.tcx),
)
}
fn maybe_report_ambiguity(
&self,
obligation: &PredicateObligation<'tcx>,
body_id: Option<hir::BodyId>,
) {
// Unable to successfully determine, probably means
// insufficient type information, but could mean
// ambiguous impls. The latter *ought* to be a
// coherence violation, so we don't report it here.
let predicate = self.resolve_vars_if_possible(obligation.predicate);
let span = obligation.cause.span;
debug!(
"maybe_report_ambiguity(predicate={:?}, obligation={:?} body_id={:?}, code={:?})",
predicate, obligation, body_id, obligation.cause.code,
);
// Ambiguity errors are often caused as fallout from earlier
// errors. So just ignore them if this infcx is tainted.
if self.is_tainted_by_errors() {
return;
}
let bound_predicate = predicate.kind();
let mut err = match bound_predicate.skip_binder() {
ty::PredicateKind::Trait(data) => {
let trait_ref = bound_predicate.rebind(data.trait_ref);
debug!("trait_ref {:?}", trait_ref);
if predicate.references_error() {
return;
}
// Typically, this ambiguity should only happen if
// there are unresolved type inference variables
// (otherwise it would suggest a coherence
// failure). But given #21974 that is not necessarily
// the case -- we can have multiple where clauses that
// are only distinguished by a region, which results
// in an ambiguity even when all types are fully
// known, since we don't dispatch based on region
// relationships.
// Pick the first substitution that still contains inference variables as the one
// we're going to emit an error for. If there are none (see above), fall back to
// the substitution for `Self`.
let subst = {
let substs = data.trait_ref.substs;
substs
.iter()
.find(|s| s.has_infer_types_or_consts())
.unwrap_or_else(|| substs[0])
};
// This is kind of a hack: it frequently happens that some earlier
// error prevents types from being fully inferred, and then we get
// a bunch of uninteresting errors saying something like "<generic
// #0> doesn't implement Sized". It may even be true that we
// could just skip over all checks where the self-ty is an
// inference variable, but I was afraid that there might be an
// inference variable created, registered as an obligation, and
// then never forced by writeback, and hence by skipping here we'd
// be ignoring the fact that we don't KNOW the type works
// out. Though even that would probably be harmless, given that
// we're only talking about builtin traits, which are known to be
// inhabited. We used to check for `self.tcx.sess.has_errors()` to
// avoid inundating the user with unnecessary errors, but we now
// check upstream for type errors and don't add the obligations to
// begin with in those cases.
if self.tcx.lang_items().sized_trait() == Some(trait_ref.def_id()) {
self.emit_inference_failure_err(body_id, span, subst, vec![], ErrorCode::E0282)
.emit();
return;
}
let impl_candidates = self.find_similar_impl_candidates(trait_ref);
let mut err = self.emit_inference_failure_err(
body_id,
span,
subst,
impl_candidates,
ErrorCode::E0283,
);
err.note(&format!("cannot satisfy `{}`", predicate));
if let ObligationCauseCode::ItemObligation(def_id) = obligation.cause.code {
self.suggest_fully_qualified_path(&mut err, def_id, span, trait_ref.def_id());
} else if let (
Ok(ref snippet),
ObligationCauseCode::BindingObligation(ref def_id, _),
) =
(self.tcx.sess.source_map().span_to_snippet(span), &obligation.cause.code)
{
let generics = self.tcx.generics_of(*def_id);
if generics.params.iter().any(|p| p.name != kw::SelfUpper)
&& !snippet.ends_with('>')
&& !generics.has_impl_trait()
&& !self.tcx.fn_trait_kind_from_lang_item(*def_id).is_some()
{
// FIXME: To avoid spurious suggestions in functions where type arguments
// where already supplied, we check the snippet to make sure it doesn't
// end with a turbofish. Ideally we would have access to a `PathSegment`
// instead. Otherwise we would produce the following output:
//
// error[E0283]: type annotations needed
// --> $DIR/issue-54954.rs:3:24
// |
// LL | const ARR_LEN: usize = Tt::const_val::<[i8; 123]>();
// | ^^^^^^^^^^^^^^^^^^^^^^^^^^
// | |
// | cannot infer type
// | help: consider specifying the type argument
// | in the function call:
// | `Tt::const_val::<[i8; 123]>::<T>`
// ...
// LL | const fn const_val<T: Sized>() -> usize {
// | - required by this bound in `Tt::const_val`
// |
// = note: cannot satisfy `_: Tt`
err.span_suggestion_verbose(
span.shrink_to_hi(),
&format!(
"consider specifying the type argument{} in the function call",
pluralize!(generics.params.len()),
),
format!(
"::<{}>",
generics
.params
.iter()
.map(|p| p.name.to_string())
.collect::<Vec<String>>()
.join(", ")
),
Applicability::HasPlaceholders,
);
}
}
err
}
ty::PredicateKind::WellFormed(arg) => {
// Same hacky approach as above to avoid deluging user
// with error messages.
if arg.references_error() || self.tcx.sess.has_errors() {
return;
}
self.emit_inference_failure_err(body_id, span, arg, vec![], ErrorCode::E0282)
}
ty::PredicateKind::Subtype(data) => {
if data.references_error() || self.tcx.sess.has_errors() {
// no need to overload user in such cases
return;
}
let SubtypePredicate { a_is_expected: _, a, b } = data;
// both must be type variables, or the other would've been instantiated
assert!(a.is_ty_var() && b.is_ty_var());
self.emit_inference_failure_err(body_id, span, a.into(), vec![], ErrorCode::E0282)
}
ty::PredicateKind::Projection(data) => {
let self_ty = data.projection_ty.self_ty();
let ty = data.ty;
if predicate.references_error() {
return;
}
if self_ty.needs_infer() && ty.needs_infer() {
// We do this for the `foo.collect()?` case to produce a suggestion.
let mut err = self.emit_inference_failure_err(
body_id,
span,
self_ty.into(),
vec![],
ErrorCode::E0284,
);
err.note(&format!("cannot satisfy `{}`", predicate));
err
} else {
let mut err = struct_span_err!(
self.tcx.sess,
span,
E0284,
"type annotations needed: cannot satisfy `{}`",
predicate,
);
err.span_label(span, &format!("cannot satisfy `{}`", predicate));
err
}
}
_ => {
if self.tcx.sess.has_errors() {
return;
}
let mut err = struct_span_err!(
self.tcx.sess,
span,
E0284,
"type annotations needed: cannot satisfy `{}`",
predicate,
);
err.span_label(span, &format!("cannot satisfy `{}`", predicate));
err
}
};
self.note_obligation_cause(&mut err, obligation);
err.emit();
}
/// Returns `true` if the trait predicate may apply for *some* assignment
/// to the type parameters.
fn predicate_can_apply(
&self,
param_env: ty::ParamEnv<'tcx>,
pred: ty::PolyTraitRef<'tcx>,
) -> bool {
struct ParamToVarFolder<'a, 'tcx> {
infcx: &'a InferCtxt<'a, 'tcx>,
var_map: FxHashMap<Ty<'tcx>, Ty<'tcx>>,
}
impl<'a, 'tcx> TypeFolder<'tcx> for ParamToVarFolder<'a, 'tcx> {
fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
self.infcx.tcx
}
fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
if let ty::Param(ty::ParamTy { name, .. }) = *ty.kind() {
let infcx = self.infcx;
self.var_map.entry(ty).or_insert_with(|| {
infcx.next_ty_var(TypeVariableOrigin {
kind: TypeVariableOriginKind::TypeParameterDefinition(name, None),
span: DUMMY_SP,
})
})
} else {
ty.super_fold_with(self)
}
}
}
self.probe(|_| {
let mut selcx = SelectionContext::new(self);
let cleaned_pred =
pred.fold_with(&mut ParamToVarFolder { infcx: self, var_map: Default::default() });
let cleaned_pred = super::project::normalize(
&mut selcx,
param_env,
ObligationCause::dummy(),
cleaned_pred,
)
.value;
let obligation = Obligation::new(
ObligationCause::dummy(),
param_env,
cleaned_pred.without_const().to_predicate(selcx.tcx()),
);
self.predicate_may_hold(&obligation)
})
}
fn note_obligation_cause(
&self,
err: &mut DiagnosticBuilder<'tcx>,
obligation: &PredicateObligation<'tcx>,
) {
// First, attempt to add note to this error with an async-await-specific
// message, and fall back to regular note otherwise.
if !self.maybe_note_obligation_cause_for_async_await(err, obligation) {
self.note_obligation_cause_code(
err,
&obligation.predicate,
&obligation.cause.code,
&mut vec![],
&mut Default::default(),
);
self.suggest_unsized_bound_if_applicable(err, obligation);
}
}
fn suggest_unsized_bound_if_applicable(
&self,
err: &mut DiagnosticBuilder<'tcx>,
obligation: &PredicateObligation<'tcx>,
) {
let (pred, item_def_id, span) =
match (obligation.predicate.kind().skip_binder(), obligation.cause.code.peel_derives())
{
(
ty::PredicateKind::Trait(pred),
&ObligationCauseCode::BindingObligation(item_def_id, span),
) => (pred, item_def_id, span),
_ => return,
};
let node = match (
self.tcx.hir().get_if_local(item_def_id),
Some(pred.def_id()) == self.tcx.lang_items().sized_trait(),
) {
(Some(node), true) => node,
_ => return,
};
let generics = match node.generics() {
Some(generics) => generics,
None => return,
};
for param in generics.params {
if param.span != span
|| param.bounds.iter().any(|bound| {
bound.trait_ref().and_then(|trait_ref| trait_ref.trait_def_id())
== self.tcx.lang_items().sized_trait()
})
{
continue;
}
match node {
hir::Node::Item(
item
@
hir::Item {
kind:
hir::ItemKind::Enum(..)
| hir::ItemKind::Struct(..)
| hir::ItemKind::Union(..),
..
},
) => {
// Suggesting `T: ?Sized` is only valid in an ADT if `T` is only used in a
// borrow. `struct S<'a, T: ?Sized>(&'a T);` is valid, `struct S<T: ?Sized>(T);`
// is not.
let mut visitor = FindTypeParam {
param: param.name.ident().name,
invalid_spans: vec![],
nested: false,
};
visitor.visit_item(item);
if !visitor.invalid_spans.is_empty() {
let mut multispan: MultiSpan = param.span.into();
multispan.push_span_label(
param.span,
format!("this could be changed to `{}: ?Sized`...", param.name.ident()),
);
for sp in visitor.invalid_spans {
multispan.push_span_label(
sp,
format!(
"...if indirection were used here: `Box<{}>`",
param.name.ident(),
),
);
}
err.span_help(
multispan,
&format!(
"you could relax the implicit `Sized` bound on `{T}` if it were \
used through indirection like `&{T}` or `Box<{T}>`",
T = param.name.ident(),
),
);
return;
}
}
_ => {}
}
let (span, separator) = match param.bounds {
[] => (span.shrink_to_hi(), ":"),
[.., bound] => (bound.span().shrink_to_hi(), " +"),
};
err.span_suggestion_verbose(
span,
"consider relaxing the implicit `Sized` restriction",
format!("{} ?Sized", separator),
Applicability::MachineApplicable,
);
return;
}
}
fn is_recursive_obligation(
&self,
obligated_types: &mut Vec<&ty::TyS<'tcx>>,
cause_code: &ObligationCauseCode<'tcx>,
) -> bool {
if let ObligationCauseCode::BuiltinDerivedObligation(ref data) = cause_code {
let parent_trait_ref = self.resolve_vars_if_possible(data.parent_trait_ref);
if obligated_types.iter().any(|ot| ot == &parent_trait_ref.skip_binder().self_ty()) {
return true;
}
}
false
}
}
/// Look for type `param` in an ADT being used only through a reference to confirm that suggesting
/// `param: ?Sized` would be a valid constraint.
struct FindTypeParam {
param: rustc_span::Symbol,
invalid_spans: Vec<Span>,
nested: bool,
}
impl<'v> Visitor<'v> for FindTypeParam {
type Map = rustc_hir::intravisit::ErasedMap<'v>;
fn nested_visit_map(&mut self) -> hir::intravisit::NestedVisitorMap<Self::Map> {
hir::intravisit::NestedVisitorMap::None
}
fn visit_where_predicate(&mut self, _: &'v hir::WherePredicate<'v>) {
// Skip where-clauses, to avoid suggesting indirection for type parameters found there.
}
fn visit_ty(&mut self, ty: &hir::Ty<'_>) {
// We collect the spans of all uses of the "bare" type param, like in `field: T` or
// `field: (T, T)` where we could make `T: ?Sized` while skipping cases that are known to be
// valid like `field: &'a T` or `field: *mut T` and cases that *might* have further `Sized`
// obligations like `Box<T>` and `Vec<T>`, but we perform no extra analysis for those cases
// and suggest `T: ?Sized` regardless of their obligations. This is fine because the errors
// in that case should make what happened clear enough.
match ty.kind {
hir::TyKind::Ptr(_) | hir::TyKind::Rptr(..) | hir::TyKind::TraitObject(..) => {}
hir::TyKind::Path(hir::QPath::Resolved(None, path))
if path.segments.len() == 1 && path.segments[0].ident.name == self.param =>
{
if !self.nested {
self.invalid_spans.push(ty.span);
}
}
hir::TyKind::Path(_) => {
let prev = self.nested;
self.nested = true;
hir::intravisit::walk_ty(self, ty);
self.nested = prev;
}
_ => {
hir::intravisit::walk_ty(self, ty);
}
}
}
}
pub fn recursive_type_with_infinite_size_error(
tcx: TyCtxt<'tcx>,
type_def_id: DefId,
spans: Vec<Span>,
) {
assert!(type_def_id.is_local());
let span = tcx.hir().span_if_local(type_def_id).unwrap();
let span = tcx.sess.source_map().guess_head_span(span);
let path = tcx.def_path_str(type_def_id);
let mut err =
struct_span_err!(tcx.sess, span, E0072, "recursive type `{}` has infinite size", path);
err.span_label(span, "recursive type has infinite size");
for &span in &spans {
err.span_label(span, "recursive without indirection");
}
let msg = format!(
"insert some indirection (e.g., a `Box`, `Rc`, or `&`) to make `{}` representable",
path,
);
if spans.len() <= 4 {
err.multipart_suggestion(
&msg,
spans
.iter()
.flat_map(|&span| {
vec![
(span.shrink_to_lo(), "Box<".to_string()),
(span.shrink_to_hi(), ">".to_string()),
]
.into_iter()
})
.collect(),
Applicability::HasPlaceholders,
);
} else {
err.help(&msg);
}
err.emit();
}
/// Summarizes information
#[derive(Clone)]
pub enum ArgKind {
/// An argument of non-tuple type. Parameters are (name, ty)
Arg(String, String),
/// An argument of tuple type. For a "found" argument, the span is
/// the location in the source of the pattern. For an "expected"
/// argument, it will be None. The vector is a list of (name, ty)
/// strings for the components of the tuple.
Tuple(Option<Span>, Vec<(String, String)>),
}
impl ArgKind {
fn empty() -> ArgKind {
ArgKind::Arg("_".to_owned(), "_".to_owned())
}
/// Creates an `ArgKind` from the expected type of an
/// argument. It has no name (`_`) and an optional source span.
pub fn from_expected_ty(t: Ty<'_>, span: Option<Span>) -> ArgKind {
match t.kind() {
ty::Tuple(tys) => ArgKind::Tuple(
span,
tys.iter().map(|ty| ("_".to_owned(), ty.to_string())).collect::<Vec<_>>(),
),
_ => ArgKind::Arg("_".to_owned(), t.to_string()),
}
}
}
| 43.030958 | 109 | 0.459266 |
91c50610e7c0759193d53aaee0b32397fbb7ae3a | 1,447 |
pub struct IconError {
props: crate::Props,
}
impl yew::Component for IconError {
type Properties = crate::Props;
type Message = ();
fn create(props: Self::Properties, _: yew::prelude::ComponentLink<Self>) -> Self
{
Self { props }
}
fn update(&mut self, _: Self::Message) -> yew::prelude::ShouldRender
{
true
}
fn change(&mut self, _: Self::Properties) -> yew::prelude::ShouldRender
{
false
}
fn view(&self) -> yew::prelude::Html
{
yew::prelude::html! {
<svg
class=self.props.class.unwrap_or("")
width=self.props.size.unwrap_or(24).to_string()
height=self.props.size.unwrap_or(24).to_string()
viewBox="0 0 24 24"
fill=self.props.fill.unwrap_or("none")
stroke=self.props.color.unwrap_or("currentColor")
stroke-width=self.props.stroke_width.unwrap_or(2).to_string()
stroke-linecap=self.props.stroke_linecap.unwrap_or("round")
stroke-linejoin=self.props.stroke_linejoin.unwrap_or("round")
>
<svg xmlns="http://www.w3.org/2000/svg" height="24" viewBox="0 0 24 24" width="24"><path d="M12 2C6.48 2 2 6.48 2 12s4.48 10 10 10 10-4.48 10-10S17.52 2 12 2zm0 11c-.55 0-1-.45-1-1V8c0-.55.45-1 1-1s1 .45 1 1v4c0 .55-.45 1-1 1zm1 4h-2v-2h2v2z"/></svg>
</svg>
}
}
}
| 31.456522 | 262 | 0.568072 |
5618cabac460d47d0bdeadcdf3dfb446ce9a9868 | 7,866 | use crate::layout::Widget;
use crate::{
layout, text, EventCtx, GfxCtx, Line, MultiKey, ScreenDims, ScreenPt, ScreenRectangle, Text,
};
pub struct ModalMenu {
title: String,
info: Text,
chosen_action: Option<String>,
choices: Vec<Choice>,
// This can be inactive entries too.
hovering_idx: Option<usize>,
standalone_layout: Option<layout::ContainerOrientation>,
top_left: ScreenPt,
dims: ScreenDims,
}
struct Choice {
hotkey: Option<MultiKey>,
label: String,
active: bool,
}
impl ModalMenu {
pub fn new<S1: Into<String>, S2: Into<String>>(
title: S1,
raw_choices: Vec<(Option<MultiKey>, S2)>,
ctx: &EventCtx,
) -> ModalMenu {
let mut m = ModalMenu {
title: title.into(),
info: Text::new().with_bg(),
chosen_action: None,
choices: raw_choices
.into_iter()
.map(|(hotkey, label)| Choice {
hotkey,
label: label.into(),
active: false,
})
.collect(),
hovering_idx: None,
standalone_layout: Some(layout::ContainerOrientation::TopRight),
top_left: ScreenPt::new(0.0, 0.0),
dims: ScreenDims::new(0.0, 0.0),
};
m.recalculate_dims(ctx);
m
}
// It's part of something bigger
pub fn disable_standalone_layout(mut self) -> ModalMenu {
assert!(self.standalone_layout.is_some());
self.standalone_layout = None;
self
}
pub fn set_standalone_layout(mut self, layout: layout::ContainerOrientation) -> ModalMenu {
self.standalone_layout = Some(layout);
self
}
pub fn set_info(&mut self, ctx: &EventCtx, info: Text) {
self.info = info.with_bg();
self.recalculate_dims(ctx);
}
pub fn event(&mut self, ctx: &mut EventCtx) {
if let Some(ref action) = self.chosen_action {
panic!("Caller didn't consume modal action '{}'", action);
}
if let Some(o) = self.standalone_layout {
layout::stack_vertically(o, ctx, vec![self]);
self.recalculate_dims(ctx);
}
// Handle the mouse
if ctx.redo_mouseover() {
self.hovering_idx = None;
if let Some(cursor) = ctx.canvas.get_cursor_in_screen_space() {
let mut top_left = self.top_left;
top_left.y += ctx.text_dims(&self.info).height;
if !self.title.is_empty() {
top_left.y += ctx.default_line_height();
}
for idx in 0..self.choices.len() {
let rect = ScreenRectangle {
x1: top_left.x,
y1: top_left.y,
x2: top_left.x + self.dims.width,
y2: top_left.y + ctx.default_line_height(),
};
if rect.contains(cursor) {
self.hovering_idx = Some(idx);
break;
}
top_left.y += ctx.default_line_height();
}
}
}
if let Some(idx) = self.hovering_idx {
if ctx.normal_left_click() && self.choices[idx].active {
self.chosen_action = Some(self.choices[idx].label.clone());
}
}
// Handle hotkeys
for choice in &self.choices {
if !choice.active {
continue;
}
if let Some(hotkey) = choice.hotkey {
if ctx.input.new_was_pressed(hotkey) {
self.chosen_action = Some(choice.label.clone());
break;
}
}
}
// Reset for next round
for choice in self.choices.iter_mut() {
choice.active = false;
}
}
pub fn push_action(&mut self, hotkey: Option<MultiKey>, label: &str, ctx: &EventCtx) {
self.choices.push(Choice {
hotkey,
label: label.to_string(),
active: false,
});
self.recalculate_dims(ctx);
}
pub fn remove_action(&mut self, label: &str, ctx: &EventCtx) {
self.choices.retain(|c| c.label != label);
self.recalculate_dims(ctx);
}
pub fn change_action(&mut self, old_label: &str, new_label: &str, ctx: &EventCtx) {
for c in self.choices.iter_mut() {
if c.label == old_label {
c.label = new_label.to_string();
self.recalculate_dims(ctx);
return;
}
}
panic!("Menu doesn't have {}", old_label);
}
pub fn maybe_change_action(&mut self, old_label: &str, new_label: &str, ctx: &EventCtx) {
for c in self.choices.iter_mut() {
if c.label == old_label {
c.label = new_label.to_string();
self.recalculate_dims(ctx);
return;
}
}
// Don't panic
}
pub fn swap_action(&mut self, old_label: &str, new_label: &str, ctx: &EventCtx) -> bool {
if self.action(old_label) {
self.change_action(old_label, new_label, ctx);
true
} else {
false
}
}
pub fn consume_action(&mut self, name: &str, ctx: &EventCtx) -> bool {
if self.action(name) {
self.remove_action(name, ctx);
true
} else {
false
}
}
pub fn action(&mut self, label: &str) -> bool {
if let Some(ref action) = self.chosen_action {
if label == action {
self.chosen_action = None;
return true;
}
return false;
}
for c in self.choices.iter_mut() {
if c.label == label {
c.active = true;
return false;
}
}
panic!("Menu doesn't have action {}", label);
}
pub fn draw(&self, g: &mut GfxCtx) {
g.draw_blocking_text_at_screenspace_topleft(&self.calculate_txt(), self.top_left);
}
fn recalculate_dims(&mut self, ctx: &EventCtx) {
self.dims = ctx.text_dims(&self.calculate_txt());
}
fn calculate_txt(&self) -> Text {
let mut txt = if self.title.is_empty() {
Text::new().with_bg()
} else {
Text::prompt(&self.title)
};
txt.extend(&self.info);
for (idx, choice) in self.choices.iter().enumerate() {
if choice.active {
if let Some(key) = choice.hotkey {
txt.add_appended(vec![
Line(key.describe()).fg(text::HOTKEY_COLOR),
Line(format!(" - {}", choice.label)),
]);
} else {
txt.add(Line(&choice.label));
}
// TODO BG color should be on the TextSpan, so this isn't so terrible?
if Some(idx) == self.hovering_idx {
txt.highlight_last_line(text::SELECTED_COLOR);
}
} else {
if let Some(key) = choice.hotkey {
txt.add(
Line(format!("{} - {}", key.describe(), choice.label))
.fg(text::INACTIVE_CHOICE_COLOR),
);
} else {
txt.add(Line(&choice.label).fg(text::INACTIVE_CHOICE_COLOR));
}
}
}
txt
}
}
impl Widget for ModalMenu {
fn get_dims(&self) -> ScreenDims {
ScreenDims::new(self.dims.width, self.dims.height)
}
fn set_pos(&mut self, top_left: ScreenPt) {
self.top_left = top_left;
}
}
| 30.607004 | 96 | 0.496059 |
4b57ae81db08d9939e632ce1ad9351199a1a1841 | 9,513 | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
use common::{Location, WithLocation};
use graphql_ir::{
FragmentSpread, InlineFragment, LinkedField, Program, ScalarField, Selection, Transformed,
TransformedValue, Transformer,
};
use interner::{Intern, StringKey};
use schema::{FieldID, InterfaceID, ObjectID, Type};
use std::collections::hash_map::Entry;
use std::collections::HashMap;
use std::sync::Arc;
/// A transform that adds an `id` field on any type that has an id field but
/// where there is no unaliased `id` selection.
pub fn generate_id_field<'s>(program: &Program<'s>) -> Program<'s> {
let mut transform = GenerateIDFieldTransform::new(program);
transform
.transform_program(program)
.replace_or_else(|| program.clone())
}
struct GenerateIDFieldTransform<'s> {
program: &'s Program<'s>,
id_name: StringKey,
node_interface: Option<NodeInterface>,
cache: HashMap<Type, Option<FieldID>>,
}
/// If the schema defines a `Node` interface, this contains information on that interface.
struct NodeInterface {
id: InterfaceID,
id_field: FieldID,
}
impl<'s> Transformer for GenerateIDFieldTransform<'s> {
const NAME: &'static str = "GenerateIDFieldTransform";
const VISIT_ARGUMENTS: bool = false;
const VISIT_DIRECTIVES: bool = false;
fn transform_linked_field(&mut self, field: &LinkedField) -> Transformed<Selection> {
let selections = self.transform_selections(&field.selections);
let next_selections = if self.has_unaliased_id_field(&field.selections) {
selections
} else {
let schema = self.program.schema();
let type_ = self
.program
.schema()
.field(field.definition.item)
.type_
.inner();
match type_ {
Type::Object(id) => {
let object = schema.object(id);
if let Some(id_field_id) = self.get_id_field_id(type_, &object.fields) {
let mut next_selections =
selections.replace_or_else(|| field.selections.clone());
next_selections
.push(self.create_id_selection(field.definition.location, id_field_id));
TransformedValue::Replace(next_selections)
} else {
selections
}
}
Type::Interface(id) => {
let interface = schema.interface(id);
if let Some(id_field_id) = self.get_id_field_id(type_, &interface.fields) {
let mut next_selections =
selections.replace_or_else(|| field.selections.clone());
next_selections
.push(self.create_id_selection(field.definition.location, id_field_id));
TransformedValue::Replace(next_selections)
} else {
self.get_selections_with_inline_id_fragments(
field,
selections,
&interface.implementors,
)
}
}
Type::Union(id) => {
let union = schema.union(id);
self.get_selections_with_inline_id_fragments(field, selections, &union.members)
}
_ => selections,
}
};
match next_selections {
TransformedValue::Keep => Transformed::Keep,
TransformedValue::Replace(selections) => {
Transformed::Replace(Selection::LinkedField(Arc::new(LinkedField {
alias: field.alias,
definition: field.definition,
arguments: field.arguments.clone(),
directives: field.directives.clone(),
selections,
})))
}
}
}
fn transform_fragment_spread(&mut self, _spread: &FragmentSpread) -> Transformed<Selection> {
Transformed::Keep
}
fn transform_scalar_field(&mut self, _field: &ScalarField) -> Transformed<Selection> {
Transformed::Keep
}
}
impl<'s> GenerateIDFieldTransform<'s> {
fn new(program: &'s Program<'s>) -> Self {
let id_name = "id".intern();
let schema = program.schema();
let node_interface = match schema.get_type("Node".intern()) {
Some(Type::Interface(node_interface_id)) => {
let node_interface = schema.interface(node_interface_id);
let id_field = *node_interface
.fields
.iter()
.find(|&&id| schema.field(id).name == id_name)
.expect("Expected `Node` to contain a field named `id`.");
Some(NodeInterface {
id: node_interface_id,
id_field,
})
}
_ => None,
};
Self {
program,
id_name,
node_interface,
cache: Default::default(),
}
}
fn has_unaliased_id_field(&self, selections: &[Selection]) -> bool {
selections.iter().any(|x| match x {
Selection::ScalarField(child) => {
child.alias.is_none()
&& self.program.schema().field(child.definition.item).name == self.id_name
}
_ => false,
})
}
fn get_id_field_id(&mut self, type_: Type, fields: &[FieldID]) -> Option<FieldID> {
match self.cache.entry(type_) {
Entry::Occupied(e) => *e.get(),
Entry::Vacant(e) => {
for id in fields {
let field = self.program.schema().field(*id);
if field.name == self.id_name
&& self.program.schema().is_id(field.type_.inner())
{
let result = Some(*id);
e.insert(result);
return result;
}
}
e.insert(None);
None
}
}
}
/// For interfaces and unions: generate a `... on Node { id }`
/// fragment if *any* concrete type implements Node. Then generate a
/// `... on PossibleType { id }` for every concrete type that does *not*
/// implement `Node`
fn get_selections_with_inline_id_fragments(
&mut self,
field: &LinkedField,
selections: TransformedValue<Vec<Selection>>,
concrete_ids: &[ObjectID],
) -> TransformedValue<Vec<Selection>> {
let mut next_selections = vec![];
let mut should_generate_node = false;
for object_id in concrete_ids {
let object = self.program.schema().object(*object_id);
let implements_node = if let Some(ref node_interface) = self.node_interface {
object
.interfaces
.iter()
.any(|&interface_id| interface_id == node_interface.id)
} else {
false
};
if implements_node {
should_generate_node = true;
} else if let Some(id_field_id) =
self.get_id_field_id(Type::Object(*object_id), &object.fields)
{
next_selections.push(Selection::InlineFragment(self.create_inline_id_fragment(
field.definition.location,
Type::Object(*object_id),
id_field_id,
)));
}
}
if next_selections.is_empty() && !should_generate_node {
return selections;
}
let mut result = if let TransformedValue::Replace(selections) = selections {
selections
} else {
field.selections.clone()
};
if should_generate_node {
// This should not happen because we can only set
// `should_generate_node` to true, if this is Some.
let node_interface = self.node_interface.as_ref().unwrap();
result.push(Selection::InlineFragment(self.create_inline_id_fragment(
field.definition.location,
Type::Interface(node_interface.id),
node_interface.id_field,
)));
}
result.extend(next_selections.into_iter());
TransformedValue::Replace(result)
}
fn create_id_selection(&self, location: Location, id_field_id: FieldID) -> Selection {
Selection::ScalarField(Arc::new(ScalarField {
alias: None,
definition: WithLocation::new(location, id_field_id),
arguments: Default::default(),
directives: Default::default(),
}))
}
fn create_inline_id_fragment(
&self,
location: Location,
type_: Type,
id_field_id: FieldID,
) -> Arc<InlineFragment> {
Arc::new(InlineFragment {
type_condition: Some(type_),
directives: Default::default(),
selections: vec![self.create_id_selection(location, id_field_id)],
})
}
}
| 36.588462 | 100 | 0.537685 |
ab287b780c8741564f596c10cdc51f74a53ad078 | 380,023 | #![doc = "generated by AutoRust"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::models;
#[derive(Clone)]
pub struct Client {
endpoint: String,
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
scopes: Vec<String>,
pipeline: azure_core::Pipeline,
}
#[derive(Clone)]
pub struct ClientBuilder {
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
endpoint: Option<String>,
scopes: Option<Vec<String>>,
}
pub const DEFAULT_ENDPOINT: &str = azure_core::resource_manager_endpoint::AZURE_PUBLIC_CLOUD;
impl ClientBuilder {
pub fn new(credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>) -> Self {
Self {
credential,
endpoint: None,
scopes: None,
}
}
pub fn endpoint(mut self, endpoint: impl Into<String>) -> Self {
self.endpoint = Some(endpoint.into());
self
}
pub fn scopes(mut self, scopes: &[&str]) -> Self {
self.scopes = Some(scopes.iter().map(|scope| (*scope).to_owned()).collect());
self
}
pub fn build(self) -> Client {
let endpoint = self.endpoint.unwrap_or_else(|| DEFAULT_ENDPOINT.to_owned());
let scopes = self.scopes.unwrap_or_else(|| vec![format!("{}/", endpoint)]);
Client::new(endpoint, self.credential, scopes)
}
}
impl Client {
pub(crate) fn endpoint(&self) -> &str {
self.endpoint.as_str()
}
pub(crate) fn token_credential(&self) -> &dyn azure_core::auth::TokenCredential {
self.credential.as_ref()
}
pub(crate) fn scopes(&self) -> Vec<&str> {
self.scopes.iter().map(String::as_str).collect()
}
pub(crate) async fn send(&self, request: impl Into<azure_core::Request>) -> Result<azure_core::Response, azure_core::Error> {
let mut context = azure_core::Context::default();
let mut request = request.into();
self.pipeline.send(&mut context, &mut request).await
}
pub fn new(
endpoint: impl Into<String>,
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
scopes: Vec<String>,
) -> Self {
let endpoint = endpoint.into();
let pipeline = azure_core::Pipeline::new(
option_env!("CARGO_PKG_NAME"),
option_env!("CARGO_PKG_VERSION"),
azure_core::ClientOptions::default(),
Vec::new(),
Vec::new(),
);
Self {
endpoint,
credential,
scopes,
pipeline,
}
}
pub fn addons(&self) -> addons::Client {
addons::Client(self.clone())
}
pub fn alerts(&self) -> alerts::Client {
alerts::Client(self.clone())
}
pub fn available_skus(&self) -> available_skus::Client {
available_skus::Client(self.clone())
}
pub fn bandwidth_schedules(&self) -> bandwidth_schedules::Client {
bandwidth_schedules::Client(self.clone())
}
pub fn containers(&self) -> containers::Client {
containers::Client(self.clone())
}
pub fn devices(&self) -> devices::Client {
devices::Client(self.clone())
}
pub fn diagnostic_settings(&self) -> diagnostic_settings::Client {
diagnostic_settings::Client(self.clone())
}
pub fn jobs(&self) -> jobs::Client {
jobs::Client(self.clone())
}
pub fn monitoring_config(&self) -> monitoring_config::Client {
monitoring_config::Client(self.clone())
}
pub fn nodes(&self) -> nodes::Client {
nodes::Client(self.clone())
}
pub fn operations(&self) -> operations::Client {
operations::Client(self.clone())
}
pub fn operations_status(&self) -> operations_status::Client {
operations_status::Client(self.clone())
}
pub fn orders(&self) -> orders::Client {
orders::Client(self.clone())
}
pub fn roles(&self) -> roles::Client {
roles::Client(self.clone())
}
pub fn shares(&self) -> shares::Client {
shares::Client(self.clone())
}
pub fn storage_account_credentials(&self) -> storage_account_credentials::Client {
storage_account_credentials::Client(self.clone())
}
pub fn storage_accounts(&self) -> storage_accounts::Client {
storage_accounts::Client(self.clone())
}
pub fn support_packages(&self) -> support_packages::Client {
support_packages::Client(self.clone())
}
pub fn triggers(&self) -> triggers::Client {
triggers::Client(self.clone())
}
pub fn users(&self) -> users::Client {
users::Client(self.clone())
}
}
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
Operations_List(#[from] operations::list::Error),
#[error(transparent)]
AvailableSkus_List(#[from] available_skus::list::Error),
#[error(transparent)]
Devices_ListBySubscription(#[from] devices::list_by_subscription::Error),
#[error(transparent)]
Devices_ListByResourceGroup(#[from] devices::list_by_resource_group::Error),
#[error(transparent)]
Devices_Get(#[from] devices::get::Error),
#[error(transparent)]
Devices_CreateOrUpdate(#[from] devices::create_or_update::Error),
#[error(transparent)]
Devices_Update(#[from] devices::update::Error),
#[error(transparent)]
Devices_Delete(#[from] devices::delete::Error),
#[error(transparent)]
Alerts_ListByDataBoxEdgeDevice(#[from] alerts::list_by_data_box_edge_device::Error),
#[error(transparent)]
Alerts_Get(#[from] alerts::get::Error),
#[error(transparent)]
BandwidthSchedules_ListByDataBoxEdgeDevice(#[from] bandwidth_schedules::list_by_data_box_edge_device::Error),
#[error(transparent)]
BandwidthSchedules_Get(#[from] bandwidth_schedules::get::Error),
#[error(transparent)]
BandwidthSchedules_CreateOrUpdate(#[from] bandwidth_schedules::create_or_update::Error),
#[error(transparent)]
BandwidthSchedules_Delete(#[from] bandwidth_schedules::delete::Error),
#[error(transparent)]
DiagnosticSettings_GetDiagnosticProactiveLogCollectionSettings(
#[from] diagnostic_settings::get_diagnostic_proactive_log_collection_settings::Error,
),
#[error(transparent)]
DiagnosticSettings_UpdateDiagnosticProactiveLogCollectionSettings(
#[from] diagnostic_settings::update_diagnostic_proactive_log_collection_settings::Error,
),
#[error(transparent)]
DiagnosticSettings_GetDiagnosticRemoteSupportSettings(#[from] diagnostic_settings::get_diagnostic_remote_support_settings::Error),
#[error(transparent)]
DiagnosticSettings_UpdateDiagnosticRemoteSupportSettings(#[from] diagnostic_settings::update_diagnostic_remote_support_settings::Error),
#[error(transparent)]
Devices_DownloadUpdates(#[from] devices::download_updates::Error),
#[error(transparent)]
Devices_GenerateCertificate(#[from] devices::generate_certificate::Error),
#[error(transparent)]
Devices_GetExtendedInformation(#[from] devices::get_extended_information::Error),
#[error(transparent)]
Devices_InstallUpdates(#[from] devices::install_updates::Error),
#[error(transparent)]
Jobs_Get(#[from] jobs::get::Error),
#[error(transparent)]
Devices_GetNetworkSettings(#[from] devices::get_network_settings::Error),
#[error(transparent)]
Nodes_ListByDataBoxEdgeDevice(#[from] nodes::list_by_data_box_edge_device::Error),
#[error(transparent)]
OperationsStatus_Get(#[from] operations_status::get::Error),
#[error(transparent)]
Orders_ListByDataBoxEdgeDevice(#[from] orders::list_by_data_box_edge_device::Error),
#[error(transparent)]
Orders_Get(#[from] orders::get::Error),
#[error(transparent)]
Orders_CreateOrUpdate(#[from] orders::create_or_update::Error),
#[error(transparent)]
Orders_Delete(#[from] orders::delete::Error),
#[error(transparent)]
Orders_ListDcAccessCode(#[from] orders::list_dc_access_code::Error),
#[error(transparent)]
Roles_ListByDataBoxEdgeDevice(#[from] roles::list_by_data_box_edge_device::Error),
#[error(transparent)]
Roles_Get(#[from] roles::get::Error),
#[error(transparent)]
Roles_CreateOrUpdate(#[from] roles::create_or_update::Error),
#[error(transparent)]
Roles_Delete(#[from] roles::delete::Error),
#[error(transparent)]
Addons_ListByRole(#[from] addons::list_by_role::Error),
#[error(transparent)]
Addons_Get(#[from] addons::get::Error),
#[error(transparent)]
Addons_CreateOrUpdate(#[from] addons::create_or_update::Error),
#[error(transparent)]
Addons_Delete(#[from] addons::delete::Error),
#[error(transparent)]
MonitoringConfig_List(#[from] monitoring_config::list::Error),
#[error(transparent)]
MonitoringConfig_Get(#[from] monitoring_config::get::Error),
#[error(transparent)]
MonitoringConfig_CreateOrUpdate(#[from] monitoring_config::create_or_update::Error),
#[error(transparent)]
MonitoringConfig_Delete(#[from] monitoring_config::delete::Error),
#[error(transparent)]
Devices_ScanForUpdates(#[from] devices::scan_for_updates::Error),
#[error(transparent)]
Devices_CreateOrUpdateSecuritySettings(#[from] devices::create_or_update_security_settings::Error),
#[error(transparent)]
Shares_ListByDataBoxEdgeDevice(#[from] shares::list_by_data_box_edge_device::Error),
#[error(transparent)]
Shares_Get(#[from] shares::get::Error),
#[error(transparent)]
Shares_CreateOrUpdate(#[from] shares::create_or_update::Error),
#[error(transparent)]
Shares_Delete(#[from] shares::delete::Error),
#[error(transparent)]
Shares_Refresh(#[from] shares::refresh::Error),
#[error(transparent)]
StorageAccountCredentials_ListByDataBoxEdgeDevice(#[from] storage_account_credentials::list_by_data_box_edge_device::Error),
#[error(transparent)]
StorageAccountCredentials_Get(#[from] storage_account_credentials::get::Error),
#[error(transparent)]
StorageAccountCredentials_CreateOrUpdate(#[from] storage_account_credentials::create_or_update::Error),
#[error(transparent)]
StorageAccountCredentials_Delete(#[from] storage_account_credentials::delete::Error),
#[error(transparent)]
StorageAccounts_ListByDataBoxEdgeDevice(#[from] storage_accounts::list_by_data_box_edge_device::Error),
#[error(transparent)]
StorageAccounts_Get(#[from] storage_accounts::get::Error),
#[error(transparent)]
StorageAccounts_CreateOrUpdate(#[from] storage_accounts::create_or_update::Error),
#[error(transparent)]
StorageAccounts_Delete(#[from] storage_accounts::delete::Error),
#[error(transparent)]
Containers_ListByStorageAccount(#[from] containers::list_by_storage_account::Error),
#[error(transparent)]
Containers_Get(#[from] containers::get::Error),
#[error(transparent)]
Containers_CreateOrUpdate(#[from] containers::create_or_update::Error),
#[error(transparent)]
Containers_Delete(#[from] containers::delete::Error),
#[error(transparent)]
Containers_Refresh(#[from] containers::refresh::Error),
#[error(transparent)]
Triggers_ListByDataBoxEdgeDevice(#[from] triggers::list_by_data_box_edge_device::Error),
#[error(transparent)]
Triggers_Get(#[from] triggers::get::Error),
#[error(transparent)]
Triggers_CreateOrUpdate(#[from] triggers::create_or_update::Error),
#[error(transparent)]
Triggers_Delete(#[from] triggers::delete::Error),
#[error(transparent)]
SupportPackages_TriggerSupportPackage(#[from] support_packages::trigger_support_package::Error),
#[error(transparent)]
Devices_UpdateExtendedInformation(#[from] devices::update_extended_information::Error),
#[error(transparent)]
Devices_GetUpdateSummary(#[from] devices::get_update_summary::Error),
#[error(transparent)]
Devices_UploadCertificate(#[from] devices::upload_certificate::Error),
#[error(transparent)]
Users_ListByDataBoxEdgeDevice(#[from] users::list_by_data_box_edge_device::Error),
#[error(transparent)]
Users_Get(#[from] users::get::Error),
#[error(transparent)]
Users_CreateOrUpdate(#[from] users::create_or_update::Error),
#[error(transparent)]
Users_Delete(#[from] users::delete::Error),
}
pub mod operations {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
#[doc = "List all the supported operations."]
pub fn list(&self) -> list::Builder {
list::Builder { client: self.0.clone() }
}
}
pub mod list {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::OperationsList, Error>> {
Box::pin(async move {
let url_str = &format!("{}/providers/Microsoft.DataBoxEdge/operations", self.client.endpoint(),);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::OperationsList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod available_skus {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
#[doc = "List all the available Skus and information related to them."]
pub fn list(&self, subscription_id: impl Into<String>) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
}
}
}
pub mod list {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::DataBoxEdgeSkuList, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.DataBoxEdge/availableSkus",
self.client.endpoint(),
&self.subscription_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::DataBoxEdgeSkuList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod devices {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list_by_subscription(&self, subscription_id: impl Into<String>) -> list_by_subscription::Builder {
list_by_subscription::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
expand: None,
}
}
pub fn list_by_resource_group(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> list_by_resource_group::Builder {
list_by_resource_group::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
expand: None,
}
}
pub fn get(
&self,
device_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
device_name: device_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn create_or_update(
&self,
device_name: impl Into<String>,
data_box_edge_device: impl Into<models::DataBoxEdgeDevice>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
device_name: device_name.into(),
data_box_edge_device: data_box_edge_device.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn update(
&self,
device_name: impl Into<String>,
parameters: impl Into<models::DataBoxEdgeDevicePatch>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> update::Builder {
update::Builder {
client: self.0.clone(),
device_name: device_name.into(),
parameters: parameters.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn delete(
&self,
device_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
device_name: device_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
#[doc = "Downloads the updates on a Data Box Edge/Data Box Gateway device."]
pub fn download_updates(
&self,
device_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> download_updates::Builder {
download_updates::Builder {
client: self.0.clone(),
device_name: device_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn generate_certificate(
&self,
device_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> generate_certificate::Builder {
generate_certificate::Builder {
client: self.0.clone(),
device_name: device_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn get_extended_information(
&self,
device_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> get_extended_information::Builder {
get_extended_information::Builder {
client: self.0.clone(),
device_name: device_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
#[doc = "Installs the updates on the Data Box Edge/Data Box Gateway device."]
pub fn install_updates(
&self,
device_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> install_updates::Builder {
install_updates::Builder {
client: self.0.clone(),
device_name: device_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn get_network_settings(
&self,
device_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> get_network_settings::Builder {
get_network_settings::Builder {
client: self.0.clone(),
device_name: device_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
#[doc = "Scans for updates on a Data Box Edge/Data Box Gateway device."]
pub fn scan_for_updates(
&self,
device_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> scan_for_updates::Builder {
scan_for_updates::Builder {
client: self.0.clone(),
device_name: device_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn create_or_update_security_settings(
&self,
device_name: impl Into<String>,
security_settings: impl Into<models::SecuritySettings>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> create_or_update_security_settings::Builder {
create_or_update_security_settings::Builder {
client: self.0.clone(),
device_name: device_name.into(),
security_settings: security_settings.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn update_extended_information(
&self,
device_name: impl Into<String>,
parameters: impl Into<models::DataBoxEdgeDeviceExtendedInfoPatch>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> update_extended_information::Builder {
update_extended_information::Builder {
client: self.0.clone(),
device_name: device_name.into(),
parameters: parameters.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
#[doc = "Gets information about the availability of updates based on the last scan of the device. It also gets information about any ongoing download or install jobs on the device."]
pub fn get_update_summary(
&self,
device_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> get_update_summary::Builder {
get_update_summary::Builder {
client: self.0.clone(),
device_name: device_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn upload_certificate(
&self,
device_name: impl Into<String>,
parameters: impl Into<models::UploadCertificateRequest>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> upload_certificate::Builder {
upload_certificate::Builder {
client: self.0.clone(),
device_name: device_name.into(),
parameters: parameters.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
}
pub mod list_by_subscription {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) expand: Option<String>,
}
impl Builder {
pub fn expand(mut self, expand: impl Into<String>) -> Self {
self.expand = Some(expand.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::DataBoxEdgeDeviceList, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices",
self.client.endpoint(),
&self.subscription_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
if let Some(expand) = &self.expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::DataBoxEdgeDeviceList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_by_resource_group {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) expand: Option<String>,
}
impl Builder {
pub fn expand(mut self, expand: impl Into<String>) -> Self {
self.expand = Some(expand.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::DataBoxEdgeDeviceList, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
if let Some(expand) = &self.expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::DataBoxEdgeDeviceList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::DataBoxEdgeDevice, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::DataBoxEdgeDevice =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) data_box_edge_device: models::DataBoxEdgeDevice,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::DataBoxEdgeDevice, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.data_box_edge_device).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::DataBoxEdgeDevice =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod update {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) parameters: models::DataBoxEdgeDevicePatch,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::DataBoxEdgeDevice, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::DataBoxEdgeDevice =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod download_updates {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/downloadUpdates",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod generate_certificate {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::GenerateCertResponse, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/generateCertificate",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::GenerateCertResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get_extended_information {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::DataBoxEdgeDeviceExtendedInfo, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/getExtendedInformation" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . device_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::DataBoxEdgeDeviceExtendedInfo =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod install_updates {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/installUpdates",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get_network_settings {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::NetworkSettings, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/networkSettings/default" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . device_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::NetworkSettings =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod scan_for_updates {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/scanForUpdates",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update_security_settings {
use super::models;
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) security_settings: models::SecuritySettings,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/securitySettings/default/update" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . device_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.security_settings).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod update_extended_information {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) parameters: models::DataBoxEdgeDeviceExtendedInfoPatch,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::DataBoxEdgeDeviceExtendedInfo, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/updateExtendedInformation" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . device_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::DataBoxEdgeDeviceExtendedInfo =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get_update_summary {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::UpdateSummary, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/updateSummary/default",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::UpdateSummary =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod upload_certificate {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) parameters: models::UploadCertificateRequest,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::UploadCertificateResponse, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/uploadCertificate",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::UploadCertificateResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod alerts {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list_by_data_box_edge_device(
&self,
device_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> list_by_data_box_edge_device::Builder {
list_by_data_box_edge_device::Builder {
client: self.0.clone(),
device_name: device_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
#[doc = "Gets an alert by name."]
pub fn get(
&self,
device_name: impl Into<String>,
name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
device_name: device_name.into(),
name: name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
}
pub mod list_by_data_box_edge_device {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::AlertList, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/alerts",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::AlertList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Alert, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/alerts/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name,
&self.name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Alert =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod bandwidth_schedules {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list_by_data_box_edge_device(
&self,
device_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> list_by_data_box_edge_device::Builder {
list_by_data_box_edge_device::Builder {
client: self.0.clone(),
device_name: device_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn get(
&self,
device_name: impl Into<String>,
name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
device_name: device_name.into(),
name: name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn create_or_update(
&self,
device_name: impl Into<String>,
name: impl Into<String>,
parameters: impl Into<models::BandwidthSchedule>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
device_name: device_name.into(),
name: name.into(),
parameters: parameters.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn delete(
&self,
device_name: impl Into<String>,
name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
device_name: device_name.into(),
name: name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
}
pub mod list_by_data_box_edge_device {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::BandwidthSchedulesList, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/bandwidthSchedules",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::BandwidthSchedulesList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::BandwidthSchedule, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/bandwidthSchedules/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name,
&self.name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::BandwidthSchedule =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::BandwidthSchedule),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) name: String,
pub(crate) parameters: models::BandwidthSchedule,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/bandwidthSchedules/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name,
&self.name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::BandwidthSchedule =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/bandwidthSchedules/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name,
&self.name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod diagnostic_settings {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn get_diagnostic_proactive_log_collection_settings(
&self,
device_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> get_diagnostic_proactive_log_collection_settings::Builder {
get_diagnostic_proactive_log_collection_settings::Builder {
client: self.0.clone(),
device_name: device_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn update_diagnostic_proactive_log_collection_settings(
&self,
device_name: impl Into<String>,
proactive_log_collection_settings: impl Into<models::DiagnosticProactiveLogCollectionSettings>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> update_diagnostic_proactive_log_collection_settings::Builder {
update_diagnostic_proactive_log_collection_settings::Builder {
client: self.0.clone(),
device_name: device_name.into(),
proactive_log_collection_settings: proactive_log_collection_settings.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn get_diagnostic_remote_support_settings(
&self,
device_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> get_diagnostic_remote_support_settings::Builder {
get_diagnostic_remote_support_settings::Builder {
client: self.0.clone(),
device_name: device_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn update_diagnostic_remote_support_settings(
&self,
device_name: impl Into<String>,
diagnostic_remote_support_settings: impl Into<models::DiagnosticRemoteSupportSettings>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> update_diagnostic_remote_support_settings::Builder {
update_diagnostic_remote_support_settings::Builder {
client: self.0.clone(),
device_name: device_name.into(),
diagnostic_remote_support_settings: diagnostic_remote_support_settings.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
}
pub mod get_diagnostic_proactive_log_collection_settings {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::DiagnosticProactiveLogCollectionSettings, Error>>
{
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/diagnosticProactiveLogCollectionSettings/default" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . device_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::DiagnosticProactiveLogCollectionSettings =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod update_diagnostic_proactive_log_collection_settings {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::DiagnosticProactiveLogCollectionSettings),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) proactive_log_collection_settings: models::DiagnosticProactiveLogCollectionSettings,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/diagnosticProactiveLogCollectionSettings/default" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . device_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.proactive_log_collection_settings).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::DiagnosticProactiveLogCollectionSettings =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get_diagnostic_remote_support_settings {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::DiagnosticRemoteSupportSettings, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/diagnosticRemoteSupportSettings/default" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . device_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::DiagnosticRemoteSupportSettings =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod update_diagnostic_remote_support_settings {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::DiagnosticRemoteSupportSettings),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) diagnostic_remote_support_settings: models::DiagnosticRemoteSupportSettings,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/diagnosticRemoteSupportSettings/default" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . device_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.diagnostic_remote_support_settings).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::DiagnosticRemoteSupportSettings =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod jobs {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
#[doc = "Gets the details of a specified job on a Data Box Edge/Data Box Gateway device."]
pub fn get(
&self,
device_name: impl Into<String>,
name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
device_name: device_name.into(),
name: name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
}
pub mod get {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Job, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/jobs/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name,
&self.name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Job =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod nodes {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list_by_data_box_edge_device(
&self,
device_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> list_by_data_box_edge_device::Builder {
list_by_data_box_edge_device::Builder {
client: self.0.clone(),
device_name: device_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
}
pub mod list_by_data_box_edge_device {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::NodeList, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/nodes",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::NodeList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod operations_status {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
#[doc = "Gets the details of a specified job on a Data Box Edge/Data Box Gateway device."]
pub fn get(
&self,
device_name: impl Into<String>,
name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
device_name: device_name.into(),
name: name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
}
pub mod get {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Job, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/operationsStatus/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name,
&self.name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Job =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod orders {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
#[doc = "Lists all the orders related to a Data Box Edge/Data Box Gateway device."]
pub fn list_by_data_box_edge_device(
&self,
device_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> list_by_data_box_edge_device::Builder {
list_by_data_box_edge_device::Builder {
client: self.0.clone(),
device_name: device_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
#[doc = "Gets a specific order by name."]
pub fn get(
&self,
device_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
device_name: device_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
#[doc = "Creates or updates an order."]
pub fn create_or_update(
&self,
device_name: impl Into<String>,
order: impl Into<models::Order>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
device_name: device_name.into(),
order: order.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
#[doc = "Deletes the order related to the device."]
pub fn delete(
&self,
device_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
device_name: device_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
#[doc = "Gets the DCAccess Code"]
pub fn list_dc_access_code(
&self,
device_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> list_dc_access_code::Builder {
list_dc_access_code::Builder {
client: self.0.clone(),
device_name: device_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
}
pub mod list_by_data_box_edge_device {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::OrderList, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/orders",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::OrderList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Order, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/orders/default",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Order =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Order),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) order: models::Order,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/orders/default",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.order).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Order =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/orders/default",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_dc_access_code {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::DcAccessCode, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/orders/default/listDCAccessCode" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . device_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::DcAccessCode =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod roles {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list_by_data_box_edge_device(
&self,
device_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> list_by_data_box_edge_device::Builder {
list_by_data_box_edge_device::Builder {
client: self.0.clone(),
device_name: device_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn get(
&self,
device_name: impl Into<String>,
name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
device_name: device_name.into(),
name: name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn create_or_update(
&self,
device_name: impl Into<String>,
name: impl Into<String>,
role: impl Into<models::Role>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
device_name: device_name.into(),
name: name.into(),
role: role.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn delete(
&self,
device_name: impl Into<String>,
name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
device_name: device_name.into(),
name: name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
}
pub mod list_by_data_box_edge_device {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::RoleList, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/roles",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::RoleList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Role, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/roles/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name,
&self.name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Role =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Role),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) name: String,
pub(crate) role: models::Role,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/roles/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name,
&self.name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.role).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Role =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/roles/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name,
&self.name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod addons {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list_by_role(
&self,
device_name: impl Into<String>,
role_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> list_by_role::Builder {
list_by_role::Builder {
client: self.0.clone(),
device_name: device_name.into(),
role_name: role_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn get(
&self,
device_name: impl Into<String>,
role_name: impl Into<String>,
addon_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
device_name: device_name.into(),
role_name: role_name.into(),
addon_name: addon_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn create_or_update(
&self,
device_name: impl Into<String>,
role_name: impl Into<String>,
addon_name: impl Into<String>,
addon: impl Into<models::Addon>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
device_name: device_name.into(),
role_name: role_name.into(),
addon_name: addon_name.into(),
addon: addon.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn delete(
&self,
device_name: impl Into<String>,
role_name: impl Into<String>,
addon_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
device_name: device_name.into(),
role_name: role_name.into(),
addon_name: addon_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
}
pub mod list_by_role {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) role_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::AddonList, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/roles/{}/addons",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name,
&self.role_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::AddonList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) role_name: String,
pub(crate) addon_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Addon, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/roles/{}/addons/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name,
&self.role_name,
&self.addon_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Addon =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Addon),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) role_name: String,
pub(crate) addon_name: String,
pub(crate) addon: models::Addon,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/roles/{}/addons/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name,
&self.role_name,
&self.addon_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.addon).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Addon =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) role_name: String,
pub(crate) addon_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/roles/{}/addons/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name,
&self.role_name,
&self.addon_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod monitoring_config {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
#[doc = "Lists metric configurations in a role."]
pub fn list(
&self,
device_name: impl Into<String>,
role_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
device_name: device_name.into(),
role_name: role_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
#[doc = "Gets a metric configuration of a role."]
pub fn get(
&self,
device_name: impl Into<String>,
role_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
device_name: device_name.into(),
role_name: role_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
#[doc = "Creates a new metric configuration or updates an existing one for a role."]
pub fn create_or_update(
&self,
device_name: impl Into<String>,
role_name: impl Into<String>,
monitoring_metric_configuration: impl Into<models::MonitoringMetricConfiguration>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
device_name: device_name.into(),
role_name: role_name.into(),
monitoring_metric_configuration: monitoring_metric_configuration.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
#[doc = "deletes a new metric configuration for a role."]
pub fn delete(
&self,
device_name: impl Into<String>,
role_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
device_name: device_name.into(),
role_name: role_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
}
pub mod list {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) role_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::MonitoringMetricConfigurationList, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/roles/{}/monitoringConfig" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . device_name , & self . role_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::MonitoringMetricConfigurationList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) role_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::MonitoringMetricConfiguration, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/roles/{}/monitoringConfig/default" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . device_name , & self . role_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::MonitoringMetricConfiguration =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::MonitoringMetricConfiguration),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) role_name: String,
pub(crate) monitoring_metric_configuration: models::MonitoringMetricConfiguration,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/roles/{}/monitoringConfig/default" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . device_name , & self . role_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.monitoring_metric_configuration).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::MonitoringMetricConfiguration =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) role_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/roles/{}/monitoringConfig/default" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . device_name , & self . role_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod shares {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
#[doc = "Lists all the shares in a Data Box Edge/Data Box Gateway device."]
pub fn list_by_data_box_edge_device(
&self,
device_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> list_by_data_box_edge_device::Builder {
list_by_data_box_edge_device::Builder {
client: self.0.clone(),
device_name: device_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
#[doc = "Gets a share by name."]
pub fn get(
&self,
device_name: impl Into<String>,
name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
device_name: device_name.into(),
name: name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
#[doc = "Creates a new share or updates an existing share on the device."]
pub fn create_or_update(
&self,
device_name: impl Into<String>,
name: impl Into<String>,
share: impl Into<models::Share>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
device_name: device_name.into(),
name: name.into(),
share: share.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn delete(
&self,
device_name: impl Into<String>,
name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
device_name: device_name.into(),
name: name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
#[doc = "Refreshes the share metadata with the data from the cloud."]
pub fn refresh(
&self,
device_name: impl Into<String>,
name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> refresh::Builder {
refresh::Builder {
client: self.0.clone(),
device_name: device_name.into(),
name: name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
}
pub mod list_by_data_box_edge_device {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::ShareList, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/shares",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ShareList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Share, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/shares/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name,
&self.name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Share =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Share),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) name: String,
pub(crate) share: models::Share,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/shares/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name,
&self.name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.share).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Share =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/shares/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name,
&self.name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod refresh {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/shares/{}/refresh",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name,
&self.name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod storage_account_credentials {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
#[doc = "Gets all the storage account credentials in a Data Box Edge/Data Box Gateway device."]
pub fn list_by_data_box_edge_device(
&self,
device_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> list_by_data_box_edge_device::Builder {
list_by_data_box_edge_device::Builder {
client: self.0.clone(),
device_name: device_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn get(
&self,
device_name: impl Into<String>,
name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
device_name: device_name.into(),
name: name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn create_or_update(
&self,
device_name: impl Into<String>,
name: impl Into<String>,
storage_account_credential: impl Into<models::StorageAccountCredential>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
device_name: device_name.into(),
name: name.into(),
storage_account_credential: storage_account_credential.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn delete(
&self,
device_name: impl Into<String>,
name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
device_name: device_name.into(),
name: name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
}
pub mod list_by_data_box_edge_device {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::StorageAccountCredentialList, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/storageAccountCredentials" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . device_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::StorageAccountCredentialList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::StorageAccountCredential, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/storageAccountCredentials/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . device_name , & self . name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::StorageAccountCredential =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::StorageAccountCredential),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) name: String,
pub(crate) storage_account_credential: models::StorageAccountCredential,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/storageAccountCredentials/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . device_name , & self . name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.storage_account_credential).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::StorageAccountCredential =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/storageAccountCredentials/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . device_name , & self . name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod storage_accounts {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
#[doc = "Lists all the StorageAccounts in a Data Box Edge/Data Box Gateway device."]
pub fn list_by_data_box_edge_device(
&self,
device_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> list_by_data_box_edge_device::Builder {
list_by_data_box_edge_device::Builder {
client: self.0.clone(),
device_name: device_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
#[doc = "Gets a StorageAccount by name."]
pub fn get(
&self,
device_name: impl Into<String>,
storage_account_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
device_name: device_name.into(),
storage_account_name: storage_account_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
#[doc = "Creates a new StorageAccount or updates an existing StorageAccount on the device."]
pub fn create_or_update(
&self,
device_name: impl Into<String>,
storage_account_name: impl Into<String>,
storage_account: impl Into<models::StorageAccount>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
device_name: device_name.into(),
storage_account_name: storage_account_name.into(),
storage_account: storage_account.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn delete(
&self,
device_name: impl Into<String>,
storage_account_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
device_name: device_name.into(),
storage_account_name: storage_account_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
}
pub mod list_by_data_box_edge_device {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::StorageAccountList, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/storageAccounts",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::StorageAccountList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) storage_account_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::StorageAccount, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/storageAccounts/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name,
&self.storage_account_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::StorageAccount =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::StorageAccount),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) storage_account_name: String,
pub(crate) storage_account: models::StorageAccount,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/storageAccounts/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name,
&self.storage_account_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.storage_account).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::StorageAccount =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::models;
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) storage_account_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/storageAccounts/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name,
&self.storage_account_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod containers {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
#[doc = "Lists all the containers of a storage Account in a Data Box Edge/Data Box Gateway device."]
pub fn list_by_storage_account(
&self,
device_name: impl Into<String>,
storage_account_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> list_by_storage_account::Builder {
list_by_storage_account::Builder {
client: self.0.clone(),
device_name: device_name.into(),
storage_account_name: storage_account_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
#[doc = "Gets a container by name."]
pub fn get(
&self,
device_name: impl Into<String>,
storage_account_name: impl Into<String>,
container_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
device_name: device_name.into(),
storage_account_name: storage_account_name.into(),
container_name: container_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
#[doc = "Creates a new container or updates an existing container on the device."]
pub fn create_or_update(
&self,
device_name: impl Into<String>,
storage_account_name: impl Into<String>,
container_name: impl Into<String>,
container: impl Into<models::Container>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
device_name: device_name.into(),
storage_account_name: storage_account_name.into(),
container_name: container_name.into(),
container: container.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn delete(
&self,
device_name: impl Into<String>,
storage_account_name: impl Into<String>,
container_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
device_name: device_name.into(),
storage_account_name: storage_account_name.into(),
container_name: container_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
#[doc = "Refreshes the container metadata with the data from the cloud."]
pub fn refresh(
&self,
device_name: impl Into<String>,
storage_account_name: impl Into<String>,
container_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> refresh::Builder {
refresh::Builder {
client: self.0.clone(),
device_name: device_name.into(),
storage_account_name: storage_account_name.into(),
container_name: container_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
}
pub mod list_by_storage_account {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) storage_account_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::ContainerList, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/storageAccounts/{}/containers" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . device_name , & self . storage_account_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ContainerList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) storage_account_name: String,
pub(crate) container_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Container, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/storageAccounts/{}/containers/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . device_name , & self . storage_account_name , & self . container_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Container =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Container),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) storage_account_name: String,
pub(crate) container_name: String,
pub(crate) container: models::Container,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/storageAccounts/{}/containers/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . device_name , & self . storage_account_name , & self . container_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.container).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Container =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::models;
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) storage_account_name: String,
pub(crate) container_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/storageAccounts/{}/containers/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . device_name , & self . storage_account_name , & self . container_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod refresh {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) storage_account_name: String,
pub(crate) container_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/storageAccounts/{}/containers/{}/refresh" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . device_name , & self . storage_account_name , & self . container_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod triggers {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list_by_data_box_edge_device(
&self,
device_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> list_by_data_box_edge_device::Builder {
list_by_data_box_edge_device::Builder {
client: self.0.clone(),
device_name: device_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
filter: None,
}
}
pub fn get(
&self,
device_name: impl Into<String>,
name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
device_name: device_name.into(),
name: name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn create_or_update(
&self,
device_name: impl Into<String>,
name: impl Into<String>,
trigger: impl Into<models::Trigger>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
device_name: device_name.into(),
name: name.into(),
trigger: trigger.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn delete(
&self,
device_name: impl Into<String>,
name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
device_name: device_name.into(),
name: name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
}
pub mod list_by_data_box_edge_device {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) filter: Option<String>,
}
impl Builder {
pub fn filter(mut self, filter: impl Into<String>) -> Self {
self.filter = Some(filter.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::TriggerList, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/triggers",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
if let Some(filter) = &self.filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::TriggerList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Trigger, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/triggers/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name,
&self.name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Trigger =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Trigger),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) name: String,
pub(crate) trigger: models::Trigger,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/triggers/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name,
&self.name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.trigger).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Trigger =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/triggers/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name,
&self.name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod support_packages {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
#[doc = "Triggers support package on the device"]
pub fn trigger_support_package(
&self,
device_name: impl Into<String>,
trigger_support_package_request: impl Into<models::TriggerSupportPackageRequest>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> trigger_support_package::Builder {
trigger_support_package::Builder {
client: self.0.clone(),
device_name: device_name.into(),
trigger_support_package_request: trigger_support_package_request.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
}
pub mod trigger_support_package {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) trigger_support_package_request: models::TriggerSupportPackageRequest,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/triggerSupportPackage",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.trigger_support_package_request).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod users {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list_by_data_box_edge_device(
&self,
device_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> list_by_data_box_edge_device::Builder {
list_by_data_box_edge_device::Builder {
client: self.0.clone(),
device_name: device_name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
filter: None,
}
}
pub fn get(
&self,
device_name: impl Into<String>,
name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
device_name: device_name.into(),
name: name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn create_or_update(
&self,
device_name: impl Into<String>,
name: impl Into<String>,
user: impl Into<models::User>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
device_name: device_name.into(),
name: name.into(),
user: user.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn delete(
&self,
device_name: impl Into<String>,
name: impl Into<String>,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
device_name: device_name.into(),
name: name.into(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
}
pub mod list_by_data_box_edge_device {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) filter: Option<String>,
}
impl Builder {
pub fn filter(mut self, filter: impl Into<String>) -> Self {
self.filter = Some(filter.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::UserList, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/users",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
if let Some(filter) = &self.filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::UserList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::User, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/users/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name,
&self.name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::User =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::User),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) name: String,
pub(crate) user: models::User,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/users/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name,
&self.name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.user).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::User =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) device_name: String,
pub(crate) name: String,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{}/users/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.device_name,
&self.name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
| 51.098965 | 352 | 0.521753 |
9101865d6631a407fcf816f03305844097f0476f | 33,253 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use std::any::Any;
use std::borrow::Borrow;
use std::convert::From;
use std::fmt;
use std::iter::{FromIterator, IntoIterator};
use std::mem;
use chrono::{prelude::*, Duration};
use super::array::print_long_array;
use super::raw_pointer::RawPtrBox;
use super::*;
use crate::temporal_conversions;
use crate::util::bit_util;
use crate::{
buffer::{Buffer, MutableBuffer},
util::trusted_len_unzip,
};
/// Number of seconds in a day
const SECONDS_IN_DAY: i64 = 86_400;
/// Number of milliseconds in a second
const MILLISECONDS: i64 = 1_000;
/// Number of microseconds in a second
const MICROSECONDS: i64 = 1_000_000;
/// Number of nanoseconds in a second
const NANOSECONDS: i64 = 1_000_000_000;
/// Array whose elements are of primitive types.
pub struct PrimitiveArray<T: ArrowPrimitiveType> {
/// Underlying ArrayData
/// # Safety
/// must have exactly one buffer, aligned to type T
data: ArrayData,
/// Pointer to the value array. The lifetime of this must be <= to the value buffer
/// stored in `data`, so it's safe to store.
/// # Safety
/// raw_values must have a value equivalent to `data.buffers()[0].raw_data()`
/// raw_values must have alignment for type T::NativeType
raw_values: RawPtrBox<T::Native>,
}
impl<T: ArrowPrimitiveType> PrimitiveArray<T> {
/// Returns the length of this array.
#[inline]
pub fn len(&self) -> usize {
self.data.len()
}
/// Returns whether this array is empty.
pub fn is_empty(&self) -> bool {
self.data.is_empty()
}
/// Returns a slice of the values of this array
#[inline]
pub fn values(&self) -> &[T::Native] {
// Soundness
// raw_values alignment & location is ensured by fn from(ArrayDataRef)
// buffer bounds/offset is ensured by the ArrayData instance.
unsafe {
std::slice::from_raw_parts(
self.raw_values.as_ptr().add(self.data.offset()),
self.len(),
)
}
}
// Returns a new primitive array builder
pub fn builder(capacity: usize) -> PrimitiveBuilder<T> {
PrimitiveBuilder::<T>::new(capacity)
}
/// Returns the primitive value at index `i`.
///
/// # Safety
///
/// caller must ensure that the passed in offset is less than the array len()
#[inline]
pub unsafe fn value_unchecked(&self, i: usize) -> T::Native {
let offset = i + self.offset();
*self.raw_values.as_ptr().add(offset)
}
/// Returns the primitive value at index `i`.
///
/// Note this doesn't do any bound checking, for performance reason.
/// # Safety
/// caller must ensure that the passed in offset is less than the array len()
#[inline]
pub fn value(&self, i: usize) -> T::Native {
debug_assert!(i < self.len());
unsafe { self.value_unchecked(i) }
}
/// Creates a PrimitiveArray based on an iterator of values without nulls
pub fn from_iter_values<I: IntoIterator<Item = T::Native>>(iter: I) -> Self {
let val_buf: Buffer = iter.into_iter().collect();
let data = ArrayData::new(
T::DATA_TYPE,
val_buf.len() / mem::size_of::<<T as ArrowPrimitiveType>::Native>(),
None,
None,
0,
vec![val_buf],
vec![],
);
PrimitiveArray::from(data)
}
/// Creates a PrimitiveArray based on a constant value with `count` elements
pub fn from_value(value: T::Native, count: usize) -> Self {
// # Safety: length is known
let val_buf = unsafe { Buffer::from_trusted_len_iter((0..count).map(|_| value)) };
let data = ArrayData::new(
T::DATA_TYPE,
val_buf.len() / mem::size_of::<<T as ArrowPrimitiveType>::Native>(),
None,
None,
0,
vec![val_buf],
vec![],
);
PrimitiveArray::from(data)
}
}
impl<T: ArrowPrimitiveType> Array for PrimitiveArray<T> {
fn as_any(&self) -> &Any {
self
}
fn data(&self) -> &ArrayData {
&self.data
}
/// Returns the total number of bytes of memory occupied by the buffers owned by this [PrimitiveArray].
fn get_buffer_memory_size(&self) -> usize {
self.data.get_buffer_memory_size()
}
/// Returns the total number of bytes of memory occupied physically by this [PrimitiveArray].
fn get_array_memory_size(&self) -> usize {
self.data.get_array_memory_size() + mem::size_of::<RawPtrBox<T::Native>>()
}
}
fn as_datetime<T: ArrowPrimitiveType>(v: i64) -> Option<NaiveDateTime> {
match T::DATA_TYPE {
DataType::Date32 => Some(temporal_conversions::date32_to_datetime(v as i32)),
DataType::Date64 => Some(temporal_conversions::date64_to_datetime(v)),
DataType::Time32(_) | DataType::Time64(_) => None,
DataType::Timestamp(unit, _) => match unit {
TimeUnit::Second => Some(temporal_conversions::timestamp_s_to_datetime(v)),
TimeUnit::Millisecond => {
Some(temporal_conversions::timestamp_ms_to_datetime(v))
}
TimeUnit::Microsecond => {
Some(temporal_conversions::timestamp_us_to_datetime(v))
}
TimeUnit::Nanosecond => {
Some(temporal_conversions::timestamp_ns_to_datetime(v))
}
},
// interval is not yet fully documented [ARROW-3097]
DataType::Interval(_) => None,
_ => None,
}
}
fn as_date<T: ArrowPrimitiveType>(v: i64) -> Option<NaiveDate> {
as_datetime::<T>(v).map(|datetime| datetime.date())
}
fn as_time<T: ArrowPrimitiveType>(v: i64) -> Option<NaiveTime> {
match T::DATA_TYPE {
DataType::Time32(unit) => {
// safe to immediately cast to u32 as `self.value(i)` is positive i32
let v = v as u32;
match unit {
TimeUnit::Second => Some(temporal_conversions::time32s_to_time(v as i32)),
TimeUnit::Millisecond => {
Some(temporal_conversions::time32ms_to_time(v as i32))
}
_ => None,
}
}
DataType::Time64(unit) => match unit {
TimeUnit::Microsecond => Some(temporal_conversions::time64us_to_time(v)),
TimeUnit::Nanosecond => Some(temporal_conversions::time64ns_to_time(v)),
_ => None,
},
DataType::Timestamp(_, _) => as_datetime::<T>(v).map(|datetime| datetime.time()),
DataType::Date32 | DataType::Date64 => Some(NaiveTime::from_hms(0, 0, 0)),
DataType::Interval(_) => None,
_ => None,
}
}
fn as_duration<T: ArrowPrimitiveType>(v: i64) -> Option<Duration> {
match T::DATA_TYPE {
DataType::Duration(unit) => match unit {
TimeUnit::Second => Some(temporal_conversions::duration_s_to_duration(v)),
TimeUnit::Millisecond => {
Some(temporal_conversions::duration_ms_to_duration(v))
}
TimeUnit::Microsecond => {
Some(temporal_conversions::duration_us_to_duration(v))
}
TimeUnit::Nanosecond => {
Some(temporal_conversions::duration_ns_to_duration(v))
}
},
_ => None,
}
}
impl<T: ArrowTemporalType + ArrowNumericType> PrimitiveArray<T>
where
i64: std::convert::From<T::Native>,
{
/// Returns value as a chrono `NaiveDateTime`, handling time resolution
///
/// If a data type cannot be converted to `NaiveDateTime`, a `None` is returned.
/// A valid value is expected, thus the user should first check for validity.
pub fn value_as_datetime(&self, i: usize) -> Option<NaiveDateTime> {
as_datetime::<T>(i64::from(self.value(i)))
}
/// Returns value as a chrono `NaiveDate` by using `Self::datetime()`
///
/// If a data type cannot be converted to `NaiveDate`, a `None` is returned
pub fn value_as_date(&self, i: usize) -> Option<NaiveDate> {
self.value_as_datetime(i).map(|datetime| datetime.date())
}
/// Returns a value as a chrono `NaiveTime`
///
/// `Date32` and `Date64` return UTC midnight as they do not have time resolution
pub fn value_as_time(&self, i: usize) -> Option<NaiveTime> {
as_time::<T>(i64::from(self.value(i)))
}
/// Returns a value as a chrono `Duration`
///
/// If a data type cannot be converted to `Duration`, a `None` is returned
pub fn value_as_duration(&self, i: usize) -> Option<Duration> {
as_duration::<T>(i64::from(self.value(i)))
}
}
impl<T: ArrowPrimitiveType> fmt::Debug for PrimitiveArray<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "PrimitiveArray<{:?}>\n[\n", T::DATA_TYPE)?;
print_long_array(self, f, |array, index, f| match T::DATA_TYPE {
DataType::Date32 | DataType::Date64 => {
let v = self.value(index).to_isize().unwrap() as i64;
match as_date::<T>(v) {
Some(date) => write!(f, "{:?}", date),
None => write!(f, "null"),
}
}
DataType::Time32(_) | DataType::Time64(_) => {
let v = self.value(index).to_isize().unwrap() as i64;
match as_time::<T>(v) {
Some(time) => write!(f, "{:?}", time),
None => write!(f, "null"),
}
}
DataType::Timestamp(_, _) => {
let v = self.value(index).to_isize().unwrap() as i64;
match as_datetime::<T>(v) {
Some(datetime) => write!(f, "{:?}", datetime),
None => write!(f, "null"),
}
}
_ => fmt::Debug::fmt(&array.value(index), f),
})?;
write!(f, "]")
}
}
impl<'a, T: ArrowPrimitiveType> IntoIterator for &'a PrimitiveArray<T> {
type Item = Option<<T as ArrowPrimitiveType>::Native>;
type IntoIter = PrimitiveIter<'a, T>;
fn into_iter(self) -> Self::IntoIter {
PrimitiveIter::<'a, T>::new(self)
}
}
impl<'a, T: ArrowPrimitiveType> PrimitiveArray<T> {
/// constructs a new iterator
pub fn iter(&'a self) -> PrimitiveIter<'a, T> {
PrimitiveIter::<'a, T>::new(&self)
}
}
impl<T: ArrowPrimitiveType, Ptr: Borrow<Option<<T as ArrowPrimitiveType>::Native>>>
FromIterator<Ptr> for PrimitiveArray<T>
{
fn from_iter<I: IntoIterator<Item = Ptr>>(iter: I) -> Self {
let iter = iter.into_iter();
let (lower, _) = iter.size_hint();
let mut null_buf = BooleanBufferBuilder::new(lower);
let buffer: Buffer = iter
.map(|item| {
if let Some(a) = item.borrow() {
null_buf.append(true);
*a
} else {
null_buf.append(false);
// this ensures that null items on the buffer are not arbitrary.
// This is important because falible operations can use null values (e.g. a vectorized "add")
// which may panic (e.g. overflow if the number on the slots happen to be very large).
T::Native::default()
}
})
.collect();
let data = ArrayData::new(
T::DATA_TYPE,
null_buf.len(),
None,
Some(null_buf.into()),
0,
vec![buffer],
vec![],
);
PrimitiveArray::from(data)
}
}
impl<T: ArrowPrimitiveType> PrimitiveArray<T> {
/// Creates a [`PrimitiveArray`] from an iterator of trusted length.
/// # Safety
/// The iterator must be [`TrustedLen`](https://doc.rust-lang.org/std/iter/trait.TrustedLen.html).
/// I.e. that `size_hint().1` correctly reports its length.
#[inline]
pub unsafe fn from_trusted_len_iter<I, P>(iter: I) -> Self
where
P: std::borrow::Borrow<Option<<T as ArrowPrimitiveType>::Native>>,
I: IntoIterator<Item = P>,
{
let iterator = iter.into_iter();
let (_, upper) = iterator.size_hint();
let len = upper.expect("trusted_len_unzip requires an upper limit");
let (null, buffer) = trusted_len_unzip(iterator);
let data =
ArrayData::new(T::DATA_TYPE, len, None, Some(null), 0, vec![buffer], vec![]);
PrimitiveArray::from(data)
}
}
// TODO: the macro is needed here because we'd get "conflicting implementations" error
// otherwise with both `From<Vec<T::Native>>` and `From<Vec<Option<T::Native>>>`.
// We should revisit this in future.
macro_rules! def_numeric_from_vec {
( $ty:ident ) => {
impl From<Vec<<$ty as ArrowPrimitiveType>::Native>> for PrimitiveArray<$ty> {
fn from(data: Vec<<$ty as ArrowPrimitiveType>::Native>) -> Self {
let array_data = ArrayData::builder($ty::DATA_TYPE)
.len(data.len())
.add_buffer(Buffer::from_slice_ref(&data))
.build();
PrimitiveArray::from(array_data)
}
}
// Constructs a primitive array from a vector. Should only be used for testing.
impl From<Vec<Option<<$ty as ArrowPrimitiveType>::Native>>>
for PrimitiveArray<$ty>
{
fn from(data: Vec<Option<<$ty as ArrowPrimitiveType>::Native>>) -> Self {
PrimitiveArray::from_iter(data.iter())
}
}
};
}
def_numeric_from_vec!(Int8Type);
def_numeric_from_vec!(Int16Type);
def_numeric_from_vec!(Int32Type);
def_numeric_from_vec!(Int64Type);
def_numeric_from_vec!(UInt8Type);
def_numeric_from_vec!(UInt16Type);
def_numeric_from_vec!(UInt32Type);
def_numeric_from_vec!(UInt64Type);
def_numeric_from_vec!(Float32Type);
def_numeric_from_vec!(Float64Type);
def_numeric_from_vec!(Date32Type);
def_numeric_from_vec!(Date64Type);
def_numeric_from_vec!(Time32SecondType);
def_numeric_from_vec!(Time32MillisecondType);
def_numeric_from_vec!(Time64MicrosecondType);
def_numeric_from_vec!(Time64NanosecondType);
def_numeric_from_vec!(IntervalYearMonthType);
def_numeric_from_vec!(IntervalDayTimeType);
def_numeric_from_vec!(DurationSecondType);
def_numeric_from_vec!(DurationMillisecondType);
def_numeric_from_vec!(DurationMicrosecondType);
def_numeric_from_vec!(DurationNanosecondType);
def_numeric_from_vec!(TimestampSecondType);
def_numeric_from_vec!(TimestampMillisecondType);
def_numeric_from_vec!(TimestampMicrosecondType);
def_numeric_from_vec!(TimestampNanosecondType);
impl<T: ArrowTimestampType> PrimitiveArray<T> {
/// Construct a timestamp array from a vec of i64 values and an optional timezone
pub fn from_vec(data: Vec<i64>, timezone: Option<String>) -> Self {
let array_data =
ArrayData::builder(DataType::Timestamp(T::get_time_unit(), timezone))
.len(data.len())
.add_buffer(Buffer::from_slice_ref(&data))
.build();
PrimitiveArray::from(array_data)
}
}
impl<T: ArrowTimestampType> PrimitiveArray<T> {
/// Construct a timestamp array from a vec of Option<i64> values and an optional timezone
pub fn from_opt_vec(data: Vec<Option<i64>>, timezone: Option<String>) -> Self {
// TODO: duplicated from def_numeric_from_vec! macro, it looks possible to convert to generic
let data_len = data.len();
let mut null_buf = MutableBuffer::new_null(data_len);
let mut val_buf = MutableBuffer::new(data_len * mem::size_of::<i64>());
{
let null_slice = null_buf.as_slice_mut();
for (i, v) in data.iter().enumerate() {
if let Some(n) = v {
bit_util::set_bit(null_slice, i);
val_buf.push(*n);
} else {
val_buf.push(0i64);
}
}
}
let array_data =
ArrayData::builder(DataType::Timestamp(T::get_time_unit(), timezone))
.len(data_len)
.add_buffer(val_buf.into())
.null_bit_buffer(null_buf.into())
.build();
PrimitiveArray::from(array_data)
}
}
/// Constructs a `PrimitiveArray` from an array data reference.
impl<T: ArrowPrimitiveType> From<ArrayData> for PrimitiveArray<T> {
fn from(data: ArrayData) -> Self {
assert_eq!(
data.buffers().len(),
1,
"PrimitiveArray data should contain a single buffer only (values buffer)"
);
let ptr = data.buffers()[0].as_ptr();
Self {
data,
raw_values: unsafe { RawPtrBox::new(ptr) },
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::thread;
use crate::buffer::Buffer;
use crate::datatypes::DataType;
#[test]
fn test_primitive_array_from_vec() {
let buf = Buffer::from_slice_ref(&[0, 1, 2, 3, 4]);
let arr = Int32Array::from(vec![0, 1, 2, 3, 4]);
assert_eq!(buf, arr.data.buffers()[0]);
assert_eq!(5, arr.len());
assert_eq!(0, arr.offset());
assert_eq!(0, arr.null_count());
for i in 0..5 {
assert!(!arr.is_null(i));
assert!(arr.is_valid(i));
assert_eq!(i as i32, arr.value(i));
}
assert_eq!(64, arr.get_buffer_memory_size());
assert_eq!(136, arr.get_array_memory_size());
}
#[test]
fn test_primitive_array_from_vec_option() {
// Test building a primitive array with null values
let arr = Int32Array::from(vec![Some(0), None, Some(2), None, Some(4)]);
assert_eq!(5, arr.len());
assert_eq!(0, arr.offset());
assert_eq!(2, arr.null_count());
for i in 0..5 {
if i % 2 == 0 {
assert!(!arr.is_null(i));
assert!(arr.is_valid(i));
assert_eq!(i as i32, arr.value(i));
} else {
assert!(arr.is_null(i));
assert!(!arr.is_valid(i));
}
}
assert_eq!(128, arr.get_buffer_memory_size());
assert_eq!(216, arr.get_array_memory_size());
}
#[test]
fn test_date64_array_from_vec_option() {
// Test building a primitive array with null values
// we use Int32 and Int64 as a backing array, so all Int32 and Int64 conventions
// work
let arr: PrimitiveArray<Date64Type> =
vec![Some(1550902545147), None, Some(1550902545147)].into();
assert_eq!(3, arr.len());
assert_eq!(0, arr.offset());
assert_eq!(1, arr.null_count());
for i in 0..3 {
if i % 2 == 0 {
assert!(!arr.is_null(i));
assert!(arr.is_valid(i));
assert_eq!(1550902545147, arr.value(i));
// roundtrip to and from datetime
assert_eq!(
1550902545147,
arr.value_as_datetime(i).unwrap().timestamp_millis()
);
} else {
assert!(arr.is_null(i));
assert!(!arr.is_valid(i));
}
}
}
#[test]
fn test_time32_millisecond_array_from_vec() {
// 1: 00:00:00.001
// 37800005: 10:30:00.005
// 86399210: 23:59:59.210
let arr: PrimitiveArray<Time32MillisecondType> =
vec![1, 37_800_005, 86_399_210].into();
assert_eq!(3, arr.len());
assert_eq!(0, arr.offset());
assert_eq!(0, arr.null_count());
let formatted = vec!["00:00:00.001", "10:30:00.005", "23:59:59.210"];
for (i, formatted) in formatted.iter().enumerate().take(3) {
// check that we can't create dates or datetimes from time instances
assert_eq!(None, arr.value_as_datetime(i));
assert_eq!(None, arr.value_as_date(i));
let time = arr.value_as_time(i).unwrap();
assert_eq!(*formatted, time.format("%H:%M:%S%.3f").to_string());
}
}
#[test]
fn test_time64_nanosecond_array_from_vec() {
// Test building a primitive array with null values
// we use Int32 and Int64 as a backing array, so all Int32 and Int64 conventions
// work
// 1e6: 00:00:00.001
// 37800005e6: 10:30:00.005
// 86399210e6: 23:59:59.210
let arr: PrimitiveArray<Time64NanosecondType> =
vec![1_000_000, 37_800_005_000_000, 86_399_210_000_000].into();
assert_eq!(3, arr.len());
assert_eq!(0, arr.offset());
assert_eq!(0, arr.null_count());
let formatted = vec!["00:00:00.001", "10:30:00.005", "23:59:59.210"];
for (i, item) in formatted.iter().enumerate().take(3) {
// check that we can't create dates or datetimes from time instances
assert_eq!(None, arr.value_as_datetime(i));
assert_eq!(None, arr.value_as_date(i));
let time = arr.value_as_time(i).unwrap();
assert_eq!(*item, time.format("%H:%M:%S%.3f").to_string());
}
}
#[test]
fn test_interval_array_from_vec() {
// intervals are currently not treated specially, but are Int32 and Int64 arrays
let arr = IntervalYearMonthArray::from(vec![Some(1), None, Some(-5)]);
assert_eq!(3, arr.len());
assert_eq!(0, arr.offset());
assert_eq!(1, arr.null_count());
assert_eq!(1, arr.value(0));
assert_eq!(1, arr.values()[0]);
assert!(arr.is_null(1));
assert_eq!(-5, arr.value(2));
assert_eq!(-5, arr.values()[2]);
// a day_time interval contains days and milliseconds, but we do not yet have accessors for the values
let arr = IntervalDayTimeArray::from(vec![Some(1), None, Some(-5)]);
assert_eq!(3, arr.len());
assert_eq!(0, arr.offset());
assert_eq!(1, arr.null_count());
assert_eq!(1, arr.value(0));
assert_eq!(1, arr.values()[0]);
assert!(arr.is_null(1));
assert_eq!(-5, arr.value(2));
assert_eq!(-5, arr.values()[2]);
}
#[test]
fn test_duration_array_from_vec() {
let arr = DurationSecondArray::from(vec![Some(1), None, Some(-5)]);
assert_eq!(3, arr.len());
assert_eq!(0, arr.offset());
assert_eq!(1, arr.null_count());
assert_eq!(1, arr.value(0));
assert_eq!(1, arr.values()[0]);
assert!(arr.is_null(1));
assert_eq!(-5, arr.value(2));
assert_eq!(-5, arr.values()[2]);
let arr = DurationMillisecondArray::from(vec![Some(1), None, Some(-5)]);
assert_eq!(3, arr.len());
assert_eq!(0, arr.offset());
assert_eq!(1, arr.null_count());
assert_eq!(1, arr.value(0));
assert_eq!(1, arr.values()[0]);
assert!(arr.is_null(1));
assert_eq!(-5, arr.value(2));
assert_eq!(-5, arr.values()[2]);
let arr = DurationMicrosecondArray::from(vec![Some(1), None, Some(-5)]);
assert_eq!(3, arr.len());
assert_eq!(0, arr.offset());
assert_eq!(1, arr.null_count());
assert_eq!(1, arr.value(0));
assert_eq!(1, arr.values()[0]);
assert!(arr.is_null(1));
assert_eq!(-5, arr.value(2));
assert_eq!(-5, arr.values()[2]);
let arr = DurationNanosecondArray::from(vec![Some(1), None, Some(-5)]);
assert_eq!(3, arr.len());
assert_eq!(0, arr.offset());
assert_eq!(1, arr.null_count());
assert_eq!(1, arr.value(0));
assert_eq!(1, arr.values()[0]);
assert!(arr.is_null(1));
assert_eq!(-5, arr.value(2));
assert_eq!(-5, arr.values()[2]);
}
#[test]
fn test_timestamp_array_from_vec() {
let arr = TimestampSecondArray::from_vec(vec![1, -5], None);
assert_eq!(2, arr.len());
assert_eq!(0, arr.offset());
assert_eq!(0, arr.null_count());
assert_eq!(1, arr.value(0));
assert_eq!(-5, arr.value(1));
assert_eq!(&[1, -5], arr.values());
let arr = TimestampMillisecondArray::from_vec(vec![1, -5], None);
assert_eq!(2, arr.len());
assert_eq!(0, arr.offset());
assert_eq!(0, arr.null_count());
assert_eq!(1, arr.value(0));
assert_eq!(-5, arr.value(1));
assert_eq!(&[1, -5], arr.values());
let arr = TimestampMicrosecondArray::from_vec(vec![1, -5], None);
assert_eq!(2, arr.len());
assert_eq!(0, arr.offset());
assert_eq!(0, arr.null_count());
assert_eq!(1, arr.value(0));
assert_eq!(-5, arr.value(1));
assert_eq!(&[1, -5], arr.values());
let arr = TimestampNanosecondArray::from_vec(vec![1, -5], None);
assert_eq!(2, arr.len());
assert_eq!(0, arr.offset());
assert_eq!(0, arr.null_count());
assert_eq!(1, arr.value(0));
assert_eq!(-5, arr.value(1));
assert_eq!(&[1, -5], arr.values());
}
#[test]
fn test_primitive_array_slice() {
let arr = Int32Array::from(vec![
Some(0),
None,
Some(2),
None,
Some(4),
Some(5),
Some(6),
None,
None,
]);
assert_eq!(9, arr.len());
assert_eq!(0, arr.offset());
assert_eq!(4, arr.null_count());
let arr2 = arr.slice(2, 5);
assert_eq!(5, arr2.len());
assert_eq!(2, arr2.offset());
assert_eq!(1, arr2.null_count());
for i in 0..arr2.len() {
assert_eq!(i == 1, arr2.is_null(i));
assert_eq!(i != 1, arr2.is_valid(i));
}
let int_arr2 = arr2.as_any().downcast_ref::<Int32Array>().unwrap();
assert_eq!(2, int_arr2.values()[0]);
assert_eq!(&[4, 5, 6], &int_arr2.values()[2..5]);
let arr3 = arr2.slice(2, 3);
assert_eq!(3, arr3.len());
assert_eq!(4, arr3.offset());
assert_eq!(0, arr3.null_count());
let int_arr3 = arr3.as_any().downcast_ref::<Int32Array>().unwrap();
assert_eq!(&[4, 5, 6], int_arr3.values());
assert_eq!(4, int_arr3.value(0));
assert_eq!(5, int_arr3.value(1));
assert_eq!(6, int_arr3.value(2));
}
#[test]
fn test_boolean_array_slice() {
let arr = BooleanArray::from(vec![
Some(true),
None,
Some(false),
None,
Some(true),
Some(false),
Some(true),
Some(false),
None,
Some(true),
]);
assert_eq!(10, arr.len());
assert_eq!(0, arr.offset());
assert_eq!(3, arr.null_count());
let arr2 = arr.slice(3, 5);
assert_eq!(5, arr2.len());
assert_eq!(3, arr2.offset());
assert_eq!(1, arr2.null_count());
let bool_arr = arr2.as_any().downcast_ref::<BooleanArray>().unwrap();
assert_eq!(false, bool_arr.is_valid(0));
assert_eq!(true, bool_arr.is_valid(1));
assert_eq!(true, bool_arr.value(1));
assert_eq!(true, bool_arr.is_valid(2));
assert_eq!(false, bool_arr.value(2));
assert_eq!(true, bool_arr.is_valid(3));
assert_eq!(true, bool_arr.value(3));
assert_eq!(true, bool_arr.is_valid(4));
assert_eq!(false, bool_arr.value(4));
}
#[test]
fn test_int32_fmt_debug() {
let arr = Int32Array::from(vec![0, 1, 2, 3, 4]);
assert_eq!(
"PrimitiveArray<Int32>\n[\n 0,\n 1,\n 2,\n 3,\n 4,\n]",
format!("{:?}", arr)
);
}
#[test]
fn test_fmt_debug_up_to_20_elements() {
(1..=20).for_each(|i| {
let values = (0..i).collect::<Vec<i16>>();
let array_expected = format!(
"PrimitiveArray<Int16>\n[\n{}\n]",
values
.iter()
.map(|v| { format!(" {},", v) })
.collect::<Vec<String>>()
.join("\n")
);
let array = Int16Array::from(values);
assert_eq!(array_expected, format!("{:?}", array));
})
}
#[test]
fn test_int32_with_null_fmt_debug() {
let mut builder = Int32Array::builder(3);
builder.append_slice(&[0, 1]).unwrap();
builder.append_null().unwrap();
builder.append_slice(&[3, 4]).unwrap();
let arr = builder.finish();
assert_eq!(
"PrimitiveArray<Int32>\n[\n 0,\n 1,\n null,\n 3,\n 4,\n]",
format!("{:?}", arr)
);
}
#[test]
fn test_timestamp_fmt_debug() {
let arr: PrimitiveArray<TimestampMillisecondType> =
TimestampMillisecondArray::from_vec(
vec![1546214400000, 1546214400000, -1546214400000],
None,
);
assert_eq!(
"PrimitiveArray<Timestamp(Millisecond, None)>\n[\n 2018-12-31T00:00:00,\n 2018-12-31T00:00:00,\n 1921-01-02T00:00:00,\n]",
format!("{:?}", arr)
);
}
#[test]
fn test_date32_fmt_debug() {
let arr: PrimitiveArray<Date32Type> = vec![12356, 13548, -365].into();
assert_eq!(
"PrimitiveArray<Date32>\n[\n 2003-10-31,\n 2007-02-04,\n 1969-01-01,\n]",
format!("{:?}", arr)
);
}
#[test]
fn test_time32second_fmt_debug() {
let arr: PrimitiveArray<Time32SecondType> = vec![7201, 60054].into();
assert_eq!(
"PrimitiveArray<Time32(Second)>\n[\n 02:00:01,\n 16:40:54,\n]",
format!("{:?}", arr)
);
}
#[test]
#[should_panic(expected = "invalid time")]
fn test_time32second_invalid_neg() {
// The panic should come from chrono, not from arrow
let arr: PrimitiveArray<Time32SecondType> = vec![-7201, -60054].into();
println!("{:?}", arr);
}
#[test]
fn test_primitive_array_builder() {
// Test building a primitive array with ArrayData builder and offset
let buf = Buffer::from_slice_ref(&[0, 1, 2, 3, 4]);
let buf2 = buf.clone();
let data = ArrayData::builder(DataType::Int32)
.len(5)
.offset(2)
.add_buffer(buf)
.build();
let arr = Int32Array::from(data);
assert_eq!(buf2, arr.data.buffers()[0]);
assert_eq!(5, arr.len());
assert_eq!(0, arr.null_count());
for i in 0..3 {
assert_eq!((i + 2) as i32, arr.value(i));
}
}
#[test]
fn test_primitive_from_iter_values() {
// Test building a primitive array with from_iter_values
let arr: PrimitiveArray<Int32Type> = PrimitiveArray::from_iter_values(0..10);
assert_eq!(10, arr.len());
assert_eq!(0, arr.null_count());
for i in 0..10i32 {
assert_eq!(i, arr.value(i as usize));
}
}
#[test]
fn test_primitive_array_from_unbound_iter() {
// iterator that doesn't declare (upper) size bound
let value_iter = (0..)
.scan(0usize, |pos, i| {
if *pos < 10 {
*pos += 1;
Some(Some(i))
} else {
// actually returns up to 10 values
None
}
})
// limited using take()
.take(100);
let (_, upper_size_bound) = value_iter.size_hint();
// the upper bound, defined by take above, is 100
assert_eq!(upper_size_bound, Some(100));
let primitive_array: PrimitiveArray<Int32Type> = value_iter.collect();
// but the actual number of items in the array should be 10
assert_eq!(primitive_array.len(), 10);
}
#[test]
#[should_panic(expected = "PrimitiveArray data should contain a single buffer only \
(values buffer)")]
fn test_primitive_array_invalid_buffer_len() {
let data = ArrayData::builder(DataType::Int32).len(5).build();
Int32Array::from(data);
}
#[test]
fn test_access_array_concurrently() {
let a = Int32Array::from(vec![5, 6, 7, 8, 9]);
let ret = thread::spawn(move || a.value(3)).join();
assert!(ret.is_ok());
assert_eq!(8, ret.ok().unwrap());
}
}
| 35.18836 | 137 | 0.564731 |
69f166342f219f706edf8d3db364bfa0b7c57b4b | 204 | mod error;
mod parser;
mod assembler;
pub use error::{Diagnostics, DiagnosticsType, ParserError, LabelErrorType, LabelError, AssemblerError};
pub use assembler::{Label, Symbol, ObjectCode, ProgramRepr};
| 29.142857 | 103 | 0.794118 |
7571a09d8e8d08a9af525c75258880d55d6e405a | 6,535 | //! Adapted from [`proc-macro2`](https://github.com/alexcrichton/proc-macro2).
use std::{
cell::RefCell,
fmt::{self, Debug},
path::PathBuf,
};
use crate::strnom::{skip_ws, Cursor, PResult};
thread_local! {
static SOURCE_MAP: RefCell<SourceMap> = RefCell::new(Default::default());
}
/// Add file to source map and return lower bound
///
/// Use in the same thread
pub fn get_cursor<'a>(p: &PathBuf, rest: &'a str) -> Cursor<'a> {
SOURCE_MAP.with(|x| Cursor {
rest,
off: x.borrow_mut().add_file(p, rest).lo,
})
}
/// Reinitialize source map instance when run multiple times in the same thread
///
/// Use in the same thread
pub fn clean() {
SOURCE_MAP.with(|x| *x.borrow_mut() = Default::default());
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct LineColumn {
pub line: usize,
pub column: usize,
}
struct FileInfo {
name: PathBuf,
span: Span,
lines: Vec<usize>,
}
impl FileInfo {
fn offset_line_column(&self, offset: usize) -> LineColumn {
assert!(self.span_within(Span {
lo: offset as u32,
hi: offset as u32,
}));
let offset = offset - self.span.lo as usize;
match self.lines.binary_search(&offset) {
Ok(found) => LineColumn {
line: found + 1,
column: 0,
},
Err(idx) => LineColumn {
line: idx,
column: offset - self.lines[idx - 1],
},
}
}
fn get_ranges(&self, span: Span) -> ((usize, usize), (usize, usize)) {
assert!(self.span_within(span));
let lo = (span.lo - self.span.lo) as usize;
let hi = (span.hi - self.span.lo) as usize;
let lo_line = match self.lines.binary_search(&lo) {
Ok(_) => lo,
Err(idx) => self.lines[idx - 1],
};
let hi_line = match self.lines.binary_search(&hi) {
Ok(_) => hi,
Err(idx) => self
.lines
.get(idx)
.copied()
.unwrap_or(self.span.hi as usize),
};
((lo_line, hi_line), (lo - lo_line, hi - lo_line))
}
fn span_within(&self, span: Span) -> bool {
span.lo >= self.span.lo && span.hi <= self.span.hi
}
}
/// Computes the offsets of each line in the given source string.
fn lines_offsets(s: &str) -> Vec<usize> {
let mut lines = vec![0];
let mut prev = 0;
while let Some(len) = s[prev..].find('\n') {
prev += len + 1;
lines.push(prev);
}
lines
}
#[derive(Default)]
struct SourceMap {
files: Vec<FileInfo>,
}
impl SourceMap {
fn next_start_pos(&self) -> u32 {
// Add 1 so there's always space between files.
self.files.last().map(|f| f.span.hi + 1).unwrap_or(0)
}
fn add_file(&mut self, name: &PathBuf, src: &str) -> Span {
let lines = lines_offsets(src);
let lo = self.next_start_pos();
let span = Span {
lo,
hi: lo + (src.len() as u32),
};
self.files.push(FileInfo {
name: name.to_owned(),
span,
lines,
});
span
}
fn fileinfo(&self, span: Span) -> &FileInfo {
for file in &self.files {
if file.span_within(span) {
return file;
}
}
panic!("Invalid span with no related FileInfo!");
}
}
#[derive(Clone, Copy, PartialEq)]
pub struct Span {
pub lo: u32,
pub hi: u32,
}
// Don't allow `Span` to transfer between thread
// impl !Send for Span {}
// impl !Sync for Span {}
impl Span {
/// Assume a <= b
#[inline]
pub fn from_cursor(a: Cursor, b: Cursor) -> Self {
debug_assert!(a.off <= b.off);
Self {
lo: a.off,
hi: b.off,
}
}
pub fn from_len(i: Cursor, len: usize) -> Self {
Self {
lo: i.off,
hi: i.off + (len as u32),
}
}
pub fn from_range(i: Cursor, (lo, hi): (usize, usize)) -> Self {
Self {
lo: i.off + (lo as u32),
hi: i.off + (hi as u32),
}
}
pub fn join_proc(self, proc: proc_macro2::Span) -> Self {
let start = self.start();
let p_start = proc.start();
let p_end = proc.end();
let lo = if p_start.line == 1 {
self.lo + p_start.column as u32
} else {
SOURCE_MAP.with(|cm| {
let cm = cm.borrow();
let fi = cm.fileinfo(self);
fi.lines[start.line + p_start.line - 2] as u32 + p_start.column as u32
})
};
let hi = if p_end.line == 1 {
self.lo + p_end.column as u32
} else {
SOURCE_MAP.with(|cm| {
let cm = cm.borrow();
let fi = cm.fileinfo(self);
fi.lines[start.line + p_end.line - 2] as u32 + p_end.column as u32
})
};
Self { lo, hi }
}
/// Returns line bounds and range in bounds
pub fn range_in_file(self) -> ((usize, usize), (usize, usize)) {
SOURCE_MAP.with(|cm| {
let cm = cm.borrow();
let fi = cm.fileinfo(self);
fi.get_ranges(self)
})
}
pub fn file_path(self) -> PathBuf {
SOURCE_MAP.with(|cm| {
let cm = cm.borrow();
let fi = cm.fileinfo(self);
fi.name.clone()
})
}
pub fn start(self) -> LineColumn {
SOURCE_MAP.with(|cm| {
let cm = cm.borrow();
let fi = cm.fileinfo(self);
fi.offset_line_column(self.lo as usize)
})
}
}
impl<'a> From<Cursor<'a>> for Span {
fn from(c: Cursor) -> Self {
Self::from_cursor(c, c)
}
}
impl fmt::Debug for Span {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "bytes({}..{})", self.lo, self.hi)
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct S<T: Debug + PartialEq + Clone>(pub(super) T, pub(super) Span);
impl<T: Debug + PartialEq + Clone> S<T> {
pub fn t(&self) -> &T {
&self.0
}
pub fn span(&self) -> Span {
self.1
}
}
pub(crate) fn spanned<'a, T: Debug + PartialEq + Clone>(
input: Cursor<'a>,
f: fn(Cursor<'a>) -> PResult<'a, T>,
) -> PResult<'a, S<T>> {
let input = skip_ws(input);
let lo = input.off;
let (a, b) = f(input)?;
let hi = a.off;
let span = Span { lo, hi };
Ok((a, S(b, span)))
}
| 25.428016 | 86 | 0.504208 |
1a7e327ed89143bde9d6255ae0709b70fca4acc7 | 18,101 | use rand::{
distributions::{Distribution, Standard},
Rng,
};
use std::{
cmp::{Ord, Ordering, PartialOrd},
fmt,
io::{Read, Result as IoResult, Write},
marker::PhantomData,
ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign},
};
use crate::{
bits::{FromBits, ToBits},
bytes::{FromBytes, ToBytes},
fields::{Field, FpParameters, PrimeField},
CanonicalDeserialize, CanonicalDeserializeWithFlags, CanonicalSerialize,
CanonicalSerializeWithFlags, EmptyFlags, Error, Flags, SemanticallyValid, SerializationError,
UniformRand,
};
use serde::{Deserialize, Serialize};
/// Model for cubic extension field of a prime field F=BasePrimeField
/// F3 = F[X]/(X^3-alpha),
/// with alpha being a (quadratic) "non-residue" (for which X^3-alpha is irreducible).
///
/// We implement inversion according to
/// Beuchat, et al., High-Speed Software Implementation of the Optimal Ate Pairing over Barreto–Naehrig Curves
/// https://eprint.iacr.org/2010/354.pdf,
/// and square and Karatsuba multiplication according to
/// Devegili, et al., Multiplication and Squaring on Abstract Pairing-Friendly Fields
/// https://eprint.iacr.org/2006/471.pdf
pub trait CubicExtParameters: 'static + Send + Sync {
/// The prime field that this cubic extension is eventually an extension of.
type BasePrimeField: PrimeField;
/// The base field that this field is a cubic extension of.
type BaseField: Field;
/// The type of the coefficients for an efficient implementation of the
/// Frobenius endomorphism.
type FrobCoeff: Field;
/// The degree of the extension over the base prime field.
const DEGREE_OVER_BASE_PRIME_FIELD: usize;
/// The cubic non-residue used to construct the extension.
const NONRESIDUE: Self::BaseField;
/// Coefficients for the Frobenius automorphism.
const FROBENIUS_COEFF_C1: &'static [Self::FrobCoeff];
const FROBENIUS_COEFF_C2: &'static [Self::FrobCoeff];
/// A specializable method for multiplying an element of the base field by
/// the quadratic non-residue. This is used in multiplication and squaring.
#[inline(always)]
fn mul_base_field_by_nonresidue(fe: &Self::BaseField) -> Self::BaseField {
Self::NONRESIDUE * fe
}
/// A specializable method for multiplying an element of the base field by
/// the appropriate Frobenius coefficient.
fn mul_base_field_by_frob_coeff(
c1: &mut Self::BaseField,
c2: &mut Self::BaseField,
power: usize,
);
}
#[derive(Derivative)]
#[derivative(
Default(bound = "P: CubicExtParameters"),
Hash(bound = "P: CubicExtParameters"),
Clone(bound = "P: CubicExtParameters"),
Copy(bound = "P: CubicExtParameters"),
Debug(bound = "P: CubicExtParameters"),
PartialEq(bound = "P: CubicExtParameters"),
Eq(bound = "P: CubicExtParameters")
)]
#[derive(Serialize, Deserialize)]
pub struct CubicExtField<P: CubicExtParameters> {
pub c0: P::BaseField,
pub c1: P::BaseField,
pub c2: P::BaseField,
#[derivative(Debug = "ignore")]
#[serde(skip)]
#[doc(hidden)]
pub _parameters: PhantomData<P>,
}
impl<P: CubicExtParameters> CubicExtField<P> {
pub fn new(c0: P::BaseField, c1: P::BaseField, c2: P::BaseField) -> Self {
CubicExtField {
c0,
c1,
c2,
_parameters: PhantomData,
}
}
pub fn mul_assign_by_basefield(&mut self, value: &P::BaseField) {
self.c0.mul_assign(value);
self.c1.mul_assign(value);
self.c2.mul_assign(value);
}
/// Calculate the norm of an element with respect to the base field `P::BaseField`.
pub fn norm(&self) -> P::BaseField {
let mut self_to_p = *self;
self_to_p.frobenius_map(1);
let mut self_to_p2 = *self;
self_to_p2.frobenius_map(2);
self_to_p *= &(self_to_p2 * self);
debug_assert!(self_to_p.c1.is_zero() && self_to_p.c2.is_zero());
self_to_p.c0
}
}
impl<P: CubicExtParameters> Field for CubicExtField<P> {
type BasePrimeField = P::BasePrimeField;
fn zero() -> Self {
CubicExtField {
c0: P::BaseField::zero(),
c1: P::BaseField::zero(),
c2: P::BaseField::zero(),
_parameters: PhantomData,
}
}
fn is_zero(&self) -> bool {
self.c0.is_zero() && self.c1.is_zero() && self.c2.is_zero()
}
fn one() -> Self {
CubicExtField {
c0: P::BaseField::one(),
c1: P::BaseField::zero(),
c2: P::BaseField::zero(),
_parameters: PhantomData,
}
}
fn is_one(&self) -> bool {
self.c0.is_one() && self.c1.is_zero() && self.c2.is_zero()
}
fn is_odd(&self) -> bool {
self.c2.is_odd()
|| (self.c2.is_zero() && self.c1.is_odd())
|| (self.c2.is_zero() && self.c1.is_zero() && self.c0.is_odd())
}
#[inline]
fn characteristic<'a>() -> &'a [u64] {
P::BaseField::characteristic()
}
fn double(&self) -> Self {
let mut result = self.clone();
result.double_in_place();
result
}
fn double_in_place(&mut self) -> &mut Self {
self.c0.double_in_place();
self.c1.double_in_place();
self.c2.double_in_place();
self
}
fn square(&self) -> Self {
let mut result = self.clone();
result.square_in_place();
result
}
fn square_in_place(&mut self) -> &mut Self {
// Devegili OhEig Scott Dahab --- Multiplication and Squaring on
// AbstractPairing-Friendly
// Fields.pdf; Section 4 (CH-SQR2)
let a = self.c0.clone();
let b = self.c1.clone();
let c = self.c2.clone();
let s0 = a.square();
let ab = a * &b;
let s1 = ab.double();
let s2 = (a - &b + &c).square();
let bc = b * &c;
let s3 = bc.double();
let s4 = c.square();
self.c0 = s0 + &P::mul_base_field_by_nonresidue(&s3);
self.c1 = s1 + &P::mul_base_field_by_nonresidue(&s4);
self.c2 = s1 + &s2 + &s3 - &s0 - &s4;
self
}
fn inverse(&self) -> Option<Self> {
if self.is_zero() {
None
} else {
// From "High-Speed Software Implementation of the Optimal Ate AbstractPairing
// over
// Barreto-Naehrig Curves"; Algorithm 17
let t0 = self.c0.square();
let t1 = self.c1.square();
let t2 = self.c2.square();
let mut t3 = self.c0.clone();
t3.mul_assign(&self.c1);
let mut t4 = self.c0.clone();
t4.mul_assign(&self.c2);
let mut t5 = self.c1.clone();
t5.mul_assign(&self.c2);
let n5 = P::mul_base_field_by_nonresidue(&t5);
let mut s0 = t0.clone();
s0.sub_assign(&n5);
let mut s1 = P::mul_base_field_by_nonresidue(&t2);
s1.sub_assign(&t3);
let mut s2 = t1.clone();
s2.sub_assign(&t4); // typo in paper referenced above. should be "-" as per Scott, but is "*"
let mut a1 = self.c2.clone();
a1.mul_assign(&s1);
let mut a2 = self.c1.clone();
a2.mul_assign(&s2);
let mut a3 = a1.clone();
a3.add_assign(&a2);
a3 = P::mul_base_field_by_nonresidue(&a3);
let mut t6 = self.c0.clone();
t6.mul_assign(&s0);
t6.add_assign(&a3);
t6.inverse_in_place();
let mut c0 = t6.clone();
c0.mul_assign(&s0);
let mut c1 = t6.clone();
c1.mul_assign(&s1);
let mut c2 = t6.clone();
c2.mul_assign(&s2);
Some(Self::new(c0, c1, c2))
}
}
fn inverse_in_place(&mut self) -> Option<&mut Self> {
if let Some(inverse) = self.inverse() {
*self = inverse;
Some(self)
} else {
None
}
}
fn frobenius_map(&mut self, power: usize) {
self.c0.frobenius_map(power);
self.c1.frobenius_map(power);
self.c2.frobenius_map(power);
P::mul_base_field_by_frob_coeff(&mut self.c1, &mut self.c2, power);
}
#[inline]
fn from_random_bytes_with_flags<F: Flags>(bytes: &[u8]) -> Option<(Self, F)> {
let split_at = bytes.len() / 3;
if let Some(c0) = P::BaseField::from_random_bytes(&bytes[..split_at]) {
if let Some(c1) = P::BaseField::from_random_bytes(&bytes[split_at..2 * split_at]) {
if let Some((c2, flags)) =
P::BaseField::from_random_bytes_with_flags(&bytes[2 * split_at..])
{
return Some((CubicExtField::new(c0, c1, c2), flags));
}
}
}
None
}
#[inline]
fn from_random_bytes(bytes: &[u8]) -> Option<Self> {
Self::from_random_bytes_with_flags::<EmptyFlags>(bytes).map(|f| f.0)
}
}
/// `CubicExtField` elements are ordered lexicographically.
impl<P: CubicExtParameters> Ord for CubicExtField<P> {
#[inline(always)]
fn cmp(&self, other: &Self) -> Ordering {
let c2_cmp = self.c2.cmp(&other.c2);
let c1_cmp = self.c1.cmp(&other.c1);
let c0_cmp = self.c0.cmp(&other.c0);
if c2_cmp == Ordering::Equal {
if c1_cmp == Ordering::Equal {
c0_cmp
} else {
c1_cmp
}
} else {
c2_cmp
}
}
}
impl<P: CubicExtParameters> PartialOrd for CubicExtField<P> {
#[inline(always)]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl<P: CubicExtParameters> From<u128> for CubicExtField<P>
where
P::BaseField: From<u128>,
{
fn from(other: u128) -> Self {
let fe: P::BaseField = other.into();
Self::new(fe, P::BaseField::zero(), P::BaseField::zero())
}
}
impl<P: CubicExtParameters> From<u64> for CubicExtField<P>
where
P::BaseField: From<u64>,
{
fn from(other: u64) -> Self {
let fe: P::BaseField = other.into();
Self::new(fe, P::BaseField::zero(), P::BaseField::zero())
}
}
impl<P: CubicExtParameters> From<u32> for CubicExtField<P>
where
P::BaseField: From<u32>,
{
fn from(other: u32) -> Self {
let fe: P::BaseField = other.into();
Self::new(fe, P::BaseField::zero(), P::BaseField::zero())
}
}
impl<P: CubicExtParameters> From<u16> for CubicExtField<P>
where
P::BaseField: From<u16>,
{
fn from(other: u16) -> Self {
let fe: P::BaseField = other.into();
Self::new(fe, P::BaseField::zero(), P::BaseField::zero())
}
}
impl<P: CubicExtParameters> From<u8> for CubicExtField<P>
where
P::BaseField: From<u8>,
{
fn from(other: u8) -> Self {
let fe: P::BaseField = other.into();
Self::new(fe, P::BaseField::zero(), P::BaseField::zero())
}
}
impl<P: CubicExtParameters> ToBytes for CubicExtField<P> {
#[inline]
fn write<W: Write>(&self, mut writer: W) -> IoResult<()> {
self.c0.write(&mut writer)?;
self.c1.write(&mut writer)?;
self.c2.write(writer)
}
}
impl<P: CubicExtParameters> FromBytes for CubicExtField<P> {
#[inline]
fn read<R: Read>(mut reader: R) -> IoResult<Self> {
let c0 = P::BaseField::read(&mut reader)?;
let c1 = P::BaseField::read(&mut reader)?;
let c2 = P::BaseField::read(reader)?;
Ok(CubicExtField::new(c0, c1, c2))
}
}
impl<P: CubicExtParameters> ToBits for CubicExtField<P> {
fn write_bits(&self) -> Vec<bool> {
let mut bits = self.c0.write_bits();
bits.extend_from_slice(self.c1.write_bits().as_slice());
bits.extend_from_slice(self.c2.write_bits().as_slice());
bits
}
}
impl<P: CubicExtParameters> FromBits for CubicExtField<P> {
fn read_bits(bits: Vec<bool>) -> Result<Self, Error> {
let size = (P::DEGREE_OVER_BASE_PRIME_FIELD / 3)
* <P::BasePrimeField as PrimeField>::Params::MODULUS_BITS as usize;
let c0 = P::BaseField::read_bits(bits[..size].to_vec())?;
let c1 = P::BaseField::read_bits(bits[size..(2 * size)].to_vec())?;
let c2 = P::BaseField::read_bits(bits[(2 * size)..].to_vec())?;
Ok(CubicExtField::new(c0, c1, c2))
}
}
impl<P: CubicExtParameters> CanonicalSerializeWithFlags for CubicExtField<P> {
#[inline]
fn serialize_with_flags<W: Write, F: Flags>(
&self,
mut writer: W,
flags: F,
) -> Result<(), SerializationError> {
CanonicalSerialize::serialize(&self.c0, &mut writer)?;
CanonicalSerialize::serialize(&self.c1, &mut writer)?;
self.c2.serialize_with_flags(&mut writer, flags)?;
Ok(())
}
#[inline]
fn serialized_size_with_flags<F: Flags>(&self) -> usize {
self.c0.serialized_size()
+ self.c1.serialized_size()
+ self.c2.serialized_size_with_flags::<F>()
}
}
impl<P: CubicExtParameters> CanonicalSerialize for CubicExtField<P> {
#[inline]
fn serialize<W: Write>(&self, writer: W) -> Result<(), SerializationError> {
self.serialize_with_flags(writer, EmptyFlags)
}
#[inline]
fn serialized_size(&self) -> usize {
self.serialized_size_with_flags::<EmptyFlags>()
}
}
impl<P: CubicExtParameters> CanonicalDeserializeWithFlags for CubicExtField<P> {
#[inline]
fn deserialize_with_flags<R: Read, F: Flags>(
mut reader: R,
) -> Result<(Self, F), SerializationError> {
let c0 = CanonicalDeserialize::deserialize(&mut reader)?;
let c1 = CanonicalDeserialize::deserialize(&mut reader)?;
let (c2, flags) = CanonicalDeserializeWithFlags::deserialize_with_flags(&mut reader)?;
Ok((CubicExtField::new(c0, c1, c2), flags))
}
}
impl<P: CubicExtParameters> CanonicalDeserialize for CubicExtField<P> {
#[inline]
fn deserialize<R: Read>(mut reader: R) -> Result<Self, SerializationError> {
let c0: P::BaseField = CanonicalDeserialize::deserialize(&mut reader)?;
let c1: P::BaseField = CanonicalDeserialize::deserialize(&mut reader)?;
let c2: P::BaseField = CanonicalDeserialize::deserialize(&mut reader)?;
Ok(CubicExtField::new(c0, c1, c2))
}
}
impl<P: CubicExtParameters> SemanticallyValid for CubicExtField<P> {
#[inline]
fn is_valid(&self) -> bool {
self.c0.is_valid() && self.c1.is_valid() && self.c2.is_valid()
}
}
impl<P: CubicExtParameters> Neg for CubicExtField<P> {
type Output = Self;
#[inline]
fn neg(self) -> Self {
let mut res = self.clone();
res.c0 = res.c0.neg();
res.c1 = res.c1.neg();
res.c2 = res.c2.neg();
res
}
}
impl<P: CubicExtParameters> Distribution<CubicExtField<P>> for Standard {
#[inline]
fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> CubicExtField<P> {
CubicExtField::new(
UniformRand::rand(rng),
UniformRand::rand(rng),
UniformRand::rand(rng),
)
}
}
impl<'a, P: CubicExtParameters> Add<&'a CubicExtField<P>> for CubicExtField<P> {
type Output = Self;
#[inline]
fn add(self, other: &Self) -> Self {
let mut result = self;
result.add_assign(other);
result
}
}
impl<'a, P: CubicExtParameters> Sub<&'a CubicExtField<P>> for CubicExtField<P> {
type Output = Self;
#[inline]
fn sub(self, other: &Self) -> Self {
let mut result = self;
result.sub_assign(other);
result
}
}
impl<'a, P: CubicExtParameters> Mul<&'a CubicExtField<P>> for CubicExtField<P> {
type Output = Self;
#[inline]
fn mul(self, other: &Self) -> Self {
let mut result = self;
result.mul_assign(other);
result
}
}
impl<'a, P: CubicExtParameters> Div<&'a CubicExtField<P>> for CubicExtField<P> {
type Output = Self;
#[inline]
fn div(self, other: &Self) -> Self {
let mut result = self;
result.mul_assign(&other.inverse().unwrap());
result
}
}
impl<'a, P: CubicExtParameters> AddAssign<&'a Self> for CubicExtField<P> {
#[inline]
fn add_assign(&mut self, other: &Self) {
self.c0.add_assign(&other.c0);
self.c1.add_assign(&other.c1);
self.c2.add_assign(&other.c2);
}
}
impl<'a, P: CubicExtParameters> SubAssign<&'a Self> for CubicExtField<P> {
#[inline]
fn sub_assign(&mut self, other: &Self) {
self.c0.sub_assign(&other.c0);
self.c1.sub_assign(&other.c1);
self.c2.sub_assign(&other.c2);
}
}
impl_additive_ops_from_ref!(CubicExtField, CubicExtParameters);
impl_multiplicative_ops_from_ref!(CubicExtField, CubicExtParameters);
impl<'a, P: CubicExtParameters> MulAssign<&'a Self> for CubicExtField<P> {
#[inline]
fn mul_assign(&mut self, other: &Self) {
// Devegili OhEig Scott Dahab --- Multiplication and Squaring on
// AbstractPairing-Friendly
// Fields.pdf; Section 4 (Karatsuba)
let a = other.c0;
let b = other.c1;
let c = other.c2;
let d = self.c0;
let e = self.c1;
let f = self.c2;
let ad = d * &a;
let be = e * &b;
let cf = f * &c;
let x = (e + &f) * &(b + &c) - &be - &cf;
let y = (d + &e) * &(a + &b) - &ad - &be;
let z = (d + &f) * &(a + &c) - &ad + &be - &cf;
self.c0 = ad + &P::mul_base_field_by_nonresidue(&x);
self.c1 = y + &P::mul_base_field_by_nonresidue(&cf);
self.c2 = z;
}
}
impl<'a, P: CubicExtParameters> DivAssign<&'a Self> for CubicExtField<P> {
#[inline]
fn div_assign(&mut self, other: &Self) {
self.mul_assign(&other.inverse().unwrap());
}
}
impl<P: CubicExtParameters> fmt::Display for CubicExtField<P> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "CubicExtField({}, {}, {})", self.c0, self.c1, self.c2)
}
}
| 30.679661 | 110 | 0.584277 |
5b6f4dd4e8aba80a8b1e8ea4076ded89aade1e49 | 2,521 | #![feature(test)]
extern crate test;
use log::*;
use solana_program_runtime::{ExecuteDetailsTimings, PreAccount};
use solana_sdk::{account::AccountSharedData, pubkey, rent::Rent};
use test::Bencher;
#[bench]
fn bench_verify_account_changes_data(bencher: &mut Bencher) {
solana_logger::setup();
let owner = pubkey::new_rand();
let non_owner = pubkey::new_rand();
let pre = PreAccount::new(
&pubkey::new_rand(),
&AccountSharedData::new(0, BUFSIZE, &owner),
);
let post = AccountSharedData::new(0, BUFSIZE, &owner);
assert_eq!(
pre.verify(
&owner,
false,
&Rent::default(),
&post,
&mut ExecuteDetailsTimings::default(),
false,
),
Ok(())
);
// this one should be faster
bencher.iter(|| {
pre.verify(
&owner,
false,
&Rent::default(),
&post,
&mut ExecuteDetailsTimings::default(),
false,
)
.unwrap();
});
let summary = bencher.bench(|_bencher| {}).unwrap();
info!("data no change by owner: {} ns/iter", summary.median);
let pre_data = vec![BUFSIZE];
let post_data = vec![BUFSIZE];
bencher.iter(|| pre_data == post_data);
let summary = bencher.bench(|_bencher| {}).unwrap();
info!("data compare {} ns/iter", summary.median);
let pre = PreAccount::new(
&pubkey::new_rand(),
&AccountSharedData::new(0, BUFSIZE, &owner),
);
bencher.iter(|| {
pre.verify(
&non_owner,
false,
&Rent::default(),
&post,
&mut ExecuteDetailsTimings::default(),
false,
)
.unwrap();
});
let summary = bencher.bench(|_bencher| {}).unwrap();
info!("data no change by non owner: {} ns/iter", summary.median);
}
const BUFSIZE: usize = 1024 * 1024 + 127;
static BUF0: [u8; BUFSIZE] = [0; BUFSIZE];
static BUF1: [u8; BUFSIZE] = [1; BUFSIZE];
#[bench]
fn bench_is_zeroed(bencher: &mut Bencher) {
bencher.iter(|| {
PreAccount::is_zeroed(&BUF0);
});
}
#[bench]
fn bench_is_zeroed_not(bencher: &mut Bencher) {
bencher.iter(|| {
PreAccount::is_zeroed(&BUF1);
});
}
#[bench]
fn bench_is_zeroed_by_iter(bencher: &mut Bencher) {
bencher.iter(|| BUF0.iter().all(|item| *item == 0));
}
#[bench]
fn bench_is_zeroed_not_by_iter(bencher: &mut Bencher) {
bencher.iter(|| BUF1.iter().all(|item| *item == 0));
}
| 25.21 | 69 | 0.563269 |
11bfcaeddf2bb20d66f904c02a9b25a4c2109207 | 1,535 | //! Low-level types used throughout the Darwinia.
#![warn(missing_docs)]
#![cfg_attr(not(feature = "std"), no_std)]
use sp_runtime::{
generic,
traits::{BlakeTwo256, IdentifyAccount, Verify},
MultiSignature, OpaqueExtrinsic,
};
/// An index to a block.
pub type BlockNumber = u32;
/// Alias to 512-bit hash when used in the context of a transaction signature on the chain.
pub type Signature = MultiSignature;
/// Some way of identifying an account on the chain. We intentionally make it equivalent
/// to the public key of our transaction signing scheme.
pub type AccountId = <<Signature as Verify>::Signer as IdentifyAccount>::AccountId;
/// The type for looking up accounts. We don't expect more than 4 billion of them.
pub type AccountIndex = u32;
/// Balance of an account.
pub type Balance = u128;
/// Power of an account.
pub type Power = u32;
/// Type used for expressing timestamp.
pub type Moment = u64;
/// Index of a transaction in the chain.
pub type Index = u32;
/// A hash of some data used by the chain.
pub type Hash = sp_core::H256;
/// A timestamp: milliseconds since the unix epoch.
/// `u64` is enough to represent a duration of half a billion years, when the
/// time scale is milliseconds.
pub type Timestamp = u64;
/// Digest item type.
pub type DigestItem = generic::DigestItem<Hash>;
/// Header type.
pub type Header = generic::Header<BlockNumber, BlakeTwo256>;
/// Block type.
pub type Block = generic::Block<Header, OpaqueExtrinsic>;
/// Block ID.
pub type BlockId = generic::BlockId<Block>;
| 28.962264 | 91 | 0.726384 |
e6a63686f7ff751763e9dee15661578f455c5d88 | 1,139 | use std::error::Error as StdError;
use std::fmt::Debug;
use std::fmt::Display;
use std::fmt::Formatter;
use std::fmt::Result as FmtResult;
use std::io::Error as IoError;
use std::io::IntoInnerError;
use structopt::clap::Error as ClapError;
#[derive(Debug)]
pub enum Error {
Missing,
Io(IoError),
Clap(ClapError),
}
impl StdError for Error {
fn source(&self) -> Option<&(dyn StdError + 'static)> {
return None;
}
}
impl Display for Error {
fn fmt(&self, fmt: &mut Formatter) -> FmtResult {
return match self {
Error::Missing => write!(fmt, "Missing options."),
Error::Io(err) => write!(fmt, "{}", err),
Error::Clap(err) => write!(fmt, "{}", err),
};
}
}
impl From<IoError> for Error {
fn from(err: IoError) -> Self {
return Error::Io(err.into());
}
}
impl<W: Debug + Send + 'static> From<IntoInnerError<W>> for Error {
fn from(err: IntoInnerError<W>) -> Self {
return Error::Io(err.into());
}
}
impl From<ClapError> for Error {
fn from(err: ClapError) -> Self {
return Error::Clap(err.into());
}
}
| 22.78 | 67 | 0.583845 |
61c84ce532c8b94bce80fa5880444f18fac3865a | 1,428 | //! This module contains traits to inject custom logic into the window shell.
use crate::{event::*, utils::Point};
/// The window adapter is used to work with the window shell.
/// It handles updates from the shell and provides method to update and render its content.
pub trait WindowAdapter {
// /// Renders the content.
// fn render(&mut self, _canvas: &mut Canvas) {}
// /// Updates the content.
// fn update(&mut self) {}
/// Is called after the window is resized.
fn resize(&mut self, _width: f64, _height: f64) {}
/// Is called after the mouse was moved.
fn mouse(&mut self, _x: f64, _y: f64) {}
/// Is called after the state of a mouse button is changed.
fn mouse_event(&mut self, _event: MouseEvent) {}
/// Is called if mouse wheel or trackpad detect scroll event.
fn scroll(&mut self, _delta_x: f64, _delta_y: f64) {}
/// Is called after the state of a keyboard key is changed.
fn key_event(&mut self, _event: KeyEvent) {}
/// Is called after the quit event of the window is called.
fn quit_event(&mut self) {}
/// Gets the current mouse position.
fn mouse_position(&self) -> Point;
/// Is called if active state of the window is changed.
fn active(&mut self, active: bool);
}
/// Used to define an additional updater for the window shell.
pub trait Updater {
/// Called to update the content.
fn update(&mut self);
}
| 32.454545 | 91 | 0.661064 |
f8d6f64e89e1cc2963965f08bb6f90d23149e387 | 17,650 | use crate::errors::Context;
use crate::journal::Change;
use crate::journal::ChangeFilter;
use crate::journal::Journal;
use log::info;
use rand::Rng;
use std::collections::HashSet;
use std::fs;
use std::io;
use std::path::Path;
use std::path::PathBuf;
use std::process::Command;
use std::process::ExitStatus;
use structopt::StructOpt;
use tempfile::tempdir;
#[derive(Debug, Clone, StructOpt)]
struct PathOpt {
/// Path to the "base" image
#[structopt(short, long)]
#[structopt(default_value = "./base")]
base: PathBuf,
/// Path to the "changes" file
#[structopt(short, long)]
#[structopt(default_value = "./changes")]
changes: PathBuf,
}
#[derive(Debug, Default, StructOpt)]
struct FilterOpt {
/// Filter out certain changes
///
/// For example, "24:01011" means take the first 24 writes,
/// then skip the 25th (0), take 26th (1), skip 27th (0),
/// take 28th and 29th write operations.
#[structopt(short, long)]
#[structopt(default_value = "")]
filter: String,
}
#[derive(Debug, StructOpt)]
struct MutateOpt {
/// Discard Sync operations
#[structopt(long)]
drop_sync: bool,
/// Split large writes into 2048-byte ones
#[structopt(long)]
split_write: bool,
/// Insert Write operations with zeros
#[structopt(long)]
zero_fill: bool,
}
#[derive(Debug, Clone, StructOpt)]
struct RunOpt {
/// Whether to use 'sudo' to run the command
#[structopt(long)]
sudo: bool,
}
#[derive(Debug, Clone, StructOpt)]
struct GenTestsOpt {
/// Log(Maximum test cases generated between 2 Syncs) / Log(2)
#[structopt(short, long)]
#[structopt(default_value = "8")]
max_cases_log2: usize,
}
#[derive(Debug, StructOpt)]
struct MountOpt {
#[structopt(flatten)]
paths: PathOpt,
#[structopt(flatten)]
filter: FilterOpt,
#[structopt(flatten)]
run: RunOpt,
/// FUSE mount options
#[structopt(long)]
fuse_args: Vec<String>,
/// Whether to record changes back to disk.
#[structopt(short, long)]
record: bool,
/// Shell command to run with the mount path as $1
#[structopt(short, long)]
exec: Option<String>,
/// Mount destination
#[structopt(short, long)]
#[structopt(default_value = "./mountpoint")]
dest: PathBuf,
}
#[derive(Debug, StructOpt)]
enum Opt {
/// Mounts image and record changes
///
/// Without --exec, the process will wait for ENTER in stdin before unmounting.
/// With --exec, the process will unmount after executing the command.
Mount {
#[structopt(flatten)]
opts: MountOpt,
},
/// Merges changes into base image
Merge {
#[structopt(flatten)]
paths: PathOpt,
#[structopt(flatten)]
filter: FilterOpt,
},
/// Mutate the changes
Mutate {
#[structopt(flatten)]
paths: PathOpt,
#[structopt(flatten)]
mutate: MutateOpt,
},
/// Shows details of a "changes" file
Show {
#[structopt(flatten)]
paths: PathOpt,
/// Show detailed bytes
#[structopt(short, long)]
verbose: bool,
},
/// Generate "filter"s for testing
GenTests {
#[structopt(flatten)]
paths: PathOpt,
#[structopt(flatten)]
test: GenTestsOpt,
},
/// Run a test suite script
///
/// The script will receive `argv[1]` telling it what to do:
///
/// * prepare: Prepare the initial filesystem. Output to `argv[2]`.
///
/// * changes: Make changes that will be recorded. Input is `argv[2]`.
///
/// * verify: Check properties. Input is `argv[2]`. Return value in 10..20
/// are considered as "successful", and are used to "bisect" test cases.
///
/// If the script returns non-zero exit code, and is not in the 10..20
/// range, then verification stops and prints the test case.
///
/// The input and output files are created in a temporary directory
/// that will be deleted unless `--keep` is set.
RunSuite {
/// Script to run
script_path: PathBuf,
/// Whether to keep the temporary directory
#[structopt(short, long)]
keep: bool,
#[structopt(flatten)]
run: RunOpt,
#[structopt(flatten)]
test: GenTestsOpt,
},
}
fn load_journal(opt: &PathOpt) -> io::Result<Journal> {
info!(
"reading journal at {} with changes {}",
opt.base.display(),
opt.changes.display()
);
Journal::load(&opt.base, &opt.changes)
}
fn save_journal(journal: &Journal, opt: &PathOpt) -> io::Result<()> {
info!(
"writing journal to {} with changes {}",
opt.base.display(),
opt.changes.display()
);
journal.dump(&opt.base, &opt.changes)?;
Ok(())
}
fn mutate_journal(journal: &mut Journal, opt: &MutateOpt) {
let mut new_changes = Vec::new();
for change in &journal.changes {
match change {
Change::Sync => {
if !opt.drop_sync {
new_changes.push(Change::Sync);
}
}
Change::Write { offset, data } => {
if opt.zero_fill && data.iter().any(|b| *b != 0) {
new_changes.push(Change::Write {
offset: *offset,
data: vec![0; data.len()],
});
}
if opt.split_write && data.len() > 2048 {
let mut data_offset = 0;
while let Some(sub) =
data.get(data_offset..(data_offset + 2048).min(data.len()))
{
if sub.is_empty() {
break;
}
new_changes.push(Change::Write {
offset: offset + data_offset,
data: sub.to_vec(),
});
data_offset += sub.len();
}
} else {
new_changes.push(change.clone());
}
}
}
}
journal.changes = new_changes;
}
fn parse_filter(opt: &FilterOpt) -> io::Result<Option<ChangeFilter>> {
if opt.filter.is_empty() {
Ok(None)
} else {
opt.filter.parse().map(Some)
}
}
fn show_changes(changes: &[Change], verbose: bool) {
if changes.is_empty() {
info!("No changes");
}
for (i, change) in changes.iter().enumerate() {
print!("{:6} ", i);
match change {
Change::Sync => println!("Sync"),
Change::Write { offset, data } => {
if verbose {
println!("Write at {} with {:?}", offset, data);
} else {
let is_zero = data.iter().all(|b| *b == 0);
println!(
"Write at {} with {} bytes{}",
offset,
data.len(),
if is_zero { " of zeros" } else { "" }
);
}
}
}
}
}
fn gen_tests(mut changes: Vec<Change>, opt: &GenTestsOpt) -> Vec<String> {
let max_width: usize = opt.max_cases_log2;
let mut result = Vec::new();
// Ensure the last change is Sync.
if let Some(Change::Write { .. }) = changes.last() {
changes.push(Change::Sync);
}
// Figure out locations of "Sync"s.
let mut sync_indexes = Vec::new();
for (i, change) in changes.iter().enumerate() {
if let Change::Sync = change {
sync_indexes.push(i);
}
}
// For each "Sync", generate test cases.
for (i, sync_index) in sync_indexes.iter().enumerate() {
// start_index .. sync_index
let start_index = if i == 0 { 0 } else { sync_indexes[i - 1] + 1 };
let width = sync_index - start_index;
if width == 0 {
// Ignore - no writes.
} else if width <= max_width {
info!(
"# All cases for {} writes before #{} Sync",
width, sync_index,
);
for bits in 0..(1 << width) {
result.push(format!("{}:{:0width$b}", start_index, bits, width = width));
}
} else {
let n = 1 << max_width;
info!(
"# Random {} cases for {} writes before #{} Sync",
n, width, sync_index,
);
let mut bits = vec![false; width];
let mut rng = rand::thread_rng();
let mut visited: HashSet<String> = HashSet::new();
while visited.len() < n {
// Do a few bit flips.
let bit_flip_count = rng.gen_range(1, width * 2 / max_width);
for _ in 0..bit_flip_count {
let idx = rng.gen_range(0, width);
bits[idx] = !bits[idx];
}
let bits_str: String = bits
.iter()
.map(|&b| if b { "1" } else { "0" })
.collect::<Vec<&str>>()
.concat();
if visited.insert(bits_str.clone()) {
result.push(format!("{}:{}", start_index, bits_str));
}
}
}
}
result
}
fn wait_stdin() {
let stdin = io::stdin();
let mut s = String::new();
let _ = stdin.read_line(&mut s);
}
fn execute(mut args: Vec<String>, run: &RunOpt) -> io::Result<ExitStatus> {
if run.sudo && unsafe { libc::getuid() } != 0 {
let mut sudo_path = None;
for path in &["/usr/bin/sudo", "/run/wrappers/bin/sudo"] {
if Path::new(path).exists() {
sudo_path = Some(path.to_string());
}
}
match sudo_path {
Some(path) => args.insert(0, path),
None => {
return Err(io::Error::new(
io::ErrorKind::NotFound,
"can not find sudo in common locations",
))
}
}
}
info!("running: {}", shell_words::join(&args[..]));
Command::new(&args[0])
.args(&args[1..])
.status()
.context("run script")
}
fn mount(opts: MountOpt) -> io::Result<i32> {
let MountOpt {
paths,
fuse_args,
dest,
filter,
exec,
run,
record,
} = opts;
let mut result = 0;
let mut journal = load_journal(&paths)?;
let filter = parse_filter(&filter)?;
// Create the file if it does not exist.
let _ = fs::OpenOptions::new().write(true).create(true).open(&dest);
let session = journal
.mount(&dest, &fuse_args, filter.as_ref())
.context(format!("mounting outagefs to {}", dest.display()))?;
info!("mounted: {}", dest.display());
match exec {
Some(cmd) => {
let sh_args = vec![
"/bin/sh".to_string(),
"-c".to_string(),
cmd.clone(),
"--".to_string(),
dest.display().to_string(),
];
let status = execute(sh_args, &run)?;
if let Some(code) = status.code() {
result = code;
info!("child exited with {}", code);
}
}
None => {
info!("press ENTER to write changes and unmount");
wait_stdin();
}
}
drop(session);
info!("unmounted: {}", dest.display());
if record {
save_journal(&journal, &paths)?;
info!("changes written: {}", paths.changes.display());
}
Ok(result)
}
fn run_script(script_path: &str, run: &RunOpt, test: &GenTestsOpt) -> io::Result<i32> {
// Prepare
let paths = PathOpt {
base: "base".into(),
changes: "changes".into(),
};
execute(
vec![
script_path.to_string(),
"prepare".into(),
paths.base.display().to_string(),
],
&run,
)
.context("executing prepare script")?;
// Record changes
let dest = Path::new("mountpoint").to_path_buf();
mount(MountOpt {
paths: paths.clone(),
filter: FilterOpt::default(),
fuse_args: Vec::new(),
run: run.clone(),
record: true,
exec: Some(shell_words::join(vec![
script_path.to_string(),
"changes".to_string(),
dest.display().to_string(),
])),
dest: dest.clone(),
})
.context("runing mount subcommand to record changes")?;
// Tests
let journal = load_journal(&paths)?;
let tests = gen_tests(journal.changes, test);
let total = tests.len();
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
#[repr(u8)]
enum Tested {
Unknown,
Pass(usize),
}
let mut tested = vec![Tested::Unknown; tests.len()];
let mut tested_count = 0;
let mut next_test_index = 0;
while tested_count < tests.len() {
let i = next_test_index;
tested_count += 1;
assert_eq!(tested[i], Tested::Unknown);
eprintln!("[{} of {}] Test Case #{}", tested_count, total, i);
let code = mount(MountOpt {
paths: paths.clone(),
filter: FilterOpt {
filter: tests[i].clone(),
},
fuse_args: Vec::new(),
run: run.clone(),
record: false,
exec: Some(shell_words::join(vec![
script_path.to_string(),
"verify".into(),
dest.display().to_string(),
])),
dest: dest.clone(),
})
.context(format!("runing mount subcommand to verify {}", &tests[i]))?;
info!("verify script returned {}", code);
if code >= 10 && code < 20 {
tested[i] = Tested::Pass((code - 10) as _);
} else if code == 0 {
tested[i] = Tested::Pass(0);
} else {
eprintln!("verify script returned {} for filter {}", code, &tests[i]);
return Ok(code);
}
if tested_count >= tests.len() {
break;
}
// Find the next "interesting" test.
next_test_index = if i == 0 {
tests.len() - 1
} else {
// Find a bisect range.
let mut best_range_start = 0;
let mut best_range_distance = 0;
let mut last_pass_start = 0;
let mut last_pass_variant = 0;
for j in 0..tests.len() {
match tested[j] {
Tested::Unknown => continue,
Tested::Pass(v) => {
if v != last_pass_variant && j - last_pass_start > best_range_distance {
best_range_distance = j - last_pass_start;
best_range_start = last_pass_start;
}
last_pass_start = j;
last_pass_variant = v;
}
}
}
let best_range_end = best_range_start + best_range_distance;
let best_range_mid = (best_range_end + best_range_start) / 2;
if best_range_distance > 1 {
info!(
"bisect {}..{}: {}",
best_range_start, best_range_end, best_range_mid
);
best_range_mid
} else {
let mut j = (i + 1) % tests.len();
let mut count = 0;
while tested[j] != Tested::Unknown {
j += 1;
count += 1;
assert!(count <= tests.len());
if j >= tests.len() {
j = 0;
}
}
info!("picking next untested case: {}", j);
j
}
};
}
eprintln!("{} test cases verified", tested_count);
Ok(0)
}
pub(crate) fn main() -> io::Result<()> {
let opt = Opt::from_args();
match opt {
Opt::Mount { opts } => {
mount(opts)?;
}
Opt::Merge { paths, filter } => {
let journal = load_journal(&paths)?;
let filter = parse_filter(&filter)?;
let data = journal.data(filter.as_ref());
let journal = Journal::new(data);
save_journal(&journal, &paths)?;
}
Opt::Mutate { paths, mutate } => {
let mut journal = load_journal(&paths)?;
mutate_journal(&mut journal, &mutate);
save_journal(&journal, &paths)?;
}
Opt::Show { paths, verbose } => {
let journal = load_journal(&paths)?;
show_changes(&journal.changes, verbose);
}
Opt::GenTests { paths, test } => {
let journal = load_journal(&paths)?;
for s in gen_tests(journal.changes, &test) {
println!("{}", s);
}
}
Opt::RunSuite {
script_path,
keep,
run,
test,
} => {
let script_path = script_path.canonicalize()?.display().to_string();
let tmpdir = tempdir()?;
let dir = &tmpdir.path();
info!("chdir: {}", dir.display());
std::env::set_current_dir(dir)?;
let _code = run_script(&script_path, &run, &test)?;
if keep {
eprintln!("keep tmpdir: {}", tmpdir.into_path().display());
}
}
}
Ok(())
}
| 29.864636 | 96 | 0.490708 |
39ea8f4b98903244f90f3a1f5f8b91c901598223 | 22,396 | // Copyright Materialize, Inc. and contributors. All rights reserved.
//
// Use of this software is governed by the Business Source License
// included in the LICENSE file.
//
// As of the Change Date specified in that file, in accordance with
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0.
//! A controller that provides an interface to a compute instance, and the storage layer below it.
//!
//! The compute controller curates the creation of indexes and sinks, the progress of readers through
//! these collections, and their eventual dropping and resource reclamation.
//!
//! The compute controller can be viewed as a partial map from `GlobalId` to collection. It is an error to
//! use an identifier before it has been "created" with `create_dataflows()`. Once created, the controller holds
//! a read capability for each output collection of a dataflow, which is manipulated with `allow_compaction()`.
//! Eventually, a collecction is dropped with either `drop_sources()` or by allowing compaction to the empty frontier.
//!
//! Created dataflows will prevent the compaction of their inputs, including other compute collections but also
//! collections managed by the storage layer. Each dataflow input is prevented from compacting beyond the allowed
//! compaction of each of its outputs, ensuring that we can recover each dataflow to its current state in case of
//! failure or other reconfiguration.
use std::collections::{BTreeMap, BTreeSet};
use differential_dataflow::lattice::Lattice;
use timely::progress::frontier::MutableAntichain;
use timely::progress::{Antichain, ChangeBatch, Timestamp};
use uuid::Uuid;
use crate::client::{Client, Command, ComputeCommand, ComputeInstanceId, StorageCommand};
use crate::logging::LoggingConfig;
use crate::DataflowDescription;
use mz_expr::GlobalId;
use mz_expr::RowSetFinishing;
use mz_repr::Row;
use super::ReadPolicy;
/// Controller state maintained for each compute instance.
pub(super) struct ComputeControllerState<T> {
/// Tracks expressed `since` and received `upper` frontiers for indexes and sinks.
pub(super) collections: BTreeMap<GlobalId, CollectionState<T>>,
}
/// A controller for a compute instance.
pub struct ComputeController<'a, C, T> {
pub(super) instance: ComputeInstanceId,
pub(super) compute: &'a mut ComputeControllerState<T>,
pub(super) storage: &'a mut super::StorageControllerState<T>,
pub(super) client: &'a mut C,
}
/// Errors arising from compute commands.
#[derive(Debug)]
pub enum ComputeError {
/// Command referenced an instance that was not present.
InstanceMissing(ComputeInstanceId),
/// Command referenced an identifier that was not present.
IdentifierMissing(GlobalId),
/// Dataflow was malformed (e.g. missing `as_of`).
DataflowMalformed,
/// The dataflow `as_of` was not greater than the `since` of the identifier.
DataflowSinceViolation(GlobalId),
/// The peek `timestamp` was not greater than the `since` of the identifier.
PeekSinceViolation(GlobalId),
/// An error from the underlying client.
ClientError(anyhow::Error),
}
impl From<anyhow::Error> for ComputeError {
fn from(error: anyhow::Error) -> Self {
Self::ClientError(error)
}
}
impl<T: Timestamp + Lattice> ComputeControllerState<T> {
pub(super) fn new(logging: &Option<LoggingConfig>) -> Self {
let mut collections = BTreeMap::default();
if let Some(logging_config) = logging.as_ref() {
for id in logging_config.log_identifiers() {
collections.insert(
id,
CollectionState::new(
Antichain::from_elem(T::minimum()),
Vec::new(),
Vec::new(),
),
);
}
}
Self { collections }
}
}
// Public interface
impl<'a, C: Client<T>, T: Timestamp + Lattice> ComputeController<'a, C, T> {
/// Acquires a handle to a controller for the storage instance.
#[inline]
pub fn storage(&mut self) -> crate::client::controller::StorageController<C, T> {
crate::client::controller::StorageController {
storage: &mut self.storage,
client: &mut self.client,
}
}
/// Acquire a handle to the collection state associated with `id`.
pub fn collection(&self, id: GlobalId) -> Result<&CollectionState<T>, ComputeError> {
self.compute
.collections
.get(&id)
.ok_or(ComputeError::IdentifierMissing(id))
}
/// Creates and maintains the described dataflows, and initializes state for their output.
///
/// This method creates dataflows whose inputs are still readable at the dataflow `as_of`
/// frontier, and initializes the outputs as readable from that frontier onward.
/// It installs read dependencies from the outputs to the inputs, so that the input read
/// capabilities will be held back to the output read capabilities, ensuring that we are
/// always able to return to a state that can serve the output read capabilities.
pub async fn create_dataflows(
&mut self,
dataflows: Vec<DataflowDescription<crate::plan::Plan<T>, T>>,
) -> Result<(), ComputeError> {
// Validate dataflows as having inputs whose `since` is less or equal to the dataflow's `as_of`.
// Start tracking frontiers for each dataflow, using its `as_of` for each index and sink.
for dataflow in dataflows.iter() {
let as_of = dataflow
.as_of
.as_ref()
.ok_or(ComputeError::DataflowMalformed)?;
// Record all transitive dependencies of the outputs.
let mut storage_dependencies = Vec::new();
let mut compute_dependencies = Vec::new();
// Validate sources have `since.less_equal(as_of)`.
for (source_id, _) in dataflow.source_imports.iter() {
let since = &self
.storage
.collections
.get(source_id)
.ok_or(ComputeError::IdentifierMissing(*source_id))?
.read_capabilities
.frontier();
if !(<_ as timely::order::PartialOrder>::less_equal(since, &as_of.borrow())) {
Err(ComputeError::DataflowSinceViolation(*source_id))?;
}
storage_dependencies.push(*source_id);
}
// Validate indexes have `since.less_equal(as_of)`.
// TODO(mcsherry): Instead, return an error from the constructing method.
for (index_id, _) in dataflow.index_imports.iter() {
let collection = self.collection(*index_id)?;
let since = collection.read_capabilities.frontier();
if !(<_ as timely::order::PartialOrder>::less_equal(&since, &as_of.borrow())) {
Err(ComputeError::DataflowSinceViolation(*index_id))?;
} else {
compute_dependencies.push(*index_id);
}
}
// Canonicalize depedencies.
// Probably redundant based on key structure, but doing for sanity.
storage_dependencies.sort();
storage_dependencies.dedup();
compute_dependencies.sort();
compute_dependencies.dedup();
// We will bump the internals of each input by the number of dependents (outputs).
let outputs = dataflow.sink_exports.len() + dataflow.index_exports.len();
let mut changes = ChangeBatch::new();
for time in as_of.iter() {
changes.update(time.clone(), outputs as i64);
}
// Update storage read capabilities for inputs.
let mut storage_read_updates = storage_dependencies
.iter()
.map(|id| (*id, changes.clone()))
.collect();
self.storage()
.update_read_capabilities(&mut storage_read_updates)
.await;
// Update compute read capabilities for inputs.
let mut compute_read_updates = compute_dependencies
.iter()
.map(|id| (*id, changes.clone()))
.collect();
self.update_read_capabilities(&mut compute_read_updates)
.await;
// Install collection state for each of the exports.
for (sink_id, _) in dataflow.sink_exports.iter() {
self.compute.collections.insert(
*sink_id,
CollectionState::new(
as_of.clone(),
storage_dependencies.clone(),
compute_dependencies.clone(),
),
);
}
for (index_id, _, _) in dataflow.index_exports.iter() {
self.compute.collections.insert(
*index_id,
CollectionState::new(
as_of.clone(),
storage_dependencies.clone(),
compute_dependencies.clone(),
),
);
}
}
let sources = dataflows
.iter()
.map(|dataflow| {
(
dataflow.debug_name.clone(),
dataflow.id,
dataflow.as_of.clone(),
dataflow.source_imports.clone(),
)
})
.collect();
self.client
.send(Command::Storage(StorageCommand::RenderSources(sources)))
.await
.expect("Storage command failed; unrecoverable");
self.client
.send(Command::Compute(
ComputeCommand::CreateDataflows(dataflows),
self.instance,
))
.await
.expect("Compute command failed; unrecoverable");
Ok(())
}
/// Drops the read capability for the sinks and allows their resources to be reclaimed.
pub async fn drop_sinks(&mut self, identifiers: Vec<GlobalId>) -> Result<(), ComputeError> {
// Validate that the ids exist.
self.validate_ids(identifiers.iter().cloned())?;
let compaction_commands = identifiers
.into_iter()
.map(|id| (id, Antichain::new()))
.collect();
self.allow_compaction(compaction_commands).await;
Ok(())
}
/// Drops the read capability for the indexes and allows their resources to be reclaimed.
pub async fn drop_indexes(&mut self, identifiers: Vec<GlobalId>) -> Result<(), ComputeError> {
// Validate that the ids exist.
self.validate_ids(identifiers.iter().cloned())?;
let compaction_commands = identifiers
.into_iter()
.map(|id| (id, Antichain::new()))
.collect();
self.allow_compaction(compaction_commands).await;
Ok(())
}
/// Initiate a peek request for the contents of `id` at `timestamp`.
pub async fn peek(
&mut self,
id: GlobalId,
key: Option<Row>,
uuid: Uuid,
timestamp: T,
finishing: RowSetFinishing,
map_filter_project: mz_expr::SafeMfpPlan,
) -> Result<(), ComputeError> {
let since = self.collection(id)?.read_capabilities.frontier();
if !since.less_equal(×tamp) {
Err(ComputeError::PeekSinceViolation(id))?;
}
self.client
.send(Command::Compute(
ComputeCommand::Peek {
id,
key,
uuid,
timestamp,
finishing,
map_filter_project,
},
self.instance,
))
.await
.map_err(ComputeError::from)
}
/// Cancels existing peek requests.
pub async fn cancel_peeks(&mut self, uuids: &BTreeSet<Uuid>) -> Result<(), ComputeError> {
let uuids = uuids.clone();
self.client
.send(Command::Compute(
ComputeCommand::CancelPeeks { uuids },
self.instance,
))
.await
.map_err(ComputeError::from)
}
/// Downgrade the read capabilities of specific identifiers to specific frontiers.
///
/// Downgrading any read capability to the empty frontier will drop the item and eventually reclaim its resources.
pub async fn allow_compaction(&mut self, frontiers: Vec<(GlobalId, Antichain<T>)>) {
// The coordinator currently sends compaction commands for identifiers that do not exist.
// Until that changes, we need to be oblivious to errors, or risk not compacting anything.
// // Validate that the ids exist.
// self.validate_ids(frontiers.iter().map(|(id, _)| *id))?;
let policies = frontiers
.into_iter()
.map(|(id, frontier)| (id, ReadPolicy::ValidFrom(frontier)));
self.set_read_policy(policies.collect()).await;
}
/// Assigns a read policy to specific identifiers.
///
/// The policies are assigned in the order presented, and repeated identifiers should
/// conclude with the last policy. Changing a policy will immediately downgrade the read
/// capability if appropriate, but it will not "recover" the read capability if the prior
/// capability is already ahead of it.
///
/// Identifiers not present in `policies` retain their existing read policies.
pub async fn set_read_policy(&mut self, policies: Vec<(GlobalId, ReadPolicy<T>)>) {
let mut read_capability_changes = BTreeMap::default();
for (id, policy) in policies.into_iter() {
if let Ok(collection) = self.collection_mut(id) {
let mut new_read_capability = match &policy {
ReadPolicy::ValidFrom(frontier) => frontier.clone(),
ReadPolicy::LagWriteFrontier(logic) => {
logic(collection.write_frontier.frontier())
}
};
if <_ as timely::order::PartialOrder>::less_equal(
&collection.implied_capability,
&new_read_capability,
) {
let mut update = ChangeBatch::new();
update.extend(new_read_capability.iter().map(|time| (time.clone(), 1)));
std::mem::swap(&mut collection.implied_capability, &mut new_read_capability);
update.extend(new_read_capability.iter().map(|time| (time.clone(), -1)));
if !update.is_empty() {
read_capability_changes.insert(id, update);
}
}
collection.read_policy = policy;
} else {
tracing::error!("Reference to unregistered id: {:?}", id);
}
}
if !read_capability_changes.is_empty() {
self.update_read_capabilities(&mut read_capability_changes)
.await;
}
}
}
// Internal interface
impl<'a, C: Client<T>, T: Timestamp + Lattice> ComputeController<'a, C, T> {
/// Acquire a mutable reference to the collection state, should it exist.
pub(super) fn collection_mut(
&mut self,
id: GlobalId,
) -> Result<&mut CollectionState<T>, ComputeError> {
self.compute
.collections
.get_mut(&id)
.ok_or(ComputeError::IdentifierMissing(id))
}
/// Validate that a collection exists for all identifiers, and error if any do not.
pub fn validate_ids(&self, ids: impl Iterator<Item = GlobalId>) -> Result<(), ComputeError> {
for id in ids {
self.collection(id)?;
}
Ok(())
}
/// Accept write frontier updates from the compute layer.
pub(super) async fn update_write_frontiers(&mut self, updates: &[(GlobalId, ChangeBatch<T>)]) {
let mut read_capability_changes = BTreeMap::default();
for (id, changes) in updates.iter() {
let collection = self
.collection_mut(*id)
.expect("Reference to absent collection");
collection
.write_frontier
.update_iter(changes.clone().drain());
if let super::ReadPolicy::LagWriteFrontier(logic) = &collection.read_policy {
let mut new_read_capability = logic(collection.write_frontier.frontier());
if <_ as timely::order::PartialOrder>::less_equal(
&collection.implied_capability,
&new_read_capability,
) {
// TODO: reuse change batch above?
let mut update = ChangeBatch::new();
update.extend(new_read_capability.iter().map(|time| (time.clone(), 1)));
std::mem::swap(&mut collection.implied_capability, &mut new_read_capability);
update.extend(new_read_capability.iter().map(|time| (time.clone(), -1)));
if !update.is_empty() {
read_capability_changes.insert(*id, update);
}
}
}
}
if !read_capability_changes.is_empty() {
self.update_read_capabilities(&mut read_capability_changes)
.await;
}
}
/// Applies `updates`, propagates consequences through other read capabilities, and sends an appropriate compaction command.
pub(super) async fn update_read_capabilities(
&mut self,
updates: &mut BTreeMap<GlobalId, ChangeBatch<T>>,
) {
// Locations to record consequences that we need to act on.
let mut storage_todo = BTreeMap::default();
let mut compute_net = Vec::default();
// Repeatedly extract the maximum id, and updates for it.
while let Some(key) = updates.keys().rev().next().cloned() {
let mut update = updates.remove(&key).unwrap();
if let Ok(collection) = self.collection_mut(key) {
let changes = collection.read_capabilities.update_iter(update.drain());
update.extend(changes);
for id in collection.storage_dependencies.iter() {
storage_todo
.entry(*id)
.or_insert_with(ChangeBatch::new)
.extend(update.iter().cloned());
}
for id in collection.compute_dependencies.iter() {
updates
.entry(*id)
.or_insert_with(ChangeBatch::new)
.extend(update.iter().cloned());
}
compute_net.push((key, update));
} else {
// Storage presumably, but verify.
storage_todo
.entry(key)
.or_insert_with(ChangeBatch::new)
.extend(update.drain())
}
}
// Translate our net compute actions into `AllowCompaction` commands.
let mut compaction_commands = Vec::new();
for (id, change) in compute_net.iter_mut() {
if !change.is_empty() {
let frontier = self
.collection(*id)
.unwrap()
.read_capabilities
.frontier()
.to_owned();
compaction_commands.push((*id, frontier));
}
}
if !compaction_commands.is_empty() {
self.client
.send(Command::Compute(
ComputeCommand::AllowCompaction(compaction_commands),
self.instance,
))
.await
.expect("Compute instance command failed; unrecoverable");
}
// We may have storage consequences to process.
if !storage_todo.is_empty() {
self.storage()
.update_read_capabilities(&mut storage_todo)
.await;
}
}
}
/// State maintained about individual collections.
pub struct CollectionState<T> {
/// Accumulation of read capabilities for the collection.
///
/// This accumulation will always contain `self.implied_capability`, but may also contain
/// capabilities held by others who have read dependencies on this collection.
pub read_capabilities: MutableAntichain<T>,
/// The implicit capability associated with collection creation.
pub implied_capability: Antichain<T>,
/// The policy to use to downgrade `self.implied_capability`.
pub read_policy: ReadPolicy<T>,
/// Storage identifiers on which this collection depends.
pub storage_dependencies: Vec<GlobalId>,
/// Compute identifiers on which this collection depends.
pub compute_dependencies: Vec<GlobalId>,
/// Reported progress in the write capabilities.
///
/// Importantly, this is not a write capability, but what we have heard about the
/// write capabilities of others. All future writes will have times greater than or
/// equal to `upper_frontier.frontier()`.
pub write_frontier: MutableAntichain<T>,
}
impl<T: Timestamp> CollectionState<T> {
/// Creates a new collection state, with an initial read policy valid from `since`.
pub fn new(
since: Antichain<T>,
storage_dependencies: Vec<GlobalId>,
compute_dependencies: Vec<GlobalId>,
) -> Self {
let mut read_capabilities = MutableAntichain::new();
read_capabilities.update_iter(since.iter().map(|time| (time.clone(), 1)));
Self {
read_capabilities,
implied_capability: since.clone(),
read_policy: ReadPolicy::ValidFrom(since),
storage_dependencies,
compute_dependencies,
write_frontier: MutableAntichain::new_bottom(Timestamp::minimum()),
}
}
/// Reports the current read capability.
pub fn read_capability(&self) -> &Antichain<T> {
&self.implied_capability
}
}
| 41.397412 | 128 | 0.585149 |
d548958aca3fc2db6bf244318ea742137c3f7756 | 31,823 | use std::ops::{Add, Index, IndexMut, Mul, Sub};
use crate::field_elem::{FieldElement, FieldElementVector};
use core::cmp::max;
#[cfg(feature = "rayon")]
use crate::rayon::iter::IntoParallelRefIterator;
#[cfg(feature = "rayon")]
use rayon::prelude::*;
/// Univariate polynomial represented with coefficients in a vector. The ith element of the vector is the coefficient of the ith degree term.
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
pub struct UnivarPolynomial(pub FieldElementVector);
impl UnivarPolynomial {
/// Return a zero polynomial of degree `degree`
pub fn new(degree: usize) -> Self {
let coeffs = FieldElementVector::new(degree + 1);
UnivarPolynomial(coeffs)
}
/// Return a constant polynomial
pub fn new_constant(constant: FieldElement) -> Self {
let mut coeffs = FieldElementVector::new(1);
coeffs[0] = constant;
UnivarPolynomial(coeffs)
}
/// Return a randomly chosen polynomial (each coefficient is randomly chosen) of degree `degree`.
pub fn random(degree: usize) -> Self {
Self(FieldElementVector::random(degree + 1)) // +1 for constant term
}
/// Create a polynomial with given roots in `roots`
/// i.e. (x-roots[0])*(x-roots[1])*(x-roots[2])...(x-roots[last]) given `roots`
#[cfg(feature = "rayon")]
pub fn new_with_roots(roots: &[FieldElement]) -> Self {
// vector of [(x-roots[0]), (x-roots[1]), (x-roots[2]), ...]
let x_i = roots
.par_iter()
.map(|i| {
let mut v = FieldElementVector::with_capacity(2);
v.push(-i);
v.push(FieldElement::one());
UnivarPolynomial(v)
})
.collect::<Vec<UnivarPolynomial>>();
// Polynomial (x-roots[0])*(x-roots[1])*(x-roots[2])...(x-roots[last])
x_i.par_iter().cloned().reduce(
|| Self::new_constant(FieldElement::one()),
|a, b| UnivarPolynomial::multiply(&a, &b),
)
}
#[cfg(not(feature = "rayon"))]
pub fn new_with_roots(roots: &[FieldElement]) -> Self {
// vector of [(x-roots[0]), (x-roots[1]), (x-roots[2]), ...]
let x_i = roots
.iter()
.map(|i| {
let mut v = FieldElementVector::with_capacity(2);
v.push(-i);
v.push(FieldElement::one());
UnivarPolynomial(v)
})
.collect::<Vec<UnivarPolynomial>>();
// Polynomial (x-roots[0])*(x-roots[1])*(x-roots[2])...(x-roots[last])
x_i.iter()
.cloned()
.fold(Self::new_constant(FieldElement::one()), |a, b| {
UnivarPolynomial::multiply(&a, &b)
})
}
pub fn coefficients(&self) -> &FieldElementVector {
&self.0
}
pub fn degree(&self) -> usize {
// TODO: This makes fetching the coefficient ambiguous as a 0 degree polynomial might
// have a coefficient for the 0th degree or it might not. Should probably adapt Index and IndexMut trait.
let l = self.0.len();
if l == 0 {
l
} else {
l - 1
}
}
/// Polynomial is zero if all coefficients are 0
pub fn is_zero(&self) -> bool {
self.0.iter().all(|coeff| coeff.is_zero())
}
// Evaluate polynomial at given `x`
pub fn eval(&self, x: &FieldElement) -> FieldElement {
if x.is_zero() {
self[0].clone()
} else {
// Use Horner's method https://en.wikipedia.org/wiki/Horner%27s_method
// p(x) = a_0 + a_1*x + a_2*x^2 + a_3*x^3 + a_4*x^4 + ...
// p(x) = a_0 + x*(a_1 + x*(a_2 + x*(a_3 + x*(a_4 + ... x*(a_{n-1} + x*a_n))))..
// Reading coefficients from higher to lower degrees.
let mut res = self.0[self.0.len() - 1].clone(); // a_n
for i in (0..=self.0.len() - 2).rev() {
// in each iteration, multiply `res` with `x` and add the coefficient for ith degree, a_i
res = &self.0[i] + &(&res * x);
}
res
}
}
/// Divides 2 polynomials i.e. `dividend` / `divisor` using long division.
/// Returns (quotient, remainder)
pub fn long_division(dividend: &Self, divisor: &Self) -> (Self, Self) {
assert!(!divisor.is_zero());
assert!(!divisor[divisor.degree()].is_zero());
let mut remainder: UnivarPolynomial = dividend.clone();
let mut quotient = vec![];
// Inverse of coefficient of highest degree of the divisor polynomial. This will be multiplied
// with the coefficient of highest degree of the remainder.
let highest_degree_coeff_inv = divisor[divisor.degree()].inverse();
let rem_degree = dividend.degree();
let div_degree = divisor.degree();
for i in (div_degree..=rem_degree).rev() {
if remainder[i].is_zero() {
quotient.push(FieldElement::zero());
continue;
}
let q = &highest_degree_coeff_inv * &remainder[i];
for j in 0..div_degree {
remainder[i - div_degree + j] -= &(&divisor[j] * &q);
}
quotient.push(q);
}
// The coefficients of the quotient polynomial were computed from highest to lowest degree.
quotient.reverse();
// Remainder's degree will be less than divisor's degree.
for _ in div_degree..=rem_degree {
remainder.0.pop();
}
(
UnivarPolynomial(FieldElementVector::from(quotient)),
remainder,
)
}
/// Return product of 2 polynomials. `left` * `right`
pub fn multiply(left: &Self, right: &Self) -> Self {
let mut product = Self::new(left.degree() + right.degree());
for i in 0..=left.degree() {
for j in 0..=right.degree() {
product[i + j] += &left[i] * &right[j];
}
}
product
}
/// Return sum of 2 polynomials. `left` + `right`
#[cfg(feature = "rayon")]
pub fn sum(left: &Self, right: &Self) -> Self {
// The resulting sum polynomial is initialized with the input polynomial of larger degree
let (mut sum_poly, smaller_poly, smaller_poly_degree) = if left.degree() > right.degree() {
(left.clone(), right, right.degree())
} else {
(right.clone(), left, left.degree())
};
// The following unobvious code is to use rayon for parallelization. A simpler (non-parallel)
// version would be `for i in 0..=smaller_poly_degree { sum_poly[i] += &smaller_poly[i]; }`
// Add small degree ([0, smaller_poly_degree]) terms in parallel
let small_degree_terms = (0..=smaller_poly_degree)
.into_par_iter()
.map(|i| &sum_poly[i] + &smaller_poly[i])
.collect::<Vec<FieldElement>>();
// Replace small degree ([0, smaller_poly_degree]) terms in the sum_poly
sum_poly.replace_small_degree_terms(smaller_poly_degree, small_degree_terms.into_iter());
sum_poly
}
#[cfg(not(feature = "rayon"))]
pub fn sum(left: &Self, right: &Self) -> Self {
// The resulting sum polynomial is initialized with the input polynomial of larger degree
let (mut sum_poly, smaller_poly, smaller_poly_degree) = if left.degree() > right.degree() {
(left.clone(), right, right.degree())
} else {
(right.clone(), left, left.degree())
};
// The following unobvious code is to use rayon for parallelization. A simpler (non-parallel)
// version would be `for i in 0..=smaller_poly_degree { sum_poly[i] += &smaller_poly[i]; }`
// Add small degree ([0, smaller_poly_degree]) terms in parallel
let small_degree_terms = (0..=smaller_poly_degree)
.into_iter()
.map(|i| &sum_poly[i] + &smaller_poly[i])
.collect::<Vec<FieldElement>>();
// Replace small degree ([0, smaller_poly_degree]) terms in the sum_poly
sum_poly.replace_small_degree_terms(smaller_poly_degree, small_degree_terms.into_iter());
sum_poly
}
/// Return difference of 2 polynomials. `left` - `right`
pub fn difference(left: &Self, right: &Self) -> Self {
let left_degree = left.degree();
let right_degree = right.degree();
let diff_poly_degree = max(left_degree, right_degree);
let mut diff = Self::new(diff_poly_degree);
for i in 0..=diff_poly_degree {
if i <= left_degree {
diff[i] = left[i].clone();
}
if i <= right_degree {
diff[i] -= &right[i];
}
}
diff
}
pub fn multiply_by_constant(&self, constant: &FieldElement) -> UnivarPolynomial {
let mut new_poly = self.clone();
for i in 0..=self.degree() {
new_poly[i] = constant * &self[i];
}
new_poly
}
pub fn multiply_by_monic_monomial(&self, monomial_degree: u64) -> UnivarPolynomial {
let mut new_poly = self.clone();
let new_poly_beginning = FieldElementVector::new(monomial_degree as usize);
new_poly.0.splice(0..0, new_poly_beginning);
new_poly
}
/// Replace terms of `self` from degree 0 to `till_degree` with coefficients in `replace_with`.
/// Assumes `replace_with` will yield at least `till_degree` + 1 coefficients
fn replace_small_degree_terms<I: IntoIterator<Item = FieldElement>>(
&mut self,
till_degree: usize,
replace_with: I,
) {
self.0.splice(0..=till_degree, replace_with)
}
}
impl Index<usize> for UnivarPolynomial {
type Output = FieldElement;
fn index(&self, idx: usize) -> &FieldElement {
&self.0[idx]
}
}
impl IndexMut<usize> for UnivarPolynomial {
fn index_mut(&mut self, idx: usize) -> &mut FieldElement {
&mut self.0[idx]
}
}
impl Eq for UnivarPolynomial {}
impl<'a> Add<&'a UnivarPolynomial> for &UnivarPolynomial {
type Output = UnivarPolynomial;
fn add(self, other: &'a UnivarPolynomial) -> UnivarPolynomial {
UnivarPolynomial::sum(self, other)
}
}
impl<'a> Sub<&'a UnivarPolynomial> for &UnivarPolynomial {
type Output = UnivarPolynomial;
fn sub(self, other: &'a UnivarPolynomial) -> UnivarPolynomial {
UnivarPolynomial::difference(self, other)
}
}
impl<'a> Mul<&'a UnivarPolynomial> for &UnivarPolynomial {
type Output = UnivarPolynomial;
fn mul(self, other: &'a UnivarPolynomial) -> UnivarPolynomial {
UnivarPolynomial::multiply(self, other)
}
}
/// Creates a new univariate polynomial from given coefficients from lower to higher degree terms
#[macro_export]
macro_rules! univar_polynomial {
( $( $elem:expr ),* ) => {
{
let mut coeffs = vec![];
$(
coeffs.push($elem);
)*
UnivarPolynomial(coeffs.into())
}
};
}
#[cfg(test)]
mod tests {
use super::*;
use rand::Rng;
use std::time::{Duration, Instant};
#[test]
fn test_poly() {
let degree = 10;
let poly1 = UnivarPolynomial(FieldElementVector::random(degree + 1));
assert!(!poly1.is_zero());
let poly2 = UnivarPolynomial(FieldElementVector::new(degree + 1));
assert!(poly2.is_zero());
let poly3 = UnivarPolynomial::new(degree);
assert!(poly3.is_zero());
let poly4 = UnivarPolynomial::new_constant(FieldElement::from(100u64));
assert!(!poly4.is_zero());
assert_eq!(poly4.degree(), 0);
assert_eq!(poly4[0], FieldElement::from(100u64));
}
#[test]
fn test_create_poly_from_macro() {
let poly = univar_polynomial!(
FieldElement::one(),
FieldElement::zero(),
FieldElement::from(87u64),
-FieldElement::one(),
FieldElement::from(300u64)
);
assert_eq!(poly.degree(), 4);
assert_eq!(poly[0], FieldElement::one());
assert_eq!(poly[1], FieldElement::zero());
assert_eq!(poly[2], FieldElement::from(87u64));
assert_eq!(poly[3], FieldElement::minus_one());
assert_eq!(poly[4], FieldElement::from(300u64));
}
#[test]
fn test_poly_long_div() {
// x^2 - 1 / x + 1 = x - 1
// dividend = -1 + x^2
let c1 = vec![
FieldElement::minus_one(),
FieldElement::zero(),
FieldElement::one(),
];
let dividend = UnivarPolynomial(FieldElementVector::from(c1));
// divisor = 1 + x
let c2 = vec![FieldElement::one(), FieldElement::one()];
let divisor = UnivarPolynomial(FieldElementVector::from(c2));
let (quotient, rem) = UnivarPolynomial::long_division(÷nd, &divisor);
println!("Quotient={:?}", "ient);
// quotient = -1 + x
assert_eq!(quotient.degree(), 1);
assert_eq!(quotient[0], FieldElement::minus_one());
assert_eq!(quotient[1], FieldElement::one());
assert_eq!(rem.degree(), 0);
let quotient = UnivarPolynomial::long_division(÷nd, "ient).0;
println!("Quotient={:?}", "ient);
// quotient = 1 + x
assert_eq!(quotient.degree(), 1);
assert_eq!(quotient[0], FieldElement::one());
assert_eq!(quotient[1], FieldElement::one());
// 2x^2 + 3x + 1 / x + 1 = 2x + 1
// dividend = 1 + 3x + 2x^2
let c1 = vec![
FieldElement::one(),
FieldElement::from(3u64),
FieldElement::from(2u64),
];
let dividend = UnivarPolynomial(FieldElementVector::from(c1));
// divisor = 1 + x
let c2 = vec![FieldElement::one(), FieldElement::one()];
let divisor = UnivarPolynomial(FieldElementVector::from(c2));
let (quotient, rem) = UnivarPolynomial::long_division(÷nd, &divisor);
println!("Quotient={:?}", "ient);
// quotient = 1 + 2x
assert_eq!(quotient.degree(), 1);
assert_eq!(quotient[0], FieldElement::one());
assert_eq!(quotient[1], FieldElement::from(2u64));
assert_eq!(rem.degree(), 0);
// 4x - 4 / x - 1 = 4
// dividend = -4 + 4x
let c1 = vec![-FieldElement::from(4u64), FieldElement::from(4u64)];
let dividend = UnivarPolynomial(FieldElementVector::from(c1));
// divisor = -1 + x
let c2 = vec![FieldElement::minus_one(), FieldElement::one()];
let divisor = UnivarPolynomial(FieldElementVector::from(c2));
let (quotient, rem) = UnivarPolynomial::long_division(÷nd, &divisor);
println!("Quotient={:?}", "ient);
// quotient = 4
assert_eq!(quotient.degree(), 0);
assert_eq!(quotient[0], FieldElement::from(4u64));
assert_eq!(rem.degree(), 0);
// x^5 + x^3 + 4x^2 + 4 / x^2 + 1 = x^3 + 4
// dividend = 4 + 4x^2 + x^3 + x^5
let c1 = vec![
FieldElement::from(4u64),
FieldElement::zero(),
FieldElement::from(4u64),
FieldElement::one(),
FieldElement::zero(),
FieldElement::one(),
];
let dividend = UnivarPolynomial(FieldElementVector::from(c1));
// divisor = 1 + x^2
let c2 = vec![
FieldElement::one(),
FieldElement::zero(),
FieldElement::one(),
];
let divisor = UnivarPolynomial(FieldElementVector::from(c2));
let (quotient, rem) = UnivarPolynomial::long_division(÷nd, &divisor);
println!("Quotient={:?}", "ient);
// quotient = 4 + x^3
assert_eq!(quotient.degree(), 3);
assert_eq!(quotient[0], FieldElement::from(4u64));
assert_eq!(quotient[1], FieldElement::zero());
assert_eq!(quotient[2], FieldElement::zero());
assert_eq!(quotient[3], FieldElement::one());
assert_eq!(rem.degree(), 1);
// 2x^4 - 40x^3 + 3x^2 - 56x - 80 / x - 20 = 2x^3 + 3x + 4
// dividend = -80 - 56x + 3x^2 - 40x^3 + 2x^4
let c1 = vec![
-FieldElement::from(80u64),
-FieldElement::from(56u64),
FieldElement::from(3u64),
-FieldElement::from(40u64),
FieldElement::from(2u64),
];
let dividend = UnivarPolynomial(FieldElementVector::from(c1));
// divisor = -20 + x
let c2 = vec![-FieldElement::from(20), FieldElement::one()];
let divisor = UnivarPolynomial(FieldElementVector::from(c2));
let (quotient, rem) = UnivarPolynomial::long_division(÷nd, &divisor);
println!("Quotient={:?}", "ient);
// quotient = 4 + 3x + 2x^3
assert_eq!(quotient.degree(), 3);
assert_eq!(quotient[0], FieldElement::from(4u64));
assert_eq!(quotient[1], FieldElement::from(3u64));
assert_eq!(quotient[2], FieldElement::zero());
assert_eq!(quotient[3], FieldElement::from(2u64));
assert_eq!(rem.degree(), 0);
}
#[test]
fn test_poly_multiply() {
// (x + 1) * (x - 1) = x^2 - 1
// x + 1
let left = UnivarPolynomial(FieldElementVector::from(vec![
FieldElement::one(),
FieldElement::one(),
]));
// -1 + x
let right = UnivarPolynomial(FieldElementVector::from(vec![
FieldElement::minus_one(),
FieldElement::one(),
]));
let product = UnivarPolynomial::multiply(&left, &right);
// product = -1 + x^2
assert_eq!(product.degree(), 2);
assert_eq!(product[0], FieldElement::minus_one());
assert_eq!(product[1], FieldElement::zero());
assert_eq!(product[2], FieldElement::one());
// Test overloaded operator
assert_eq!(product, &left * &right);
// (x + 1) * (2x + 1) = 2x^2 + 3x + 1
// 1 + x
let left = UnivarPolynomial(FieldElementVector::from(vec![
FieldElement::one(),
FieldElement::one(),
]));
// 1 + 2x
let right = UnivarPolynomial(FieldElementVector::from(vec![
FieldElement::one(),
FieldElement::from(2u64),
]));
let product = UnivarPolynomial::multiply(&left, &right);
// product = 2x^2 + 3x + 1
assert_eq!(product.degree(), 2);
assert_eq!(product[0], FieldElement::one());
assert_eq!(product[1], FieldElement::from(3u64));
assert_eq!(product[2], FieldElement::from(2u64));
// Test overloaded operator
assert_eq!(product, &left * &right);
// (x^2 + 1) * (x^3 + 4) = x^5 + x^3 + 4x^2 + 4
// 1 + x^2
let left = UnivarPolynomial(FieldElementVector::from(vec![
FieldElement::one(),
FieldElement::zero(),
FieldElement::one(),
]));
// 4 + x^3
let right = UnivarPolynomial(FieldElementVector::from(vec![
FieldElement::from(4u64),
FieldElement::zero(),
FieldElement::zero(),
FieldElement::one(),
]));
let product = UnivarPolynomial::multiply(&left, &right);
// 4 + 4x^2 + x^3 + x^5
assert_eq!(product.degree(), 5);
assert_eq!(product[0], FieldElement::from(4u64));
assert_eq!(product[1], FieldElement::zero());
assert_eq!(product[2], FieldElement::from(4u64));
assert_eq!(product[3], FieldElement::one());
assert_eq!(product[4], FieldElement::zero());
assert_eq!(product[5], FieldElement::one());
// Test overloaded operator
assert_eq!(product, &left * &right);
}
#[test]
fn test_poly_rem() {
// x^2 - 5 / x + 1 => q = x - 1, r = -4
// dividend = -5 + x^2
let c1 = vec![
-FieldElement::from(5u64),
FieldElement::zero(),
FieldElement::one(),
];
let dividend = UnivarPolynomial(FieldElementVector::from(c1));
// divisor = 1 + x
let c2 = vec![FieldElement::one(), FieldElement::one()];
let divisor = UnivarPolynomial(FieldElementVector::from(c2));
let (quotient, remainder) = UnivarPolynomial::long_division(÷nd, &divisor);
// quotient = -1 + x
assert_eq!(quotient.degree(), 1);
assert_eq!(quotient[0], FieldElement::minus_one());
assert_eq!(quotient[1], FieldElement::one());
// remainder = -4
assert_eq!(remainder.degree(), 0);
assert_eq!(remainder[0], -FieldElement::from(4u64));
// x^5 + 2x^3 + 4x^2 + 4 / x^2 + 1 = q = x^3 + x + 4, r = -x
// dividend = 4 + 4x^2 + 2x^3 + x^5
let c1 = vec![
FieldElement::from(4u64),
FieldElement::zero(),
FieldElement::from(4u64),
FieldElement::from(2u64),
FieldElement::zero(),
FieldElement::one(),
];
let dividend = UnivarPolynomial(FieldElementVector::from(c1));
// divisor = 1 + x^2
let c2 = vec![
FieldElement::one(),
FieldElement::zero(),
FieldElement::one(),
];
let divisor = UnivarPolynomial(FieldElementVector::from(c2));
let (quotient, remainder) = UnivarPolynomial::long_division(÷nd, &divisor);
// quotient = 4 + x^3
assert_eq!(quotient.degree(), 3);
assert_eq!(quotient[0], FieldElement::from(4u64));
assert_eq!(quotient[1], FieldElement::one());
assert_eq!(quotient[2], FieldElement::zero());
assert_eq!(quotient[3], FieldElement::one());
assert_eq!(remainder.degree(), 1);
assert_eq!(remainder[0], FieldElement::zero());
assert_eq!(remainder[1], FieldElement::minus_one());
}
#[test]
fn test_random_poly_sum_difference() {
// Test sum and difference of randomly generated polynomials.
let num_test_cases = 100;
let mut rng = rand::thread_rng();
let start = Instant::now();
for _ in 0..num_test_cases {
let left = UnivarPolynomial::random(rng.gen_range(1, 100));
let right = UnivarPolynomial::random(rng.gen_range(1, 100));
let sum = UnivarPolynomial::sum(&left, &right);
// sum is commutative
assert_eq!(sum, UnivarPolynomial::sum(&right, &left));
// Test overloaded operator
assert_eq!(sum, &left + &right);
// sum - left == right
let mut diff_1 = UnivarPolynomial::difference(&sum, &right);
// Test overloaded operator
assert_eq!(diff_1, &sum - &right);
// Since degree of difference is same as degree of `sum` but the higher degree coeffs
// of difference will be 0. Remove those 0s (after checking that they really are 0) and
// then do equality comparison with `left`
while diff_1.degree() > left.degree() {
let c = diff_1.0.pop().unwrap();
assert!(c.is_zero());
}
assert_eq!(diff_1, left);
// sum - right == left
let mut diff_2 = UnivarPolynomial::difference(&sum, &left);
// Test overloaded operator
assert_eq!(diff_2, &sum - &left);
// Since degree of difference is same as degree of `sum` but the higher degree coeffs
// of difference will be 0. Remove those 0s (after checking that they really are 0) and
// then do equality comparison with `right`
while diff_2.degree() > right.degree() {
let c = diff_2.0.pop().unwrap();
assert!(c.is_zero());
}
assert_eq!(diff_2, right);
}
println!(
"Sum diff time for {} elems = {:?}",
num_test_cases,
start.elapsed()
);
}
#[test]
fn test_random_poly_long_div() {
// Multiply 2 random polynomials and then use the result to check long division
let num_test_cases = 100;
let mut rng = rand::thread_rng();
for _ in 0..num_test_cases {
let left = UnivarPolynomial::random(rng.gen_range(1, 100));
let right = UnivarPolynomial::random(rng.gen_range(1, 100));
let product = UnivarPolynomial::multiply(&left, &right);
// product / left == right
let quotient_1 = UnivarPolynomial::long_division(&product, &left).0;
assert_eq!(quotient_1, right);
// product / right == left
let quotient_2 = UnivarPolynomial::long_division(&product, &right).0;
assert_eq!(quotient_2, left);
// Test overloaded operator
assert_eq!(product, &left * &right);
}
}
#[test]
fn test_random_poly_long_div_remainder() {
// Divide 2 random polynomials and check that the quotient and remainder are correct using
// the relation dividend = divisor * quotient + remainder
let num_test_cases = 100;
let mut rng = rand::thread_rng();
for _ in 0..num_test_cases {
let d_1: usize = rng.gen_range(1, 100);
let d_2: usize = rng.gen_range(1, 100);
let (dividend, divisor) = if d_1 > d_2 {
(UnivarPolynomial::random(d_1), UnivarPolynomial::random(d_2))
} else {
(UnivarPolynomial::random(d_2), UnivarPolynomial::random(d_1))
};
// dividend / divisor => quotient and remainder
let (quotient, remainder) = UnivarPolynomial::long_division(÷nd, &divisor);
// dividend = divisor * quotient + remainder
// div_quo = divisor * quotient
let div_quo = UnivarPolynomial::multiply(&divisor, "ient);
// expected_dividend = div_quo + remainder
let expected_dividend = UnivarPolynomial::sum(&div_quo, &remainder);
assert_eq!(expected_dividend, dividend);
}
}
#[test]
fn test_poly_from_given_roots() {
// Check resulting polynomial is of correct degree and polynomial becomes 0 at each root
let num_test_cases = 100;
let mut rng = rand::thread_rng();
let mut start = Instant::now();
for _ in 0..num_test_cases {
let num_roots = rng.gen_range(2, 30);
let roots = FieldElementVector::random(num_roots);
let poly = UnivarPolynomial::new_with_roots(roots.as_slice());
assert_eq!(poly.degree(), num_roots);
for r in roots {
assert_eq!(poly.eval(&r), FieldElement::zero())
}
}
println!("Time for {} elems = {:?}", num_test_cases, start.elapsed());
}
#[test]
fn test_multiply_with_constant() {
// 9 + 2x + 75x^2 + 128x^3
let orig = UnivarPolynomial(FieldElementVector::from(vec![
FieldElement::from(9u64),
FieldElement::from(2u64),
FieldElement::from(75u64),
FieldElement::from(128u64),
]));
let c = FieldElement::from(3u64);
let new = orig.multiply_by_constant(&c);
assert_eq!(new.degree(), 3);
assert_eq!(new[0], FieldElement::from(27));
assert_eq!(new[1], FieldElement::from(6));
assert_eq!(new[2], FieldElement::from(225));
assert_eq!(new[3], FieldElement::from(384));
// 1 + 4x^2 + 5x^3 + 18x^6
let orig = UnivarPolynomial(FieldElementVector::from(vec![
FieldElement::one(),
FieldElement::zero(),
FieldElement::from(4u64),
FieldElement::from(5u64),
FieldElement::zero(),
FieldElement::zero(),
FieldElement::from(18u64),
]));
let c = FieldElement::from(10u64);
let new = orig.multiply_by_constant(&c);
assert_eq!(new.degree(), 6);
assert_eq!(new[0], FieldElement::from(10));
assert_eq!(new[1], FieldElement::zero());
assert_eq!(new[2], FieldElement::from(40));
assert_eq!(new[3], FieldElement::from(50));
assert_eq!(new[4], FieldElement::zero());
assert_eq!(new[5], FieldElement::zero());
assert_eq!(new[6], FieldElement::from(180));
// take a random polynomial, multiply it with a constant, then multiply it with inverse of
// the same constant. result should be same as original
let random_poly = UnivarPolynomial::random(10);
let c = FieldElement::random();
let c_inv = c.inverse();
assert_eq!(
random_poly,
random_poly
.multiply_by_constant(&c)
.multiply_by_constant(&c_inv)
);
}
#[test]
fn test_multiply_with_monic_monomial() {
// 9 + 2x + 75x^2 + 128x^3
let orig = UnivarPolynomial(FieldElementVector::from(vec![
FieldElement::from(9u64),
FieldElement::from(2u64),
FieldElement::from(75u64),
FieldElement::from(128u64),
]));
let monomial_degree = 0;
let new = orig.multiply_by_monic_monomial(monomial_degree);
assert_eq!(new, orig);
let monomial_degree = 1;
let new = orig.multiply_by_monic_monomial(monomial_degree);
assert_eq!(new.degree(), 4);
assert_eq!(new[0], FieldElement::zero());
assert_eq!(new[1], FieldElement::from(9u64));
assert_eq!(new[2], FieldElement::from(2u64));
assert_eq!(new[3], FieldElement::from(75u64));
assert_eq!(new[4], FieldElement::from(128u64));
let monomial_degree = 2;
let new = orig.multiply_by_monic_monomial(monomial_degree);
assert_eq!(new.degree(), 5);
assert_eq!(new[0], FieldElement::zero());
assert_eq!(new[1], FieldElement::zero());
assert_eq!(new[2], FieldElement::from(9u64));
assert_eq!(new[3], FieldElement::from(2u64));
assert_eq!(new[4], FieldElement::from(75u64));
assert_eq!(new[5], FieldElement::from(128u64));
// 1 + 4x^2 + 5x^3 + 18x^6
let orig = UnivarPolynomial(FieldElementVector::from(vec![
FieldElement::one(),
FieldElement::zero(),
FieldElement::from(4u64),
FieldElement::from(5u64),
FieldElement::zero(),
FieldElement::zero(),
FieldElement::from(18u64),
]));
let monomial_degree = 0;
let new = orig.multiply_by_monic_monomial(monomial_degree);
assert_eq!(new, orig);
let monomial_degree = 1;
let new = orig.multiply_by_monic_monomial(monomial_degree);
assert_eq!(new.degree(), 7);
assert_eq!(new[0], FieldElement::zero());
assert_eq!(new[1], FieldElement::one());
assert_eq!(new[2], FieldElement::zero());
assert_eq!(new[3], FieldElement::from(4));
assert_eq!(new[4], FieldElement::from(5));
assert_eq!(new[5], FieldElement::zero());
assert_eq!(new[6], FieldElement::zero());
assert_eq!(new[7], FieldElement::from(18));
let monomial_degree = 2;
let new = orig.multiply_by_monic_monomial(monomial_degree);
assert_eq!(new.degree(), 8);
assert_eq!(new[0], FieldElement::zero());
assert_eq!(new[1], FieldElement::zero());
assert_eq!(new[2], FieldElement::one());
assert_eq!(new[3], FieldElement::zero());
assert_eq!(new[4], FieldElement::from(4));
assert_eq!(new[5], FieldElement::from(5));
assert_eq!(new[6], FieldElement::zero());
assert_eq!(new[7], FieldElement::zero());
assert_eq!(new[8], FieldElement::from(18));
}
}
| 37.884524 | 141 | 0.565597 |
4a66fe3e3b905b3378dc85983bc8bac89228fcaf | 1,694 | //! Possibly-infinite values.
use std::ops::{Add, AddAssign};
/// A possibly (negatively) infinite value of type `T`.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Infinitable<T> {
/// Negative infinity.
NegInf,
/// A finite value `T`.
Finite(T),
/// Positive infinity.
PosInf,
}
impl<T> Infinitable<T> {
/// Maps a function over this.
pub fn map<F, U>(self, f: F) -> Infinitable<U>
where
F: FnOnce(T) -> U,
{
match self {
Infinitable::NegInf => Infinitable::NegInf,
Infinitable::Finite(x) => Infinitable::Finite(f(x)),
Infinitable::PosInf => Infinitable::PosInf,
}
}
}
impl<T, U> Add<U> for Infinitable<T>
where
T: Add<U>,
{
type Output = Infinitable<T::Output>;
fn add(self, rhs: U) -> Self::Output {
self.map(|x| x + rhs)
}
}
impl<T, U> AddAssign<U> for Infinitable<T>
where
T: AddAssign<U>,
{
fn add_assign(&mut self, rhs: U) {
match self {
Infinitable::Finite(x) => x.add_assign(rhs),
Infinitable::NegInf | Infinitable::PosInf => {}
}
}
}
#[test]
fn partial_ord() {
assert_eq!(Infinitable::Finite(3), Infinitable::Finite(3));
assert!(Infinitable::Finite(3) < Infinitable::Finite(4));
assert!(Infinitable::Finite(5) > Infinitable::Finite(4));
assert!(Infinitable::PosInf > Infinitable::Finite(i32::MAX));
assert!(Infinitable::NegInf < Infinitable::Finite(i32::MIN));
assert!(Infinitable::<i32>::NegInf < Infinitable::PosInf);
}
#[test]
fn add() {
assert_eq!(Infinitable::Finite(3) + 4, Infinitable::Finite(7));
assert_eq!(Infinitable::<i32>::PosInf + 4, Infinitable::PosInf);
assert_eq!(Infinitable::<i32>::NegInf + 4, Infinitable::NegInf);
}
| 24.550725 | 67 | 0.637544 |
f4c805add76fe2f3723424c602ad3f53b79d4e61 | 7,703 | use crate::errors::{InternalErrorSource, SicCliOpsError};
use crate::TResult;
use sic_image_engine::engine::{EnvItem, Instr};
use sic_image_engine::wrapper::filter_type::FilterTypeWrap;
use sic_image_engine::wrapper::image_path::ImageFromPath;
use sic_image_engine::wrapper::overlay::OverlayInputs;
use sic_image_engine::ImgOp;
use sic_parser::errors::SicParserError;
use sic_parser::value_parser::{Describable, ParseInputsFromIter};
use std::fmt::Debug;
use std::str::FromStr;
/// The enumeration of all supported operations.
#[derive(
Debug, Copy, Clone, Hash, Eq, PartialEq, AsStaticStr, EnumIter, EnumString, EnumVariantNames,
)]
#[strum(serialize_all = "kebab_case")]
pub enum OperationId {
// image operations
Blur,
Brighten,
Contrast,
Crop,
Diff,
#[cfg(feature = "imageproc-ops")]
DrawText,
Filter3x3,
FlipHorizontal,
FlipVertical,
Grayscale,
HueRotate,
HorizontalGradient,
Invert,
Overlay,
Resize,
Rotate90,
Rotate180,
Rotate270,
#[cfg(feature = "imageproc-ops")]
Threshold,
Unsharpen,
VerticalGradient,
// modifiers
PreserveAspectRatio,
SamplingFilter,
}
impl OperationId {
/// A string representation for each operation.
pub fn as_str(self) -> &'static str {
use strum::AsStaticRef;
self.as_static()
}
pub fn try_from_name(input: &str) -> TResult<Self> {
OperationId::from_str(input)
.map_err(|_err| SicCliOpsError::InternalError(InternalErrorSource::NoMatchingOperator))
}
/// Provides the number of arguments an operation takes.
/// Used to unify arguments together.
/// E.g. (without accounting for the requirement of having incremental indices as well),
/// say we receive for resize the values 10, 20, 100 and 100. With the number of values we know
/// that each resize operation takes two arguments, not four. So it could be that there are
/// two operations, namely `resize 10 20` and `resize 100 100`. We do need to take some other
/// conditions into account, but they are not relevant for this particular method =).
pub fn takes_number_of_arguments(self) -> usize {
match self {
// image operations
OperationId::Blur => 1,
OperationId::Brighten => 1,
OperationId::Contrast => 1,
OperationId::Crop => 4,
OperationId::Diff => 1,
#[cfg(feature = "imageproc-ops")]
OperationId::DrawText => 5,
OperationId::Filter3x3 => 9,
OperationId::FlipHorizontal => 0,
OperationId::FlipVertical => 0,
OperationId::Grayscale => 0,
OperationId::HueRotate => 1,
OperationId::HorizontalGradient => 2,
OperationId::Invert => 0,
OperationId::Overlay => 3,
OperationId::Resize => 2,
OperationId::Rotate90 => 0,
OperationId::Rotate180 => 0,
OperationId::Rotate270 => 0,
#[cfg(feature = "imageproc-ops")]
OperationId::Threshold => 0,
OperationId::Unsharpen => 2,
OperationId::VerticalGradient => 2,
// image operation modifiers
OperationId::PreserveAspectRatio => 1,
OperationId::SamplingFilter => 1,
}
}
}
macro_rules! parse_inputs_by_type {
($iterable:expr, $ty:ty) => {{
let input: Result<$ty, SicCliOpsError> =
ParseInputsFromIter::parse($iterable).map_err(|err| {
SicCliOpsError::UnableToParseValueOfType {
err,
typ: stringify!($ty).to_string(),
}
});
input
}};
}
impl OperationId {
/// Constructs instructions for image operations which are taken as input by the image engine.
pub fn create_instruction<'a, T>(self, inputs: T) -> Result<Instr, SicCliOpsError>
where
T: IntoIterator,
T::Item: Into<Describable<'a>> + std::fmt::Debug,
{
let stmt = match self {
// image operations
OperationId::Blur => Instr::Operation(ImgOp::Blur(parse_inputs_by_type!(inputs, f32)?)),
OperationId::Brighten => {
Instr::Operation(ImgOp::Brighten(parse_inputs_by_type!(inputs, i32)?))
}
OperationId::Contrast => {
Instr::Operation(ImgOp::Contrast(parse_inputs_by_type!(inputs, f32)?))
}
OperationId::Crop => Instr::Operation(ImgOp::Crop(parse_inputs_by_type!(
inputs,
(u32, u32, u32, u32)
)?)),
OperationId::Diff => {
Instr::Operation(ImgOp::Diff(parse_inputs_by_type!(inputs, ImageFromPath)?))
}
#[cfg(feature = "imageproc-ops")]
OperationId::DrawText => {
use sic_image_engine::wrapper::draw_text_inner::DrawTextInner;
Instr::Operation(ImgOp::DrawText(parse_inputs_by_type!(
inputs,
DrawTextInner
)?))
}
OperationId::Filter3x3 => {
Instr::Operation(ImgOp::Filter3x3(parse_inputs_by_type!(inputs, [f32; 9])?))
}
OperationId::FlipHorizontal => Instr::Operation(ImgOp::FlipHorizontal),
OperationId::FlipVertical => Instr::Operation(ImgOp::FlipVertical),
OperationId::Grayscale => Instr::Operation(ImgOp::Grayscale),
OperationId::HueRotate => {
Instr::Operation(ImgOp::HueRotate(parse_inputs_by_type!(inputs, i32)?))
}
OperationId::HorizontalGradient => {
use sic_image_engine::wrapper::gradient_input::GradientInput;
Instr::Operation(ImgOp::HorizontalGradient(parse_inputs_by_type!(
inputs,
GradientInput
)?))
}
OperationId::Invert => Instr::Operation(ImgOp::Invert),
OperationId::Overlay => Instr::Operation(ImgOp::Overlay(parse_inputs_by_type!(
inputs,
OverlayInputs
)?)),
OperationId::Resize => {
Instr::Operation(ImgOp::Resize(parse_inputs_by_type!(inputs, (u32, u32))?))
}
OperationId::Rotate90 => Instr::Operation(ImgOp::Rotate90),
OperationId::Rotate180 => Instr::Operation(ImgOp::Rotate180),
OperationId::Rotate270 => Instr::Operation(ImgOp::Rotate270),
#[cfg(feature = "imageproc-ops")]
OperationId::Threshold => Instr::Operation(ImgOp::Threshold),
OperationId::Unsharpen => {
Instr::Operation(ImgOp::Unsharpen(parse_inputs_by_type!(inputs, (f32, i32))?))
}
OperationId::VerticalGradient => {
use sic_image_engine::wrapper::gradient_input::GradientInput;
Instr::Operation(ImgOp::VerticalGradient(parse_inputs_by_type!(
inputs,
GradientInput
)?))
}
// image operation modifiers
OperationId::PreserveAspectRatio => Instr::EnvAdd(EnvItem::PreserveAspectRatio(
parse_inputs_by_type!(inputs, bool)?,
)),
OperationId::SamplingFilter => {
let input = parse_inputs_by_type!(inputs, String)?;
let filter = FilterTypeWrap::try_from_str(&input)
.map_err(SicParserError::FilterTypeError)?;
Instr::EnvAdd(EnvItem::CustomSamplingFilter(filter))
}
};
Ok(stmt)
}
}
| 38.515 | 103 | 0.584188 |
01b6ca0b2e74305f7a994b8fa0f16ab7c31468aa | 281 | use actix_web::{HttpResponse, web, get};
use actix_web::client::HttpError;
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.service(hello);
}
#[get("/hello")]
pub async fn hello() -> Result<HttpResponse, HttpError> {
Ok(HttpResponse::Ok().body("hello from actix-web"))
} | 25.545455 | 57 | 0.683274 |
72b6645d67bc4d3c0a15ec1d4be7f37507d51f52 | 1,264 | // Copyright 2016 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! The block chain itself, validates and accepts new blocks, handles reorgs.
#![deny(non_upper_case_globals)]
#![deny(non_camel_case_types)]
#![deny(non_snake_case)]
#![deny(unused_mut)]
#![warn(missing_docs)]
#[macro_use]
extern crate bitflags;
extern crate byteorder;
extern crate serde;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate slog;
extern crate time;
extern crate grin_core as core;
extern crate grin_store;
extern crate grin_util as util;
mod chain;
pub mod pipe;
pub mod store;
pub mod sumtree;
pub mod types;
// Re-export the base interface
pub use chain::Chain;
pub use types::{ChainAdapter, ChainStore, Error, Options, Tip};
| 26.893617 | 77 | 0.753956 |
6aa684b1c3d014acde3efebc0174c173f710ddb1 | 1,623 | #![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
#![feature(box_patterns)]
#![feature(core_intrinsics)]
#![feature(crate_visibility_modifier)]
#![feature(drain_filter)]
#![feature(in_band_lifetimes)]
#![feature(libc)]
#![feature(nll)]
#![feature(proc_macro_internals)]
#![feature(proc_macro_quote)]
#![feature(rustc_private)]
#![feature(slice_patterns)]
#![feature(specialization)]
#![feature(stmt_expr_attributes)]
#![recursion_limit="256"]
extern crate libc;
extern crate proc_macro;
#[macro_use]
extern crate rustc;
#[macro_use]
extern crate rustc_data_structures;
pub mod error_codes;
mod index;
mod encoder;
mod decoder;
mod cstore_impl;
mod schema;
mod native_libs;
mod link_args;
mod foreign_modules;
mod dependency_format;
pub mod creader;
pub mod cstore;
pub mod dynamic_lib;
pub mod locator;
pub fn validate_crate_name(
sess: Option<&rustc::session::Session>,
s: &str,
sp: Option<syntax_pos::Span>
) {
let mut err_count = 0;
{
let mut say = |s: &str| {
match (sp, sess) {
(_, None) => bug!("{}", s),
(Some(sp), Some(sess)) => sess.span_err(sp, s),
(None, Some(sess)) => sess.err(s),
}
err_count += 1;
};
if s.is_empty() {
say("crate name must not be empty");
}
for c in s.chars() {
if c.is_alphanumeric() { continue }
if c == '_' { continue }
say(&format!("invalid character `{}` in crate name: `{}`", c, s));
}
}
if err_count > 0 {
sess.unwrap().abort_if_errors();
}
}
| 22.232877 | 78 | 0.600123 |
d924a3b75dde3809d4daf732b2895fdbfa6fb5dd | 652 | // compile-flags: -C opt-level=0
// ignore-emscripten compiled with panic=abort by default
#![crate_type = "lib"]
#![feature(unwind_attributes)]
// Make sure these all do *not* get the attribute.
// We disable optimizations to prevent LLVM from infering the attribute.
// CHECK-NOT: nounwind
// "C" ABI
// pub extern fn foo() {} // FIXME right now we don't abort-on-panic but add `nounwind` nevertheless
#[unwind(allowed)]
pub extern fn foo_allowed() {}
// "Rust"
// (`extern "Rust"` could be removed as all `fn` get it implicitly; we leave it in for clarity.)
pub extern "Rust" fn bar() {}
#[unwind(allowed)]
pub extern "Rust" fn bar_allowed() {}
| 31.047619 | 100 | 0.697853 |
f89108eae6f6a0b903ed26448c415aa2cc1d9c42 | 6,465 | use std::marker::PhantomData;
use hibitset::BitSetLike;
use storage::{DenseVecStorage, TrackChannels, Tracked, UnprotectedStorage};
use storage::TryDefault;
use world::{Component, Index};
/// Wrapper storage that tracks modifications, insertions, and removals of components
/// through an `EventChannel`.
///
/// **Note:** Joining over all components of a `FlaggedStorage`
/// mutably will flag all components.
///
/// What you want to instead is to use `restrict_mut()` to first
/// get the entities which contain the component and then conditionally
/// modify the component after a call to `get_mut_unchecked()` or `get_mut()`.
///
/// # Examples
///
/// ```
/// extern crate specs;
///
/// use specs::prelude::*;
///
/// pub struct Comp(u32);
/// impl Component for Comp {
/// // `FlaggedStorage` acts as a wrapper around another storage.
/// // You can put any store inside of here (e.g. HashMapStorage, VecStorage, etc.)
/// //
/// // It also works as `FlaggedStorage<Self>` and defaults to `DenseVecStorage<Self>`
/// // for the inner storage.
/// type Storage = FlaggedStorage<Self, VecStorage<Self>>;
/// }
///
/// pub struct CompSystem {
/// // These keep track of where you left off in the event channel.
/// modified_id: ReaderId<ModifiedFlag>,
/// inserted_id: ReaderId<InsertedFlag>,
///
/// // The bitsets you want to populate with modification/insertion events.
/// modified: BitSet,
/// inserted: BitSet,
/// }
///
/// impl<'a> System<'a> for CompSystem {
/// type SystemData = (Entities<'a>, WriteStorage<'a, Comp>);
/// fn run(&mut self, (entities, mut comps): Self::SystemData) {
/// // We want to clear the bitset first so we don't have left over events
/// // from the last frame.
/// //
/// // However, if you want to accumulate changes over a couple frames then you
/// // can only clear it when necessary. (This might be useful if you have some
/// // sort of "tick" system in your game and only want to do operations every
/// // 1/4th of a second or something)
/// //
/// // It is not okay to only read the events in an interval though as that could
/// // leave behind events which would end up growing the event ring buffer to
/// // extreme sizes.
/// self.modified.clear();
/// self.inserted.clear();
///
/// // This allows us to use the modification events in a `Join`. Otherwise we
/// // would have to iterate through the events which may not be in order.
/// //
/// // This does not populate the bitset with inserted components, only pre-existing
/// // components that were changed by a `get_mut` call to the storage.
/// comps.populate_modified(&mut self.modified_id, &mut self.modified);
///
/// // This will only include inserted components from last read, note that this
/// // will not include `insert` calls if there already was a pre-existing component.
/// comps.populate_inserted(&mut self.inserted_id, &mut self.inserted);
///
/// // Iterates over all components like normal.
/// for comp in (&comps).join() {
/// // ...
/// }
///
/// // **Never do this**
/// // This will flag all components as modified regardless of whether the inner loop
/// // actually modified the component.
/// //
/// // Only do this if you have other filters, like some other components to filter
/// // out the ones you want to modify.
/// for comp in (&mut comps).join() {
/// // ...
/// }
///
/// // Instead do something like:
///# let condition = true;
/// for (entity, mut comps) in (&*entities, &mut comps.restrict_mut()).join() {
/// if condition { // check whether this component should be modified.
/// let mut comp = comps.get_mut_unchecked();
/// // ...
/// }
/// }
///
/// // To iterate over the modified components:
/// for comp in (&comps, &self.modified).join() {
/// // ...
/// }
///
/// // To iterate over all inserted/modified components;
/// for comp in (&comps, &self.modified & &self.inserted).join() {
/// // ...
/// }
/// }
/// }
///
/// fn main() {
/// let mut world = World::new();
/// world.register::<Comp>();
///
/// // You will want to register the system `ReaderId`s
/// // before adding/modifying/removing any entities and components.
/// //
/// // Otherwise you won't receive any of the modifications until
/// // you start tracking them.
/// let mut comps = world.write_storage::<Comp>();
/// let comp_system = CompSystem {
/// modified_id: comps.track_modified(),
/// inserted_id: comps.track_inserted(),
/// modified: BitSet::new(),
/// inserted: BitSet::new(),
/// };
/// }
/// ```
///
pub struct FlaggedStorage<C, T = DenseVecStorage<C>> {
trackers: TrackChannels,
storage: T,
phantom: PhantomData<C>,
}
impl<C, T> Default for FlaggedStorage<C, T>
where
T: TryDefault,
{
fn default() -> Self {
FlaggedStorage {
trackers: TrackChannels::default(),
storage: T::unwrap_default(),
phantom: PhantomData,
}
}
}
impl<C: Component, T: UnprotectedStorage<C>> UnprotectedStorage<C> for FlaggedStorage<C, T> {
unsafe fn clean<B>(&mut self, has: B)
where
B: BitSetLike,
{
self.storage.clean(has);
}
unsafe fn get(&self, id: Index) -> &C {
self.storage.get(id)
}
unsafe fn get_mut(&mut self, id: Index) -> &mut C {
// calling `.iter()` on an unconstrained mutable storage will flag everything
self.trackers.modify.single_write(id.into());
self.storage.get_mut(id)
}
unsafe fn insert(&mut self, id: Index, comp: C) {
self.trackers.insert.single_write(id.into());
self.storage.insert(id, comp);
}
unsafe fn remove(&mut self, id: Index) -> C {
self.trackers.remove.single_write(id.into());
self.storage.remove(id)
}
}
impl<C, T> Tracked for FlaggedStorage<C, T> {
fn channels(&self) -> &TrackChannels {
&self.trackers
}
fn channels_mut(&mut self) -> &mut TrackChannels {
&mut self.trackers
}
}
| 34.758065 | 93 | 0.584841 |
759e8d4ae9e08cbe6afdb9e99f975ff2a32dac1a | 2,394 | use std::fmt;
use std::fmt::Display;
use std::fmt::Formatter;
use super::error::*;
use super::split;
use super::Position;
use super::Token;
use super::TokenVariant;
#[derive(Eq, PartialEq, Copy, Clone, Debug)]
pub enum Symbol {
Equal,
LeftParen,
RightParen,
LeftBrace,
RightBrace,
Comma,
Colon,
SemiColon,
}
impl Symbol {
pub(super) fn lex<'i>(input: &'i str, pos: &mut Position) -> Result<(&'i str, Token<'i>)> {
let tpos = *pos;
if input.starts_with('=') {
pos.col += 1;
Ok((split(input, 1), Symbol::Equal.token(tpos)))
} else if input.starts_with('(') {
pos.col += 1;
Ok((split(input, 1), Symbol::LeftParen.token(tpos)))
} else if input.starts_with(')') {
pos.col += 1;
Ok((split(input, 1), Symbol::RightParen.token(tpos)))
} else if input.starts_with('{') {
pos.col += 1;
Ok((split(input, 1), Symbol::LeftBrace.token(tpos)))
} else if input.starts_with('}') {
pos.col += 1;
Ok((split(input, 1), Symbol::RightBrace.token(tpos)))
} else if input.starts_with(',') {
pos.col += 1;
Ok((split(input, 1), Symbol::Comma.token(tpos)))
} else if input.starts_with(':') {
pos.col += 1;
Ok((split(input, 1), Symbol::Colon.token(tpos)))
} else if input.starts_with(';') {
pos.col += 1;
Ok((split(input, 1), Symbol::SemiColon.token(tpos)))
} else {
Err(Error::not_handled(tpos))
}
}
fn token<'t>(self, pos: Position) -> Token<'t> {
Token {
token: TokenVariant::Symbol(self),
pos,
}
}
}
impl Display for Symbol {
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
match self {
Symbol::Equal => write!(fmt, r#"symbol("=")"#),
Symbol::LeftParen => write!(fmt, r#"symbol("(")"#),
Symbol::RightParen => write!(fmt, r#"symbol(")")"#),
Symbol::LeftBrace => write!(fmt, r#"symbol("{{")"#),
Symbol::RightBrace => write!(fmt, r#"symbol("}}")"#),
Symbol::Comma => write!(fmt, r#"symbol(",")"#),
Symbol::Colon => write!(fmt, r#"symbol(":")"#),
Symbol::SemiColon => write!(fmt, r#"symbol(";")"#),
}
}
}
| 28.164706 | 95 | 0.498747 |
691911c0225aa22a11ec4cdaaf13a7c2cdf9ae21 | 1,892 | use std::collections::hash_map::Iter;
use fxhash::FxHashMap;
use crate::{expr::Expr, intern::StrRef, lexer::SourceLoc};
#[derive(Clone, Debug)]
pub enum Symbol {
Expr(Expr),
Value(i32),
}
pub struct Symtab {
inner: FxHashMap<StrRef, Symbol>,
hits: FxHashMap<StrRef, SourceLoc>,
}
impl Symtab {
#[inline]
pub fn new() -> Self {
Self {
inner: FxHashMap::default(),
hits: FxHashMap::default(),
}
}
#[inline]
pub fn insert(&mut self, key: StrRef, value: Symbol) -> Option<Symbol> {
self.inner.insert(key, value)
}
#[inline]
pub fn touch(&mut self, key: StrRef, loc: SourceLoc) {
if !self.hits.contains_key(&key) {
self.hits.insert(key, loc);
}
}
#[inline]
pub fn first_reference(&self, key: StrRef) -> Option<&SourceLoc> {
self.hits.get(&key)
}
#[inline]
pub fn get(&self, key: StrRef) -> Option<&Symbol> {
self.inner.get(&key)
}
#[inline]
pub fn references(&self) -> SymtabRefIter<'_> {
SymtabRefIter {
inner: self.hits.iter(),
}
}
}
impl<'a> IntoIterator for &'a Symtab {
type IntoIter = SymtabIter<'a>;
type Item = (&'a StrRef, &'a Symbol);
fn into_iter(self) -> Self::IntoIter {
SymtabIter {
inner: self.inner.iter(),
}
}
}
pub struct SymtabIter<'a> {
inner: Iter<'a, StrRef, Symbol>,
}
impl<'a> Iterator for SymtabIter<'a> {
type Item = (&'a StrRef, &'a Symbol);
#[inline]
fn next(&mut self) -> Option<Self::Item> {
self.inner.next()
}
}
pub struct SymtabRefIter<'a> {
inner: Iter<'a, StrRef, SourceLoc>,
}
impl<'a> Iterator for SymtabRefIter<'a> {
type Item = (&'a StrRef, &'a SourceLoc);
#[inline]
fn next(&mut self) -> Option<Self::Item> {
self.inner.next()
}
}
| 20.344086 | 76 | 0.553911 |
67a5ab891f72ca2b2ba840929bd3fe5806f2be04 | 30,990 | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Error handling with the `Result` type
//!
//! `Result<T, E>` is the type used for returning and propagating
//! errors. It is an enum with the variants, `Ok(T)`, representing
//! success and containing a value, and `Err(E)`, representing error
//! and containing an error value.
//!
//! ```
//! enum Result<T, E> {
//! Ok(T),
//! Err(E)
//! }
//! ```
//!
//! Functions return `Result` whenever errors are expected and
//! recoverable. In the `std` crate `Result` is most prominently used
//! for [I/O](../../std/io/index.html).
//!
//! A simple function returning `Result` might be
//! defined and used like so:
//!
//! ```
//! #[derive(Debug)]
//! enum Version { Version1, Version2 }
//!
//! fn parse_version(header: &[u8]) -> Result<Version, &'static str> {
//! match header.get(0) {
//! None => Err("invalid header length"),
//! Some(&1) => Ok(Version::Version1),
//! Some(&2) => Ok(Version::Version2),
//! Some(_) => Err("invalid version")
//! }
//! }
//!
//! let version = parse_version(&[1, 2, 3, 4]);
//! match version {
//! Ok(v) => println!("working with version: {:?}", v),
//! Err(e) => println!("error parsing header: {:?}", e),
//! }
//! ```
//!
//! Pattern matching on `Result`s is clear and straightforward for
//! simple cases, but `Result` comes with some convenience methods
//! that make working with it more succinct.
//!
//! ```
//! let good_result: Result<i32, i32> = Ok(10);
//! let bad_result: Result<i32, i32> = Err(10);
//!
//! // The `is_ok` and `is_err` methods do what they say.
//! assert!(good_result.is_ok() && !good_result.is_err());
//! assert!(bad_result.is_err() && !bad_result.is_ok());
//!
//! // `map` consumes the `Result` and produces another.
//! let good_result: Result<i32, i32> = good_result.map(|i| i + 1);
//! let bad_result: Result<i32, i32> = bad_result.map(|i| i - 1);
//!
//! // Use `and_then` to continue the computation.
//! let good_result: Result<bool, i32> = good_result.and_then(|i| Ok(i == 11));
//!
//! // Use `or_else` to handle the error.
//! let bad_result: Result<i32, i32> = bad_result.or_else(|i| Ok(11));
//!
//! // Consume the result and return the contents with `unwrap`.
//! let final_awesome_result = good_result.unwrap();
//! ```
//!
//! # Results must be used
//!
//! A common problem with using return values to indicate errors is
//! that it is easy to ignore the return value, thus failing to handle
//! the error. Result is annotated with the #[must_use] attribute,
//! which will cause the compiler to issue a warning when a Result
//! value is ignored. This makes `Result` especially useful with
//! functions that may encounter errors but don't otherwise return a
//! useful value.
//!
//! Consider the `write_line` method defined for I/O types
//! by the [`Writer`](../old_io/trait.Writer.html) trait:
//!
//! ```
//! # #![feature(old_io)]
//! use std::old_io::IoError;
//!
//! trait Writer {
//! fn write_line(&mut self, s: &str) -> Result<(), IoError>;
//! }
//! ```
//!
//! *Note: The actual definition of `Writer` uses `IoResult`, which
//! is just a synonym for `Result<T, IoError>`.*
//!
//! This method doesn't produce a value, but the write may
//! fail. It's crucial to handle the error case, and *not* write
//! something like this:
//!
//! ```{.ignore}
//! # #![feature(old_io)]
//! use std::old_io::*;
//! use std::old_path::Path;
//!
//! let mut file = File::open_mode(&Path::new("valuable_data.txt"), Open, Write);
//! // If `write_line` errors, then we'll never know, because the return
//! // value is ignored.
//! file.write_line("important message");
//! drop(file);
//! ```
//!
//! If you *do* write that in Rust, the compiler will give you a
//! warning (by default, controlled by the `unused_must_use` lint).
//!
//! You might instead, if you don't want to handle the error, simply
//! panic, by converting to an `Option` with `ok`, then asserting
//! success with `expect`. This will panic if the write fails, proving
//! a marginally useful message indicating why:
//!
//! ```{.no_run}
//! # #![feature(old_io, old_path)]
//! use std::old_io::*;
//! use std::old_path::Path;
//!
//! let mut file = File::open_mode(&Path::new("valuable_data.txt"), Open, Write);
//! file.write_line("important message").ok().expect("failed to write message");
//! drop(file);
//! ```
//!
//! You might also simply assert success:
//!
//! ```{.no_run}
//! # #![feature(old_io, old_path)]
//! # use std::old_io::*;
//! # use std::old_path::Path;
//!
//! # let mut file = File::open_mode(&Path::new("valuable_data.txt"), Open, Write);
//! assert!(file.write_line("important message").is_ok());
//! # drop(file);
//! ```
//!
//! Or propagate the error up the call stack with `try!`:
//!
//! ```
//! # #![feature(old_io, old_path)]
//! # use std::old_io::*;
//! # use std::old_path::Path;
//! fn write_message() -> Result<(), IoError> {
//! let mut file = File::open_mode(&Path::new("valuable_data.txt"), Open, Write);
//! try!(file.write_line("important message"));
//! drop(file);
//! Ok(())
//! }
//! ```
//!
//! # The `try!` macro
//!
//! When writing code that calls many functions that return the
//! `Result` type, the error handling can be tedious. The `try!`
//! macro hides some of the boilerplate of propagating errors up the
//! call stack.
//!
//! It replaces this:
//!
//! ```
//! # #![feature(old_io, old_path)]
//! use std::old_io::*;
//! use std::old_path::Path;
//!
//! struct Info {
//! name: String,
//! age: i32,
//! rating: i32,
//! }
//!
//! fn write_info(info: &Info) -> Result<(), IoError> {
//! let mut file = File::open_mode(&Path::new("my_best_friends.txt"), Open, Write);
//! // Early return on error
//! if let Err(e) = file.write_line(&format!("name: {}", info.name)) {
//! return Err(e)
//! }
//! if let Err(e) = file.write_line(&format!("age: {}", info.age)) {
//! return Err(e)
//! }
//! file.write_line(&format!("rating: {}", info.rating))
//! }
//! ```
//!
//! With this:
//!
//! ```
//! # #![feature(old_io, old_path)]
//! use std::old_io::*;
//! use std::old_path::Path;
//!
//! struct Info {
//! name: String,
//! age: i32,
//! rating: i32,
//! }
//!
//! fn write_info(info: &Info) -> Result<(), IoError> {
//! let mut file = File::open_mode(&Path::new("my_best_friends.txt"), Open, Write);
//! // Early return on error
//! try!(file.write_line(&format!("name: {}", info.name)));
//! try!(file.write_line(&format!("age: {}", info.age)));
//! try!(file.write_line(&format!("rating: {}", info.rating)));
//! Ok(())
//! }
//! ```
//!
//! *It's much nicer!*
//!
//! Wrapping an expression in `try!` will result in the unwrapped
//! success (`Ok`) value, unless the result is `Err`, in which case
//! `Err` is returned early from the enclosing function. Its simple definition
//! makes it clear:
//!
//! ```
//! macro_rules! try {
//! ($e:expr) => (match $e { Ok(e) => e, Err(e) => return Err(e) })
//! }
//! ```
//!
//! `try!` is imported by the prelude, and is available everywhere.
#![stable(feature = "rust1", since = "1.0.0")]
use self::Result::{Ok, Err};
use clone::Clone;
use fmt;
use iter::{Iterator, DoubleEndedIterator, FromIterator, ExactSizeIterator, IntoIterator};
use ops::{FnMut, FnOnce};
use option::Option::{self, None, Some};
#[allow(deprecated)]
use slice::AsSlice;
use slice;
/// `Result` is a type that represents either success (`Ok`) or failure (`Err`).
///
/// See the [`std::result`](index.html) module documentation for details.
#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)]
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
pub enum Result<T, E> {
/// Contains the success value
#[stable(feature = "rust1", since = "1.0.0")]
Ok(T),
/// Contains the error value
#[stable(feature = "rust1", since = "1.0.0")]
Err(E)
}
/////////////////////////////////////////////////////////////////////////////
// Type implementation
/////////////////////////////////////////////////////////////////////////////
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, E> Result<T, E> {
/////////////////////////////////////////////////////////////////////////
// Querying the contained values
/////////////////////////////////////////////////////////////////////////
/// Returns true if the result is `Ok`
///
/// # Examples
///
/// ```
/// let x: Result<i32, &str> = Ok(-3);
/// assert_eq!(x.is_ok(), true);
///
/// let x: Result<i32, &str> = Err("Some error message");
/// assert_eq!(x.is_ok(), false);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn is_ok(&self) -> bool {
match *self {
Ok(_) => true,
Err(_) => false
}
}
/// Returns true if the result is `Err`
///
/// # Examples
///
/// ```
/// let x: Result<i32, &str> = Ok(-3);
/// assert_eq!(x.is_err(), false);
///
/// let x: Result<i32, &str> = Err("Some error message");
/// assert_eq!(x.is_err(), true);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn is_err(&self) -> bool {
!self.is_ok()
}
/////////////////////////////////////////////////////////////////////////
// Adapter for each variant
/////////////////////////////////////////////////////////////////////////
/// Convert from `Result<T, E>` to `Option<T>`
///
/// Converts `self` into an `Option<T>`, consuming `self`,
/// and discarding the error, if any.
///
/// # Examples
///
/// ```
/// let x: Result<u32, &str> = Ok(2);
/// assert_eq!(x.ok(), Some(2));
///
/// let x: Result<u32, &str> = Err("Nothing here");
/// assert_eq!(x.ok(), None);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn ok(self) -> Option<T> {
match self {
Ok(x) => Some(x),
Err(_) => None,
}
}
/// Convert from `Result<T, E>` to `Option<E>`
///
/// Converts `self` into an `Option<E>`, consuming `self`,
/// and discarding the success value, if any.
///
/// # Examples
///
/// ```
/// let x: Result<u32, &str> = Ok(2);
/// assert_eq!(x.err(), None);
///
/// let x: Result<u32, &str> = Err("Nothing here");
/// assert_eq!(x.err(), Some("Nothing here"));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn err(self) -> Option<E> {
match self {
Ok(_) => None,
Err(x) => Some(x),
}
}
/////////////////////////////////////////////////////////////////////////
// Adapter for working with references
/////////////////////////////////////////////////////////////////////////
/// Convert from `Result<T, E>` to `Result<&T, &E>`
///
/// Produces a new `Result`, containing a reference
/// into the original, leaving the original in place.
///
/// ```
/// let x: Result<u32, &str> = Ok(2);
/// assert_eq!(x.as_ref(), Ok(&2));
///
/// let x: Result<u32, &str> = Err("Error");
/// assert_eq!(x.as_ref(), Err(&"Error"));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn as_ref(&self) -> Result<&T, &E> {
match *self {
Ok(ref x) => Ok(x),
Err(ref x) => Err(x),
}
}
/// Convert from `Result<T, E>` to `Result<&mut T, &mut E>`
///
/// ```
/// fn mutate(r: &mut Result<i32, i32>) {
/// match r.as_mut() {
/// Ok(&mut ref mut v) => *v = 42,
/// Err(&mut ref mut e) => *e = 0,
/// }
/// }
///
/// let mut x: Result<i32, i32> = Ok(2);
/// mutate(&mut x);
/// assert_eq!(x.unwrap(), 42);
///
/// let mut x: Result<i32, i32> = Err(13);
/// mutate(&mut x);
/// assert_eq!(x.unwrap_err(), 0);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn as_mut(&mut self) -> Result<&mut T, &mut E> {
match *self {
Ok(ref mut x) => Ok(x),
Err(ref mut x) => Err(x),
}
}
/// Convert from `Result<T, E>` to `&[T]` (without copying)
#[inline]
#[unstable(feature = "as_slice", since = "unsure of the utility here")]
pub fn as_slice(&self) -> &[T] {
match *self {
Ok(ref x) => slice::ref_slice(x),
Err(_) => {
// work around lack of implicit coercion from fixed-size array to slice
let emp: &[_] = &[];
emp
}
}
}
/// Convert from `Result<T, E>` to `&mut [T]` (without copying)
///
/// ```
/// # #![feature(core)]
/// let mut x: Result<&str, u32> = Ok("Gold");
/// {
/// let v = x.as_mut_slice();
/// assert!(v == ["Gold"]);
/// v[0] = "Silver";
/// assert!(v == ["Silver"]);
/// }
/// assert_eq!(x, Ok("Silver"));
///
/// let mut x: Result<&str, u32> = Err(45);
/// assert!(x.as_mut_slice().is_empty());
/// ```
#[inline]
#[unstable(feature = "core",
reason = "waiting for mut conventions")]
pub fn as_mut_slice(&mut self) -> &mut [T] {
match *self {
Ok(ref mut x) => slice::mut_ref_slice(x),
Err(_) => {
// work around lack of implicit coercion from fixed-size array to slice
let emp: &mut [_] = &mut [];
emp
}
}
}
/////////////////////////////////////////////////////////////////////////
// Transforming contained values
/////////////////////////////////////////////////////////////////////////
/// Maps a `Result<T, E>` to `Result<U, E>` by applying a function to an
/// contained `Ok` value, leaving an `Err` value untouched.
///
/// This function can be used to compose the results of two functions.
///
/// # Examples
///
/// Sum the lines of a buffer by mapping strings to numbers,
/// ignoring I/O and parse errors:
///
/// ```
/// # #![feature(old_io)]
/// use std::old_io::*;
///
/// let mut buffer: &[u8] = b"1\n2\n3\n4\n";
/// let mut buffer = &mut buffer;
///
/// let mut sum = 0;
///
/// while !buffer.is_empty() {
/// let line: IoResult<String> = buffer.read_line();
/// // Convert the string line to a number using `map` and `from_str`
/// let val: IoResult<i32> = line.map(|line| {
/// line.trim_right().parse::<i32>().unwrap_or(0)
/// });
/// // Add the value if there were no errors, otherwise add 0
/// sum += val.unwrap_or(0);
/// }
///
/// assert!(sum == 10);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn map<U, F: FnOnce(T) -> U>(self, op: F) -> Result<U,E> {
match self {
Ok(t) => Ok(op(t)),
Err(e) => Err(e)
}
}
/// Maps a `Result<T, E>` to `Result<T, F>` by applying a function to an
/// contained `Err` value, leaving an `Ok` value untouched.
///
/// This function can be used to pass through a successful result while handling
/// an error.
///
/// # Examples
///
/// ```
/// fn stringify(x: u32) -> String { format!("error code: {}", x) }
///
/// let x: Result<u32, u32> = Ok(2);
/// assert_eq!(x.map_err(stringify), Ok(2));
///
/// let x: Result<u32, u32> = Err(13);
/// assert_eq!(x.map_err(stringify), Err("error code: 13".to_string()));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn map_err<F, O: FnOnce(E) -> F>(self, op: O) -> Result<T,F> {
match self {
Ok(t) => Ok(t),
Err(e) => Err(op(e))
}
}
/////////////////////////////////////////////////////////////////////////
// Iterator constructors
/////////////////////////////////////////////////////////////////////////
/// Returns an iterator over the possibly contained value.
///
/// # Examples
///
/// ```
/// let x: Result<u32, &str> = Ok(7);
/// assert_eq!(x.iter().next(), Some(&7));
///
/// let x: Result<u32, &str> = Err("nothing!");
/// assert_eq!(x.iter().next(), None);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn iter(&self) -> Iter<T> {
Iter { inner: self.as_ref().ok() }
}
/// Returns a mutable iterator over the possibly contained value.
///
/// # Examples
///
/// ```
/// let mut x: Result<u32, &str> = Ok(7);
/// match x.iter_mut().next() {
/// Some(&mut ref mut x) => *x = 40,
/// None => {},
/// }
/// assert_eq!(x, Ok(40));
///
/// let mut x: Result<u32, &str> = Err("nothing!");
/// assert_eq!(x.iter_mut().next(), None);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn iter_mut(&mut self) -> IterMut<T> {
IterMut { inner: self.as_mut().ok() }
}
/// Returns a consuming iterator over the possibly contained value.
///
/// # Examples
///
/// ```
/// let x: Result<u32, &str> = Ok(5);
/// let v: Vec<u32> = x.into_iter().collect();
/// assert_eq!(v, [5]);
///
/// let x: Result<u32, &str> = Err("nothing!");
/// let v: Vec<u32> = x.into_iter().collect();
/// assert_eq!(v, []);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn into_iter(self) -> IntoIter<T> {
IntoIter { inner: self.ok() }
}
////////////////////////////////////////////////////////////////////////
// Boolean operations on the values, eager and lazy
/////////////////////////////////////////////////////////////////////////
/// Returns `res` if the result is `Ok`, otherwise returns the `Err` value of `self`.
///
/// # Examples
///
/// ```
/// let x: Result<u32, &str> = Ok(2);
/// let y: Result<&str, &str> = Err("late error");
/// assert_eq!(x.and(y), Err("late error"));
///
/// let x: Result<u32, &str> = Err("early error");
/// let y: Result<&str, &str> = Ok("foo");
/// assert_eq!(x.and(y), Err("early error"));
///
/// let x: Result<u32, &str> = Err("not a 2");
/// let y: Result<&str, &str> = Err("late error");
/// assert_eq!(x.and(y), Err("not a 2"));
///
/// let x: Result<u32, &str> = Ok(2);
/// let y: Result<&str, &str> = Ok("different result type");
/// assert_eq!(x.and(y), Ok("different result type"));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn and<U>(self, res: Result<U, E>) -> Result<U, E> {
match self {
Ok(_) => res,
Err(e) => Err(e),
}
}
/// Calls `op` if the result is `Ok`, otherwise returns the `Err` value of `self`.
///
/// This function can be used for control flow based on result values.
///
/// # Examples
///
/// ```
/// fn sq(x: u32) -> Result<u32, u32> { Ok(x * x) }
/// fn err(x: u32) -> Result<u32, u32> { Err(x) }
///
/// assert_eq!(Ok(2).and_then(sq).and_then(sq), Ok(16));
/// assert_eq!(Ok(2).and_then(sq).and_then(err), Err(4));
/// assert_eq!(Ok(2).and_then(err).and_then(sq), Err(2));
/// assert_eq!(Err(3).and_then(sq).and_then(sq), Err(3));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn and_then<U, F: FnOnce(T) -> Result<U, E>>(self, op: F) -> Result<U, E> {
match self {
Ok(t) => op(t),
Err(e) => Err(e),
}
}
/// Returns `res` if the result is `Err`, otherwise returns the `Ok` value of `self`.
///
/// # Examples
///
/// ```
/// let x: Result<u32, &str> = Ok(2);
/// let y: Result<u32, &str> = Err("late error");
/// assert_eq!(x.or(y), Ok(2));
///
/// let x: Result<u32, &str> = Err("early error");
/// let y: Result<u32, &str> = Ok(2);
/// assert_eq!(x.or(y), Ok(2));
///
/// let x: Result<u32, &str> = Err("not a 2");
/// let y: Result<u32, &str> = Err("late error");
/// assert_eq!(x.or(y), Err("late error"));
///
/// let x: Result<u32, &str> = Ok(2);
/// let y: Result<u32, &str> = Ok(100);
/// assert_eq!(x.or(y), Ok(2));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn or<F>(self, res: Result<T, F>) -> Result<T, F> {
match self {
Ok(v) => Ok(v),
Err(_) => res,
}
}
/// Calls `op` if the result is `Err`, otherwise returns the `Ok` value of `self`.
///
/// This function can be used for control flow based on result values.
///
/// # Examples
///
/// ```
/// fn sq(x: u32) -> Result<u32, u32> { Ok(x * x) }
/// fn err(x: u32) -> Result<u32, u32> { Err(x) }
///
/// assert_eq!(Ok(2).or_else(sq).or_else(sq), Ok(2));
/// assert_eq!(Ok(2).or_else(err).or_else(sq), Ok(2));
/// assert_eq!(Err(3).or_else(sq).or_else(err), Ok(9));
/// assert_eq!(Err(3).or_else(err).or_else(err), Err(3));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn or_else<F, O: FnOnce(E) -> Result<T, F>>(self, op: O) -> Result<T, F> {
match self {
Ok(t) => Ok(t),
Err(e) => op(e),
}
}
/// Unwraps a result, yielding the content of an `Ok`.
/// Else it returns `optb`.
///
/// # Examples
///
/// ```
/// let optb = 2;
/// let x: Result<u32, &str> = Ok(9);
/// assert_eq!(x.unwrap_or(optb), 9);
///
/// let x: Result<u32, &str> = Err("error");
/// assert_eq!(x.unwrap_or(optb), optb);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn unwrap_or(self, optb: T) -> T {
match self {
Ok(t) => t,
Err(_) => optb
}
}
/// Unwraps a result, yielding the content of an `Ok`.
/// If the value is an `Err` then it calls `op` with its value.
///
/// # Examples
///
/// ```
/// fn count(x: &str) -> usize { x.len() }
///
/// assert_eq!(Ok(2).unwrap_or_else(count), 2);
/// assert_eq!(Err("foo").unwrap_or_else(count), 3);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn unwrap_or_else<F: FnOnce(E) -> T>(self, op: F) -> T {
match self {
Ok(t) => t,
Err(e) => op(e)
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, E: fmt::Debug> Result<T, E> {
/// Unwraps a result, yielding the content of an `Ok`.
///
/// # Panics
///
/// Panics if the value is an `Err`, with a custom panic message provided
/// by the `Err`'s value.
///
/// # Examples
///
/// ```
/// let x: Result<u32, &str> = Ok(2);
/// assert_eq!(x.unwrap(), 2);
/// ```
///
/// ```{.should_panic}
/// let x: Result<u32, &str> = Err("emergency failure");
/// x.unwrap(); // panics with `emergency failure`
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn unwrap(self) -> T {
match self {
Ok(t) => t,
Err(e) =>
panic!("called `Result::unwrap()` on an `Err` value: {:?}", e)
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: fmt::Debug, E> Result<T, E> {
/// Unwraps a result, yielding the content of an `Err`.
///
/// # Panics
///
/// Panics if the value is an `Ok`, with a custom panic message provided
/// by the `Ok`'s value.
///
/// # Examples
///
/// ```{.should_panic}
/// let x: Result<u32, &str> = Ok(2);
/// x.unwrap_err(); // panics with `2`
/// ```
///
/// ```
/// let x: Result<u32, &str> = Err("emergency failure");
/// assert_eq!(x.unwrap_err(), "emergency failure");
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn unwrap_err(self) -> E {
match self {
Ok(t) =>
panic!("called `Result::unwrap_err()` on an `Ok` value: {:?}", t),
Err(e) => e
}
}
}
/////////////////////////////////////////////////////////////////////////////
// Trait implementations
/////////////////////////////////////////////////////////////////////////////
#[unstable(feature = "core",
reason = "waiting on the stability of the trait itself")]
#[deprecated(since = "1.0.0",
reason = "use inherent method instead")]
#[allow(deprecated)]
impl<T, E> AsSlice<T> for Result<T, E> {
/// Convert from `Result<T, E>` to `&[T]` (without copying)
#[inline]
fn as_slice<'a>(&'a self) -> &'a [T] {
match *self {
Ok(ref x) => slice::ref_slice(x),
Err(_) => {
// work around lack of implicit coercion from fixed-size array to slice
let emp: &[_] = &[];
emp
}
}
}
}
/////////////////////////////////////////////////////////////////////////////
// The Result Iterators
/////////////////////////////////////////////////////////////////////////////
/// An iterator over a reference to the `Ok` variant of a `Result`.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Iter<'a, T: 'a> { inner: Option<&'a T> }
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> Iterator for Iter<'a, T> {
type Item = &'a T;
#[inline]
fn next(&mut self) -> Option<&'a T> { self.inner.take() }
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let n = if self.inner.is_some() {1} else {0};
(n, Some(n))
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a T> { self.inner.take() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> ExactSizeIterator for Iter<'a, T> {}
impl<'a, T> Clone for Iter<'a, T> {
fn clone(&self) -> Iter<'a, T> { Iter { inner: self.inner } }
}
/// An iterator over a mutable reference to the `Ok` variant of a `Result`.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IterMut<'a, T: 'a> { inner: Option<&'a mut T> }
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> Iterator for IterMut<'a, T> {
type Item = &'a mut T;
#[inline]
fn next(&mut self) -> Option<&'a mut T> { self.inner.take() }
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let n = if self.inner.is_some() {1} else {0};
(n, Some(n))
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> DoubleEndedIterator for IterMut<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a mut T> { self.inner.take() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> ExactSizeIterator for IterMut<'a, T> {}
/// An iterator over the value in a `Ok` variant of a `Result`.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IntoIter<T> { inner: Option<T> }
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Iterator for IntoIter<T> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> { self.inner.take() }
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let n = if self.inner.is_some() {1} else {0};
(n, Some(n))
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> DoubleEndedIterator for IntoIter<T> {
#[inline]
fn next_back(&mut self) -> Option<T> { self.inner.take() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ExactSizeIterator for IntoIter<T> {}
/////////////////////////////////////////////////////////////////////////////
// FromIterator
/////////////////////////////////////////////////////////////////////////////
#[stable(feature = "rust1", since = "1.0.0")]
impl<A, E, V: FromIterator<A>> FromIterator<Result<A, E>> for Result<V, E> {
/// Takes each element in the `Iterator`: if it is an `Err`, no further
/// elements are taken, and the `Err` is returned. Should no `Err` occur, a
/// container with the values of each `Result` is returned.
///
/// Here is an example which increments every integer in a vector,
/// checking for overflow:
///
/// ```
/// use std::u32;
///
/// let v = vec!(1, 2);
/// let res: Result<Vec<u32>, &'static str> = v.iter().map(|&x: &u32|
/// if x == u32::MAX { Err("Overflow!") }
/// else { Ok(x + 1) }
/// ).collect();
/// assert!(res == Ok(vec!(2, 3)));
/// ```
#[inline]
fn from_iter<I: IntoIterator<Item=Result<A, E>>>(iter: I) -> Result<V, E> {
// FIXME(#11084): This could be replaced with Iterator::scan when this
// performance bug is closed.
struct Adapter<Iter, E> {
iter: Iter,
err: Option<E>,
}
impl<T, E, Iter: Iterator<Item=Result<T, E>>> Iterator for Adapter<Iter, E> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
match self.iter.next() {
Some(Ok(value)) => Some(value),
Some(Err(err)) => {
self.err = Some(err);
None
}
None => None,
}
}
}
let mut adapter = Adapter { iter: iter.into_iter(), err: None };
let v: V = FromIterator::from_iter(adapter.by_ref());
match adapter.err {
Some(err) => Err(err),
None => Ok(v),
}
}
}
/////////////////////////////////////////////////////////////////////////////
// FromIterator
/////////////////////////////////////////////////////////////////////////////
/// Perform a fold operation over the result values from an iterator.
///
/// If an `Err` is encountered, it is immediately returned.
/// Otherwise, the folded value is returned.
#[inline]
#[unstable(feature = "core")]
pub fn fold<T,
V,
E,
F: FnMut(V, T) -> V,
Iter: Iterator<Item=Result<T, E>>>(
iterator: Iter,
mut init: V,
mut f: F)
-> Result<V, E> {
for t in iterator {
match t {
Ok(v) => init = f(init, v),
Err(u) => return Err(u)
}
}
Ok(init)
}
| 30.99 | 89 | 0.486705 |
d62e814a6500552ac5f312910113fd0dd0344134 | 63,709 | #![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CustomerListResult {
#[serde(skip_serializing)]
pub value: Vec<Customer>,
#[serde(rename = "nextLink", skip_serializing)]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Customer {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<CustomerProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CustomerProperties {
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "enabledAzurePlans", default, skip_serializing_if = "Vec::is_empty")]
pub enabled_azure_plans: Vec<AzurePlan>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub resellers: Vec<Reseller>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct InitiateTransferRequest {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<InitiateTransferProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ValidateAddressResponse {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<AddressValidationStatus>,
#[serde(rename = "suggestedAddresses", default, skip_serializing_if = "Vec::is_empty")]
pub suggested_addresses: Vec<AddressDetails>,
#[serde(rename = "validationMessage", default, skip_serializing_if = "Option::is_none")]
pub validation_message: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AddressValidationStatus {
Valid,
Invalid,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct InitiateTransferProperties {
#[serde(rename = "recipientEmailId", default, skip_serializing_if = "Option::is_none")]
pub recipient_email_id: Option<String>,
#[serde(rename = "resellerId", default, skip_serializing_if = "Option::is_none")]
pub reseller_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AcceptTransferRequest {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<AcceptTransferProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AcceptTransferProperties {
#[serde(rename = "productDetails", default, skip_serializing_if = "Vec::is_empty")]
pub product_details: Vec<ProductDetails>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProductDetails {
#[serde(rename = "productType", default, skip_serializing_if = "Option::is_none")]
pub product_type: Option<ProductType>,
#[serde(rename = "productId", default, skip_serializing_if = "Option::is_none")]
pub product_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProductType {
AzureSubscription,
AzureReservation,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TransferDetails {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<TransferProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RecipientTransferDetails {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<RecipientTransferProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DetailedTransferStatus {
#[serde(rename = "productType", default, skip_serializing_if = "Option::is_none")]
pub product_type: Option<ProductType>,
#[serde(rename = "productId", skip_serializing)]
pub product_id: Option<String>,
#[serde(rename = "transferStatus", default, skip_serializing_if = "Option::is_none")]
pub transfer_status: Option<ProductTransferStatus>,
#[serde(rename = "errorDetails", default, skip_serializing_if = "Option::is_none")]
pub error_details: Option<Error>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Error {
#[serde(rename = "errorCode", skip_serializing)]
pub error_code: Option<String>,
#[serde(rename = "errorMessage", skip_serializing)]
pub error_message: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum EligibleProductType {
DevTestAzureSubscription,
StandardAzureSubscription,
AzureReservation,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TransferProperties {
#[serde(rename = "creationTime", skip_serializing)]
pub creation_time: Option<String>,
#[serde(rename = "expirationTime", skip_serializing)]
pub expiration_time: Option<String>,
#[serde(rename = "invoiceSectionId", skip_serializing)]
pub invoice_section_id: Option<String>,
#[serde(rename = "billingAccountId", skip_serializing)]
pub billing_account_id: Option<String>,
#[serde(rename = "resellerId", skip_serializing)]
pub reseller_id: Option<String>,
#[serde(rename = "resellerName", skip_serializing)]
pub reseller_name: Option<String>,
#[serde(rename = "initiatorCustomerType", skip_serializing)]
pub initiator_customer_type: Option<String>,
#[serde(rename = "billingProfileId", skip_serializing)]
pub billing_profile_id: Option<String>,
#[serde(rename = "transferStatus", default, skip_serializing_if = "Option::is_none")]
pub transfer_status: Option<TransferStatus>,
#[serde(rename = "recipientEmailId", skip_serializing)]
pub recipient_email_id: Option<String>,
#[serde(rename = "initiatorEmailId", skip_serializing)]
pub initiator_email_id: Option<String>,
#[serde(rename = "canceledBy", skip_serializing)]
pub canceled_by: Option<String>,
#[serde(rename = "lastModifiedTime", skip_serializing)]
pub last_modified_time: Option<String>,
#[serde(rename = "detailedTransferStatus", skip_serializing)]
pub detailed_transfer_status: Vec<DetailedTransferStatus>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RecipientTransferProperties {
#[serde(rename = "creationTime", skip_serializing)]
pub creation_time: Option<String>,
#[serde(rename = "expirationTime", skip_serializing)]
pub expiration_time: Option<String>,
#[serde(rename = "allowedProductType", skip_serializing)]
pub allowed_product_type: Vec<EligibleProductType>,
#[serde(rename = "transferStatus", default, skip_serializing_if = "Option::is_none")]
pub transfer_status: Option<TransferStatus>,
#[serde(rename = "recipientEmailId", skip_serializing)]
pub recipient_email_id: Option<String>,
#[serde(rename = "initiatorEmailId", skip_serializing)]
pub initiator_email_id: Option<String>,
#[serde(rename = "resellerId", skip_serializing)]
pub reseller_id: Option<String>,
#[serde(rename = "resellerName", skip_serializing)]
pub reseller_name: Option<String>,
#[serde(rename = "initiatorCustomerType", skip_serializing)]
pub initiator_customer_type: Option<String>,
#[serde(rename = "canceledBy", skip_serializing)]
pub canceled_by: Option<String>,
#[serde(rename = "lastModifiedTime", skip_serializing)]
pub last_modified_time: Option<String>,
#[serde(rename = "detailedTransferStatus", skip_serializing)]
pub detailed_transfer_status: Vec<DetailedTransferStatus>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum TransferStatus {
Pending,
InProgress,
Completed,
CompletedWithErrors,
Failed,
Canceled,
Declined,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProductTransferStatus {
NotStarted,
InProgress,
Completed,
Failed,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RecipientTransferDetailsListResult {
#[serde(skip_serializing)]
pub value: Vec<RecipientTransferDetails>,
#[serde(rename = "nextLink", skip_serializing)]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TransferDetailsListResult {
#[serde(skip_serializing)]
pub value: Vec<TransferDetails>,
#[serde(rename = "nextLink", skip_serializing)]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TransferProductRequestProperties {
#[serde(rename = "destinationInvoiceSectionId", default, skip_serializing_if = "Option::is_none")]
pub destination_invoice_section_id: Option<String>,
#[serde(rename = "destinationBillingProfileId", default, skip_serializing_if = "Option::is_none")]
pub destination_billing_profile_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TransferBillingSubscriptionResult {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<TransferBillingSubscriptionResultProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TransferBillingSubscriptionResultProperties {
#[serde(rename = "billingSubscriptionName", default, skip_serializing_if = "Option::is_none")]
pub billing_subscription_name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TransferBillingSubscriptionRequest {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<TransferBillingSubscriptionRequestProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TransferBillingSubscriptionRequestProperties {
#[serde(rename = "destinationInvoiceSectionId", default, skip_serializing_if = "Option::is_none")]
pub destination_invoice_section_id: Option<String>,
#[serde(rename = "destinationBillingProfileId", default, skip_serializing_if = "Option::is_none")]
pub destination_billing_profile_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DetachPaymentMethodEligibilityResult {
#[serde(rename = "isEligible", skip_serializing)]
pub is_eligible: Option<bool>,
#[serde(rename = "errorDetails", skip_serializing)]
pub error_details: Vec<DetachPaymentMethodErrorDetails>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DetachPaymentMethodErrorDetails {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<DetachPaymentMethodEligibilityErrorCode>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum DetachPaymentMethodEligibilityErrorCode {
AzureSubscriptions,
RecurringCharges,
ReservedInstances,
OutstandingCharges,
PendingCharges,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ValidateSubscriptionTransferEligibilityResult {
#[serde(rename = "isTransferEligible", skip_serializing)]
pub is_transfer_eligible: Option<bool>,
#[serde(rename = "errorDetails", default, skip_serializing_if = "Option::is_none")]
pub error_details: Option<ValidateSubscriptionTransferEligibilityError>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ValidateSubscriptionTransferEligibilityError {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<SubscriptionTransferValidationErrorCode>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub details: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum SubscriptionTransferValidationErrorCode {
BillingAccountInactive,
CrossBillingAccountNotAllowed,
DestinationBillingProfileInactive,
DestinationBillingProfileNotFound,
DestinationBillingProfilePastDue,
DestinationInvoiceSectionInactive,
DestinationInvoiceSectionNotFound,
InsufficientPermissionOnDestination,
InsufficientPermissionOnSource,
InvalidDestination,
InvalidSource,
MarketplaceNotEnabledOnDestination,
NotAvailableForDestinationMarket,
ProductInactive,
ProductNotFound,
ProductTypeNotSupported,
SourceBillingProfilePastDue,
SourceInvoiceSectionInactive,
SubscriptionNotActive,
SubscriptionTypeNotSupported,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UpdateAutoRenewOperation {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<UpdateAutoRenewOperationProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UpdateAutoRenewOperationProperties {
#[serde(rename = "endDate", default, skip_serializing_if = "Option::is_none")]
pub end_date: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BillingAccountListResult {
#[serde(skip_serializing)]
pub value: Vec<BillingAccount>,
#[serde(rename = "nextLink", skip_serializing)]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BillingAccount {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<BillingAccountProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BillingAccountProperties {
#[serde(rename = "displayName", skip_serializing)]
pub display_name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub address: Option<AddressDetails>,
#[serde(rename = "agreementType", skip_serializing)]
pub agreement_type: Option<billing_account_properties::AgreementType>,
#[serde(rename = "customerType", skip_serializing)]
pub customer_type: Option<billing_account_properties::CustomerType>,
#[serde(rename = "accountType", skip_serializing)]
pub account_type: Option<billing_account_properties::AccountType>,
#[serde(rename = "billingProfiles", default, skip_serializing_if = "Vec::is_empty")]
pub billing_profiles: Vec<BillingProfile>,
#[serde(rename = "enrollmentDetails", default, skip_serializing_if = "Option::is_none")]
pub enrollment_details: Option<Enrollment>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub departments: Vec<Department>,
#[serde(rename = "enrollmentAccounts", default, skip_serializing_if = "Vec::is_empty")]
pub enrollment_accounts: Vec<EnrollmentAccount>,
#[serde(rename = "organizationId", skip_serializing)]
pub organization_id: Option<String>,
}
pub mod billing_account_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AgreementType {
MicrosoftCustomerAgreement,
EnterpriseAgreement,
MicrosoftOnlineServicesProgram,
MicrosoftPartnerAgreement,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum CustomerType {
Enterprise,
Individual,
Partner,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AccountType {
Enterprise,
Individual,
Partner,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BillingAccountUpdateRequest {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<BillingAccountProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BillingProperty {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<BillingPropertyProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BillingPropertyProperties {
#[serde(rename = "billingTenantId", skip_serializing)]
pub billing_tenant_id: Option<String>,
#[serde(rename = "billingAccountId", skip_serializing)]
pub billing_account_id: Option<String>,
#[serde(rename = "billingAccountDisplayName", skip_serializing)]
pub billing_account_display_name: Option<String>,
#[serde(rename = "billingProfileId", skip_serializing)]
pub billing_profile_id: Option<String>,
#[serde(rename = "billingProfileDisplayName", skip_serializing)]
pub billing_profile_display_name: Option<String>,
#[serde(rename = "billingProfileStatus", skip_serializing)]
pub billing_profile_status: Option<billing_property_properties::BillingProfileStatus>,
#[serde(rename = "billingProfileStatusReasonCode", skip_serializing)]
pub billing_profile_status_reason_code: Option<billing_property_properties::BillingProfileStatusReasonCode>,
#[serde(rename = "billingProfileSpendingLimit", skip_serializing)]
pub billing_profile_spending_limit: Option<billing_property_properties::BillingProfileSpendingLimit>,
#[serde(rename = "costCenter", skip_serializing)]
pub cost_center: Option<String>,
#[serde(rename = "invoiceSectionId", skip_serializing)]
pub invoice_section_id: Option<String>,
#[serde(rename = "invoiceSectionDisplayName", skip_serializing)]
pub invoice_section_display_name: Option<String>,
#[serde(rename = "productId", skip_serializing)]
pub product_id: Option<String>,
#[serde(rename = "productName", skip_serializing)]
pub product_name: Option<String>,
#[serde(rename = "skuId", skip_serializing)]
pub sku_id: Option<String>,
#[serde(rename = "skuDescription", skip_serializing)]
pub sku_description: Option<String>,
}
pub mod billing_property_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum BillingProfileStatus {
Active,
Disabled,
Warned,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum BillingProfileStatusReasonCode {
PastDue,
SpendingLimitReached,
SpendingLimitExpired,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum BillingProfileSpendingLimit {
Off,
On,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Enrollment {
#[serde(rename = "startDate", default, skip_serializing_if = "Option::is_none")]
pub start_date: Option<String>,
#[serde(rename = "endDate", default, skip_serializing_if = "Option::is_none")]
pub end_date: Option<String>,
#[serde(skip_serializing)]
pub currency: Option<String>,
#[serde(skip_serializing)]
pub channel: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub policies: Option<EnrollmentPolicies>,
#[serde(skip_serializing)]
pub language: Option<String>,
#[serde(rename = "countryCode", skip_serializing)]
pub country_code: Option<String>,
#[serde(skip_serializing)]
pub status: Option<String>,
#[serde(rename = "billingCycle", skip_serializing)]
pub billing_cycle: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EnrollmentPolicies {
#[serde(rename = "accountOwnerViewCharges", skip_serializing)]
pub account_owner_view_charges: Option<bool>,
#[serde(rename = "departmentAdminViewCharges", skip_serializing)]
pub department_admin_view_charges: Option<bool>,
#[serde(rename = "marketplacesEnabled", skip_serializing)]
pub marketplaces_enabled: Option<bool>,
#[serde(rename = "reservedInstancesEnabled", skip_serializing)]
pub reserved_instances_enabled: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DepartmentListResult {
#[serde(skip_serializing)]
pub value: Vec<Department>,
#[serde(rename = "nextLink", skip_serializing)]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Department {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<DepartmentProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DepartmentProperties {
#[serde(rename = "departmentName", default, skip_serializing_if = "Option::is_none")]
pub department_name: Option<String>,
#[serde(rename = "costCenter", default, skip_serializing_if = "Option::is_none")]
pub cost_center: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<String>,
#[serde(rename = "enrollmentAccounts", default, skip_serializing_if = "Vec::is_empty")]
pub enrollment_accounts: Vec<EnrollmentAccount>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EnrollmentAccountListResult {
#[serde(skip_serializing)]
pub value: Vec<EnrollmentAccount>,
#[serde(rename = "nextLink", skip_serializing)]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EnrollmentAccount {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<EnrollmentAccountProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EnrollmentAccountProperties {
#[serde(rename = "accountName", default, skip_serializing_if = "Option::is_none")]
pub account_name: Option<String>,
#[serde(rename = "costCenter", default, skip_serializing_if = "Option::is_none")]
pub cost_center: Option<String>,
#[serde(rename = "accountOwner", default, skip_serializing_if = "Option::is_none")]
pub account_owner: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<String>,
#[serde(rename = "startDate", default, skip_serializing_if = "Option::is_none")]
pub start_date: Option<String>,
#[serde(rename = "endDate", default, skip_serializing_if = "Option::is_none")]
pub end_date: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub department: Option<Department>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct InstructionListResult {
#[serde(skip_serializing)]
pub value: Vec<Instruction>,
#[serde(rename = "nextLink", skip_serializing)]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BillingProfileListResult {
#[serde(skip_serializing)]
pub value: Vec<BillingProfile>,
#[serde(rename = "nextLink", skip_serializing)]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BillingProfile {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<BillingProfileProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Instruction {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<InstructionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct InstructionProperties {
pub amount: f64,
#[serde(rename = "startDate")]
pub start_date: String,
#[serde(rename = "endDate")]
pub end_date: String,
#[serde(rename = "creationDate", default, skip_serializing_if = "Option::is_none")]
pub creation_date: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BillingProfileCreationRequest {
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "poNumber", default, skip_serializing_if = "Option::is_none")]
pub po_number: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub address: Option<AddressDetails>,
#[serde(rename = "invoiceEmailOptIn", default, skip_serializing_if = "Option::is_none")]
pub invoice_email_opt_in: Option<bool>,
#[serde(rename = "enabledAzurePlans", default, skip_serializing_if = "Vec::is_empty")]
pub enabled_azure_plans: Vec<AzurePlan>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BillingProfileProperties {
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "poNumber", default, skip_serializing_if = "Option::is_none")]
pub po_number: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub address: Option<AddressDetails>,
#[serde(rename = "billingRelationshipType", skip_serializing)]
pub billing_relationship_type: Option<billing_profile_properties::BillingRelationshipType>,
#[serde(rename = "invoiceEmailOptIn", default, skip_serializing_if = "Option::is_none")]
pub invoice_email_opt_in: Option<bool>,
#[serde(rename = "invoiceDay", skip_serializing)]
pub invoice_day: Option<i32>,
#[serde(skip_serializing)]
pub currency: Option<String>,
#[serde(rename = "enabledAzurePlans", default, skip_serializing_if = "Vec::is_empty")]
pub enabled_azure_plans: Vec<AzurePlan>,
#[serde(rename = "indirectRelationshipInfo", default, skip_serializing_if = "Option::is_none")]
pub indirect_relationship_info: Option<IndirectRelationshipInfo>,
#[serde(rename = "invoiceSections", default, skip_serializing_if = "Vec::is_empty")]
pub invoice_sections: Vec<InvoiceSection>,
#[serde(skip_serializing)]
pub status: Option<billing_profile_properties::Status>,
#[serde(rename = "statusReasonCode", skip_serializing)]
pub status_reason_code: Option<billing_profile_properties::StatusReasonCode>,
#[serde(rename = "spendingLimit", skip_serializing)]
pub spending_limit: Option<billing_profile_properties::SpendingLimit>,
#[serde(rename = "targetClouds", skip_serializing)]
pub target_clouds: Vec<TargetCloud>,
}
pub mod billing_profile_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum BillingRelationshipType {
Direct,
IndirectCustomer,
IndirectPartner,
#[serde(rename = "CSPPartner")]
CspPartner,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
Active,
Disabled,
Warned,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum StatusReasonCode {
PastDue,
SpendingLimitReached,
SpendingLimitExpired,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum SpendingLimit {
Off,
On,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AddressDetails {
#[serde(rename = "firstName", default, skip_serializing_if = "Option::is_none")]
pub first_name: Option<String>,
#[serde(rename = "lastName", default, skip_serializing_if = "Option::is_none")]
pub last_name: Option<String>,
#[serde(rename = "companyName", default, skip_serializing_if = "Option::is_none")]
pub company_name: Option<String>,
#[serde(rename = "addressLine1", default, skip_serializing_if = "Option::is_none")]
pub address_line1: Option<String>,
#[serde(rename = "addressLine2", default, skip_serializing_if = "Option::is_none")]
pub address_line2: Option<String>,
#[serde(rename = "addressLine3", default, skip_serializing_if = "Option::is_none")]
pub address_line3: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub city: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub region: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub country: Option<String>,
#[serde(rename = "postalCode", default, skip_serializing_if = "Option::is_none")]
pub postal_code: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct IndirectRelationshipInfo {
#[serde(rename = "billingAccountName", default, skip_serializing_if = "Option::is_none")]
pub billing_account_name: Option<String>,
#[serde(rename = "billingProfileName", default, skip_serializing_if = "Option::is_none")]
pub billing_profile_name: Option<String>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct InvoiceSectionCreationRequest {
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct InvoiceSectionListResult {
#[serde(skip_serializing)]
pub value: Vec<InvoiceSection>,
#[serde(rename = "nextLink", skip_serializing)]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct InvoiceSection {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<InvoiceSectionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct InvoiceSectionProperties {
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(skip_serializing)]
pub state: Option<invoice_section_properties::State>,
#[serde(rename = "targetCloud", default, skip_serializing_if = "Option::is_none")]
pub target_cloud: Option<TargetCloud>,
}
pub mod invoice_section_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum State {
Active,
Restricted,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct InvoiceSectionListWithCreateSubPermissionResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<InvoiceSectionWithCreateSubPermission>,
#[serde(rename = "nextLink", skip_serializing)]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct InvoiceSectionWithCreateSubPermission {
#[serde(rename = "invoiceSectionId", skip_serializing)]
pub invoice_section_id: Option<String>,
#[serde(rename = "invoiceSectionDisplayName", skip_serializing)]
pub invoice_section_display_name: Option<String>,
#[serde(rename = "billingProfileId", skip_serializing)]
pub billing_profile_id: Option<String>,
#[serde(rename = "billingProfileDisplayName", skip_serializing)]
pub billing_profile_display_name: Option<String>,
#[serde(rename = "billingProfileStatus", skip_serializing)]
pub billing_profile_status: Option<invoice_section_with_create_sub_permission::BillingProfileStatus>,
#[serde(rename = "billingProfileStatusReasonCode", skip_serializing)]
pub billing_profile_status_reason_code: Option<invoice_section_with_create_sub_permission::BillingProfileStatusReasonCode>,
#[serde(rename = "billingProfileSpendingLimit", skip_serializing)]
pub billing_profile_spending_limit: Option<invoice_section_with_create_sub_permission::BillingProfileSpendingLimit>,
#[serde(rename = "enabledAzurePlans", default, skip_serializing_if = "Vec::is_empty")]
pub enabled_azure_plans: Vec<AzurePlan>,
}
pub mod invoice_section_with_create_sub_permission {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum BillingProfileStatus {
Active,
Disabled,
Warned,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum BillingProfileStatusReasonCode {
PastDue,
SpendingLimitReached,
SpendingLimitExpired,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum BillingProfileSpendingLimit {
Off,
On,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AzurePlan {
#[serde(rename = "skuId", default, skip_serializing_if = "Option::is_none")]
pub sku_id: Option<String>,
#[serde(rename = "skuDescription", skip_serializing)]
pub sku_description: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DownloadUrl {
#[serde(rename = "expiryTime", skip_serializing)]
pub expiry_time: Option<String>,
#[serde(skip_serializing)]
pub url: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorDetails {
#[serde(skip_serializing)]
pub code: Option<String>,
#[serde(skip_serializing)]
pub message: Option<String>,
#[serde(skip_serializing)]
pub target: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub details: Option<ErrorSubDetails>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorResponse {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorDetails>,
}
pub type ErrorSubDetails = Vec<serde_json::Value>;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Reseller {
#[serde(rename = "resellerId", skip_serializing)]
pub reseller_id: Option<String>,
#[serde(skip_serializing)]
pub description: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(skip_serializing)]
pub id: Option<String>,
#[serde(skip_serializing)]
pub name: Option<String>,
#[serde(rename = "type", skip_serializing)]
pub type_: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct InvoiceListResult {
#[serde(skip_serializing)]
pub value: Vec<Invoice>,
#[serde(rename = "nextLink", skip_serializing)]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Invoice {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<InvoiceProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct InvoiceProperties {
#[serde(rename = "dueDate", skip_serializing)]
pub due_date: Option<String>,
#[serde(rename = "invoiceDate", skip_serializing)]
pub invoice_date: Option<String>,
#[serde(skip_serializing)]
pub status: Option<invoice_properties::Status>,
#[serde(rename = "amountDue", default, skip_serializing_if = "Option::is_none")]
pub amount_due: Option<Amount>,
#[serde(rename = "azurePrepaymentApplied", default, skip_serializing_if = "Option::is_none")]
pub azure_prepayment_applied: Option<Amount>,
#[serde(rename = "billedAmount", default, skip_serializing_if = "Option::is_none")]
pub billed_amount: Option<Amount>,
#[serde(rename = "creditAmount", default, skip_serializing_if = "Option::is_none")]
pub credit_amount: Option<Amount>,
#[serde(rename = "freeAzureCreditApplied", default, skip_serializing_if = "Option::is_none")]
pub free_azure_credit_applied: Option<Amount>,
#[serde(rename = "subTotal", default, skip_serializing_if = "Option::is_none")]
pub sub_total: Option<Amount>,
#[serde(rename = "taxAmount", default, skip_serializing_if = "Option::is_none")]
pub tax_amount: Option<Amount>,
#[serde(rename = "totalAmount", default, skip_serializing_if = "Option::is_none")]
pub total_amount: Option<Amount>,
#[serde(rename = "invoicePeriodStartDate", skip_serializing)]
pub invoice_period_start_date: Option<String>,
#[serde(rename = "invoicePeriodEndDate", skip_serializing)]
pub invoice_period_end_date: Option<String>,
#[serde(rename = "invoiceType", skip_serializing)]
pub invoice_type: Option<invoice_properties::InvoiceType>,
#[serde(rename = "isMonthlyInvoice", skip_serializing)]
pub is_monthly_invoice: Option<bool>,
#[serde(rename = "billingProfileId", skip_serializing)]
pub billing_profile_id: Option<String>,
#[serde(rename = "billingProfileDisplayName", skip_serializing)]
pub billing_profile_display_name: Option<String>,
#[serde(rename = "purchaseOrderNumber", skip_serializing)]
pub purchase_order_number: Option<String>,
#[serde(skip_serializing)]
pub documents: Vec<Document>,
#[serde(skip_serializing)]
pub payments: Vec<PaymentProperties>,
#[serde(rename = "subscriptionId", skip_serializing)]
pub subscription_id: Option<String>,
#[serde(rename = "rebillDetails", default, skip_serializing_if = "Option::is_none")]
pub rebill_details: Option<InvoiceRebillDetails>,
}
pub mod invoice_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
Due,
OverDue,
Paid,
Void,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum InvoiceType {
AzureService,
AzureMarketplace,
AzureSupport,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PaymentProperties {
#[serde(rename = "paymentType", skip_serializing)]
pub payment_type: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub amount: Option<Amount>,
#[serde(skip_serializing)]
pub date: Option<String>,
#[serde(rename = "paymentMethodFamily", default, skip_serializing_if = "Option::is_none")]
pub payment_method_family: Option<payment_properties::PaymentMethodFamily>,
#[serde(rename = "paymentMethodType", skip_serializing)]
pub payment_method_type: Option<String>,
}
pub mod payment_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PaymentMethodFamily {
Credits,
CheckWire,
CreditCard,
None,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct InvoiceRebillDetails {
#[serde(rename = "rebillDocumentType", default, skip_serializing_if = "Option::is_none")]
pub rebill_document_type: Option<invoice_rebill_details::RebillDocumentType>,
#[serde(rename = "rebilledInvoiceId", skip_serializing)]
pub rebilled_invoice_id: Option<String>,
#[serde(rename = "latestInvoiceId", skip_serializing)]
pub latest_invoice_id: Option<String>,
}
pub mod invoice_rebill_details {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum RebillDocumentType {
Credit,
Rebill,
Original,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Document {
#[serde(skip_serializing)]
pub kind: Option<document::Kind>,
#[serde(skip_serializing)]
pub url: Option<String>,
#[serde(rename = "documentNumbers", skip_serializing)]
pub document_numbers: Vec<String>,
}
pub mod document {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Kind {
Invoice,
VoidNote,
TaxReceipt,
CreditNote,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProductsListResult {
#[serde(skip_serializing)]
pub value: Vec<Product>,
#[serde(rename = "nextLink", skip_serializing)]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Product {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ProductProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProductProperties {
#[serde(rename = "displayName", skip_serializing)]
pub display_name: Option<String>,
#[serde(rename = "purchaseDate", skip_serializing)]
pub purchase_date: Option<String>,
#[serde(rename = "productTypeId", skip_serializing)]
pub product_type_id: Option<String>,
#[serde(rename = "productType", skip_serializing)]
pub product_type: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<product_properties::Status>,
#[serde(rename = "endDate", skip_serializing)]
pub end_date: Option<String>,
#[serde(rename = "billingFrequency", default, skip_serializing_if = "Option::is_none")]
pub billing_frequency: Option<product_properties::BillingFrequency>,
#[serde(rename = "lastCharge", default, skip_serializing_if = "Option::is_none")]
pub last_charge: Option<Amount>,
#[serde(rename = "lastChargeDate", skip_serializing)]
pub last_charge_date: Option<String>,
#[serde(skip_serializing)]
pub quantity: Option<f64>,
#[serde(rename = "skuId", skip_serializing)]
pub sku_id: Option<String>,
#[serde(rename = "skuDescription", skip_serializing)]
pub sku_description: Option<String>,
#[serde(rename = "tenantId", skip_serializing)]
pub tenant_id: Option<String>,
#[serde(rename = "availabilityId", skip_serializing)]
pub availability_id: Option<String>,
#[serde(rename = "parentProductId", skip_serializing)]
pub parent_product_id: Option<String>,
#[serde(rename = "invoiceSectionId", skip_serializing)]
pub invoice_section_id: Option<String>,
#[serde(rename = "invoiceSectionDisplayName", skip_serializing)]
pub invoice_section_display_name: Option<String>,
#[serde(rename = "billingProfileId", skip_serializing)]
pub billing_profile_id: Option<String>,
#[serde(rename = "billingProfileDisplayName", skip_serializing)]
pub billing_profile_display_name: Option<String>,
#[serde(rename = "customerId", skip_serializing)]
pub customer_id: Option<String>,
#[serde(rename = "customerDisplayName", skip_serializing)]
pub customer_display_name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub reseller: Option<Reseller>,
}
pub mod product_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
Active,
Inactive,
PastDue,
Expiring,
Expired,
Disabled,
Cancelled,
AutoRenew,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum BillingFrequency {
OneTime,
Monthly,
UsageBased,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ValidateProductTransferEligibilityResult {
#[serde(rename = "isTransferEligible", skip_serializing)]
pub is_transfer_eligible: Option<bool>,
#[serde(rename = "errorDetails", default, skip_serializing_if = "Option::is_none")]
pub error_details: Option<ValidateProductTransferEligibilityError>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ValidateProductTransferEligibilityError {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<ProductTransferValidationErrorCode>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub details: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProductTransferValidationErrorCode {
InvalidSource,
ProductNotActive,
InsufficientPermissionOnSource,
InsufficientPermissionOnDestination,
DestinationBillingProfilePastDue,
ProductTypeNotSupported,
CrossBillingAccountNotAllowed,
NotAvailableForDestinationMarket,
OneTimePurchaseProductTransferNotAllowed,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BillingSubscriptionsListResult {
#[serde(skip_serializing)]
pub value: Vec<BillingSubscription>,
#[serde(rename = "nextLink", skip_serializing)]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BillingSubscription {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<BillingSubscriptionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BillingSubscriptionProperties {
#[serde(rename = "displayName", skip_serializing)]
pub display_name: Option<String>,
#[serde(rename = "subscriptionId", skip_serializing)]
pub subscription_id: Option<String>,
#[serde(rename = "subscriptionBillingStatus", default, skip_serializing_if = "Option::is_none")]
pub subscription_billing_status: Option<billing_subscription_properties::SubscriptionBillingStatus>,
#[serde(rename = "lastMonthCharges", default, skip_serializing_if = "Option::is_none")]
pub last_month_charges: Option<Amount>,
#[serde(rename = "monthToDateCharges", default, skip_serializing_if = "Option::is_none")]
pub month_to_date_charges: Option<Amount>,
#[serde(rename = "billingProfileId", skip_serializing)]
pub billing_profile_id: Option<String>,
#[serde(rename = "billingProfileDisplayName", skip_serializing)]
pub billing_profile_display_name: Option<String>,
#[serde(rename = "customerId", skip_serializing)]
pub customer_id: Option<String>,
#[serde(rename = "customerDisplayName", skip_serializing)]
pub customer_display_name: Option<String>,
#[serde(rename = "invoiceSectionId", skip_serializing)]
pub invoice_section_id: Option<String>,
#[serde(rename = "invoiceSectionDisplayName", skip_serializing)]
pub invoice_section_display_name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub reseller: Option<Reseller>,
#[serde(rename = "skuId", default, skip_serializing_if = "Option::is_none")]
pub sku_id: Option<String>,
#[serde(rename = "skuDescription", skip_serializing)]
pub sku_description: Option<String>,
}
pub mod billing_subscription_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum SubscriptionBillingStatus {
Active,
Inactive,
Abandoned,
Deleted,
Warning,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EnrollmentAccountContext {
#[serde(rename = "costCenter", default, skip_serializing_if = "Option::is_none")]
pub cost_center: Option<String>,
#[serde(rename = "startDate", default, skip_serializing_if = "Option::is_none")]
pub start_date: Option<String>,
#[serde(rename = "endDate", default, skip_serializing_if = "Option::is_none")]
pub end_date: Option<String>,
#[serde(rename = "enrollmentAccountName", default, skip_serializing_if = "Option::is_none")]
pub enrollment_account_name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum TargetCloud {
#[serde(rename = "USGov")]
UsGov,
#[serde(rename = "USNat")]
UsNat,
#[serde(rename = "USSec")]
UsSec,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TransactionListResult {
#[serde(skip_serializing)]
pub value: Vec<Transaction>,
#[serde(rename = "nextLink", skip_serializing)]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Transaction {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<TransactionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TransactionProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub kind: Option<transaction_properties::Kind>,
#[serde(skip_serializing)]
pub date: Option<String>,
#[serde(skip_serializing)]
pub invoice: Option<String>,
#[serde(rename = "orderId", skip_serializing)]
pub order_id: Option<String>,
#[serde(rename = "orderName", skip_serializing)]
pub order_name: Option<String>,
#[serde(rename = "productFamily", skip_serializing)]
pub product_family: Option<String>,
#[serde(rename = "productTypeId", skip_serializing)]
pub product_type_id: Option<String>,
#[serde(rename = "productType", skip_serializing)]
pub product_type: Option<String>,
#[serde(rename = "productDescription", skip_serializing)]
pub product_description: Option<String>,
#[serde(rename = "transactionType", default, skip_serializing_if = "Option::is_none")]
pub transaction_type: Option<transaction_properties::TransactionType>,
#[serde(rename = "transactionAmount", default, skip_serializing_if = "Option::is_none")]
pub transaction_amount: Option<Amount>,
#[serde(skip_serializing)]
pub quantity: Option<i32>,
#[serde(rename = "invoiceSectionId", skip_serializing)]
pub invoice_section_id: Option<String>,
#[serde(rename = "invoiceSectionDisplayName", skip_serializing)]
pub invoice_section_display_name: Option<String>,
#[serde(rename = "billingProfileId", skip_serializing)]
pub billing_profile_id: Option<String>,
#[serde(rename = "billingProfileDisplayName", skip_serializing)]
pub billing_profile_display_name: Option<String>,
#[serde(rename = "customerId", skip_serializing)]
pub customer_id: Option<String>,
#[serde(rename = "customerDisplayName", skip_serializing)]
pub customer_display_name: Option<String>,
#[serde(rename = "subscriptionId", skip_serializing)]
pub subscription_id: Option<String>,
#[serde(rename = "subscriptionName", skip_serializing)]
pub subscription_name: Option<String>,
#[serde(rename = "azureCreditApplied", default, skip_serializing_if = "Option::is_none")]
pub azure_credit_applied: Option<Amount>,
#[serde(rename = "billingCurrency", skip_serializing)]
pub billing_currency: Option<String>,
#[serde(skip_serializing)]
pub discount: Option<f64>,
#[serde(rename = "effectivePrice", default, skip_serializing_if = "Option::is_none")]
pub effective_price: Option<Amount>,
#[serde(rename = "exchangeRate", skip_serializing)]
pub exchange_rate: Option<f64>,
#[serde(rename = "marketPrice", default, skip_serializing_if = "Option::is_none")]
pub market_price: Option<Amount>,
#[serde(rename = "pricingCurrency", skip_serializing)]
pub pricing_currency: Option<String>,
#[serde(rename = "servicePeriodStartDate", skip_serializing)]
pub service_period_start_date: Option<String>,
#[serde(rename = "servicePeriodEndDate", skip_serializing)]
pub service_period_end_date: Option<String>,
#[serde(rename = "subTotal", default, skip_serializing_if = "Option::is_none")]
pub sub_total: Option<Amount>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tax: Option<Amount>,
#[serde(rename = "unitOfMeasure", skip_serializing)]
pub unit_of_measure: Option<String>,
#[serde(skip_serializing)]
pub units: Option<f64>,
#[serde(rename = "unitType", skip_serializing)]
pub unit_type: Option<String>,
}
pub mod transaction_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Kind {
#[serde(rename = "all")]
All,
#[serde(rename = "reservation")]
Reservation,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum TransactionType {
Purchase,
#[serde(rename = "Usage Charge")]
UsageCharge,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Policy {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PolicyProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PolicyProperties {
#[serde(rename = "marketplacePurchases", default, skip_serializing_if = "Option::is_none")]
pub marketplace_purchases: Option<policy_properties::MarketplacePurchases>,
#[serde(rename = "reservationPurchases", default, skip_serializing_if = "Option::is_none")]
pub reservation_purchases: Option<policy_properties::ReservationPurchases>,
#[serde(rename = "viewCharges", default, skip_serializing_if = "Option::is_none")]
pub view_charges: Option<policy_properties::ViewCharges>,
}
pub mod policy_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum MarketplacePurchases {
AllAllowed,
OnlyFreeAllowed,
NotAllowed,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ReservationPurchases {
Allowed,
NotAllowed,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ViewCharges {
Allowed,
NotAllowed,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CustomerPolicy {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<CustomerPolicyProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CustomerPolicyProperties {
#[serde(rename = "viewCharges", default, skip_serializing_if = "Option::is_none")]
pub view_charges: Option<customer_policy_properties::ViewCharges>,
}
pub mod customer_policy_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ViewCharges {
Allowed,
NotAllowed,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AvailableBalance {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<AvailableBalanceProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AvailableBalanceProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub amount: Option<Amount>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Amount {
#[serde(skip_serializing)]
pub currency: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub value: Option<f64>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PaymentMethodsListResult {
#[serde(skip_serializing)]
pub value: Vec<PaymentMethod>,
#[serde(rename = "nextLink", skip_serializing)]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PaymentMethod {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PaymentMethodProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PaymentMethodProperties {
#[serde(rename = "paymentMethodType", default, skip_serializing_if = "Option::is_none")]
pub payment_method_type: Option<payment_method_properties::PaymentMethodType>,
#[serde(skip_serializing)]
pub details: Option<String>,
#[serde(skip_serializing)]
pub expiration: Option<String>,
#[serde(skip_serializing)]
pub currency: Option<String>,
}
pub mod payment_method_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PaymentMethodType {
Credits,
ChequeWire,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UpdateAutoRenewRequest {
#[serde(rename = "autoRenew", default, skip_serializing_if = "Option::is_none")]
pub auto_renew: Option<update_auto_renew_request::AutoRenew>,
}
pub mod update_auto_renew_request {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AutoRenew {
#[serde(rename = "true")]
True,
#[serde(rename = "false")]
False,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Operation {
#[serde(skip_serializing)]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<operation::Display>,
}
pub mod operation {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Display {
#[serde(skip_serializing)]
pub provider: Option<String>,
#[serde(skip_serializing)]
pub resource: Option<String>,
#[serde(skip_serializing)]
pub operation: Option<String>,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationListResult {
#[serde(skip_serializing)]
pub value: Vec<Operation>,
#[serde(rename = "nextLink", skip_serializing)]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BillingRoleAssignmentPayload {
#[serde(rename = "principalId", default, skip_serializing_if = "Option::is_none")]
pub principal_id: Option<String>,
#[serde(rename = "billingRoleDefinitionId", default, skip_serializing_if = "Option::is_none")]
pub billing_role_definition_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BillingRoleAssignmentListResult {
#[serde(skip_serializing)]
pub value: Vec<BillingRoleAssignment>,
#[serde(rename = "nextLink", skip_serializing)]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BillingRoleAssignment {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<BillingRoleAssignmentProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BillingRoleAssignmentProperties {
#[serde(rename = "createdOn", skip_serializing)]
pub created_on: Option<String>,
#[serde(rename = "createdByPrincipalTenantId", skip_serializing)]
pub created_by_principal_tenant_id: Option<String>,
#[serde(rename = "createdByPrincipalId", skip_serializing)]
pub created_by_principal_id: Option<String>,
#[serde(rename = "createdByUserEmailAddress", skip_serializing)]
pub created_by_user_email_address: Option<String>,
#[serde(skip_serializing)]
pub name: Option<String>,
#[serde(rename = "principalId", default, skip_serializing_if = "Option::is_none")]
pub principal_id: Option<String>,
#[serde(rename = "principalTenantId", default, skip_serializing_if = "Option::is_none")]
pub principal_tenant_id: Option<String>,
#[serde(rename = "roleDefinitionId", default, skip_serializing_if = "Option::is_none")]
pub role_definition_id: Option<String>,
#[serde(skip_serializing)]
pub scope: Option<String>,
#[serde(rename = "userEmailAddress", default, skip_serializing_if = "Option::is_none")]
pub user_email_address: Option<String>,
#[serde(rename = "userAuthenticationType", default, skip_serializing_if = "Option::is_none")]
pub user_authentication_type: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BillingRoleDefinitionListResult {
#[serde(skip_serializing)]
pub value: Vec<BillingRoleDefinition>,
#[serde(rename = "nextLink", skip_serializing)]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BillingRoleDefinition {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<BillingRoleDefinitionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BillingRoleDefinitionProperties {
#[serde(skip_serializing)]
pub description: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub permissions: Vec<BillingPermissionsProperties>,
#[serde(rename = "roleName", skip_serializing)]
pub role_name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BillingPermissionsListResult {
#[serde(skip_serializing)]
pub value: Vec<BillingPermissionsProperties>,
#[serde(rename = "nextLink", skip_serializing)]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BillingPermissionsProperties {
#[serde(skip_serializing)]
pub actions: Vec<Action>,
#[serde(rename = "notActions", skip_serializing)]
pub not_actions: Vec<NotAction>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AgreementListResult {
#[serde(skip_serializing)]
pub value: Vec<Agreement>,
#[serde(rename = "nextLink", skip_serializing)]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Agreement {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<AgreementProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AgreementProperties {
#[serde(rename = "agreementLink", skip_serializing)]
pub agreement_link: Option<String>,
#[serde(skip_serializing)]
pub category: Option<agreement_properties::Category>,
#[serde(rename = "acceptanceMode", skip_serializing)]
pub acceptance_mode: Option<agreement_properties::AcceptanceMode>,
#[serde(rename = "effectiveDate", skip_serializing)]
pub effective_date: Option<String>,
#[serde(rename = "expirationDate", skip_serializing)]
pub expiration_date: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub participants: Vec<Participants>,
#[serde(skip_serializing)]
pub status: Option<String>,
}
pub mod agreement_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Category {
MicrosoftCustomerAgreement,
AffiliatePurchaseTerms,
Other,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AcceptanceMode {
ClickToAccept,
ESignEmbedded,
ESignOffline,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ValidateTransferListResponse {
#[serde(skip_serializing)]
pub value: Vec<ValidateTransferResponse>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ValidateTransferResponse {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ValidateTransferResponseProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ValidateTransferResponseProperties {
#[serde(skip_serializing)]
pub status: Option<String>,
#[serde(rename = "productId", skip_serializing)]
pub product_id: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub results: Vec<ValidationResultProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ValidationResultProperties {
#[serde(skip_serializing)]
pub level: Option<String>,
#[serde(skip_serializing)]
pub code: Option<String>,
#[serde(skip_serializing)]
pub message: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Participants {
#[serde(skip_serializing)]
pub status: Option<String>,
#[serde(rename = "statusDate", skip_serializing)]
pub status_date: Option<String>,
#[serde(skip_serializing)]
pub email: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Action {}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct NotAction {}
| 41.831254 | 127 | 0.722316 |
f9c43060129513c6f2679ae1eae794d304d9d42d | 4,147 | use std::{env, fs, collections::{HashSet}, cmp::{Ordering, max}};
#[allow(unused_imports)]
use itertools::Itertools;
#[derive(Debug, Clone)]
struct Fold {
fold_along_x: bool,
coord: i64,
}
impl From<&str> for Fold {
fn from(row: &str) -> Self {
let prefix = "fold along ";
let line = row.strip_prefix(prefix).unwrap();
let (axis, coordinate) = line.split_once('=').unwrap();
assert!(axis == "x" || axis == "y");
let fold_along_x = axis == "x";
let coord = coordinate.parse().unwrap();
Fold {
fold_along_x,
coord,
}
}
}
fn main() {
let args: Vec<String> = env::args().collect();
let mut reversed_args: Vec<_> = args.iter().map(|x| x.as_str()).rev().collect();
reversed_args
.pop()
.expect("Expected the executable name to be the first argument, but was missing");
let part = reversed_args.pop().expect("part number");
let input_file = reversed_args.pop().expect("input file");
let content = fs::read_to_string(input_file).unwrap();
let (dots, fold_instructions) = content.trim_end().split_once("\n\n").unwrap();
let dot_coords: Vec<(i64, i64)> = dots
.split('\n')
.map(|row| {
let coords = row.split_once(',').unwrap();
(coords.0.parse().unwrap(), coords.1.parse().unwrap())
})
.collect();
let folds: Vec<Fold> = fold_instructions.split('\n').map(Fold::from).collect();
match part {
"1" => {
let result = solve_part1(&dot_coords, &folds);
println!("{}", result);
}
"2" => {
let result = solve_part2(&dot_coords, &folds);
println!("{}", result);
}
_ => unreachable!("{}", part),
}
}
fn apply_fold(current_dots: &mut HashSet<(i64, i64)>, fold: &Fold) {
let dots_to_reflect: Vec<((i64, i64), (i64, i64))> = current_dots.iter().copied().filter_map(|pt| {
let (x, y) = pt;
if fold.fold_along_x {
let delta_x = x - fold.coord;
match delta_x.cmp(&0) {
Ordering::Less => None, // not over the fold line, no reflection needed
Ordering::Greater => Some((pt, (fold.coord - delta_x, y))),
Ordering::Equal => unreachable!(),
}
} else {
let delta_y = y - fold.coord;
match delta_y.cmp(&0) {
Ordering::Less => None, // not over the fold line, no reflection needed
Ordering::Greater => Some((pt, (x, fold.coord - delta_y))),
Ordering::Equal => unreachable!(),
}
}
}).collect();
for (original_dot, new_dot) in dots_to_reflect {
let removed = current_dots.remove(&original_dot);
assert!(removed);
current_dots.insert(new_dot); // might overlap, so the return may be true or false here
}
}
fn solve_part1(dots: &[(i64, i64)], folds: &[Fold]) -> usize {
let first_fold = folds.first().unwrap();
let mut current_dots: HashSet<(i64, i64)> = dots.iter().copied().collect();
apply_fold(&mut current_dots, first_fold);
current_dots.len()
}
fn solve_part2(dots: &[(i64, i64)], folds: &[Fold]) -> String {
let mut current_dots: HashSet<(i64, i64)> = dots.iter().copied().collect();
for fold in folds {
apply_fold(&mut current_dots, fold);
}
let mut max_x = 0i64;
let mut max_y = 0i64;
for (x, y) in current_dots.iter() {
assert!(*x >= 0);
assert!(*y >= 0);
max_x = max(max_x, *x);
max_y = max(max_y, *y);
}
let row_width = (max_x + 2) as usize; // 0..=max_x plus a newline char
let row_count = (max_y + 1) as usize; // 0..=max_y
let mut buffer: Vec<u8> = Vec::with_capacity(0);
buffer.resize(row_count * row_width, b'.');
for y in 1..=max_y {
buffer[(y as usize * row_width) - 1] = b'\n';
}
*buffer.last_mut().unwrap() = 0;
for (x, y) in current_dots.iter() {
buffer[(*y as usize) * row_width + (*x as usize)] = b'#';
}
String::from_utf8(buffer).unwrap()
}
| 30.718519 | 103 | 0.548107 |
9bff161f9c76f938d6820acc1eeb8adfe9b4fba7 | 12,329 | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use std::borrow::Borrow;
use crate::{
data_cache::TransactionDataCache, native_functions::NativeContextExtensions, runtime::VMRuntime,
};
use move_binary_format::errors::*;
use move_core_types::{
account_address::AccountAddress,
effects::{ChangeSet, Event},
gas_schedule::GasAlgebra,
identifier::IdentStr,
language_storage::{ModuleId, TypeTag},
resolver::MoveResolver,
value::MoveTypeLayout,
};
use move_vm_types::{data_store::DataStore, gas_schedule::GasStatus};
pub struct Session<'r, 'l, S> {
pub(crate) runtime: &'l VMRuntime,
pub(crate) data_cache: TransactionDataCache<'r, 'l, S>,
pub(crate) native_extensions: NativeContextExtensions,
}
/// Result of executing a function in the VM
pub enum ExecutionResult {
/// Execution completed successfully and changed global state
Success {
/// Changes to global state that occurred during execution
change_set: ChangeSet,
/// Events emitted during execution
events: Vec<Event>,
/// Values returned by the function
return_values: Vec<Vec<u8>>,
/// Final value of inputs passed in to the entrypoint via a mutable reference
mutable_ref_values: Vec<Vec<u8>>,
/// Gas used during execution
gas_used: u64,
/// Native extensions end state
native_extensions: NativeContextExtensions,
},
/// Execution failed and had no side effects
Fail {
/// The reason execution failed
error: VMError,
/// Gas used during execution
gas_used: u64,
},
}
impl<'r, 'l, S: MoveResolver> Session<'r, 'l, S> {
/// Execute a Move function with the given arguments. This is mainly designed for an external
/// environment to invoke system logic written in Move.
///
/// The caller MUST ensure
/// - All types and modules referred to by the type arguments exist.
///
/// The Move VM MUST return an invariant violation if the caller fails to follow any of the
/// rules above.
///
/// Currently if any other error occurs during execution, the Move VM will simply propagate that
/// error back to the outer environment without handling/translating it. This behavior may be
/// revised in the future.
///
/// In case an invariant violation occurs, the whole Session should be considered corrupted and
/// one shall not proceed with effect generation.
pub fn execute_function(
&mut self,
module: &ModuleId,
function_name: &IdentStr,
ty_args: Vec<TypeTag>,
args: Vec<Vec<u8>>,
gas_status: &mut GasStatus,
) -> VMResult<Vec<Vec<u8>>> {
self.runtime.execute_function(
module,
function_name,
ty_args,
args,
&mut self.data_cache,
gas_status,
&mut self.native_extensions,
)
}
/// Execute `module`::`fuction_name`<`ty_args`>(`args`) and return the effects in
/// an `ExecutionResult`, including
/// * the write set and events
/// * return values of the function
/// * changes to values passes by mutable reference to the function
/// Arguments to the function in `args` can be any type--ground types, user-defined struct
/// types, and references (including mutable references).
/// A reference argument in `args[i]` with type `&T` or `&mut T` will be deserialized as a `T`.
/// Pure arguments are deserialized in the obvious way.
///
/// NOTE: The ability to deserialize `args` into arbitrary types is very powerful--e.g., it can
/// used to manufacture `signer`'s or `Coin`'s from raw bytes. It is the respmsibility of the
/// caller (e.g. adapter) to ensure that this power is useed responsibility/securely for its use-case.
pub fn execute_function_for_effects<V>(
self,
module: &ModuleId,
function_name: &IdentStr,
ty_args: Vec<TypeTag>,
args: Vec<V>,
gas_status: &mut GasStatus,
) -> ExecutionResult
where
V: Borrow<[u8]>,
{
// Deconstruct the session.
let Session {
runtime,
mut data_cache,
mut native_extensions,
} = self;
let gas_budget = gas_status.remaining_gas().get();
let execution_res = runtime.execute_function_for_effects(
module,
function_name,
ty_args,
args,
&mut data_cache,
gas_status,
&mut native_extensions,
);
let gas_used = gas_budget - gas_status.remaining_gas().get();
// Reconstruct the session for call to finish, but do not provide extensions as we
// need to put them into the result.
let session = Session {
runtime,
data_cache,
native_extensions: NativeContextExtensions::default(),
};
match execution_res {
Ok((return_values, mutable_ref_values)) => match session.finish() {
Ok((change_set, events)) => ExecutionResult::Success {
change_set,
events,
return_values,
mutable_ref_values,
gas_used,
native_extensions,
},
Err(error) => ExecutionResult::Fail { error, gas_used },
},
Err(error) => ExecutionResult::Fail { error, gas_used },
}
}
/// Execute a Move script function with the given arguments.
///
/// Unlike `execute_function` which is designed for system logic, `execute_script_function` is
/// mainly designed to call a script function in an existing module. It similar to
/// `execute_script` except that execution of the "script" begins with the specified function
///
/// The Move VM MUST return a user error (in other words, an error that's not an invariant
/// violation) if
/// - The function does not exist.
/// - The function does not have script visibility.
/// - The signature is not valid for a script. Not all script-visible module functions can
/// be invoked from this entry point. See `move_bytecode_verifier::script_signature` for the
/// rules.
/// - Type arguments refer to a non-existent type.
/// - Arguments (senders included) fail to deserialize or fail to match the signature of the
/// script function.
///
/// If any other error occurs during execution, the Move VM MUST propagate that error back to
/// the caller.
/// Besides, no user input should cause the Move VM to return an invariant violation.
///
/// In case an invariant violation occurs, the whole Session should be considered corrupted and
/// one shall not proceed with effect generation.
pub fn execute_script_function(
&mut self,
module: &ModuleId,
function_name: &IdentStr,
ty_args: Vec<TypeTag>,
args: Vec<Vec<u8>>,
senders: Vec<AccountAddress>,
gas_status: &mut GasStatus,
) -> VMResult<()> {
self.runtime.execute_script_function(
module,
function_name,
ty_args,
args,
senders,
&mut self.data_cache,
gas_status,
&mut self.native_extensions,
)
}
/// Execute a transaction script.
///
/// The Move VM MUST return a user error (in other words, an error that's not an invariant
/// violation) if
/// - The script fails to deserialize or verify. Not all expressible signatures are valid.
/// See `move_bytecode_verifier::script_signature` for the rules.
/// - Type arguments refer to a non-existent type.
/// - Arguments (senders included) fail to deserialize or fail to match the signature of the
/// script function.
///
/// If any other error occurs during execution, the Move VM MUST propagate that error back to
/// the caller.
/// Besides, no user input should cause the Move VM to return an invariant violation.
///
/// In case an invariant violation occurs, the whole Session should be considered corrupted and
/// one shall not proceed with effect generation.
pub fn execute_script(
&mut self,
script: Vec<u8>,
ty_args: Vec<TypeTag>,
args: Vec<Vec<u8>>,
senders: Vec<AccountAddress>,
gas_status: &mut GasStatus,
) -> VMResult<()> {
self.runtime.execute_script(
script,
ty_args,
args,
senders,
&mut self.data_cache,
gas_status,
&mut self.native_extensions,
)
}
/// Publish the given module.
///
/// The Move VM MUST return a user error, i.e., an error that's not an invariant violation, if
/// - The module fails to deserialize or verify.
/// - The sender address does not match that of the module.
/// - (Republishing-only) the module to be updated is not backward compatible with the old module.
/// - (Republishing-only) the module to be updated introduces cyclic dependencies.
///
/// The Move VM should not be able to produce other user errors.
/// Besides, no user input should cause the Move VM to return an invariant violation.
///
/// In case an invariant violation occurs, the whole Session should be considered corrupted and
/// one shall not proceed with effect generation.
pub fn publish_module(
&mut self,
module: Vec<u8>,
sender: AccountAddress,
gas_status: &mut GasStatus,
) -> VMResult<()> {
self.publish_module_bundle(vec![module], sender, gas_status)
}
/// Publish a series of modules.
///
/// The Move VM MUST return a user error, i.e., an error that's not an invariant violation, if
/// any module fails to deserialize or verify (see the full list of failing conditions in the
/// `publish_module` API). The publishing of the module series is an all-or-nothing action:
/// either all modules are published to the data store or none is.
///
/// Similar to the `publish_module` API, the Move VM should not be able to produce other user
/// errors. Besides, no user input should cause the Move VM to return an invariant violation.
///
/// In case an invariant violation occurs, the whole Session should be considered corrupted and
/// one shall not proceed with effect generation.
pub fn publish_module_bundle(
&mut self,
modules: Vec<Vec<u8>>,
sender: AccountAddress,
gas_status: &mut GasStatus,
) -> VMResult<()> {
self.runtime
.publish_module_bundle(modules, sender, &mut self.data_cache, gas_status)
}
pub fn num_mutated_accounts(&self, sender: &AccountAddress) -> u64 {
self.data_cache.num_mutated_accounts(sender)
}
/// Finish up the session and produce the side effects.
///
/// This function should always succeed with no user errors returned, barring invariant violations.
///
/// This MUST NOT be called if there is a previous invocation that failed with an invariant violation.
pub fn finish(self) -> VMResult<(ChangeSet, Vec<Event>)> {
self.data_cache
.into_effects()
.map_err(|e| e.finish(Location::Undefined))
}
/// Same like `finish`, but also extracts the native context extensions from the session.
pub fn finish_with_extensions(
self,
) -> VMResult<(ChangeSet, Vec<Event>, NativeContextExtensions)> {
let Session {
data_cache,
native_extensions,
..
} = self;
let (change_set, events) = data_cache
.into_effects()
.map_err(|e| e.finish(Location::Undefined))?;
Ok((change_set, events, native_extensions))
}
pub fn get_type_layout(&self, type_tag: &TypeTag) -> VMResult<MoveTypeLayout> {
self.runtime
.loader()
.get_type_layout(type_tag, &self.data_cache)
}
pub fn get_data_store(&mut self) -> &mut dyn DataStore {
&mut self.data_cache
}
}
| 38.892744 | 106 | 0.625355 |
4b56c0e8d70b0f5526649164c431ea45be98a71c | 6,772 | // fills
use structs::Cells;
use structs::Column;
use structs::MergeCells;
use structs::Stylesheet;
use quick_xml::events::{BytesStart, Event};
use quick_xml::Reader;
use quick_xml::Writer;
use reader::driver::*;
use std::io::Cursor;
use writer::driver::*;
#[derive(Clone, Default, Debug)]
pub(crate) struct Columns {
column: Vec<Column>,
}
impl Columns {
pub(crate) fn get_column_collection(&self) -> &Vec<Column> {
&self.column
}
pub(crate) fn get_column_collection_mut(&mut self) -> &mut Vec<Column> {
&mut self.column
}
pub(crate) fn get_column(&self, value: &u32) -> Option<&Column> {
for column in &self.column {
if value == column.get_col_num() {
return Some(column);
}
}
None
}
pub(crate) fn get_column_mut(&mut self, value: &u32) -> &mut Column {
match self.get_column(value) {
Some(_) => {}
None => {
let mut obj = Column::default();
obj.set_col_num(*value);
self.set_column(obj);
}
}
for column in self.get_column_collection_mut() {
if value == column.get_col_num() {
return column;
}
}
panic!("Column not found.");
}
pub(crate) fn set_column(&mut self, value: Column) -> &mut Self {
self.column.push(value);
self
}
pub(crate) fn calculation_auto_width(
&mut self,
cells: &Cells,
merge_cells: &MergeCells,
) -> &mut Self {
for column in self.get_column_collection_mut() {
let has_horizontal = merge_cells.has_horizontal(column.get_col_num());
if has_horizontal {
continue;
}
column.calculation_auto_width(cells);
}
self
}
pub(crate) fn adjustment_insert_coordinate(
&mut self,
root_col_num: &u32,
offset_col_num: &u32,
) {
for column_dimension in self.get_column_collection_mut() {
column_dimension.adjustment_insert_coordinate(root_col_num, offset_col_num);
}
}
pub(crate) fn adjustment_remove_coordinate(
&mut self,
root_col_num: &u32,
offset_col_num: &u32,
) {
self.get_column_collection_mut().retain(|x| {
!(x.get_col_num() > root_col_num && x.get_col_num() < &(root_col_num + offset_col_num))
});
for column_dimension in self.get_column_collection_mut() {
column_dimension.adjustment_remove_coordinate(root_col_num, offset_col_num);
}
}
pub(crate) fn set_attributes<R: std::io::BufRead>(
&mut self,
reader: &mut Reader<R>,
_e: &BytesStart,
stylesheet: &Stylesheet,
) {
let mut buf = Vec::new();
loop {
match reader.read_event(&mut buf) {
Ok(Event::Empty(ref e)) => match e.name() {
b"col" => {
let mut obj = Column::default();
obj.set_attributes(reader, e, stylesheet);
let min = get_attribute(e, b"min").unwrap().parse::<u32>().unwrap();
let max = get_attribute(e, b"max").unwrap().parse::<u32>().unwrap();
for i in min..=max {
obj.set_col_num(i);
self.set_column(obj.clone());
}
}
_ => (),
},
Ok(Event::End(ref e)) => match e.name() {
b"cols" => return,
_ => (),
},
Ok(Event::Eof) => panic!("Error not find {} end element", "cols"),
Err(e) => panic!("Error at position {}: {:?}", reader.buffer_position(), e),
_ => (),
}
buf.clear();
}
}
pub(crate) fn write_to(
&self,
writer: &mut Writer<Cursor<Vec<u8>>>,
stylesheet: &mut Stylesheet,
) {
if !self.column.is_empty() {
// cols
write_start_tag(writer, "cols", vec![], false);
// col
let mut column_copy = self.column.clone();
column_copy.sort_by(|a, b| a.get_col_num().cmp(b.get_col_num()));
let mut column_iter = column_copy.iter();
let mut column_raw = column_iter.next();
let mut obj = column_raw.unwrap();
let mut min = obj.get_col_num().clone();
let mut max = min;
loop {
column_raw = column_iter.next();
match column_raw {
Some(column) => {
if column.get_col_num() == &(max + 1)
&& column.get_hash_code() == obj.get_hash_code()
&& column.get_style() == obj.get_style()
{
max += 1;
} else {
self.write_to_column(writer, &min, &max, obj, stylesheet);
obj = column;
min = obj.get_col_num().clone();
max = min;
}
}
None => {
self.write_to_column(writer, &min, &max, obj, stylesheet);
break;
}
}
}
write_end_tag(writer, "cols");
}
}
pub(crate) fn write_to_column(
&self,
writer: &mut Writer<Cursor<Vec<u8>>>,
min: &u32,
max: &u32,
column: &Column,
stylesheet: &mut Stylesheet,
) {
// col
let mut attributes: Vec<(&str, &str)> = Vec::new();
let min_str = min.to_string();
let max_str = max.to_string();
attributes.push(("min", min_str.as_str()));
attributes.push(("max", max_str.as_str()));
let width = column.width.get_value_string();
attributes.push(("width", &width));
if column.hidden.get_value() == &true {
attributes.push(("hidden", column.hidden.get_value_string()));
}
if column.best_fit.get_value() == &true {
attributes.push(("bestFit", column.best_fit.get_value_string()));
}
attributes.push(("customWidth", "1"));
let xf_index_str: String;
let xf_index = stylesheet.set_style(column.get_style());
if xf_index > 0 {
xf_index_str = xf_index.to_string();
attributes.push(("style", &xf_index_str));
}
write_start_tag(writer, "col", attributes, true);
}
}
| 32.873786 | 99 | 0.488925 |
e6db99da2b1604e3ea6c5e6bd6148466631bcb55 | 511 | use crate::Error;
pub fn info(name: &str) -> Result<(), Error> {
use library::database::{core, Info};
let Info {
name,
version,
description,
build_depend,
run_depend,
} = core()
.map_err(Error::SQLite3)?
.info(name)
.map_err(Error::Info)?;
print!(
"name: {}\nversion: {}\ndescription: {}\nbuild dependencies: {}\nrun dependencies: {}\n",
name, version, description, build_depend, run_depend
);
Ok(())
}
| 21.291667 | 97 | 0.536204 |
b98a193abe59bf76c0f007b6b7424f0a8c83b715 | 2,642 | use log::info;
use babycat::Waveform;
use babycat::WaveformArgs;
use babycat::DECODING_BACKEND_SYMPHONIA;
use babycat::RESAMPLE_MODE_BABYCAT_LANCZOS;
use babycat::RESAMPLE_MODE_BABYCAT_SINC;
use babycat::RESAMPLE_MODE_LIBSAMPLERATE;
use crate::common::exit_with_msg;
use crate::common::UnwrapOrExit;
#[allow(clippy::too_many_arguments)]
pub fn convert(
input_filename: &str,
output_filename: &str,
output_format: &str,
start_time_milliseconds: u64,
end_time_milliseconds: u64,
frame_rate_hz: u32,
num_channels: u32,
convert_to_mono: bool,
zero_pad_ending: bool,
resample_mode: &str,
decoding_backend: &str,
) {
//
// Input validation.
if output_format != "wav" {
exit_with_msg(&format!(
"Unsupported output file format: {}",
output_format
));
}
let resample_mode_int = {
if resample_mode == "libsamplerate" {
RESAMPLE_MODE_LIBSAMPLERATE
} else if resample_mode == "babycat_lanczos" {
RESAMPLE_MODE_BABYCAT_LANCZOS
} else if resample_mode == "babycat_sinc" {
RESAMPLE_MODE_BABYCAT_SINC
} else {
panic!("NO");
}
};
let decoding_backend_int = {
if decoding_backend == "symphonia" {
DECODING_BACKEND_SYMPHONIA
} else {
panic!("NO");
}
};
//
// Set up decoding.
let waveform_args = WaveformArgs {
start_time_milliseconds,
end_time_milliseconds,
frame_rate_hz,
num_channels,
convert_to_mono,
zero_pad_ending,
resample_mode: resample_mode_int,
decoding_backend: decoding_backend_int,
};
//
// Decode from filesystem.
let decoding_start_time = std::time::Instant::now();
let waveform = Waveform::from_file(input_filename, waveform_args).unwrap_or_exit();
let decoding_elapsed = std::time::Instant::now() - decoding_start_time;
info!(
"Decoded {} frames of {} channels at {} hz in {} seconds from {}",
waveform.num_frames(),
waveform.num_channels(),
waveform.frame_rate_hz(),
decoding_elapsed.as_secs_f64(),
input_filename,
);
//
// Waveform is now in memory. Time to encode.
let encoding_start_time = std::time::Instant::now();
waveform.to_wav_file(output_filename).unwrap_or_exit();
let encoding_elapsed = std::time::Instant::now() - encoding_start_time;
info!(
"Encoded as {} and saved in {} seconds to {}",
output_format,
encoding_elapsed.as_secs_f64(),
output_filename,
);
}
| 29.355556 | 87 | 0.634746 |
d6b4a2d68794b282e6ed8ad44e3540dbfd02c51f | 4,257 | //! Fills out the call summary information for `RFunction`
use crate::analysis::analyzer::{
Action, Analyzer, AnalyzerInfo, AnalyzerKind, AnalyzerResult, Change, ModuleAnalyzer,
};
use crate::analysis::interproc::transfer::InterProcAnalysis;
use crate::frontend::radeco_containers::RadecoModule;
use std::any::Any;
use std::collections::HashSet;
use std::fmt::Debug;
#[derive(Debug)]
pub struct InterProcAnalyzer<T>
where
T: InterProcAnalysis,
{
analyzed: HashSet<u64>,
analyzer: T,
}
const NAME: &str = "interproc";
const REQUIRES: &[AnalyzerKind] = &[];
pub const INFO: AnalyzerInfo = AnalyzerInfo {
name: NAME,
kind: AnalyzerKind::InterProc,
requires: REQUIRES,
uses_policy: false,
};
impl<T> InterProcAnalyzer<T>
where
T: InterProcAnalysis + Debug,
{
pub fn new() -> InterProcAnalyzer<T> {
InterProcAnalyzer {
analyzed: HashSet::new(),
analyzer: T::new(),
}
}
fn analyze_function(&mut self, rmod: &mut RadecoModule, func_addr: u64) {
// If the current function has already been analyzed, return.
if self.analyzed.contains(&func_addr) {
return;
}
// Analyze all children of the present node in call graph.
let callees = rmod
.function(func_addr)
.map(|rfn| rmod.callees_of(rfn))
.unwrap_or(Vec::new());
for (call, _) in callees {
self.analyze_function(rmod, call);
}
// Propagate changes and remove deadcode based on the analysis information from
// the children. Perform context translations from caller to callee etc.
// TODO.
{
// Pull changes from callee.
self.analyzer.propagate(rmod, func_addr);
// Analyze transfer function for the current function.
self.analyzer.transfer(rmod, func_addr);
}
// Insert the current function into analyzed set.
self.analyzed.insert(func_addr);
}
}
impl<T: 'static> Analyzer for InterProcAnalyzer<T>
where
T: InterProcAnalysis + Debug,
{
fn info(&self) -> &'static AnalyzerInfo {
&INFO
}
fn as_any(&self) -> &dyn Any {
self
}
}
impl<T: 'static> ModuleAnalyzer for InterProcAnalyzer<T>
where
T: InterProcAnalysis + Debug,
{
fn analyze<F: FnMut(Box<dyn Change>) -> Action>(
&mut self,
rmod: &mut RadecoModule,
_policy: Option<F>,
) -> Option<Box<dyn AnalyzerResult>> {
let fs = rmod.functions.clone();
for (_, f) in fs {
self.analyze_function(rmod, f.offset);
}
None
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::analysis::analyzer::{all, FuncAnalyzer};
use crate::analysis::dce::DCE;
use crate::analysis::interproc::summary;
use crate::frontend::radeco_containers::ProjectLoader;
use crate::frontend::radeco_source::FileSource;
use crate::middle::ir_writer;
use std::rc::Rc;
#[test]
#[ignore]
fn ipa_t1() {
// let mut rproj = ProjectLoader::new().path("./ct1_sccp_ex.o").load();
let fsource = FileSource::open("./test_files/ct1_sccp_ex/ct1_sccp_ex");
let mut rproj = ProjectLoader::new().source(Rc::new(fsource)).load();
for mut xy in rproj.iter_mut() {
let mut rmod = &mut xy.module;
{
let mut analyzer: InterProcAnalyzer<summary::CallSummary> =
InterProcAnalyzer::new();
analyzer.analyze(&mut rmod, Some(all));
}
for (ref addr, ref mut rfn) in rmod.functions.iter_mut() {
let mut dce = DCE::new();
dce.analyze(rfn, Some(all));
//println!("Binds: {:?}", rfn.bindings.bindings());
println!("Info for: {:#x}", addr);
println!("Local Variable info: {:#?}", rfn.locals());
println!("Arg info: {:#?}", rfn.args());
//println!("Returns info: {:?}", rfn.returns());
let mut il = String::new();
ir_writer::emit_il(&mut il, Some(rfn.name.clone().to_string()), rfn.ssa()).unwrap();
println!("{}", il);
}
}
}
}
| 29.5625 | 100 | 0.580456 |
331b56a7a0b54ab7c5eb6185285501412b35acb3 | 14,118 | // Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0.
use configuration::ConfigValue;
pub use rocksdb::PerfLevel;
use rocksdb::{DBCompressionType, DBInfoLogLevel, DBTitanDBBlobRunMode};
use std::str::FromStr;
#[derive(Copy, Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub enum LogLevel {
Error,
Fatal,
Info,
Warn,
Debug,
}
impl From<LogLevel> for DBInfoLogLevel {
fn from(compression_type: LogLevel) -> DBInfoLogLevel {
match compression_type {
LogLevel::Error => DBInfoLogLevel::Error,
LogLevel::Fatal => DBInfoLogLevel::Fatal,
LogLevel::Info => DBInfoLogLevel::Info,
LogLevel::Warn => DBInfoLogLevel::Warn,
LogLevel::Debug => DBInfoLogLevel::Debug,
}
}
}
#[derive(Copy, Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub enum CompressionType {
No,
Snappy,
Zlib,
Bz2,
Lz4,
Lz4hc,
Zstd,
ZstdNotFinal,
}
impl From<CompressionType> for DBCompressionType {
fn from(compression_type: CompressionType) -> DBCompressionType {
match compression_type {
CompressionType::No => DBCompressionType::No,
CompressionType::Snappy => DBCompressionType::Snappy,
CompressionType::Zlib => DBCompressionType::Zlib,
CompressionType::Bz2 => DBCompressionType::Bz2,
CompressionType::Lz4 => DBCompressionType::Lz4,
CompressionType::Lz4hc => DBCompressionType::Lz4hc,
CompressionType::Zstd => DBCompressionType::Zstd,
CompressionType::ZstdNotFinal => DBCompressionType::ZstdNotFinal,
}
}
}
pub mod compression_type_level_serde {
use std::fmt;
use serde::de::{Error, SeqAccess, Unexpected, Visitor};
use serde::ser::SerializeSeq;
use serde::{Deserializer, Serializer};
use rocksdb::DBCompressionType;
pub fn serialize<S>(ts: &[DBCompressionType; 7], serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut s = serializer.serialize_seq(Some(ts.len()))?;
for t in ts {
let name = match *t {
DBCompressionType::No => "no",
DBCompressionType::Snappy => "snappy",
DBCompressionType::Zlib => "zlib",
DBCompressionType::Bz2 => "bzip2",
DBCompressionType::Lz4 => "lz4",
DBCompressionType::Lz4hc => "lz4hc",
DBCompressionType::Zstd => "zstd",
DBCompressionType::ZstdNotFinal => "zstd-not-final",
DBCompressionType::Disable => "disable",
};
s.serialize_element(name)?;
}
s.end()
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<[DBCompressionType; 7], D::Error>
where
D: Deserializer<'de>,
{
struct SeqVisitor;
impl<'de> Visitor<'de> for SeqVisitor {
type Value = [DBCompressionType; 7];
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(formatter, "a compression type vector")
}
fn visit_seq<S>(self, mut seq: S) -> Result<[DBCompressionType; 7], S::Error>
where
S: SeqAccess<'de>,
{
let mut seqs = [DBCompressionType::No; 7];
let mut i = 0;
while let Some(value) = seq.next_element::<String>()? {
if i == 7 {
return Err(S::Error::invalid_value(
Unexpected::Str(&value),
&"only 7 compression types",
));
}
seqs[i] = match &*value.trim().to_lowercase() {
"no" => DBCompressionType::No,
"snappy" => DBCompressionType::Snappy,
"zlib" => DBCompressionType::Zlib,
"bzip2" => DBCompressionType::Bz2,
"lz4" => DBCompressionType::Lz4,
"lz4hc" => DBCompressionType::Lz4hc,
"zstd" => DBCompressionType::Zstd,
"zstd-not-final" => DBCompressionType::ZstdNotFinal,
"disable" => DBCompressionType::Disable,
_ => {
return Err(S::Error::invalid_value(
Unexpected::Str(&value),
&"invalid compression type",
));
}
};
i += 1;
}
if i < 7 {
return Err(S::Error::invalid_length(i, &"7 compression types"));
}
Ok(seqs)
}
}
deserializer.deserialize_seq(SeqVisitor)
}
}
pub mod compression_type_serde {
use std::fmt;
use serde::de::{Error, Unexpected, Visitor};
use serde::{Deserializer, Serializer};
use rocksdb::DBCompressionType;
pub fn serialize<S>(t: &DBCompressionType, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let name = match *t {
DBCompressionType::No => "no",
DBCompressionType::Snappy => "snappy",
DBCompressionType::Zlib => "zlib",
DBCompressionType::Bz2 => "bzip2",
DBCompressionType::Lz4 => "lz4",
DBCompressionType::Lz4hc => "lz4hc",
DBCompressionType::Zstd => "zstd",
DBCompressionType::ZstdNotFinal => "zstd-not-final",
DBCompressionType::Disable => "disable",
};
serializer.serialize_str(name)
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<DBCompressionType, D::Error>
where
D: Deserializer<'de>,
{
struct StrVistor;
impl<'de> Visitor<'de> for StrVistor {
type Value = DBCompressionType;
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(formatter, "a compression type")
}
fn visit_str<E>(self, value: &str) -> Result<DBCompressionType, E>
where
E: Error,
{
let str = match &*value.trim().to_lowercase() {
"no" => DBCompressionType::No,
"snappy" => DBCompressionType::Snappy,
"zlib" => DBCompressionType::Zlib,
"bzip2" => DBCompressionType::Bz2,
"lz4" => DBCompressionType::Lz4,
"lz4hc" => DBCompressionType::Lz4hc,
"zstd" => DBCompressionType::Zstd,
"zstd-not-final" => DBCompressionType::ZstdNotFinal,
"disable" => DBCompressionType::Disable,
_ => {
return Err(E::invalid_value(
Unexpected::Other(&"invalid compression type".to_string()),
&self,
));
}
};
Ok(str)
}
}
deserializer.deserialize_str(StrVistor)
}
}
#[derive(Copy, Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub enum BlobRunMode {
Normal,
ReadOnly,
Fallback,
}
impl From<BlobRunMode> for ConfigValue {
fn from(mode: BlobRunMode) -> ConfigValue {
ConfigValue::BlobRunMode(format!("k{:?}", mode))
}
}
impl Into<BlobRunMode> for ConfigValue {
fn into(self) -> BlobRunMode {
if let ConfigValue::BlobRunMode(s) = self {
match s.as_str() {
"kNormal" => BlobRunMode::Normal,
"kReadOnly" => BlobRunMode::ReadOnly,
"kFallback" => BlobRunMode::Fallback,
m => panic!("expect: kNormal, kReadOnly or kFallback, got: {:?}", m),
}
} else {
panic!("expect: ConfigValue::BlobRunMode, got: {:?}", self);
}
}
}
impl FromStr for BlobRunMode {
type Err = String;
fn from_str(s: &str) -> Result<BlobRunMode, String> {
match s {
"normal" => Ok(BlobRunMode::Normal),
"read-only" => Ok(BlobRunMode::ReadOnly),
"fallback" => Ok(BlobRunMode::Fallback),
m => Err(format!(
"expect: normal, read-only or fallback, got: {:?}",
m
)),
}
}
}
impl Into<DBTitanDBBlobRunMode> for BlobRunMode {
fn into(self) -> DBTitanDBBlobRunMode {
match self {
BlobRunMode::Normal => DBTitanDBBlobRunMode::Normal,
BlobRunMode::ReadOnly => DBTitanDBBlobRunMode::ReadOnly,
BlobRunMode::Fallback => DBTitanDBBlobRunMode::Fallback,
}
}
}
macro_rules! numeric_enum_mod {
($name:ident $enum:ident { $($variant:ident = $value:expr, )* }) => {
pub mod $name {
use std::fmt;
use serde::{Serializer, Deserializer};
use serde::de::{self, Unexpected, Visitor};
use rocksdb::$enum;
pub fn serialize<S>(mode: &$enum, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer
{
serializer.serialize_i64(*mode as i64)
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<$enum, D::Error>
where D: Deserializer<'de>
{
struct EnumVisitor;
impl<'de> Visitor<'de> for EnumVisitor {
type Value = $enum;
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(formatter, concat!("valid ", stringify!($enum)))
}
fn visit_i64<E>(self, value: i64) -> Result<$enum, E>
where E: de::Error
{
match value {
$( $value => Ok($enum::$variant), )*
_ => Err(E::invalid_value(Unexpected::Signed(value), &self))
}
}
}
deserializer.deserialize_i64(EnumVisitor)
}
#[cfg(test)]
mod tests {
use toml;
use rocksdb::$enum;
#[test]
fn test_serde() {
#[derive(Serialize, Deserialize, PartialEq)]
struct EnumHolder {
#[serde(with = "super")]
e: $enum,
}
let cases = vec![
$(($enum::$variant, $value), )*
];
for (e, v) in cases {
let holder = EnumHolder { e };
let res = toml::to_string(&holder).unwrap();
let exp = format!("e = {}\n", v);
assert_eq!(res, exp);
let h: EnumHolder = toml::from_str(&exp).unwrap();
assert!(h == holder);
}
}
}
}
}
}
numeric_enum_mod! {compaction_pri_serde CompactionPriority {
ByCompensatedSize = 0,
OldestLargestSeqFirst = 1,
OldestSmallestSeqFirst = 2,
MinOverlappingRatio = 3,
}}
numeric_enum_mod! {rate_limiter_mode_serde DBRateLimiterMode {
ReadOnly = 1,
WriteOnly = 2,
AllIo = 3,
}}
numeric_enum_mod! {compaction_style_serde DBCompactionStyle {
Level = 0,
Universal = 1,
}}
numeric_enum_mod! {recovery_mode_serde DBRecoveryMode {
TolerateCorruptedTailRecords = 0,
AbsoluteConsistency = 1,
PointInTime = 2,
SkipAnyCorruptedRecords = 3,
}}
numeric_enum_mod! {perf_level_serde PerfLevel {
Uninitialized = 0,
Disable = 1,
EnableCount = 2,
EnableTimeExceptForMutex = 3,
EnableTimeAndCPUTimeExceptForMutex = 4,
EnableTime = 5,
OutOfBounds = 6,
}}
#[cfg(test)]
mod tests {
use super::*;
use rocksdb::DBCompressionType;
#[test]
fn test_parse_compression_type() {
#[derive(Serialize, Deserialize)]
struct CompressionTypeHolder {
#[serde(with = "compression_type_level_serde")]
tp: [DBCompressionType; 7],
}
let all_tp = vec![
(DBCompressionType::No, "no"),
(DBCompressionType::Snappy, "snappy"),
(DBCompressionType::Zlib, "zlib"),
(DBCompressionType::Bz2, "bzip2"),
(DBCompressionType::Lz4, "lz4"),
(DBCompressionType::Lz4hc, "lz4hc"),
(DBCompressionType::Zstd, "zstd"),
(DBCompressionType::ZstdNotFinal, "zstd-not-final"),
(DBCompressionType::Disable, "disable"),
];
for i in 0..all_tp.len() - 7 {
let mut src = [DBCompressionType::No; 7];
let mut exp = ["no"; 7];
for (i, &t) in all_tp[i..i + 7].iter().enumerate() {
src[i] = t.0;
exp[i] = t.1;
}
let holder = CompressionTypeHolder { tp: src };
let res_str = toml::to_string(&holder).unwrap();
let exp_str = format!("tp = [\"{}\"]\n", exp.join("\", \""));
assert_eq!(res_str, exp_str);
let h: CompressionTypeHolder = toml::from_str(&exp_str).unwrap();
assert_eq!(h.tp, holder.tp);
}
// length is wrong.
assert!(toml::from_str::<CompressionTypeHolder>("tp = [\"no\"]").is_err());
assert!(toml::from_str::<CompressionTypeHolder>(
r#"tp = [
"no", "no", "no", "no", "no", "no", "no", "no"
]"#
)
.is_err());
// value is wrong.
assert!(toml::from_str::<CompressionTypeHolder>(
r#"tp = [
"no", "no", "no", "no", "no", "no", "yes"
]"#
)
.is_err());
}
}
| 33.140845 | 94 | 0.503754 |
0ef4825e1e535f37f9ea4eda965cc592ff5f5ce2 | 6,002 | // Copyright (c) 2015-2016 Brandon Thomas <[email protected]>
//! Low level parsing that returns headers and data fields closer to the
//! underlying ILDA data model.
use data::COLOR_PALETTE_SIZE;
use data::ColorPalette;
use data::Format;
use data::HEADER_SIZE;
use data::Header;
use data::INDEXED_2D_DATA_SIZE;
use data::INDEXED_3D_DATA_SIZE;
use data::IldaEntry;
use data::IndexedPoint2d;
use data::IndexedPoint3d;
use data::TRUE_COLOR_2D_DATA_SIZE;
use data::TRUE_COLOR_3D_DATA_SIZE;
use data::TrueColorPoint2d;
use data::TrueColorPoint3d;
use error::IldaError;
use std::fs::File;
use std::io::Read;
/// The ILDA format header; "ILDA" in ASCII.
const ILDA_HEADER : [u8; 4] = [73u8, 76u8, 68u8, 65u8];
/// Read ILDA data from a file.
pub fn read_file(filename: &str) -> Result<Vec<IldaEntry>, IldaError> {
let mut contents = Vec::new();
let mut file = File::open(filename)?;
let _r = file.read_to_end(&mut contents);
read_bytes(&contents[..])
}
/// Read ILDA data from raw bytes.
pub fn read_bytes(ilda_bytes: &[u8]) -> Result<Vec<IldaEntry>, IldaError> {
if ilda_bytes.len() < 32 {
return Err(IldaError::FileTooSmall);
}
enum NextRead { Header, I3d, I2d, Color, Tc3d, Tc2d };
let mut vec = Vec::new();
let mut i : usize = 0;
let mut next_read = NextRead::Header;
let mut frames_to_read = 0;
// TODO(echelon): This isn't very concise.
while i < ilda_bytes.len() {
match next_read {
NextRead::Header => {
let header = read_header(&ilda_bytes[i .. i + HEADER_SIZE])
.map_err(|_| IldaError::InvalidHeader)?;
next_read = match header.get_format() {
Format::Indexed3d => NextRead::I3d,
Format::Indexed2d => NextRead::I2d,
Format::ColorPalette => NextRead::Color,
Format::TrueColor3d => NextRead::Tc3d,
Format::TrueColor2d => NextRead::Tc2d,
Format::Unknown => return Err(IldaError::InvalidHeader),
};
frames_to_read = header.record_count;
vec.push(IldaEntry::HeaderEntry(header));
i += HEADER_SIZE;
},
NextRead::I3d => {
let end = INDEXED_3D_DATA_SIZE * frames_to_read as usize;
let points = IndexedPoint3d::read_bytes(&ilda_bytes[i .. i + end])?;
let mut entries = points.iter()
.map(|x| IldaEntry::IdxPoint3dEntry(x.clone()))
.collect();
vec.append(&mut entries);
next_read = NextRead::Header;
i += end;
},
NextRead::I2d => {
let end = INDEXED_2D_DATA_SIZE * frames_to_read as usize;
let points = IndexedPoint2d::read_bytes(&ilda_bytes[i .. i + end])?;
let mut entries = points.iter()
.map(|x| IldaEntry::IdxPoint2dEntry(x.clone()))
.collect();
vec.append(&mut entries);
next_read = NextRead::Header;
i += end;
},
NextRead::Color => {
let end = COLOR_PALETTE_SIZE * frames_to_read as usize;
let points = ColorPalette::read_bytes(&ilda_bytes[i .. i + end])?;
let mut entries = points.iter()
.map(|x| IldaEntry::ColorPaletteEntry(x.clone()))
.collect();
vec.append(&mut entries);
next_read = NextRead::Header;
i += end;
},
NextRead::Tc3d => {
let end = TRUE_COLOR_3D_DATA_SIZE * frames_to_read as usize;
let points = TrueColorPoint3d::read_bytes(&ilda_bytes[i .. i + end])?;
let mut entries = points.iter()
.map(|x| IldaEntry::TcPoint3dEntry(x.clone()))
.collect();
vec.append(&mut entries);
next_read = NextRead::Header;
i += end;
},
NextRead::Tc2d => {
let end = TRUE_COLOR_2D_DATA_SIZE * frames_to_read as usize;
let points = TrueColorPoint2d::read_bytes(&ilda_bytes[i .. i + end])?;
let mut entries = points.iter()
.map(|x| IldaEntry::TcPoint2dEntry(x.clone()))
.collect();
vec.append(&mut entries);
next_read = NextRead::Header;
i += end;
},
};
}
Ok(vec)
}
fn read_header(header_bytes: &[u8]) -> Result<Header, IldaError> {
if header_bytes.len() != 32 || &header_bytes[0..4] != &ILDA_HEADER {
return Err(IldaError::InvalidHeader);
}
let name = read_name(&header_bytes[8..16]);
let company_name = read_name(&header_bytes[16..24]);
let number_of_records = read_u16(&header_bytes[24..26]);
let frame_number = read_u16(&header_bytes[26..28]);
let total_frames = read_u16(&header_bytes[28..30]);
let projector_number = header_bytes[31];
Ok(Header {
reserved: 0, // TODO: Read in.
format_code: header_bytes[7],
name: name,
company_name: company_name,
record_count: number_of_records,
number: frame_number,
total_frames: total_frames,
projector_number: projector_number,
reserved_2: 0, // TODO: Read in.
})
}
fn read_name(bytes: &[u8]) -> Option<String> {
let mut name = String::with_capacity(8);
for byte in bytes {
if *byte == 0 {
break;
} else if *byte < 31 {
continue; // unprintable characters
} else {
name.push(*byte as char);
}
}
match name.len() {
0 => None,
_ => Some(name),
}
}
fn read_u16(bytes: &[u8]) -> u16 {
((bytes[0] as u16) << 8) | (bytes[1] as u16)
}
#[cfg(test)]
mod tests {
use super::read_name;
use super::read_u16;
#[test]
fn test_read_name() {
assert_eq!(read_name(&[0, 0, 0, 0]), None);
assert_eq!(read_name(&[0, 100, 100, 100]), None);
assert_eq!(read_name(&[102, 111, 111]), Some("foo".to_string()));
assert_eq!(read_name(&[102, 111, 111, 0, 111]),
Some("foo".to_string()));
}
#[test]
fn test_read_u16() {
assert_eq!(read_u16(&[0u8, 0u8]), 0u16);
assert_eq!(read_u16(&[0u8, 100u8]), 100u16);
assert_eq!(read_u16(&[0u8, 255u8]), 255u16);
assert_eq!(read_u16(&[1u8, 0u8]), 256u16);
assert_eq!(read_u16(&[255u8, 0u8]), 65280u16);
assert_eq!(read_u16(&[255u8, 255u8]), 65535u16);
}
}
| 31.260417 | 78 | 0.611796 |
cce90f8a2cd29339edf892cfdfce9f55f0b3618a | 541 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn main() {
let x = 1;
let y: || -> int = || x;
let _z = y();
}
| 33.8125 | 68 | 0.695009 |
50aed51268c862af559b246ef4d1982c5b2c04b5 | 576 |
Osnovice trapeza @"ABCD"@ su @"AB"@ i @"CD"@ i dati su uglovi (vidi sliku)
@center@ @number_f@ = @dat_f@, @hspacept(20)@ @number_s@ = @dat_s@.
@center@ @mycanvas()@
Odredi ostale unutrašnje i spoljašnje uglove trapeza:
@center@ @numb[ind[1]]@ @hspacept(5)@ @answ[1]@ @hspacept(25)@ @numb[ind[2]]@ @hspacept(5)@ @answ[2]@
@center@ @numb[ind[3]]@ @hspacept(5)@ @answ[3]@ @hspacept(25)@ @numb[ind[4]]@ @hspacept(5)@ @answ[4]@
@center@ @numb[ind[5]]@ @hspacept(5)@ @answ[5]@ @hspacept(25)@ @numb[ind[6]]@ @hspacept(5)@ @answ[6]@
| 41.142857 | 108 | 0.572917 |
6add9580b8729f874da55391849f4309fb9b96be | 2,984 | use super::Optimizer;
use fxhash::FxHashMap;
use swc_ecma_ast::*;
use swc_ecma_utils::{ident::IdentLike, Id};
use swc_ecma_visit::{noop_visit_mut_type, VisitMut, VisitMutWith};
/// Methods related to the option `collapse_vars`.
impl Optimizer<'_> {
pub(super) fn collapse_assignment_to_vars(&mut self, e: &mut Expr) {
if !self.options.collapse_vars {
return;
}
if self.ctx.in_try_block || self.ctx.executed_multiple_time || self.ctx.in_cond {
return;
}
match &*e {
Expr::Assign(assign) => {
//
let left = match &assign.left {
PatOrExpr::Expr(_) => return,
PatOrExpr::Pat(left) => match &**left {
Pat::Ident(i) => i,
_ => return,
},
};
if let Some(usage) = self
.data
.as_ref()
.and_then(|data| data.vars.get(&left.to_id()))
{
if !usage.declared
|| !usage.is_fn_local
|| usage.assign_count != 1
|| usage.var_kind == Some(VarDeclKind::Const)
{
return;
}
if usage.used_in_loop {
match &*assign.right {
Expr::Lit(..) | Expr::Ident(..) => {}
_ => return,
}
}
if usage.usage_count >= 2 {
match &*assign.right {
Expr::Lit(..) => {}
_ => return,
}
}
}
let value = match &*assign.right {
Expr::Lit(..)
| Expr::Member(MemberExpr {
computed: false, ..
}) => assign.right.clone(),
_ => return,
};
log::debug!(
"collpase_vars: Decided to inline {}{:?}",
left.id.sym,
left.id.span.ctxt
);
self.lits.insert(left.to_id(), value);
}
_ => {}
}
}
}
struct Inliner<'a> {
values: &'a mut FxHashMap<Id, Option<Box<Expr>>>,
}
impl VisitMut for Inliner<'_> {
noop_visit_mut_type!();
fn visit_mut_expr(&mut self, e: &mut Expr) {
e.visit_mut_children_with(self);
match e {
Expr::Ident(i) => {
if let Some(value) = self.values.remove(&i.to_id()) {
log::debug!("collapse_vars: Inlining {}{:?}", i.sym, i.span.ctxt);
*e = *value.expect("should be used only once");
}
}
_ => {}
}
}
}
| 29.84 | 89 | 0.387735 |
0835d7fca961207b89762d0eee31d4c7140b1ee4 | 281 | fn main() -> Result<(), std::io::Error> {
let abi = <contract::Sample as ink_lang::GenerateAbi>::generate_abi();
let contents = serde_json::to_string_pretty(&abi)?;
std::fs::create_dir("target").ok();
std::fs::write("target/metadata.json", contents)?;
Ok(())
}
| 35.125 | 74 | 0.626335 |
d7c0f6fa14d1311ae6af05ffe9b41f83b83f9c0d | 234 | mod mutator;
pub use mutator::Mutator;
mod circle_mutator;
pub use circle_mutator::CircleMutator;
mod triangle_mutator;
pub use triangle_mutator::TriangleMutator;
mod rectangle_mutator;
pub use rectangle_mutator::RectangleMutator;
| 19.5 | 44 | 0.833333 |
0e4691c1245ee3829d28cb657fdf7c031547d4a2 | 5,818 | // Copyright 2018 Parity Technologies (UK) Ltd.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
use bigint::U256;
use libp2p_core::PeerId;
use multihash::Multihash;
use sha2::{Digest, Sha256, digest::generic_array::{GenericArray, typenum::U32}};
/// A `Key` identifies both the nodes participating in the Kademlia DHT, as well as
/// records stored in the DHT.
///
/// The set of all `Key`s defines the Kademlia keyspace.
///
/// `Key`s have an XOR metric as defined in the Kademlia paper, i.e. the bitwise XOR of
/// the hash digests, interpreted as an integer. See [`Key::distance`].
///
/// A `Key` preserves the preimage of type `T` of the hash function. See [`Key::preimage`].
#[derive(Clone, Debug)]
pub struct Key<T> {
preimage: T,
bytes: KeyBytes,
}
/// The raw bytes of a key in the DHT keyspace.
#[derive(PartialEq, Eq, Clone, Debug)]
pub struct KeyBytes(GenericArray<u8, U32>);
impl KeyBytes {
/// Computes the distance of the keys according to the XOR metric.
pub fn distance<U>(&self, other: &U) -> Distance
where
U: AsRef<KeyBytes>
{
let a = U256::from(self.0.as_ref());
let b = U256::from(other.as_ref().0.as_ref());
Distance(a ^ b)
}
}
impl AsRef<KeyBytes> for KeyBytes {
fn as_ref(&self) -> &KeyBytes {
self
}
}
impl<T> AsRef<KeyBytes> for Key<T> {
fn as_ref(&self) -> &KeyBytes {
&self.bytes
}
}
impl<T, U> PartialEq<Key<U>> for Key<T> {
fn eq(&self, other: &Key<U>) -> bool {
self.bytes == other.bytes
}
}
impl<T> Eq for Key<T> {}
impl<TPeerId> AsRef<Key<TPeerId>> for Key<TPeerId> {
fn as_ref(&self) -> &Key<TPeerId> {
self
}
}
impl<T> Key<T> {
/// Construct a new `Key` by hashing the bytes of the given `preimage`.
///
/// The preimage of type `T` is preserved. See [`Key::preimage`] and
/// [`Key::into_preimage`].
pub fn new(preimage: T) -> Key<T>
where
T: AsRef<[u8]>
{
let bytes = KeyBytes(Sha256::digest(preimage.as_ref()));
Key { preimage, bytes }
}
/// Returns the uniquely determined key with the given distance to `self`.
///
/// This implements the following equivalence:
///
/// `self xor other = distance <==> other = self xor distance`
pub fn for_distance(&self, d: Distance) -> KeyBytes {
let key_int = U256::from(self.bytes.0.as_ref()) ^ d.0;
KeyBytes(GenericArray::from(<[u8; 32]>::from(key_int)))
}
/// Borrows the preimage of the key.
pub fn preimage(&self) -> &T {
&self.preimage
}
/// Converts the key into its preimage.
pub fn into_preimage(self) -> T {
self.preimage
}
/// Computes the distance of the keys according to the XOR metric.
pub fn distance<U>(&self, other: &U) -> Distance
where
U: AsRef<KeyBytes>
{
self.bytes.distance(other)
}
}
impl<T> Into<KeyBytes> for Key<T> {
fn into(self) -> KeyBytes {
self.bytes
}
}
impl From<Multihash> for Key<Multihash> {
fn from(m: Multihash) -> Self {
Key::new(m)
}
}
impl From<PeerId> for Key<PeerId> {
fn from(p: PeerId) -> Self {
Key::new(p)
}
}
/// A distance between two `Key`s.
#[derive(Copy, Clone, PartialEq, Eq, Default, PartialOrd, Ord, Debug)]
pub struct Distance(pub(super) bigint::U256);
#[cfg(test)]
mod tests {
use super::*;
use quickcheck::*;
impl Arbitrary for Key<PeerId> {
fn arbitrary<G: Gen>(_: &mut G) -> Key<PeerId> {
Key::from(PeerId::random())
}
}
#[test]
fn identity() {
fn prop(a: Key<PeerId>) -> bool {
a.distance(&a) == Distance::default()
}
quickcheck(prop as fn(_) -> _)
}
#[test]
fn symmetry() {
fn prop(a: Key<PeerId>, b: Key<PeerId>) -> bool {
a.distance(&b) == b.distance(&a)
}
quickcheck(prop as fn(_,_) -> _)
}
#[test]
fn triangle_inequality() {
fn prop(a: Key<PeerId>, b: Key<PeerId>, c: Key<PeerId>) -> TestResult {
let ab = a.distance(&b);
let bc = b.distance(&c);
let (ab_plus_bc, overflow) = ab.0.overflowing_add(bc.0);
if overflow {
TestResult::discard()
} else {
TestResult::from_bool(a.distance(&c) <= Distance(ab_plus_bc))
}
}
quickcheck(prop as fn(_,_,_) -> _)
}
#[test]
fn unidirectionality() {
fn prop(a: Key<PeerId>, b: Key<PeerId>) -> bool {
let d = a.distance(&b);
(0 .. 100).all(|_| {
let c = Key::from(PeerId::random());
a.distance(&c) != d || b == c
})
}
quickcheck(prop as fn(_,_) -> _)
}
}
| 28.945274 | 91 | 0.596597 |
22103810d6a360b0022ae0f531e105a647320cda | 4,878 | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
// Allow KiB, MiB consts
#![allow(non_upper_case_globals, non_snake_case)]
// Allow fns to take &usize, since criterion only passes parameters by ref
#![allow(clippy::trivially_copy_pass_by_ref)]
// Allow writing 1 * KiB or 1 * MiB
#![allow(clippy::identity_op)]
use criterion::{
criterion_group, criterion_main, AxisScale, Bencher, Criterion, ParameterizedBenchmark,
PlotConfiguration, Throughput,
};
use futures::{
channel::mpsc,
executor::block_on,
sink::SinkExt,
stream::{FuturesUnordered, StreamExt},
};
use libra_types::PeerId;
use network::protocols::{
network::{
dummy::{setup_network, DummyMsg, DummyNetworkSender},
Event,
},
rpc::error::RpcError,
};
use std::time::Duration;
const KiB: usize = 1 << 10;
const MiB: usize = 1 << 20;
const NUM_MSGS: u32 = 100;
const TOLERANCE: u32 = 20;
fn direct_send_bench(b: &mut Bencher, msg_len: &usize) {
let tn = setup_network();
let runtime = tn.runtime;
let mut dialer_sender = tn.dialer_sender;
let listener_peer_id = tn.listener_peer_id;
let mut listener_events = tn.listener_events;
// Compose Proposal message with `msg_len` bytes payload
let msg = DummyMsg(vec![0u8; *msg_len]);
let (mut tx, mut rx) = mpsc::channel(0);
// The listener side keeps receiving messages and send signal back to the bencher to finish
// the iteration once NUM_MSGS messages are received.
let f_listener = async move {
let mut counter = 0u32;
while let Some(_) = listener_events.next().await {
counter += 1;
// By the nature of DirectSend protocol, some messages may be lost when a connection is
// broken temporarily.
if counter == NUM_MSGS - TOLERANCE {
let _ = tx.send(()).await;
counter = 0;
}
}
};
runtime.spawn(f_listener);
// The dialer side keeps sending messages. In each iteration of the benchmark, it sends
// NUM_MSGS messages and wait until the listener side sends signal back.
b.iter(|| {
for _ in 0..NUM_MSGS {
dialer_sender
.send_to(listener_peer_id, msg.clone())
.unwrap();
}
block_on(rx.next()).unwrap();
});
}
fn rpc_bench(b: &mut Bencher, msg_len: &usize) {
let tn = setup_network();
let runtime = tn.runtime;
let dialer_sender = tn.dialer_sender;
let listener_peer_id = tn.listener_peer_id;
let mut listener_events = tn.listener_events;
// Compose RequestBlock message and RespondBlock message with `msg_len` bytes payload
let req = DummyMsg(vec![]);
let res = DummyMsg(vec![0u8; *msg_len]);
// The listener side keeps receiving RPC requests and sending responses back
let f_listener = async move {
while let Some(Ok(event)) = listener_events.next().await {
match event {
Event::RpcRequest((_, _, res_tx)) => res_tx
.send(Ok(lcs::to_bytes(&res)
.expect("fail to serialize proto")
.into()))
.expect("fail to send rpc response to network"),
event => panic!("Unexpected event: {:?}", event),
}
}
};
runtime.spawn(f_listener);
// The dialer side keeps sending RPC requests. In each iteration of the benchmark, it sends
// NUM_MSGS requests and blocks on getting the responses.
b.iter(|| {
let mut requests = FuturesUnordered::new();
for _ in 0..NUM_MSGS {
requests.push(send_rpc(
dialer_sender.clone(),
listener_peer_id,
req.clone(),
));
}
while let Some(res) = block_on(requests.next()) {
let _ = res.unwrap();
}
});
}
async fn send_rpc(
mut sender: DummyNetworkSender,
recipient: PeerId,
req_msg: DummyMsg,
) -> Result<DummyMsg, RpcError> {
sender
.send_rpc(recipient, req_msg, Duration::from_secs(15))
.await
}
fn network_crate_benchmark(c: &mut Criterion) {
::libra_logger::Logger::new().environment_only(true).init();
// Parameterize benchmarks over the message length.
let msg_lens = vec![32usize, 256, 1 * KiB, 4 * KiB, 64 * KiB, 256 * KiB, 1 * MiB];
c.bench(
"network_crate_benchmark",
ParameterizedBenchmark::new("direct_send", direct_send_bench, msg_lens)
.with_function("rpc", rpc_bench)
.sample_size(10)
.plot_config(PlotConfiguration::default().summary_scale(AxisScale::Logarithmic))
.throughput(|msg_len| Throughput::Bytes(((*msg_len as u32) * NUM_MSGS).into())),
);
}
criterion_group!(benches, network_crate_benchmark);
criterion_main!(benches);
| 33.410959 | 99 | 0.620951 |
56b0fc2970c393983bce7c9ec856d93ac9ea7db9 | 755 | // quiz1.rs
// This is a quiz for the following sections:
// - Variables
// - Functions
// - If
// Mary is buying apples. One apple usually costs 2 Rustbucks, but if you buy
// more than 40 at once, each apple only costs 1! Write a function that calculates
// the price of an order of apples given the quantity bought. No hints this time!
// Put your function here!
fn calculate_apple_price(quantity: u16) -> u16 {
if quantity > 40 { quantity } else { quantity * 2 }
}
// Don't modify this function!
#[test]
fn verify_test() {
let price1 = calculate_apple_price(35);
let price2 = calculate_apple_price(40);
let price3 = calculate_apple_price(65);
assert_eq!(70, price1);
assert_eq!(80, price2);
assert_eq!(65, price3);
}
| 26.034483 | 82 | 0.688742 |
623a1ac3c5f777deeebf5e73a8da1157b484a924 | 57,085 | #![allow(missing_docs)]
#![allow(non_camel_case_types)] // For the simd type aliases.
//! SIMD values based on auto-vectorization.
use crate::scalar::{Field, SubsetOf, SupersetOf};
use crate::simd::{
PrimitiveSimdValue, SimdBool, SimdComplexField, SimdPartialOrd, SimdRealField, SimdSigned,
SimdValue,
};
use approx::AbsDiffEq;
#[cfg(feature = "decimal")]
use decimal::d128;
use num::{FromPrimitive, Num, One, Zero};
use std::{
fmt,
ops::{
Add, AddAssign, BitAnd, BitOr, BitXor, Div, DivAssign, Mul, MulAssign, Neg, Not, Rem,
RemAssign, Sub, SubAssign,
},
};
// This is a hack to allow use to reuse `_0` as integers or as identifier,
// depending on whether or not `ident_to_value` has been called in scope.
// This helps writing macros that define both `::new` and `From([T; lanes()])`.
macro_rules! ident_to_value(
() => {
const _0: usize = 0; const _1: usize = 1; const _2: usize = 2; const _3: usize = 3; const _4: usize = 4; const _5: usize = 5; const _6: usize = 6; const _7: usize = 7;
const _8: usize = 8; const _9: usize = 9; const _10: usize = 10; const _11: usize = 11; const _12: usize = 12; const _13: usize = 13; const _14: usize = 14; const _15: usize = 15;
const _16: usize = 16; const _17: usize = 17; const _18: usize = 18; const _19: usize = 19; const _20: usize = 20; const _21: usize = 21; const _22: usize = 22; const _23: usize = 23;
const _24: usize = 24; const _25: usize = 25; const _26: usize = 26; const _27: usize = 27; const _28: usize = 28; const _29: usize = 29; const _30: usize = 30; const _31: usize = 31;
const _32: usize = 32; const _33: usize = 33; const _34: usize = 34; const _35: usize = 35; const _36: usize = 36; const _37: usize = 37; const _38: usize = 38; const _39: usize = 39;
const _40: usize = 40; const _41: usize = 41; const _42: usize = 42; const _43: usize = 43; const _44: usize = 44; const _45: usize = 45; const _46: usize = 46; const _47: usize = 47;
const _48: usize = 48; const _49: usize = 49; const _50: usize = 50; const _51: usize = 51; const _52: usize = 52; const _53: usize = 53; const _54: usize = 54; const _55: usize = 55;
const _56: usize = 56; const _57: usize = 57; const _58: usize = 58; const _59: usize = 59; const _60: usize = 60; const _61: usize = 61; const _62: usize = 62; const _63: usize = 63;
}
);
/// An Simd structure that implements all the relevant traits from `num` an `simba`.
///
/// This is needed to overcome the orphan rules.
#[repr(align(16))]
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub struct AutoSimd<N>(pub N);
/// An Simd boolean structure that implements all the relevant traits from `num` an `simba`.
///
/// This is needed to overcome the orphan rules.
#[repr(align(16))]
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub struct AutoBoolSimd<N>(pub N);
macro_rules! impl_bool_simd(
($($t: ty, $lanes: expr, $($i: ident),*;)*) => {$(
impl_simd_value!($t, bool, $lanes, AutoSimd<$t> $(, $i)*;);
impl From<[bool; $lanes]> for AutoSimd<$t> {
#[inline(always)]
fn from(vals: [bool; $lanes]) -> Self {
Self(vals)
}
}
impl Not for AutoSimd<$t> {
type Output = Self;
#[inline]
fn not(self) -> Self {
self.map(|x| !x)
}
}
impl BitAnd<AutoSimd<$t>> for AutoSimd<$t> {
type Output = Self;
fn bitand(self, rhs: Self) -> Self {
self.zip_map(rhs, |x, y| x & y)
}
}
impl BitOr<AutoSimd<$t>> for AutoSimd<$t> {
type Output = Self;
fn bitor(self, rhs: Self) -> Self {
self.zip_map(rhs, |x, y| x | y)
}
}
impl BitXor<AutoSimd<$t>> for AutoSimd<$t> {
type Output = Self;
fn bitxor(self, rhs: Self) -> Self {
self.zip_map(rhs, |x, y| x ^ y)
}
}
impl SimdBool for AutoSimd<$t> {
#[inline(always)]
fn bitmask(self) -> u64 {
ident_to_value!();
0u64 $(
| ((self.0[$i] as u64) << $i)
)*
}
#[inline(always)]
fn and(self) -> bool {
ident_to_value!();
true $(
&& self.0[$i]
)*
}
#[inline(always)]
fn or(self) -> bool {
ident_to_value!();
false $(
|| self.0[$i]
)*
}
#[inline(always)]
fn xor(self) -> bool {
ident_to_value!();
false $(
^ self.0[$i]
)*
}
#[inline(always)]
fn all(self) -> bool {
self.and()
}
#[inline(always)]
fn any(self) -> bool {
self.or()
}
#[inline(always)]
fn none(self) -> bool {
!self.any()
}
#[inline(always)]
fn if_else<Res: SimdValue<SimdBool = Self>>(
self,
if_value: impl FnOnce() -> Res,
else_value: impl FnOnce() -> Res,
) -> Res {
let a = if_value();
let b = else_value();
a.select(self, b)
}
#[inline(always)]
fn if_else2<Res: SimdValue<SimdBool = Self>>(
self,
if_value: impl FnOnce() -> Res,
else_if: (impl FnOnce() -> Self, impl FnOnce() -> Res),
else_value: impl FnOnce() -> Res,
) -> Res {
let a = if_value();
let b = else_if.1();
let c = else_value();
let cond_a = self;
let cond_b = else_if.0();
a.select(cond_a, b.select(cond_b, c))
}
#[inline(always)]
fn if_else3<Res: SimdValue<SimdBool = Self>>(
self,
if_value: impl FnOnce() -> Res,
else_if: (impl FnOnce() -> Self, impl FnOnce() -> Res),
else_else_if: (impl FnOnce() -> Self, impl FnOnce() -> Res),
else_value: impl FnOnce() -> Res,
) -> Res {
let a = if_value();
let b = else_if.1();
let c = else_else_if.1();
let d = else_value();
let cond_a = self;
let cond_b = else_if.0();
let cond_c = else_else_if.0();
a.select(cond_a, b.select(cond_b, c.select(cond_c, d)))
}
}
)*}
);
macro_rules! impl_scalar_subset_of_simd(
($($t: ty),*) => {$(
impl<N2> SubsetOf<AutoSimd<N2>> for $t
where AutoSimd<N2>: SimdValue + Copy,
<AutoSimd<N2> as SimdValue>::Element: SupersetOf<$t> + PartialEq, {
#[inline(always)]
fn to_superset(&self) -> AutoSimd<N2> {
AutoSimd::<N2>::splat(<AutoSimd<N2> as SimdValue>::Element::from_subset(self))
}
#[inline(always)]
fn from_superset_unchecked(element: &AutoSimd<N2>) -> $t {
element.extract(0).to_subset_unchecked()
}
#[inline(always)]
fn is_in_subset(c: &AutoSimd<N2>) -> bool {
let elt0 = c.extract(0);
elt0.is_in_subset() &&
(1..AutoSimd::<N2>::lanes()).all(|i| c.extract(i) == elt0)
}
}
)*}
);
impl_scalar_subset_of_simd!(u8, u16, u32, u64, usize, i8, i16, i32, i64, isize, f32, f64);
#[cfg(feature = "decimal")]
impl_scalar_subset_of_simd!(d128);
macro_rules! impl_simd_value(
($($t: ty, $elt: ty, $lanes: expr, $bool: ty, $($i: ident),*;)*) => ($(
impl ArrTransform for AutoSimd<$t> {
#[inline(always)]
fn map(self, f: impl Fn(Self::Element) -> Self::Element) -> Self {
ident_to_value!();
Self([$(f(self.0[$i])),*])
}
#[inline(always)]
fn zip_map(self, other: Self, f: impl Fn(Self::Element, Self::Element) -> Self::Element) -> Self {
ident_to_value!();
Self([$(f(self.0[$i], other.0[$i])),*])
}
#[inline(always)]
fn zip_zip_map(self, b: Self, c: Self, f: impl Fn(Self::Element, Self::Element, Self::Element) -> Self::Element) -> Self {
ident_to_value!();
Self([$(f(self.0[$i], b.0[$i], c.0[$i])),*])
}
#[inline(always)]
fn map_bool(self, f: impl Fn(Self::Element) -> bool) -> Self::SimdBool {
ident_to_value!();
AutoSimd([$(f(self.0[$i])),*])
}
#[inline(always)]
fn zip_map_bool(self, other: Self, f: impl Fn(Self::Element, Self::Element) -> bool) -> Self::SimdBool {
ident_to_value!();
AutoSimd([$(f(self.0[$i], other.0[$i])),*])
}
}
impl fmt::Display for AutoSimd<$t> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if Self::lanes() == 1 {
return self.extract(0).fmt(f);
}
write!(f, "({}", self.extract(0))?;
for i in 1..Self::lanes() {
write!(f, ", {}", self.extract(i))?;
}
write!(f, ")")
}
}
impl AutoSimd<$t> {
pub fn new($($i: $elt),*) -> Self {
AutoSimd([$($i),*])
}
}
impl PrimitiveSimdValue for AutoSimd<$t> {}
impl SimdValue for AutoSimd<$t> {
type Element = $elt;
type SimdBool = $bool;
#[inline(always)]
fn lanes() -> usize {
$lanes
}
#[inline(always)]
fn splat(val: Self::Element) -> Self {
AutoSimd([val; $lanes])
}
#[inline(always)]
fn extract(&self, i: usize) -> Self::Element {
self.0[i]
}
#[inline(always)]
unsafe fn extract_unchecked(&self, i: usize) -> Self::Element {
*self.0.get_unchecked(i)
}
#[inline(always)]
fn replace(&mut self, i: usize, val: Self::Element) {
self.0[i] = val
}
#[inline(always)]
unsafe fn replace_unchecked(&mut self, i: usize, val: Self::Element) {
*self.0.get_unchecked_mut(i) = val
}
#[inline(always)]
fn select(self, cond: Self::SimdBool, other: Self) -> Self {
ident_to_value!();
Self([
$(if cond.0[$i] { self.0[$i] } else { other.0[$i] }),*
])
}
}
)*)
);
macro_rules! impl_uint_simd(
($($t: ty, $elt: ty, $lanes: expr, $bool: ty, $($i: ident),*;)*) => ($(
impl_simd_value!($t, $elt, $lanes, $bool $(, $i)*;);
impl From<[$elt; $lanes]> for AutoSimd<$t> {
#[inline(always)]
fn from(vals: [$elt; $lanes]) -> Self {
AutoSimd(vals)
}
}
impl From<AutoSimd<$t>> for [$elt; $lanes] {
#[inline(always)]
fn from(val: AutoSimd<$t>) -> [$elt; $lanes] {
val.0
}
}
impl SubsetOf<AutoSimd<$t>> for AutoSimd<$t> {
#[inline(always)]
fn to_superset(&self) -> Self {
*self
}
#[inline(always)]
fn from_superset(element: &Self) -> Option<Self> {
Some(*element)
}
#[inline(always)]
fn from_superset_unchecked(element: &Self) -> Self {
*element
}
#[inline(always)]
fn is_in_subset(_: &Self) -> bool {
true
}
}
impl Num for AutoSimd<$t> {
type FromStrRadixErr = <$elt as Num>::FromStrRadixErr;
#[inline(always)]
fn from_str_radix(str: &str, radix: u32) -> Result<Self, Self::FromStrRadixErr> {
<$elt>::from_str_radix(str, radix).map(Self::splat)
}
}
impl FromPrimitive for AutoSimd<$t> {
#[inline(always)]
fn from_i64(n: i64) -> Option<Self> {
<$elt>::from_i64(n).map(Self::splat)
}
#[inline(always)]
fn from_u64(n: u64) -> Option<Self> {
<$elt>::from_u64(n).map(Self::splat)
}
#[inline(always)]
fn from_isize(n: isize) -> Option<Self> {
<$elt>::from_isize(n).map(Self::splat)
}
#[inline(always)]
fn from_i8(n: i8) -> Option<Self> {
<$elt>::from_i8(n).map(Self::splat)
}
#[inline(always)]
fn from_i16(n: i16) -> Option<Self> {
<$elt>::from_i16(n).map(Self::splat)
}
#[inline(always)]
fn from_i32(n: i32) -> Option<Self> {
<$elt>::from_i32(n).map(Self::splat)
}
#[inline(always)]
fn from_usize(n: usize) -> Option<Self> {
<$elt>::from_usize(n).map(Self::splat)
}
#[inline(always)]
fn from_u8(n: u8) -> Option<Self> {
<$elt>::from_u8(n).map(Self::splat)
}
#[inline(always)]
fn from_u16(n: u16) -> Option<Self> {
<$elt>::from_u16(n).map(Self::splat)
}
#[inline(always)]
fn from_u32(n: u32) -> Option<Self> {
<$elt>::from_u32(n).map(Self::splat)
}
#[inline(always)]
fn from_f32(n: f32) -> Option<Self> {
<$elt>::from_f32(n).map(Self::splat)
}
#[inline(always)]
fn from_f64(n: f64) -> Option<Self> {
<$elt>::from_f64(n).map(Self::splat)
}
}
impl Zero for AutoSimd<$t> {
#[inline(always)]
fn zero() -> Self {
AutoSimd([<$elt>::zero(); $lanes])
}
#[inline(always)]
fn is_zero(&self) -> bool {
*self == Self::zero()
}
}
impl One for AutoSimd<$t> {
#[inline(always)]
fn one() -> Self {
AutoSimd([<$elt>::one(); $lanes])
}
}
impl Add<AutoSimd<$t>> for AutoSimd<$t> {
type Output = Self;
#[inline(always)]
fn add(self, rhs: Self) -> Self {
self.zip_map(rhs, |x, y| x + y)
}
}
impl Sub<AutoSimd<$t>> for AutoSimd<$t> {
type Output = Self;
#[inline(always)]
fn sub(self, rhs: Self) -> Self {
self.zip_map(rhs, |x, y| x - y)
}
}
impl Mul<AutoSimd<$t>> for AutoSimd<$t> {
type Output = Self;
#[inline(always)]
fn mul(self, rhs: Self) -> Self {
self.zip_map(rhs, |x, y| x * y)
}
}
impl Div<AutoSimd<$t>> for AutoSimd<$t> {
type Output = Self;
#[inline(always)]
fn div(self, rhs: Self) -> Self {
self.zip_map(rhs, |x, y| x / y)
}
}
impl Rem<AutoSimd<$t>> for AutoSimd<$t> {
type Output = Self;
#[inline(always)]
fn rem(self, rhs: Self) -> Self {
self.zip_map(rhs, |x, y| x % y)
}
}
impl AddAssign<AutoSimd<$t>> for AutoSimd<$t> {
#[inline(always)]
fn add_assign(&mut self, rhs: Self) {
*self = *self + rhs;
}
}
impl SubAssign<AutoSimd<$t>> for AutoSimd<$t> {
#[inline(always)]
fn sub_assign(&mut self, rhs: Self) {
*self = *self - rhs;
}
}
impl DivAssign<AutoSimd<$t>> for AutoSimd<$t> {
#[inline(always)]
fn div_assign(&mut self, rhs: Self) {
*self = *self / rhs;
}
}
impl MulAssign<AutoSimd<$t>> for AutoSimd<$t> {
#[inline(always)]
fn mul_assign(&mut self, rhs: Self) {
*self = *self * rhs;
}
}
impl RemAssign<AutoSimd<$t>> for AutoSimd<$t> {
#[inline(always)]
fn rem_assign(&mut self, rhs: Self) {
*self = *self % rhs;
}
}
impl SimdPartialOrd for AutoSimd<$t> {
#[inline(always)]
fn simd_gt(self, other: Self) -> Self::SimdBool {
self.zip_map_bool(other, |x, y| x.simd_gt(y))
}
#[inline(always)]
fn simd_lt(self, other: Self) -> Self::SimdBool {
self.zip_map_bool(other, |x, y| x.simd_lt(y))
}
#[inline(always)]
fn simd_ge(self, other: Self) -> Self::SimdBool {
self.zip_map_bool(other, |x, y| x.simd_ge(y))
}
#[inline(always)]
fn simd_le(self, other: Self) -> Self::SimdBool {
self.zip_map_bool(other, |x, y| x.simd_le(y))
}
#[inline(always)]
fn simd_eq(self, other: Self) -> Self::SimdBool {
self.zip_map_bool(other, |x, y| x.simd_eq(y))
}
#[inline(always)]
fn simd_ne(self, other: Self) -> Self::SimdBool {
self.zip_map_bool(other, |x, y| x.simd_ne(y))
}
#[inline(always)]
fn simd_max(self, other: Self) -> Self {
self.zip_map(other, |x, y| x.simd_max(y))
}
#[inline(always)]
fn simd_min(self, other: Self) -> Self {
self.zip_map(other, |x, y| x.simd_min(y))
}
#[inline(always)]
fn simd_clamp(self, min: Self, max: Self) -> Self {
self.simd_max(min).simd_min(max)
}
#[inline(always)]
fn simd_horizontal_min(self) -> Self::Element {
ident_to_value!();
self.0[0] $(.simd_min(self.0[$i]))*
}
#[inline(always)]
fn simd_horizontal_max(self) -> Self::Element {
ident_to_value!();
self.0[0] $(.simd_max(self.0[$i]))*
}
}
// impl MeetSemilattice for AutoSimd<$t> {
// #[inline(always)]
// fn meet(&self, other: &Self) -> Self {
// AutoSimd(self.0.min(other.0))
// }
// }
//
// impl JoinSemilattice for AutoSimd<$t> {
// #[inline(always)]
// fn join(&self, other: &Self) -> Self {
// AutoSimd(self.0.max(other.0))
// }
// }
)*)
);
macro_rules! impl_int_simd(
($($t: ty, $elt: ty, $lanes: expr, $bool: ty, $($i: ident),*;)*) => ($(
impl_uint_simd!($t, $elt, $lanes, $bool $(, $i)*;);
impl Neg for AutoSimd<$t> {
type Output = Self;
#[inline(always)]
fn neg(self) -> Self {
self.map(|x| -x)
}
}
)*)
);
macro_rules! impl_float_simd(
($($t: ty, $elt: ty, $lanes: expr, $int: ty, $bool: ty, $($i: ident),*;)*) => ($(
impl_int_simd!($t, $elt, $lanes, $bool $(, $i)*;);
// FIXME: this should be part of impl_int_simd
// but those methods do not seem to be implemented
// by packed_simd for integers.
impl SimdSigned for AutoSimd<$t> {
#[inline(always)]
fn simd_abs(&self) -> Self {
self.map(|x| x.simd_abs())
}
#[inline(always)]
fn simd_abs_sub(&self, other: &Self) -> Self {
self.zip_map(*other, |x, y| x.simd_abs_sub(&y))
}
#[inline(always)]
fn simd_signum(&self) -> Self {
self.map(|x| x.simd_signum())
}
#[inline(always)]
fn is_simd_positive(&self) -> Self::SimdBool {
self.map_bool(|x| x.is_simd_positive())
}
#[inline(always)]
fn is_simd_negative(&self) -> Self::SimdBool {
self.map_bool(|x| x.is_simd_negative())
}
}
impl Field for AutoSimd<$t> {}
#[cfg(any(feature = "std", feature = "libm", feature = "libm_force"))]
impl SimdRealField for AutoSimd<$t> {
#[inline(always)]
fn simd_atan2(self, other: Self) -> Self {
self.zip_map(other, |x, y| x.simd_atan2(y))
}
#[inline(always)]
fn simd_copysign(self, sign: Self) -> Self {
self.zip_map(sign, |me, sgn| me.simd_copysign(sgn))
}
#[inline(always)]
fn simd_default_epsilon() -> Self {
Self::splat(<$elt>::default_epsilon())
}
#[inline(always)]
fn simd_pi() -> Self {
Self::splat(<$elt>::simd_pi())
}
#[inline(always)]
fn simd_two_pi() -> Self {
Self::splat(<$elt>::simd_two_pi())
}
#[inline(always)]
fn simd_frac_pi_2() -> Self {
Self::splat(<$elt>::simd_frac_pi_2())
}
#[inline(always)]
fn simd_frac_pi_3() -> Self {
Self::splat(<$elt>::simd_frac_pi_3())
}
#[inline(always)]
fn simd_frac_pi_4() -> Self {
Self::splat(<$elt>::simd_frac_pi_4())
}
#[inline(always)]
fn simd_frac_pi_6() -> Self {
Self::splat(<$elt>::simd_frac_pi_6())
}
#[inline(always)]
fn simd_frac_pi_8() -> Self {
Self::splat(<$elt>::simd_frac_pi_8())
}
#[inline(always)]
fn simd_frac_1_pi() -> Self {
Self::splat(<$elt>::simd_frac_1_pi())
}
#[inline(always)]
fn simd_frac_2_pi() -> Self {
Self::splat(<$elt>::simd_frac_2_pi())
}
#[inline(always)]
fn simd_frac_2_sqrt_pi() -> Self {
Self::splat(<$elt>::simd_frac_2_sqrt_pi())
}
#[inline(always)]
fn simd_e() -> Self {
Self::splat(<$elt>::simd_e())
}
#[inline(always)]
fn simd_log2_e() -> Self {
Self::splat(<$elt>::simd_log2_e())
}
#[inline(always)]
fn simd_log10_e() -> Self {
Self::splat(<$elt>::simd_log10_e() )
}
#[inline(always)]
fn simd_ln_2() -> Self {
Self::splat(<$elt>::simd_ln_2())
}
#[inline(always)]
fn simd_ln_10() -> Self {
Self::splat(<$elt>::simd_ln_10())
}
}
#[cfg(any(feature = "std", feature = "libm", feature = "libm_force"))]
impl SimdComplexField for AutoSimd<$t> {
type SimdRealField = Self;
#[inline(always)]
fn simd_horizontal_sum(self) -> Self::Element {
self.0.iter().sum()
}
#[inline(always)]
fn simd_horizontal_product(self) -> Self::Element {
self.0.iter().product()
}
#[inline(always)]
fn from_simd_real(re: Self::SimdRealField) -> Self {
re
}
#[inline(always)]
fn simd_real(self) -> Self::SimdRealField {
self
}
#[inline(always)]
fn simd_imaginary(self) -> Self::SimdRealField {
Self::zero()
}
#[inline(always)]
fn simd_norm1(self) -> Self::SimdRealField {
self.map(|x| x.simd_norm1())
}
#[inline(always)]
fn simd_modulus(self) -> Self::SimdRealField {
self.map(|x| x.simd_modulus())
}
#[inline(always)]
fn simd_modulus_squared(self) -> Self::SimdRealField {
self.map(|x| x.simd_modulus_squared())
}
#[inline(always)]
fn simd_argument(self) -> Self::SimdRealField {
self.map(|x| x.simd_argument())
}
#[inline(always)]
fn simd_to_exp(self) -> (Self::SimdRealField, Self) {
let ge = self.simd_ge(Self::one());
let exp = Self::one().select(ge, -Self::one());
(self * exp, exp)
}
#[inline(always)]
fn simd_recip(self) -> Self {
self.map(|x| x.simd_recip())
}
#[inline(always)]
fn simd_conjugate(self) -> Self {
self.map(|x| x.simd_conjugate())
}
#[inline(always)]
fn simd_scale(self, factor: Self::SimdRealField) -> Self {
self.zip_map(factor, |x, y| x.simd_scale(y))
}
#[inline(always)]
fn simd_unscale(self, factor: Self::SimdRealField) -> Self {
self.zip_map(factor, |x, y| x.simd_unscale(y))
}
#[inline(always)]
fn simd_floor(self) -> Self {
self.map(|e| e.simd_floor())
}
#[inline(always)]
fn simd_ceil(self) -> Self {
self.map(|e| e.simd_ceil())
}
#[inline(always)]
fn simd_round(self) -> Self {
self.map(|e| e.simd_round())
}
#[inline(always)]
fn simd_trunc(self) -> Self {
self.map(|e| e.simd_trunc())
}
#[inline(always)]
fn simd_fract(self) -> Self {
self.map(|e| e.simd_fract())
}
#[inline(always)]
fn simd_abs(self) -> Self {
self.map(|e| e.simd_abs())
}
#[inline(always)]
fn simd_signum(self) -> Self {
self.map(|e| e.simd_signum())
}
#[inline(always)]
fn simd_mul_add(self, a: Self, b: Self) -> Self {
self.zip_zip_map(a, b, |x, y, z| x.simd_mul_add(y, z))
}
#[inline(always)]
fn simd_powi(self, n: i32) -> Self {
self.map(|e| e.simd_powi(n))
}
#[inline(always)]
fn simd_powf(self, n: Self) -> Self {
self.zip_map(n, |x, y| x.simd_powf(y))
}
#[inline(always)]
fn simd_powc(self, n: Self) -> Self {
self.zip_map(n, |x, y| x.simd_powc(y))
}
#[inline(always)]
fn simd_sqrt(self) -> Self {
self.map(|x| x.simd_sqrt())
}
#[inline(always)]
fn simd_exp(self) -> Self {
self.map(|x| x.simd_exp())
}
#[inline(always)]
fn simd_exp2(self) -> Self {
self.map(|x| x.simd_exp2())
}
#[inline(always)]
fn simd_exp_m1(self) -> Self {
self.map(|x| x.simd_exp_m1())
}
#[inline(always)]
fn simd_ln_1p(self) -> Self {
self.map(|x| x.simd_ln_1p())
}
#[inline(always)]
fn simd_ln(self) -> Self {
self.map(|x| x.simd_ln())
}
#[inline(always)]
fn simd_log(self, base: Self) -> Self {
self.zip_map(base, |x, y| x.simd_log(y))
}
#[inline(always)]
fn simd_log2(self) -> Self {
self.map(|x| x.simd_log2())
}
#[inline(always)]
fn simd_log10(self) -> Self {
self.map(|x| x.simd_log10())
}
#[inline(always)]
fn simd_cbrt(self) -> Self {
self.map(|x| x.simd_cbrt())
}
#[inline(always)]
fn simd_hypot(self, other: Self) -> Self::SimdRealField {
self.zip_map(other, |x, y| x.simd_hypot(y))
}
#[inline(always)]
fn simd_sin(self) -> Self {
self.map(|x| x.simd_sin())
}
#[inline(always)]
fn simd_cos(self) -> Self {
self.map(|x| x.simd_cos())
}
#[inline(always)]
fn simd_tan(self) -> Self {
self.map(|x| x.simd_tan())
}
#[inline(always)]
fn simd_asin(self) -> Self {
self.map(|x| x.simd_asin())
}
#[inline(always)]
fn simd_acos(self) -> Self {
self.map(|x| x.simd_acos())
}
#[inline(always)]
fn simd_atan(self) -> Self {
self.map(|x| x.simd_atan())
}
#[inline(always)]
fn simd_sin_cos(self) -> (Self, Self) {
(self.simd_sin(), self.simd_cos())
}
// #[inline(always]
// fn simd_exp_m1(self) -> Self {
// $libm::exp_m1(self)
// }
//
// #[inline(always]
// fn simd_ln_1p(self) -> Self {
// $libm::ln_1p(self)
// }
//
#[inline(always)]
fn simd_sinh(self) -> Self {
self.map(|x| x.simd_sinh())
}
#[inline(always)]
fn simd_cosh(self) -> Self {
self.map(|x| x.simd_cosh())
}
#[inline(always)]
fn simd_tanh(self) -> Self {
self.map(|x| x.simd_tanh())
}
#[inline(always)]
fn simd_asinh(self) -> Self {
self.map(|x| x.simd_asinh())
}
#[inline(always)]
fn simd_acosh(self) -> Self {
self.map(|x| x.simd_acosh())
}
#[inline(always)]
fn simd_atanh(self) -> Self {
self.map(|x| x.simd_atanh())
}
}
// NOTE: most of the impls in there are copy-paste from the implementation of
// ComplexField for num_complex::Complex. Unfortunately, we can't reuse the implementations
// so easily.
#[cfg(any(feature = "std", feature = "libm", feature = "libm_force"))]
impl SimdComplexField for num_complex::Complex<AutoSimd<$t>> {
type SimdRealField = AutoSimd<$t>;
#[inline(always)]
fn simd_horizontal_sum(self) -> Self::Element {
num_complex::Complex::new(self.re.simd_horizontal_sum(), self.im.simd_horizontal_sum())
}
#[inline(always)]
fn simd_horizontal_product(self) -> Self::Element {
let mut prod = self.extract(0);
for ii in 1..$lanes {
prod = prod * self.extract(ii)
}
prod
}
#[inline]
fn from_simd_real(re: Self::SimdRealField) -> Self {
Self::new(re, Self::SimdRealField::zero())
}
#[inline]
fn simd_real(self) -> Self::SimdRealField {
self.re
}
#[inline]
fn simd_imaginary(self) -> Self::SimdRealField {
self.im
}
#[inline]
fn simd_argument(self) -> Self::SimdRealField {
self.im.simd_atan2(self.re)
}
#[inline]
fn simd_modulus(self) -> Self::SimdRealField {
self.re.simd_hypot(self.im)
}
#[inline]
fn simd_modulus_squared(self) -> Self::SimdRealField {
self.re * self.re + self.im * self.im
}
#[inline]
fn simd_norm1(self) -> Self::SimdRealField {
self.re.simd_abs() + self.im.simd_abs()
}
#[inline]
fn simd_recip(self) -> Self {
Self::one() / self
}
#[inline]
fn simd_conjugate(self) -> Self {
self.conj()
}
#[inline]
fn simd_scale(self, factor: Self::SimdRealField) -> Self {
self * factor
}
#[inline]
fn simd_unscale(self, factor: Self::SimdRealField) -> Self {
self / factor
}
#[inline]
fn simd_floor(self) -> Self {
Self::new(self.re.simd_floor(), self.im.simd_floor())
}
#[inline]
fn simd_ceil(self) -> Self {
Self::new(self.re.simd_ceil(), self.im.simd_ceil())
}
#[inline]
fn simd_round(self) -> Self {
Self::new(self.re.simd_round(), self.im.simd_round())
}
#[inline]
fn simd_trunc(self) -> Self {
Self::new(self.re.simd_trunc(), self.im.simd_trunc())
}
#[inline]
fn simd_fract(self) -> Self {
Self::new(self.re.simd_fract(), self.im.simd_fract())
}
#[inline]
fn simd_mul_add(self, a: Self, b: Self) -> Self {
self * a + b
}
#[inline]
fn simd_abs(self) -> Self::SimdRealField {
self.simd_modulus()
}
#[inline]
fn simd_exp2(self) -> Self {
let _2 = AutoSimd::<$t>::one() + AutoSimd::<$t>::one();
num_complex::Complex::new(_2, AutoSimd::<$t>::zero()).simd_powc(self)
}
#[inline]
fn simd_exp_m1(self) -> Self {
self.simd_exp() - Self::one()
}
#[inline]
fn simd_ln_1p(self) -> Self {
(Self::one() + self).simd_ln()
}
#[inline]
fn simd_log2(self) -> Self {
let _2 = AutoSimd::<$t>::one() + AutoSimd::<$t>::one();
self.simd_log(_2)
}
#[inline]
fn simd_log10(self) -> Self {
let _10 = AutoSimd::<$t>::from_subset(&10.0f64);
self.simd_log(_10)
}
#[inline]
fn simd_cbrt(self) -> Self {
let one_third = AutoSimd::<$t>::from_subset(&(1.0 / 3.0));
self.simd_powf(one_third)
}
#[inline]
fn simd_powi(self, n: i32) -> Self {
// FIXME: is there a more accurate solution?
let n = AutoSimd::<$t>::from_subset(&(n as f64));
self.simd_powf(n)
}
/*
*
*
* Unfortunately we are forced to copy-paste all
* those impls from https://github.com/rust-num/num-complex/blob/master/src/lib.rs
* to avoid requiring `std`.
*
*
*/
/// Computes `e^(self)`, where `e` is the base of the natural logarithm.
#[inline]
fn simd_exp(self) -> Self {
// formula: e^(a + bi) = e^a (cos(b) + i*sin(b))
// = from_polar(e^a, b)
simd_complex_from_polar(self.re.simd_exp(), self.im)
}
/// Computes the principal value of natural logarithm of `self`.
///
/// This function has one branch cut:
///
/// * `(-∞, 0]`, continuous from above.
///
/// The branch satisfies `-π ≤ arg(ln(z)) ≤ π`.
#[inline]
fn simd_ln(self) -> Self {
// formula: ln(z) = ln|z| + i*arg(z)
let (r, theta) = self.simd_to_polar();
Self::new(r.simd_ln(), theta)
}
/// Computes the principal value of the square root of `self`.
///
/// This function has one branch cut:
///
/// * `(-∞, 0)`, continuous from above.
///
/// The branch satisfies `-π/2 ≤ arg(sqrt(z)) ≤ π/2`.
#[inline]
fn simd_sqrt(self) -> Self {
// formula: sqrt(r e^(it)) = sqrt(r) e^(it/2)
let two = AutoSimd::<$t>::one() + AutoSimd::<$t>::one();
let (r, theta) = self.simd_to_polar();
simd_complex_from_polar(r.simd_sqrt(), theta / two)
}
#[inline]
fn simd_hypot(self, b: Self) -> Self::SimdRealField {
(self.simd_modulus_squared() + b.simd_modulus_squared()).simd_sqrt()
}
/// Raises `self` to a floating point power.
#[inline]
fn simd_powf(self, exp: Self::SimdRealField) -> Self {
// formula: x^y = (ρ e^(i θ))^y = ρ^y e^(i θ y)
// = from_polar(ρ^y, θ y)
let (r, theta) = self.simd_to_polar();
simd_complex_from_polar(r.simd_powf(exp), theta * exp)
}
/// Returns the logarithm of `self` with respect to an arbitrary base.
#[inline]
fn simd_log(self, base: AutoSimd<$t>) -> Self {
// formula: log_y(x) = log_y(ρ e^(i θ))
// = log_y(ρ) + log_y(e^(i θ)) = log_y(ρ) + ln(e^(i θ)) / ln(y)
// = log_y(ρ) + i θ / ln(y)
let (r, theta) = self.simd_to_polar();
Self::new(r.simd_log(base), theta / base.simd_ln())
}
/// Raises `self` to a complex power.
#[inline]
fn simd_powc(self, exp: Self) -> Self {
// formula: x^y = (a + i b)^(c + i d)
// = (ρ e^(i θ))^c (ρ e^(i θ))^(i d)
// where ρ=|x| and θ=arg(x)
// = ρ^c e^(−d θ) e^(i c θ) ρ^(i d)
// = p^c e^(−d θ) (cos(c θ)
// + i sin(c θ)) (cos(d ln(ρ)) + i sin(d ln(ρ)))
// = p^c e^(−d θ) (
// cos(c θ) cos(d ln(ρ)) − sin(c θ) sin(d ln(ρ))
// + i(cos(c θ) sin(d ln(ρ)) + sin(c θ) cos(d ln(ρ))))
// = p^c e^(−d θ) (cos(c θ + d ln(ρ)) + i sin(c θ + d ln(ρ)))
// = from_polar(p^c e^(−d θ), c θ + d ln(ρ))
let (r, theta) = self.simd_to_polar();
simd_complex_from_polar(
r.simd_powf(exp.re) * (-exp.im * theta).simd_exp(),
exp.re * theta + exp.im * r.simd_ln(),
)
}
/*
/// Raises a floating point number to the complex power `self`.
#[inline]
fn simd_expf(&self, base: T) -> Self {
// formula: x^(a+bi) = x^a x^bi = x^a e^(b ln(x) i)
// = from_polar(x^a, b ln(x))
Self::from_polar(&base.powf(self.re), &(self.im * base.ln()))
}
*/
/// Computes the sine of `self`.
#[inline]
fn simd_sin(self) -> Self {
// formula: sin(a + bi) = sin(a)cosh(b) + i*cos(a)sinh(b)
Self::new(
self.re.simd_sin() * self.im.simd_cosh(),
self.re.simd_cos() * self.im.simd_sinh(),
)
}
/// Computes the cosine of `self`.
#[inline]
fn simd_cos(self) -> Self {
// formula: cos(a + bi) = cos(a)cosh(b) - i*sin(a)sinh(b)
Self::new(
self.re.simd_cos() * self.im.simd_cosh(),
-self.re.simd_sin() * self.im.simd_sinh(),
)
}
#[inline]
fn simd_sin_cos(self) -> (Self, Self) {
let (rsin, rcos) = self.re.simd_sin_cos();
let (isinh, icosh) = self.im.simd_sinh_cosh();
let sin = Self::new(rsin * icosh, rcos * isinh);
let cos = Self::new(rcos * icosh, -rsin * isinh);
(sin, cos)
}
/// Computes the tangent of `self`.
#[inline]
fn simd_tan(self) -> Self {
// formula: tan(a + bi) = (sin(2a) + i*sinh(2b))/(cos(2a) + cosh(2b))
let (two_re, two_im) = (self.re + self.re, self.im + self.im);
Self::new(two_re.simd_sin(), two_im.simd_sinh()).unscale(two_re.simd_cos() + two_im.simd_cosh())
}
/// Computes the principal value of the inverse sine of `self`.
///
/// This function has two branch cuts:
///
/// * `(-∞, -1)`, continuous from above.
/// * `(1, ∞)`, continuous from below.
///
/// The branch satisfies `-π/2 ≤ Re(asin(z)) ≤ π/2`.
#[inline]
fn simd_asin(self) -> Self {
// formula: arcsin(z) = -i ln(sqrt(1-z^2) + iz)
let i = Self::i();
-i * ((Self::one() - self * self).simd_sqrt() + i * self).simd_ln()
}
/// Computes the principal value of the inverse cosine of `self`.
///
/// This function has two branch cuts:
///
/// * `(-∞, -1)`, continuous from above.
/// * `(1, ∞)`, continuous from below.
///
/// The branch satisfies `0 ≤ Re(acos(z)) ≤ π`.
#[inline]
fn simd_acos(self) -> Self {
// formula: arccos(z) = -i ln(i sqrt(1-z^2) + z)
let i = Self::i();
-i * (i * (Self::one() - self * self).simd_sqrt() + self).simd_ln()
}
/// Computes the principal value of the inverse tangent of `self`.
///
/// This function has two branch cuts:
///
/// * `(-∞i, -i]`, continuous from the left.
/// * `[i, ∞i)`, continuous from the right.
///
/// The branch satisfies `-π/2 ≤ Re(atan(z)) ≤ π/2`.
#[inline]
fn simd_atan(self) -> Self {
// formula: arctan(z) = (ln(1+iz) - ln(1-iz))/(2i)
let i = Self::i();
let one = Self::one();
let two = one + one;
if self == i {
return Self::new(AutoSimd::<$t>::zero(), AutoSimd::<$t>::one() / AutoSimd::<$t>::zero());
} else if self == -i {
return Self::new(AutoSimd::<$t>::zero(), -AutoSimd::<$t>::one() / AutoSimd::<$t>::zero());
}
((one + i * self).simd_ln() - (one - i * self).simd_ln()) / (two * i)
}
/// Computes the hyperbolic sine of `self`.
#[inline]
fn simd_sinh(self) -> Self {
// formula: sinh(a + bi) = sinh(a)cos(b) + i*cosh(a)sin(b)
Self::new(
self.re.simd_sinh() * self.im.simd_cos(),
self.re.simd_cosh() * self.im.simd_sin(),
)
}
/// Computes the hyperbolic cosine of `self`.
#[inline]
fn simd_cosh(self) -> Self {
// formula: cosh(a + bi) = cosh(a)cos(b) + i*sinh(a)sin(b)
Self::new(
self.re.simd_cosh() * self.im.simd_cos(),
self.re.simd_sinh() * self.im.simd_sin(),
)
}
#[inline]
fn simd_sinh_cosh(self) -> (Self, Self) {
let (rsinh, rcosh) = self.re.simd_sinh_cosh();
let (isin, icos) = self.im.simd_sin_cos();
let sin = Self::new(rsinh * icos, rcosh * isin);
let cos = Self::new(rcosh * icos, rsinh * isin);
(sin, cos)
}
/// Computes the hyperbolic tangent of `self`.
#[inline]
fn simd_tanh(self) -> Self {
// formula: tanh(a + bi) = (sinh(2a) + i*sin(2b))/(cosh(2a) + cos(2b))
let (two_re, two_im) = (self.re + self.re, self.im + self.im);
Self::new(two_re.simd_sinh(), two_im.simd_sin()).unscale(two_re.simd_cosh() + two_im.simd_cos())
}
/// Computes the principal value of inverse hyperbolic sine of `self`.
///
/// This function has two branch cuts:
///
/// * `(-∞i, -i)`, continuous from the left.
/// * `(i, ∞i)`, continuous from the right.
///
/// The branch satisfies `-π/2 ≤ Im(asinh(z)) ≤ π/2`.
#[inline]
fn simd_asinh(self) -> Self {
// formula: arcsinh(z) = ln(z + sqrt(1+z^2))
let one = Self::one();
(self + (one + self * self).simd_sqrt()).simd_ln()
}
/// Computes the principal value of inverse hyperbolic cosine of `self`.
///
/// This function has one branch cut:
///
/// * `(-∞, 1)`, continuous from above.
///
/// The branch satisfies `-π ≤ Im(acosh(z)) ≤ π` and `0 ≤ Re(acosh(z)) < ∞`.
#[inline]
fn simd_acosh(self) -> Self {
// formula: arccosh(z) = 2 ln(sqrt((z+1)/2) + sqrt((z-1)/2))
let one = Self::one();
let two = one + one;
two * (((self + one) / two).simd_sqrt() + ((self - one) / two).simd_sqrt()).simd_ln()
}
/// Computes the principal value of inverse hyperbolic tangent of `self`.
///
/// This function has two branch cuts:
///
/// * `(-∞, -1]`, continuous from above.
/// * `[1, ∞)`, continuous from below.
///
/// The branch satisfies `-π/2 ≤ Im(atanh(z)) ≤ π/2`.
#[inline]
fn simd_atanh(self) -> Self {
// formula: arctanh(z) = (ln(1+z) - ln(1-z))/2
let one = Self::one();
let two = one + one;
if self == one {
return Self::new(AutoSimd::<$t>::one() / AutoSimd::<$t>::zero(), AutoSimd::<$t>::zero());
} else if self == -one {
return Self::new(-AutoSimd::<$t>::one() / AutoSimd::<$t>::zero(), AutoSimd::<$t>::zero());
}
((one + self).simd_ln() - (one - self).simd_ln()) / two
}
}
)*)
);
#[inline]
fn simd_complex_from_polar<N: SimdRealField>(r: N, theta: N) -> num_complex::Complex<N> {
num_complex::Complex::new(r.clone() * theta.clone().simd_cos(), r * theta.simd_sin())
}
impl_float_simd!(
[f32; 2], f32, 2, [i32; 2], AutoBoolx2, _0, _1;
[f32; 4], f32, 4, [i32; 4], AutoBoolx4, _0, _1, _2, _3;
[f32; 8], f32, 8, [i32; 8], AutoBoolx8, _0, _1, _2, _3, _4, _5, _6, _7;
[f32; 16], f32, 16, [i32; 16], AutoBoolx16, _0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15;
[f64; 2], f64, 2, [i64; 2], AutoBoolx2, _0, _1;
[f64; 4], f64, 4, [i64; 4], AutoBoolx4, _0, _1, _2, _3;
[f64; 8], f64, 8, [i64; 8], AutoBoolx8, _0, _1, _2, _3, _4, _5, _6, _7;
);
impl_int_simd!(
[i128; 1], i128, 1, AutoBoolx1, _0;
[i128; 2], i128, 2, AutoBoolx2, _0, _1;
[i128; 4], i128, 4, AutoBoolx4, _0, _1, _2, _3;
[i16; 2], i16, 2, AutoBoolx2, _0, _1;
[i16; 4], i16, 4, AutoBoolx4, _0, _1, _2, _3;
[i16; 8], i16, 8, AutoBoolx8, _0, _1, _2, _3, _4, _5, _6, _7;
[i16; 16], i16, 16, AutoBoolx16, _0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15;
[i16; 32], i16, 32, AutoBoolx32, _0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23, _24, _25, _26, _27, _28, _29, _30, _31;
[i32; 2], i32, 2, AutoBoolx2, _0, _1;
[i32; 4], i32, 4, AutoBoolx4, _0, _1, _2, _3;
[i32; 8], i32, 8, AutoBoolx8, _0, _1, _2, _3, _4, _5, _6, _7;
[i32; 16], i32, 16, AutoBoolx16, _0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15;
[i64; 2], i64, 2, AutoBoolx2, _0, _1;
[i64; 4], i64, 4, AutoBoolx4, _0, _1, _2, _3;
[i64; 8], i64, 8, AutoBoolx8, _0, _1, _2, _3, _4, _5, _6, _7;
[i8; 2], i8, 2, AutoBoolx2, _0, _1;
[i8; 4], i8, 4, AutoBoolx4, _0, _1, _2, _3;
[i8; 8], i8, 8, AutoBoolx8, _0, _1, _2, _3, _4, _5, _6, _7;
[i8; 16], i8, 16, AutoBoolx16, _0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15;
[i8; 32], i8, 32, AutoBoolx32, _0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23, _24, _25, _26, _27, _28, _29, _30, _31;
// [i8; 64], i8, 64, AutoBoolx64, _0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23, _24, _25, _26, _27, _28, _29, _30, _31, _32, _33, _34, _35, _36, _37, _38, _39, _40, _41, _42, _43, _44, _45, _46, _47, _48, _49, _50, _51, _52, _53, _54, _55, _56, _57, _58, _59, _60, _61, _62, _63;
[isize; 2], isize, 2, AutoBoolx2, _0, _1;
[isize; 4], isize, 4, AutoBoolx4, _0, _1, _2, _3;
[isize; 8], isize, 8, AutoBoolx8, _0, _1, _2, _3, _4, _5, _6, _7;
);
impl_uint_simd!(
[u128; 1], u128, 1, AutoBoolx1, _0;
[u128; 2], u128, 2, AutoBoolx2, _0, _1;
[u128; 4], u128, 4, AutoBoolx4, _0, _1, _2, _3;
[u16; 2], u16, 2, AutoBoolx2, _0, _1;
[u16; 4], u16, 4, AutoBoolx4, _0, _1, _2, _3;
[u16; 8], u16, 8, AutoBoolx8, _0, _1, _2, _3, _4, _5, _6, _7;
[u16; 16], u16, 16, AutoBoolx16, _0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15;
[u16; 32], u16, 32, AutoBoolx32, _0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23, _24, _25, _26, _27, _28, _29, _30, _31;
[u32; 2], u32, 2, AutoBoolx2, _0, _1;
[u32; 4], u32, 4, AutoBoolx4, _0, _1, _2, _3;
[u32; 8], u32, 8, AutoBoolx8, _0, _1, _2, _3, _4, _5, _6, _7;
[u32; 16], u32, 16, AutoBoolx16, _0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15;
[u64; 2], u64, 2, AutoBoolx2, _0, _1;
[u64; 4], u64, 4, AutoBoolx4, _0, _1, _2, _3;
[u64; 8], u64, 8, AutoBoolx8, _0, _1, _2, _3, _4, _5, _6, _7;
[u8; 2], u8, 2, AutoBoolx2, _0, _1;
[u8; 4], u8, 4, AutoBoolx4, _0, _1, _2, _3;
[u8; 8], u8, 8, AutoBoolx8, _0, _1, _2, _3, _4, _5, _6, _7;
[u8; 16], u8, 16, AutoBoolx16, _0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15;
[u8; 32], u8, 32, AutoBoolx32, _0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23, _24, _25, _26, _27, _28, _29, _30, _31;
// [u8; 64], u8, 64, AutoBoolx64, _0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23, _24, _25, _26, _27, _28, _29, _30, _31, _32, _33, _34, _35, _36, _37, _38, _39, _40, _41, _42, _43, _44, _45, _46, _47, _48, _49, _50, _51, _52, _53, _54, _55, _56, _57, _58, _59, _60, _61, _62, _63;
[usize; 2], usize, 2, AutoBoolx2, _0, _1;
[usize; 4], usize, 4, AutoBoolx4, _0, _1, _2, _3;
[usize; 8], usize, 8, AutoBoolx8, _0, _1, _2, _3, _4, _5, _6, _7;
);
impl_bool_simd!(
[bool; 1], 1, _0;
[bool; 2], 2, _0, _1;
[bool; 4], 4, _0, _1, _2, _3;
[bool; 8], 8, _0, _1, _2, _3, _4, _5, _6, _7;
[bool; 16], 16, _0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15;
[bool; 32], 32, _0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23, _24, _25, _26, _27, _28, _29, _30, _31;
// [bool; 64], 64, _0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23, _24, _25, _26, _27, _28, _29, _30, _31, _32, _33, _34, _35, _36, _37, _38, _39, _40, _41, _42, _43, _44, _45, _46, _47, _48, _49, _50, _51, _52, _53, _54, _55, _56, _57, _58, _59, _60, _61, _62, _63;
);
//
// NOTE: the following does not work because of the orphan rules.
//
//macro_rules! impl_simd_complex_from(
// ($($t: ty, $elt: ty $(, $i: expr)*;)*) => ($(
// impl From<[num_complex::Complex<$elt>; $lanes]> for num_complex::Complex<AutoSimd<$t>> {
// #[inline(always)]
// fn from(vals: [num_complex::Complex<$elt>; $lanes]) -> Self {
// num_complex::Complex {
// re: <$t>::from([$(vals[$i].re),*]),
// im: <$t>::from([$(vals[$i].im),*]),
// }
// }
// }
// )*)
//);
//
//impl_simd_complex_from!(
// [f32; 2], f32, 0, 1;
// [f32; 4], f32, 0, 1, 2, 3;
// [f32; 8], f32, 0, 1, 2, 3, 4, 5, 6, 7;
// [f32; 16], f32, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15;
//);
//////////////////////////////////////////
// Aliases //
//////////////////////////////////////////
pub type AutoF32x2 = AutoSimd<[f32; 2]>;
pub type AutoF32x4 = AutoSimd<[f32; 4]>;
pub type AutoF32x8 = AutoSimd<[f32; 8]>;
pub type AutoF32x16 = AutoSimd<[f32; 16]>;
pub type AutoF64x2 = AutoSimd<[f64; 2]>;
pub type AutoF64x4 = AutoSimd<[f64; 4]>;
pub type AutoF64x8 = AutoSimd<[f64; 8]>;
pub type AutoI128x1 = AutoSimd<[i128; 1]>;
pub type AutoI128x2 = AutoSimd<[i128; 2]>;
pub type AutoI128x4 = AutoSimd<[i128; 4]>;
pub type AutoI16x2 = AutoSimd<[i16; 2]>;
pub type AutoI16x4 = AutoSimd<[i16; 4]>;
pub type AutoI16x8 = AutoSimd<[i16; 8]>;
pub type AutoI16x16 = AutoSimd<[i16; 16]>;
pub type AutoI16x32 = AutoSimd<[i16; 32]>;
pub type AutoI32x2 = AutoSimd<[i32; 2]>;
pub type AutoI32x4 = AutoSimd<[i32; 4]>;
pub type AutoI32x8 = AutoSimd<[i32; 8]>;
pub type AutoI32x16 = AutoSimd<[i32; 16]>;
pub type AutoI64x2 = AutoSimd<[i64; 2]>;
pub type AutoI64x4 = AutoSimd<[i64; 4]>;
pub type AutoI64x8 = AutoSimd<[i64; 8]>;
pub type AutoI8x2 = AutoSimd<[i8; 2]>;
pub type AutoI8x4 = AutoSimd<[i8; 4]>;
pub type AutoI8x8 = AutoSimd<[i8; 8]>;
pub type AutoI8x16 = AutoSimd<[i8; 16]>;
pub type AutoI8x32 = AutoSimd<[i8; 32]>;
// pub type AutoI8x64 = AutoSimd<[i8; 64]>;
pub type AutoIsizex2 = AutoSimd<[isize; 2]>;
pub type AutoIsizex4 = AutoSimd<[isize; 4]>;
pub type AutoIsizex8 = AutoSimd<[isize; 8]>;
pub type AutoU128x1 = AutoSimd<[u128; 1]>;
pub type AutoU128x2 = AutoSimd<[u128; 2]>;
pub type AutoU128x4 = AutoSimd<[u128; 4]>;
pub type AutoU16x2 = AutoSimd<[u16; 2]>;
pub type AutoU16x4 = AutoSimd<[u16; 4]>;
pub type AutoU16x8 = AutoSimd<[u16; 8]>;
pub type AutoU16x16 = AutoSimd<[u16; 16]>;
pub type AutoU16x32 = AutoSimd<[u16; 32]>;
pub type AutoU32x2 = AutoSimd<[u32; 2]>;
pub type AutoU32x4 = AutoSimd<[u32; 4]>;
pub type AutoU32x8 = AutoSimd<[u32; 8]>;
pub type AutoU32x16 = AutoSimd<[u32; 16]>;
pub type AutoU64x2 = AutoSimd<[u64; 2]>;
pub type AutoU64x4 = AutoSimd<[u64; 4]>;
pub type AutoU64x8 = AutoSimd<[u64; 8]>;
pub type AutoU8x2 = AutoSimd<[u8; 2]>;
pub type AutoU8x4 = AutoSimd<[u8; 4]>;
pub type AutoU8x8 = AutoSimd<[u8; 8]>;
pub type AutoU8x16 = AutoSimd<[u8; 16]>;
pub type AutoU8x32 = AutoSimd<[u8; 32]>;
// pub type AutoU8x64 = AutoSimd<[u8; 64]>;
pub type AutoUsizex2 = AutoSimd<[usize; 2]>;
pub type AutoUsizex4 = AutoSimd<[usize; 4]>;
pub type AutoUsizex8 = AutoSimd<[usize; 8]>;
pub type AutoBoolx1 = AutoSimd<[bool; 1]>;
pub type AutoBoolx16 = AutoSimd<[bool; 16]>;
pub type AutoBoolx2 = AutoSimd<[bool; 2]>;
pub type AutoBoolx32 = AutoSimd<[bool; 32]>;
pub type AutoBoolx4 = AutoSimd<[bool; 4]>;
// pub type AutoBoolx64 = AutoSimd<[bool; 64]>;
pub type AutoBoolx8 = AutoSimd<[bool; 8]>;
/*
* Helper trait to transform an array.
*/
trait ArrTransform: SimdValue {
fn map(self, f: impl Fn(Self::Element) -> Self::Element) -> Self;
fn zip_map(
self,
other: Self,
f: impl Fn(Self::Element, Self::Element) -> Self::Element,
) -> Self;
fn zip_zip_map(
self,
b: Self,
c: Self,
f: impl Fn(Self::Element, Self::Element, Self::Element) -> Self::Element,
) -> Self;
fn map_bool(self, f: impl Fn(Self::Element) -> bool) -> Self::SimdBool;
fn zip_map_bool(
self,
other: Self,
f: impl Fn(Self::Element, Self::Element) -> bool,
) -> Self::SimdBool;
}
| 34.638956 | 347 | 0.45399 |
186c056fe88a243b2c80118f234d365801fca470 | 6,563 | use serde::{Deserialize, Serialize};
use crate::{
document::{Document, Header},
schema::view::{self, Key, SerializedView},
};
/// A document's entry in a View's mappings.
#[derive(PartialEq, Debug)]
pub struct Map<K: Key = (), V = ()> {
/// The id of the document that emitted this entry.
pub source: Header,
/// The key used to index the View.
pub key: K,
/// An associated value stored in the view.
pub value: V,
}
impl<K: Key, V> Map<K, V> {
/// Serializes this map.
pub(crate) fn serialized<View: SerializedView<Value = V>>(
&self,
) -> Result<Serialized, view::Error> {
Ok(Serialized {
source: self.source.clone(),
key: self
.key
.as_big_endian_bytes()
.map_err(view::Error::key_serialization)?
.to_vec(),
value: View::serialize(&self.value)?,
})
}
}
/// A collection of [`Map`]s.
#[derive(Debug, PartialEq)]
pub enum Mappings<K: Key = (), V = ()> {
/// Zero or one mappings.
Simple(Option<Map<K, V>>),
/// More than one mapping.
List(Vec<Map<K, V>>),
}
impl<K: Key, V> Mappings<K, V> {
/// Returns an empty collection of mappings.
pub fn none() -> Self {
Self::Simple(None)
}
/// Appends `mapping` to the end of this collection.
pub fn push(&mut self, mapping: Map<K, V>) {
match self {
Self::Simple(existing_mapping) => {
*self = if let Some(existing_mapping) = existing_mapping.take() {
Self::List(vec![existing_mapping, mapping])
} else {
Self::Simple(Some(mapping))
};
}
Self::List(vec) => vec.push(mapping),
}
}
/// Appends `mappings` to the end of this collection and returns self.
pub fn and(mut self, mappings: Self) -> Self {
self.extend(mappings);
self
}
}
impl<K: Key, V> Extend<Map<K, V>> for Mappings<K, V> {
fn extend<T: IntoIterator<Item = Map<K, V>>>(&mut self, iter: T) {
let iter = iter.into_iter();
for map in iter {
self.push(map);
}
}
}
impl<K: Key, V> FromIterator<Map<K, V>> for Mappings<K, V> {
fn from_iter<T: IntoIterator<Item = Map<K, V>>>(iter: T) -> Self {
let mut mappings = Self::none();
mappings.extend(iter);
mappings
}
}
impl<K: Key, V> FromIterator<Self> for Mappings<K, V> {
fn from_iter<T: IntoIterator<Item = Self>>(iter: T) -> Self {
let mut iter = iter.into_iter();
if let Some(mut collected) = iter.next() {
for mappings in iter {
collected.extend(mappings);
}
collected
} else {
Self::none()
}
}
}
impl<K: Key, V> IntoIterator for Mappings<K, V> {
type Item = Map<K, V>;
type IntoIter = MappingsIter<K, V>;
fn into_iter(self) -> Self::IntoIter {
match self {
Mappings::Simple(option) => MappingsIter::Inline(option),
Mappings::List(list) => MappingsIter::Vec(list.into_iter()),
}
}
}
/// An iterator over [`Mappings`].
pub enum MappingsIter<K: Key = (), V = ()> {
/// An iterator over a [`Mappings::Simple`] value.
Inline(Option<Map<K, V>>),
/// An iterator over a [`Mappings::List`] value.
Vec(std::vec::IntoIter<Map<K, V>>),
}
impl<K: Key, V> Iterator for MappingsIter<K, V> {
type Item = Map<K, V>;
fn next(&mut self) -> Option<Self::Item> {
match self {
MappingsIter::Inline(opt) => opt.take(),
MappingsIter::Vec(iter) => iter.next(),
}
}
}
/// A document's entry in a View's mappings.
#[derive(Debug)]
pub struct MappedDocument<K: Key = (), V = ()> {
/// The id of the document that emitted this entry.
pub document: Document,
/// The key used to index the View.
pub key: K,
/// An associated value stored in the view.
pub value: V,
}
impl<K: Key, V> Map<K, V> {
/// Creates a new Map entry for the document with id `source`.
pub fn new(source: Header, key: K, value: V) -> Self {
Self { source, key, value }
}
}
/// Represents a document's entry in a View's mappings, serialized and ready to store.
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Serialized {
/// The header of the document that emitted this entry.
pub source: Header,
/// The key used to index the View.Operation
#[serde(with = "serde_bytes")]
pub key: Vec<u8>,
/// An associated value stored in the view.Operation
#[serde(with = "serde_bytes")]
pub value: Vec<u8>,
}
impl Serialized {
/// Deserializes this map.
pub fn deserialized<View: SerializedView>(
&self,
) -> Result<Map<View::Key, View::Value>, view::Error> {
Ok(Map {
source: self.source.clone(),
key: <View::Key as Key>::from_big_endian_bytes(&self.key)
.map_err(view::Error::key_serialization)?,
value: View::deserialize(&self.value)?,
})
}
}
/// A serialized [`MappedDocument`](MappedDocument).
#[derive(Clone, Serialize, Deserialize, Debug)]
pub struct MappedSerialized {
/// The serialized mapped value.
pub mapping: MappedSerializedValue,
/// The source document.
pub source: Document,
}
impl MappedSerialized {
/// Deserialize into a [`MappedDocument`](MappedDocument).
pub fn deserialized<View: SerializedView>(
self,
) -> Result<MappedDocument<View::Key, View::Value>, crate::Error> {
let key = Key::from_big_endian_bytes(&self.mapping.key).map_err(
|err: <View::Key as Key>::Error| {
crate::Error::Database(view::Error::key_serialization(err).to_string())
},
)?;
let value = View::deserialize(&self.mapping.value)?;
Ok(MappedDocument {
document: self.source,
key,
value,
})
}
}
/// A key value pair
#[derive(Clone, PartialEq, Debug)]
pub struct MappedValue<K: Key, V> {
/// The key responsible for generating the value
pub key: K,
/// The value generated by the `View`
pub value: V,
}
/// A serialized [`MappedValue`].
#[derive(Clone, Serialize, Deserialize, Debug)]
pub struct MappedSerializedValue {
/// The serialized key.Operation
#[serde(with = "serde_bytes")]
pub key: Vec<u8>,
/// The serialized value.Operation
#[serde(with = "serde_bytes")]
pub value: Vec<u8>,
}
| 27.92766 | 87 | 0.569252 |
715f0b8ff878af19a95118bd7d54d3d91c5aa858 | 6,430 | extern crate chrono;
use crate::common::models::addresses::AddressEx;
use crate::common::models::backend::transactions::{ModuleTransaction, MultisigTransaction};
use crate::providers::ext::InfoProviderExt;
use crate::providers::info::{InfoProvider, SafeInfo, TokenInfo};
use crate::routes::transactions::converters::safe_app_info::safe_app_info_from;
use crate::routes::transactions::models::details::{
DetailedExecutionInfo, ModuleExecutionDetails, MultisigConfirmation, MultisigExecutionDetails,
TransactionData, TransactionDetails,
};
use crate::utils::errors::ApiResult;
use rocket::futures::future::OptionFuture;
impl MultisigTransaction {
pub async fn to_transaction_details(
&self,
rejections: Option<Vec<String>>,
info_provider: &(impl InfoProvider + Sync),
) -> ApiResult<TransactionDetails> {
let safe_info = info_provider
.safe_info(&self.safe_transaction.safe.to_string())
.await?;
let gas_token = info_provider.address_to_token_info(&self.gas_token).await;
Ok(TransactionDetails {
tx_id: self.generate_id(),
executed_at: self.execution_date.map(|data| data.timestamp_millis()),
tx_status: self.map_status(&safe_info),
tx_info: self.transaction_info(info_provider).await,
tx_data: Some(TransactionData {
to: AddressEx::address_only(&self.safe_transaction.to),
value: self.safe_transaction.value.to_owned(),
hex_data: self.safe_transaction.data.to_owned(),
data_decoded: self.safe_transaction.data_decoded.clone(),
operation: self.safe_transaction.operation,
address_info_index: OptionFuture::from(
self.safe_transaction
.data_decoded
.as_ref()
.map(|data_decoded| async move {
data_decoded.build_address_info_index(info_provider).await
}),
)
.await
.flatten(),
}),
tx_hash: self.transaction_hash.as_ref().map(|hash| hash.to_owned()),
detailed_execution_info: Some(DetailedExecutionInfo::Multisig(
self.build_execution_details(safe_info, gas_token, rejections),
)),
safe_app_info: OptionFuture::from(
self.origin
.as_ref()
.map(|origin| async move { safe_app_info_from(origin, info_provider).await }),
)
.await
.flatten(),
})
}
fn build_execution_details(
&self,
safe_info: SafeInfo,
gas_token_info: Option<TokenInfo>,
rejections: Option<Vec<String>>,
) -> MultisigExecutionDetails {
MultisigExecutionDetails {
submitted_at: self.submission_date.timestamp_millis(),
nonce: self.nonce,
safe_tx_hash: self.safe_tx_hash.to_owned(),
executor: self
.executor
.as_ref()
.map(|address| AddressEx::address_only(&address)),
signers: safe_info
.owners
.iter()
.map(|rejection| AddressEx::address_only(rejection))
.collect(),
confirmations_required: self.confirmations_required.unwrap_or(safe_info.threshold),
confirmations: self
.confirmations
.as_ref()
.unwrap_or(&vec![])
.into_iter()
.map(|confirmation| MultisigConfirmation {
signer: AddressEx::address_only(&confirmation.owner),
signature: confirmation.signature.to_owned(),
submitted_at: confirmation.submission_date.timestamp_millis(),
})
.collect(),
refund_receiver: self
.refund_receiver
.as_ref()
.map(|address| AddressEx::address_only(address))
.unwrap_or(AddressEx::zero()),
gas_token: self
.gas_token
.as_ref()
.unwrap_or(&String::from("0x0000000000000000000000000000000000000000"))
.to_owned(),
base_gas: self.base_gas.unwrap_or(0).to_string(),
safe_tx_gas: self.safe_tx_gas.unwrap_or(0).to_string(),
gas_price: self
.gas_price
.as_ref()
.unwrap_or(&String::from("0"))
.to_owned(),
gas_token_info,
rejectors: rejections.map(|r| {
r.iter()
.map(|rejection| AddressEx::address_only(rejection))
.collect()
}),
}
}
}
impl ModuleTransaction {
pub async fn to_transaction_details(
&self,
info_provider: &(impl InfoProvider + Sync),
) -> ApiResult<TransactionDetails> {
let safe_transaction = &self.safe_transaction;
let module_info = info_provider
.address_ex_from_contracts_or_default(&self.module)
.await;
Ok(TransactionDetails {
tx_id: self.generate_id(),
executed_at: Some(self.execution_date.timestamp_millis()),
tx_status: self.map_status(),
tx_info: self.transaction_info(info_provider).await,
tx_data: Some(TransactionData {
to: AddressEx::address_only(&safe_transaction.to),
value: safe_transaction.value.to_owned(),
hex_data: safe_transaction.data.to_owned(),
data_decoded: safe_transaction.data_decoded.clone(),
operation: safe_transaction.operation,
address_info_index: OptionFuture::from(safe_transaction.data_decoded.as_ref().map(
|data_decoded| async move {
data_decoded.build_address_info_index(info_provider).await
},
))
.await
.flatten(),
}),
tx_hash: Some(self.transaction_hash.to_owned()),
detailed_execution_info: Some(DetailedExecutionInfo::Module(ModuleExecutionDetails {
address: module_info,
})),
safe_app_info: None,
})
}
}
| 41.217949 | 98 | 0.571073 |
649ec88bb8d89e2f3442e56dc65a57db61cd87ca | 5,434 | use ark_ec::ProjectiveCurve;
use rand::thread_rng;
use beserial::{Deserialize, Serialize};
use nimiq_bls::*;
use nimiq_utils::key_rng::SecureGenerate;
// Warning: You really should run these tests on release mode. Otherwise it will take too long.
#[test]
fn sign_verify() {
let rng = &mut thread_rng();
for i in 0..100 {
let keypair = KeyPair::generate(rng);
let message = format!("Message {}", i);
let sig = keypair.sign(&message);
assert!(keypair.verify(&message, &sig));
}
}
#[test]
fn compress_uncompress() {
let rng = &mut thread_rng();
for i in 0..100 {
let keypair = KeyPair::generate(rng);
let message = format!("Message {}", i);
let sig = keypair.sign(&message);
assert_eq!(
keypair.public_key.compress().uncompress().unwrap(),
keypair.public_key
);
assert_eq!(sig.compress().uncompress().unwrap(), sig);
}
}
#[test]
fn serialize_deserialize() {
let rng = &mut thread_rng();
for i in 0..100 {
let keypair = KeyPair::generate(rng);
let ser_pub_key = keypair.public_key.serialize_to_vec();
let compress_pub_key = keypair.public_key.compress();
let ser_comp_pub_key = compress_pub_key.serialize_to_vec();
let message = format!("Message {}", i);
let sig = keypair.sign(&message);
let ser_signature = sig.serialize_to_vec();
let ser_comp_signature = sig.compress().serialize_to_vec();
// Check that we can deserialize a serialized public key
assert_eq!(
PublicKey::deserialize_from_vec(&ser_pub_key).unwrap(),
keypair.public_key
);
// Check that we can deserialize a serialized compressed public key
assert_eq!(
CompressedPublicKey::deserialize_from_vec(&ser_comp_pub_key)
.unwrap()
.uncompress()
.unwrap(),
keypair.public_key
);
// Check that we can deserialize a serialized signature
assert_eq!(
Signature::deserialize_from_vec(&ser_signature).unwrap(),
sig
);
assert_eq!(
Signature::deserialize_from_vec(&ser_signature)
.unwrap()
.compressed,
sig.compressed
);
// Check that we can deserialize a serialized compressed signature
assert_eq!(
CompressedSignature::deserialize_from_vec(&ser_comp_signature)
.unwrap()
.uncompress()
.unwrap(),
sig
);
assert_eq!(
sig.compressed,
CompressedSignature::deserialize_from_vec(&ser_comp_signature)
.unwrap()
.uncompress()
.unwrap()
.compressed
);
}
}
#[test]
fn uncompress_compress() {
let hex_public_key = "01535b85d472b233642cce4f5ffd3b32e3dbd518a0124614a91cc6628d0d77a7e9d955125548c56b6c7812daa41519aaf8a2d9dbfb84f4b30ac6d18ee2619a015a1097fa25bd885bbc31ae4fb961884e4cf941cecdd25a70e6a0a726ba4b2d01696d325876808c592716569672d403fb41f19bc50e18e3df855bf6f053484de4be63658875dff127681681c9574d1d0c5d048053ec1b291234145f46167de7628bbaf971d8d89e8c6c29b5e2bc47cbd3be65331194822096b4cf092f644e004b7a2fc2cbeebc88d375095e2913127ca2de9eae486fbb0a8a671ff517a81169066ea1dca6e6745498f9ad5586b4c74ba5de7cbbe39ed4ec10714ca253d5f4fcc379f0a06a762b83e676bec4e6835899d6e639f4c90a00f1d3852f239b71";
let raw_public_key: Vec<u8> = hex::decode(hex_public_key).unwrap();
let compressed_public_key: CompressedPublicKey =
Deserialize::deserialize_from_vec(&raw_public_key).unwrap();
println!(
"{:?}",
compressed_public_key
.uncompress()
.unwrap()
.public_key
.into_affine()
);
assert_eq!(
compressed_public_key.uncompress().unwrap().compress(),
compressed_public_key
);
}
#[test]
fn aggregate_signatures_same_message() {
let rng = &mut thread_rng();
let message = "Same message";
let mut public_keys = Vec::new();
let mut signatures = Vec::new();
for _ in 0..100 {
let keypair = KeyPair::generate(rng);
let signature = keypair.sign(&message);
public_keys.push(keypair.public_key);
signatures.push(signature);
}
let agg_key = AggregatePublicKey::from_public_keys(&public_keys);
let agg_sig = AggregateSignature::from_signatures(&signatures);
assert!(agg_key.verify(&message, &agg_sig));
}
#[test]
fn aggregate_signatures_serialization() {
let rng = &mut thread_rng();
let message = "Same message";
let mut public_keys = Vec::new();
let mut signatures = Vec::new();
for _ in 0..100 {
let keypair = KeyPair::generate(rng);
let signature = keypair.sign(&message);
public_keys.push(keypair.public_key);
signatures.push(signature);
}
let agg_key = AggregatePublicKey::from_public_keys(&public_keys);
let agg_sig = AggregateSignature::from_signatures(&signatures);
let ser_agg_sig = agg_sig.serialize_to_vec();
assert_eq!(
AggregateSignature::deserialize_from_vec(&ser_agg_sig).unwrap(),
agg_sig
);
assert!(agg_key.verify(
&message,
&AggregateSignature::deserialize_from_vec(&ser_agg_sig).unwrap()
));
}
| 28.302083 | 598 | 0.641148 |
f8ccd3b03c03772a568deda3a72d16bd6f8ae0ca | 1,518 | // AsRef and AsMut allow for cheap reference-to-reference conversions.
// Read more about them at https://doc.rust-lang.org/std/convert/trait.AsRef.html
// and https://doc.rust-lang.org/std/convert/trait.AsMut.html, respectively.
<<<<<<< HEAD:conversions/as_ref_mut.rs
=======
// I AM NOT DONE
>>>>>>> main:exercises/conversions/as_ref_mut.rs
// Obtain the number of bytes (not characters) in the given argument
// Add the AsRef trait appropriately as a trait bound
fn byte_counter<T: AsRef<str>>(arg: T) -> usize {
arg.as_ref().as_bytes().len()
}
// Obtain the number of characters (not bytes) in the given argument
// Add the AsRef trait appropriately as a trait bound
fn char_counter<T: AsRef<str>>(arg: T) -> usize {
arg.as_ref().chars().count()
}
fn main() {
let s = "Café au lait";
println!("{}", char_counter(s));
println!("{}", byte_counter(s));
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn different_counts() {
let s = "Café au lait";
assert_ne!(char_counter(s), byte_counter(s));
}
#[test]
fn same_counts() {
let s = "Cafe au lait";
assert_eq!(char_counter(s), byte_counter(s));
}
#[test]
fn different_counts_using_string() {
let s = String::from("Café au lait");
assert_ne!(char_counter(s.clone()), byte_counter(s));
}
#[test]
fn same_counts_using_string() {
let s = String::from("Cafe au lait");
assert_eq!(char_counter(s.clone()), byte_counter(s));
}
}
| 27.107143 | 81 | 0.629776 |
f55c06f0160a84f9ef0121ea9270855dcbfb2d31 | 4,140 | use std::env;
use std::fs;
use std::string::String;
use std::process::exit;
#[derive(Debug)]
#[derive(Clone)]
enum Direction {
N,
S,
E,
W,
L,
R,
F,
UNKNOWN,
}
#[derive(Debug)]
#[derive(Clone)]
struct Instruction {
direction: Direction,
amount: i32,
}
fn main() {
let args: Vec<String> = env::args().collect();
let filename = &args[1];
println!("Looking for ranges In file {}", filename);
let contents = fs::read_to_string(filename).expect("Something went wrong reading the file");
let split = contents.lines();
let instructions = split.map(|l| {
let directionStr = &l[0..1];
let amount = l[1..].parse::<i32>().unwrap();
Instruction {
direction: match directionStr {
"N" => Direction::N,
"S" => Direction::S,
"E" => Direction::E,
"W" => Direction::W,
"L" => Direction::L,
"R" => Direction::R,
"F" => Direction::F,
_ => Direction::UNKNOWN,
},
amount: amount,
}
});
let mut currentDirection = Direction::E;
let mut delta: [i32; 2] = [1, 0];
let mut x: i32 = 0;
let mut y: i32 = 0;
for i in instructions {
match i.direction {
Direction::N => {
y += i.amount;
},
Direction::W => {
x -= i.amount;
},
Direction::E => {
x += i.amount;
},
Direction::S => {
y -= i.amount;
},
Direction::L => {
currentDirection = nextDirection(¤tDirection, i.direction, i.amount);
delta = directionDelta(¤tDirection);
}
Direction::R => {
currentDirection = nextDirection(¤tDirection, i.direction, i.amount);
delta = directionDelta(¤tDirection);
},
Direction::F => {
x += delta[0] * i.amount;
y += delta[1] * i.amount;
},
Direction::UNKNOWN => {
println!("WTF UNKNOWN {:?}", i);
exit(1);
}
}
println!("({},{}) {:?}, {:?}", x, y, currentDirection, delta);
}
println!("{} + {} = {}", x.abs(), y.abs(), x.abs() + y.abs());
}
fn nextDirection(cur: &Direction, turnDirection: Direction, amount: i32) -> Direction {
let mut nextDir = cur.clone();
for i in 0..amount/90 {
nextDir = nextDirectionHelper(&nextDir, &turnDirection);
}
return nextDir;
}
fn nextDirectionHelper(cur: &Direction, turnDirection: &Direction) -> Direction {
match turnDirection {
Direction::L => {
return match cur {
Direction::N => Direction::W,
Direction::W => Direction::S,
Direction::S => Direction::E,
Direction::E => Direction::N,
_ => {
println!("F Left {:?}", cur);
exit(1);
}
}
},
Direction::R => {
return match cur {
Direction::N => Direction::E,
Direction::E => Direction::S,
Direction::S => Direction::W,
Direction::W => Direction::N,
_ => {
println!("F Right {:?}", cur);
exit(1);
}
}
},
_ => {
println!("F {:?}", turnDirection);
exit(1);
}
}
}
fn directionDelta(direction: &Direction) -> [i32; 2] {
return match direction {
Direction::W => [-1, 0],
Direction::E => [1, 0],
Direction::N => [0, 1],
Direction::S => [0, -1],
_ => {
println!("WTF directionDelta {:?}", direction);
exit(1);
}
};
} | 27.6 | 97 | 0.421981 |
0e83f3759f5b2c8296633c42d1e5ca1c3b98458b | 3,045 | use super::Robot;
#[cfg(test)]
mod tests {
use super::*;
// The majority of tests from https://github.com/seomoz/rep-cpp/blob/master/test/test-robots.cpp
// are highly similar to those from reppy. A few are unique and worthy of inclusion however.
#[test]
fn test_repcpp_no_leading_user_agent() {
let txt = "Disallow: /path
Allow: /path/exception
Crawl-delay: 5.2";
let r = Robot::new("Agent", txt.as_bytes()).unwrap();
assert!(r.allowed("https://example.com/path/exception"));
assert!(!r.allowed("https://example.com/path"));
assert_eq!(r.delay, Some(5.2));
}
#[test]
fn test_repcpp_well_formed_crawl_delay() {
let txt = "Disallow: /path
Allow: /path/exception
Crawl-delay: 5.2";
let r = Robot::new("Agent", txt.as_bytes()).unwrap();
assert!(r.allowed("https://example.com/path/exception"));
assert!(!r.allowed("https://example.com/path"));
assert_eq!(r.delay, Some(5.2));
}
#[test]
fn test_repcpp_malformed_crawl_delay() {
let txt = "User-agent: *
Crawl-delay: word";
let r = Robot::new("Agent", txt.as_bytes()).unwrap();
assert_eq!(r.delay, None);
}
#[test]
fn test_repcpp_empty() {
let txt = "";
let r = Robot::new("Agent", txt.as_bytes()).unwrap();
assert!(r.allowed("/"));
}
#[test]
fn test_repcpp_accepts_full_url() {
let txt = "User-Agent: agent
Disallow: /path;params?query";
let r = Robot::new("Agent", txt.as_bytes()).unwrap();
assert!(!r.allowed(
"http://[email protected]:10/path;params?query#fragment"
));
}
#[test]
fn test_repcpp_leading_wildcard_allow() {
let txt = "User-agent: meow
Disallow: /
Allow: ****/cats
Allow: */kangaroos";
let r = Robot::new("meow", txt.as_bytes()).unwrap();
assert!(!r.allowed("/kangaroo/zebra/cat/page.html"));
assert!(r.allowed("/cats.html"));
assert!(r.allowed("/cats/page.html"));
assert!(r.allowed("/get/more/cats/page.html"));
assert!(r.allowed("/kangaroos/page.html"));
assert!(r.allowed("/heaps/of/kangaroos/page.html"));
assert!(r.allowed("/kangaroosandkoalas/page.html"));
}
// Redundant but included for completeness (matching repcpp tests)
#[test]
fn test_repcpp_leading_wildcard_disallow() {
let txt = "User-agent: meow
Allow: /
Disallow: ****/cats
Disallow: */kangaroos";
let r = Robot::new("meow", txt.as_bytes()).unwrap();
assert!(r.allowed("/kangaroo/zebra/cat/page.html"));
assert!(!r.allowed("/cats.html"));
assert!(!r.allowed("/cats/page.html"));
assert!(!r.allowed("/get/more/cats/page.html"));
assert!(!r.allowed("/kangaroos/page.html"));
assert!(!r.allowed("/heaps/of/kangaroos/page.html"));
assert!(!r.allowed("/kangaroosandkoalas/page.html"));
}
}
| 32.393617 | 100 | 0.575041 |
7aab1184944a8f27903f29f12f79dd78aacec0ac | 3,280 | use std::{ops::Deref, sync::Arc};
use crate::app::models::SongDescription;
use crate::app::state::SelectionState;
use crate::app::ActionDispatcher;
use crate::app::{models::PlaylistSummary, state::SelectionAction};
use crate::{api::SpotifyApiClient, app::AppAction};
#[derive(Debug, Clone, Copy)]
pub enum SimpleSelectionTool {
MoveUp,
MoveDown,
Remove,
SelectAll,
}
#[derive(Debug, Clone)]
pub enum AddSelectionTool {
AddToQueue,
AddToPlaylist(PlaylistSummary),
}
#[derive(Debug, Clone)]
pub enum SelectionTool {
Add(AddSelectionTool),
Simple(SimpleSelectionTool),
}
pub trait SelectionToolsModel {
// dependencies
fn dispatcher(&self) -> Box<dyn ActionDispatcher>;
fn spotify_client(&self) -> Arc<dyn SpotifyApiClient + Send + Sync>;
fn selection(&self) -> Option<Box<dyn Deref<Target = SelectionState> + '_>>;
fn enabled_selection(&self) -> Option<Box<dyn Deref<Target = SelectionState> + '_>> {
self.selection().filter(|s| s.is_selection_enabled())
}
fn tools_visible(&self, selection: &SelectionState) -> Vec<SelectionTool>;
fn handle_tool_activated(&self, selection: &SelectionState, tool: &SelectionTool) {
self.default_handle_tool_activated(selection, tool)
}
fn default_handle_tool_activated(&self, selection: &SelectionState, tool: &SelectionTool) {
match tool {
SelectionTool::Add(AddSelectionTool::AddToPlaylist(playlist)) => {
self.handle_add_to_playlist_tool(selection, &playlist.id);
}
SelectionTool::Add(AddSelectionTool::AddToQueue) => {
self.dispatcher().dispatch(AppAction::QueueSelection);
}
_ => {}
}
}
// common tools implementations
fn handle_select_all_tool<'a>(&self, selection: &SelectionState, songs: &'a [SongDescription]) {
let all_selected = selection.all_selected(songs.iter().map(|s| &s.id));
let action = if all_selected {
SelectionAction::Deselect(songs.iter().map(|s| &s.id).cloned().collect())
} else {
SelectionAction::Select(songs.to_vec())
};
self.dispatcher().dispatch(action.into());
}
fn handle_select_all_tool_borrowed<'a>(
&self,
selection: &SelectionState,
songs: &'a [&'a SongDescription],
) {
let all_selected = selection.all_selected(songs.iter().map(|s| &s.id));
let action = if all_selected {
SelectionAction::Deselect(songs.iter().map(|s| &s.id).cloned().collect())
} else {
SelectionAction::Select(songs.iter().map(|&s| s.clone()).collect())
};
self.dispatcher().dispatch(action.into());
}
fn handle_add_to_playlist_tool(&self, selection: &SelectionState, playlist: &str) {
let api = self.spotify_client();
let id = playlist.to_string();
let uris: Vec<String> = selection
.peek_selection()
.iter()
.map(|s| &s.uri)
.cloned()
.collect();
self.dispatcher()
.call_spotify_and_dispatch(move || async move {
api.add_to_playlist(&id, uris).await?;
Ok(SelectionAction::Clear.into())
})
}
}
| 33.131313 | 100 | 0.619512 |
e49cf531bcde5212f2cb32eecc940d3ea5919425 | 6,060 | //! The Mapper maps logical addresses to host device commands.
use std::net::IpAddr;
use std::collections::HashMap;
use crate::config::{self, Root};
use crate::host::{self, LightHost, LightCommand};
use crate::parser::{Command, CommandParser, ParserError};
/// A single RGB light's state in the mapper.
#[allow(dead_code)]
struct Light {
/// Name to use for the light.
name: String,
/// Host this light is connected to.
host_index: usize,
/// Host-specific address for the light.
address: usize,
/// Last known red intensity.
red: u8,
/// Last known green intensity.
green: u8,
/// Last known blue intensity.
blue: u8,
/// Last IP address that set this.
ip: Option<IpAddr>,
}
/// Mappers read commands and issue them to host devices.
pub struct Mapper {
/// Configured lights.
lights: HashMap<u8, Light>,
/// Configured light effect hosts.
light_hosts: Vec<Box<LightHost>>,
/// Command parser/buffer.
parser: CommandParser,
}
/// Result type for various Mapper actions.
pub type MapperResult<T> = Result<T, MapperError>;
/// Various runtime errors for the Mapper.
#[derive(Debug)]
pub enum MapperError {
/// Unknown command tag (is this caught by the parser?)
UnknownTag(u8),
/// Unknown logical address.
UnknownAddr(u8),
/// The parser couldn't understand the message.
ParserError(ParserError),
/// Some sort of I/O error occurred.
IoError(std::io::Error),
}
/// Parser errors can become Mapper errors.
impl From<ParserError> for MapperError {
fn from(err: ParserError) -> MapperError {
MapperError::ParserError(err)
}
}
impl Mapper {
/// Try to set up a mapper and its host devices from a configuration.
pub fn from_config(config: &Root) -> MapperResult<Mapper> {
let mut lights: HashMap<u8, Light> = HashMap::new();
let mut light_hosts: Vec<Box<LightHost>> = vec![];
// Helper for assigning lights to hosts.
let mut light_hosts_lookup: HashMap<String, usize> = HashMap::new();
// Read host device information.
for (id, host) in &config.hosts {
light_hosts_lookup.insert(id.clone(), light_hosts.len());
let host_device: Box<LightHost> = match host {
config::Host::Enttec { path } => Box::new(
host::Enttec::new(path.as_ref()).expect("Unable to initialize Enttec device!"),
),
config::Host::Proxy { addr } => Box::new(
host::UdpProxy::new(addr).expect("Unable to initialize Proxy device!")
),
};
light_hosts.push(host_device);
}
// Set up lights and their host device mapping.
for (id, light) in &config.mapping.lights {
match light {
config::Light::Rgb {
host,
address,
name,
} => {
let host_index = 0;
lights.insert(
*id,
Light {
name: name.clone().unwrap_or_else(|| format!("{}-{}", host, id)),
host_index,
address: *address as usize,
red: 0,
green: 0,
blue: 0,
ip: None,
},
);
}
}
}
Ok(Mapper {
lights,
light_hosts,
parser: CommandParser::new(),
})
}
/// Read a message from a buffer and issue some commands.
///
/// TODO: Should the messages be parsed by the servers themselves?
/// Or would that move too much "business logic" into them?
pub fn take_msg(&mut self, buf: &[u8], ip: Option<IpAddr>) -> MapperResult<()> {
let mut reader = std::io::BufReader::new(buf);
self.parser.read_from(&mut reader)?;
let mut last_nick: Option<String> = None;
for cmd in &self.parser.cmds {
match cmd {
Command::Nick { nick } => {
// ewww, clone
last_nick = Some(nick.clone())
}
Command::RgbLight {
id,
light_type,
red,
green,
blue,
} => {
// Look for a light with a given id
let light = self
.lights
.get_mut(&id);
if light.is_none() {
eprintln!("Unknown light id {}", id);
continue;
}
let light = light.unwrap();
// .ok_or_else(|| MapperError::UnknownAddr(*id))?;
// Check that its type matches the command
// TODO: Actually do that.
if *light_type != 0 {
eprintln!("Unknown light type {}", light_type);
}
light.red = *red;
light.green = *green;
light.blue = *blue;
light.ip = ip;
// Issue a command to its host
let host = &mut self.light_hosts[light.host_index];
host.take_command(&LightCommand {
id: *id as usize,
address: light.address,
red: *red,
green: *green,
blue: *blue,
})
// And record that the host needs a flush
// TODO: Actually do that.
}
}
}
// TODO: Only flush the hosts that were used.
for host in &mut self.light_hosts {
host.flush();
}
Ok(())
}
}
| 32.756757 | 99 | 0.481518 |
4acef279ce10bfc01c6e8c3e4d5a178ca0a1f3fe | 1,174 | #[doc = "Reader of register SPECADDR3BOTTOM"]
pub type R = crate::R<u32, super::SPECADDR3BOTTOM>;
#[doc = "Writer for register SPECADDR3BOTTOM"]
pub type W = crate::W<u32, super::SPECADDR3BOTTOM>;
#[doc = "Register SPECADDR3BOTTOM `reset()`'s with value 0"]
impl crate::ResetValue for super::SPECADDR3BOTTOM {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type { 0 }
}
#[doc = "Reader of field `ADDR`"]
pub type ADDR_R = crate::R<u32, u32>;
#[doc = "Write proxy for field `ADDR`"]
pub struct ADDR_W<'a> {
w: &'a mut W,
}
impl<'a> ADDR_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u32) -> &'a mut W {
self.w.bits = (self.w.bits & !0xffff_ffff) | ((value as u32) & 0xffff_ffff);
self.w
}
}
impl R {
#[doc = "Bits 0:31 - Least significant 32 bits of the destination address"]
#[inline(always)]
pub fn addr(&self) -> ADDR_R { ADDR_R::new((self.bits & 0xffff_ffff) as u32) }
}
impl W {
#[doc = "Bits 0:31 - Least significant 32 bits of the destination address"]
#[inline(always)]
pub fn addr(&mut self) -> ADDR_W { ADDR_W { w: self } }
}
| 33.542857 | 84 | 0.622658 |
33ff821410b128d5c22a38bc25fd62edddde0073 | 1,217 | // =================================================================
//
// * WARNING *
//
// This file is generated!
//
// Changes made to this file will be overwritten. If changes are
// required to the generated code, the service_crategen project
// must be updated to generate the changes.
//
// =================================================================
#![doc(
html_logo_url = "https://raw.githubusercontent.com/rusoto/rusoto/master/assets/logo-square.png"
)]
//! <p><fullname>AWS Service Catalog</fullname> <p> <a href="https://aws.amazon.com/servicecatalog/">AWS Service Catalog</a> enables organizations to create and manage catalogs of IT services that are approved for AWS. To get the most out of this documentation, you should be familiar with the terminology discussed in <a href="http://docs.aws.amazon.com/servicecatalog/latest/adminguide/what-is_concepts.html">AWS Service Catalog Concepts</a>.</p></p>
//!
//! If you're using the service, you're probably looking for [ServiceCatalogClient](struct.ServiceCatalogClient.html) and [ServiceCatalog](trait.ServiceCatalog.html).
mod custom;
mod generated;
pub use custom::*;
pub use generated::*;
| 52.913043 | 452 | 0.637634 |
b903df3d04790c2a36499970793652cfcc364f15 | 4,749 | // This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
use crate::Analysis;
use crate::Font;
use crate::Rectangle;
use glib::object::IsA;
use glib::translate::*;
use std::mem;
glib::wrapper! {
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct GlyphString(Boxed<ffi::PangoGlyphString>);
match fn {
copy => |ptr| ffi::pango_glyph_string_copy(mut_override(ptr)),
free => |ptr| ffi::pango_glyph_string_free(ptr),
type_ => || ffi::pango_glyph_string_get_type(),
}
}
impl GlyphString {
#[doc(alias = "pango_glyph_string_new")]
pub fn new() -> GlyphString {
unsafe { from_glib_full(ffi::pango_glyph_string_new()) }
}
#[doc(alias = "pango_glyph_string_extents")]
pub fn extents(&mut self, font: &impl IsA<Font>) -> (Rectangle, Rectangle) {
unsafe {
let mut ink_rect = Rectangle::uninitialized();
let mut logical_rect = Rectangle::uninitialized();
ffi::pango_glyph_string_extents(
self.to_glib_none_mut().0,
font.as_ref().to_glib_none().0,
ink_rect.to_glib_none_mut().0,
logical_rect.to_glib_none_mut().0,
);
(ink_rect, logical_rect)
}
}
#[doc(alias = "pango_glyph_string_extents_range")]
pub fn extents_range(
&mut self,
start: i32,
end: i32,
font: &impl IsA<Font>,
) -> (Rectangle, Rectangle) {
unsafe {
let mut ink_rect = Rectangle::uninitialized();
let mut logical_rect = Rectangle::uninitialized();
ffi::pango_glyph_string_extents_range(
self.to_glib_none_mut().0,
start,
end,
font.as_ref().to_glib_none().0,
ink_rect.to_glib_none_mut().0,
logical_rect.to_glib_none_mut().0,
);
(ink_rect, logical_rect)
}
}
//#[doc(alias = "pango_glyph_string_get_logical_widths")]
//#[doc(alias = "get_logical_widths")]
//pub fn logical_widths(&mut self, text: &str, embedding_level: i32, logical_widths: &[i32]) {
// unsafe { TODO: call ffi:pango_glyph_string_get_logical_widths() }
//}
#[doc(alias = "pango_glyph_string_get_width")]
#[doc(alias = "get_width")]
pub fn width(&mut self) -> i32 {
unsafe { ffi::pango_glyph_string_get_width(self.to_glib_none_mut().0) }
}
#[doc(alias = "pango_glyph_string_index_to_x")]
pub fn index_to_x(
&mut self,
text: &str,
analysis: &mut Analysis,
index_: i32,
trailing: bool,
) -> i32 {
let length = text.len() as i32;
unsafe {
let mut x_pos = mem::MaybeUninit::uninit();
ffi::pango_glyph_string_index_to_x(
self.to_glib_none_mut().0,
text.to_glib_none().0,
length,
analysis.to_glib_none_mut().0,
index_,
trailing.into_glib(),
x_pos.as_mut_ptr(),
);
let x_pos = x_pos.assume_init();
x_pos
}
}
//#[cfg(any(feature = "v1_50", feature = "dox"))]
//#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_50")))]
//#[doc(alias = "pango_glyph_string_index_to_x_full")]
//pub fn index_to_x_full(&mut self, text: &str, analysis: &mut Analysis, attrs: /*Ignored*/Option<&mut LogAttr>, index_: i32, trailing: bool) -> i32 {
// unsafe { TODO: call ffi:pango_glyph_string_index_to_x_full() }
//}
#[doc(alias = "pango_glyph_string_set_size")]
pub fn set_size(&mut self, new_len: i32) {
unsafe {
ffi::pango_glyph_string_set_size(self.to_glib_none_mut().0, new_len);
}
}
#[doc(alias = "pango_glyph_string_x_to_index")]
pub fn x_to_index(&mut self, text: &str, analysis: &mut Analysis, x_pos: i32) -> (i32, i32) {
let length = text.len() as i32;
unsafe {
let mut index_ = mem::MaybeUninit::uninit();
let mut trailing = mem::MaybeUninit::uninit();
ffi::pango_glyph_string_x_to_index(
self.to_glib_none_mut().0,
text.to_glib_none().0,
length,
analysis.to_glib_none_mut().0,
x_pos,
index_.as_mut_ptr(),
trailing.as_mut_ptr(),
);
let index_ = index_.assume_init();
let trailing = trailing.assume_init();
(index_, trailing)
}
}
}
impl Default for GlyphString {
fn default() -> Self {
Self::new()
}
}
| 32.979167 | 154 | 0.563698 |
e8b5c01ca364855b76db8b3686621d6f6a53a94f | 18,250 | #![allow(dead_code, clippy::upper_case_acronyms)]
use crate::{
config::*,
eth::*,
grpc::sentry::{sentry_server::SentryServer, InboundMessage},
services::*,
};
use anyhow::{anyhow, Context};
use async_stream::stream;
use async_trait::async_trait;
use clap::Clap;
use devp2p::*;
use educe::Educe;
use futures::stream::BoxStream;
use grpc::sentry;
use maplit::btreemap;
use num_traits::{FromPrimitive, ToPrimitive};
use parking_lot::RwLock;
use secp256k1::{PublicKey, SecretKey, SECP256K1};
use std::{
collections::{btree_map::Entry, hash_map::Entry as HashMapEntry, BTreeMap, HashMap, HashSet},
convert::TryFrom,
fmt::Debug,
str::FromStr,
sync::{
atomic::{AtomicBool, Ordering},
Arc,
},
time::Duration,
};
use task_group::TaskGroup;
use tokio::{
sync::{
broadcast::{channel as broadcast, Sender as BroadcastSender},
mpsc::{channel, Sender},
Mutex as AsyncMutex,
},
time::sleep,
};
use tokio_stream::{StreamExt, StreamMap};
use tonic::transport::Server;
use tracing::*;
use tracing_subscriber::{prelude::*, EnvFilter};
use trust_dns_resolver::{config::*, TokioAsyncResolver};
mod config;
mod eth;
mod grpc;
mod services;
mod types;
type OutboundSender = Sender<OutboundEvent>;
type OutboundReceiver = Arc<AsyncMutex<BoxStream<'static, OutboundEvent>>>;
pub const BUFFERING_FACTOR: usize = 5;
#[derive(Clone)]
struct Pipes {
sender: OutboundSender,
receiver: OutboundReceiver,
}
#[derive(Clone, Debug, Default)]
struct BlockTracker {
block_by_peer: HashMap<PeerId, u64>,
peers_by_block: BTreeMap<u64, HashSet<PeerId>>,
}
impl BlockTracker {
fn set_block_number(&mut self, peer: PeerId, block: u64, force_create: bool) {
match self.block_by_peer.entry(peer) {
HashMapEntry::Vacant(e) => {
if force_create {
e.insert(block);
} else {
return;
}
}
HashMapEntry::Occupied(mut e) => {
let old_block = std::mem::replace(e.get_mut(), block);
if let Entry::Occupied(mut entry) = self.peers_by_block.entry(old_block) {
entry.get_mut().remove(&peer);
if entry.get().is_empty() {
entry.remove();
}
}
}
}
self.peers_by_block.entry(block).or_default().insert(peer);
}
fn remove_peer(&mut self, peer: PeerId) {
if let Some(block) = self.block_by_peer.remove(&peer) {
if let Entry::Occupied(mut entry) = self.peers_by_block.entry(block) {
entry.get_mut().remove(&peer);
if entry.get().is_empty() {
entry.remove();
}
}
}
}
fn peers_with_min_block(&self, block: u64) -> HashSet<PeerId> {
self.peers_by_block
.range(block..)
.map(|(_, v)| v)
.flatten()
.copied()
.collect()
}
}
#[derive(Educe)]
#[educe(Debug)]
pub struct CapabilityServerImpl {
#[educe(Debug(ignore))]
peer_pipes: Arc<RwLock<HashMap<PeerId, Pipes>>>,
block_tracker: Arc<RwLock<BlockTracker>>,
status_message: Arc<RwLock<Option<FullStatusData>>>,
valid_peers: Arc<RwLock<HashSet<PeerId>>>,
data_sender: BroadcastSender<InboundMessage>,
upload_requests_sender: BroadcastSender<InboundMessage>,
tx_message_sender: BroadcastSender<InboundMessage>,
no_new_peers: Arc<AtomicBool>,
}
impl CapabilityServerImpl {
fn setup_peer(&self, peer: PeerId, p: Pipes) {
let mut pipes = self.peer_pipes.write();
let mut block_tracker = self.block_tracker.write();
assert!(pipes.insert(peer, p).is_none());
block_tracker.set_block_number(peer, 0, true);
}
fn get_pipes(&self, peer: PeerId) -> Option<Pipes> {
self.peer_pipes.read().get(&peer).cloned()
}
pub fn sender(&self, peer: PeerId) -> Option<OutboundSender> {
self.peer_pipes
.read()
.get(&peer)
.map(|pipes| pipes.sender.clone())
}
fn receiver(&self, peer: PeerId) -> Option<OutboundReceiver> {
self.peer_pipes
.read()
.get(&peer)
.map(|pipes| pipes.receiver.clone())
}
fn teardown_peer(&self, peer: PeerId) {
let mut pipes = self.peer_pipes.write();
let mut block_tracker = self.block_tracker.write();
let mut valid_peers = self.valid_peers.write();
pipes.remove(&peer);
block_tracker.remove_peer(peer);
valid_peers.remove(&peer);
}
pub fn all_peers(&self) -> HashSet<PeerId> {
self.peer_pipes.read().keys().copied().collect()
}
pub fn connected_peers(&self) -> usize {
self.valid_peers.read().len()
}
pub fn set_status(&self, message: FullStatusData) {
*self.status_message.write() = Some(message);
self.no_new_peers.store(false, Ordering::SeqCst);
}
#[instrument(skip(self))]
async fn handle_event(
&self,
peer: PeerId,
event: InboundEvent,
) -> Result<Option<Message>, DisconnectReason> {
match event {
InboundEvent::Disconnect { reason } => {
debug!("Peer disconnect (reason: {:?}), tearing down peer.", reason);
self.teardown_peer(peer);
}
InboundEvent::Message {
message: Message { id, data },
..
} => {
let valid_peer = self.valid_peers.read().contains(&peer);
let message_id = EthMessageId::from_usize(id);
match message_id {
None => {
debug!("Unknown message");
}
Some(EthMessageId::Status) => {
let v = rlp::decode::<StatusMessage>(&data).map_err(|e| {
debug!("Failed to decode status message: {}! Kicking peer.", e);
DisconnectReason::ProtocolBreach
})?;
debug!("Decoded status message: {:?}", v);
let status_data = self.status_message.read();
let mut valid_peers = self.valid_peers.write();
if let Some(FullStatusData { fork_filter, .. }) = &*status_data {
fork_filter.validate(v.fork_id).map_err(|reason| {
debug!("Kicking peer with incompatible fork ID: {:?}", reason);
DisconnectReason::UselessPeer
})?;
valid_peers.insert(peer);
}
}
Some(inbound_id) if valid_peer => {
if let Some(sender) = match inbound_id {
EthMessageId::BlockBodies
| EthMessageId::BlockHeaders
| EthMessageId::NodeData => Some(&self.data_sender),
EthMessageId::GetBlockBodies
| EthMessageId::GetBlockHeaders
| EthMessageId::GetNodeData => Some(&self.upload_requests_sender),
// EthMessageId::Transactions
// | EthMessageId::NewPooledTransactionHashes
// | EthMessageId::GetPooledTransactions
// | EthMessageId::PooledTransactions => Some(&self.tx_message_sender),
_ => None,
} {
if sender
.send(InboundMessage {
id: sentry::MessageId::try_from(inbound_id).unwrap() as i32,
data,
peer_id: Some(peer.into()),
})
.is_err()
{
warn!("no connected sentry, dropping status and peer");
*self.status_message.write() = None;
return Err(DisconnectReason::ClientQuitting);
}
}
}
_ => {}
}
}
}
Ok(None)
}
}
#[async_trait]
impl CapabilityServer for CapabilityServerImpl {
#[instrument(skip(self, peer), level = "debug", fields(peer=&*peer.to_string()))]
fn on_peer_connect(&self, peer: PeerId, caps: HashMap<CapabilityName, CapabilityVersion>) {
let first_events = if let Some(FullStatusData {
status,
fork_filter,
}) = &*self.status_message.read()
{
let status_message = StatusMessage {
protocol_version: *caps
.get(&capability_name())
.expect("peer without this cap would have been disconnected"),
network_id: status.network_id,
total_difficulty: status.total_difficulty,
best_hash: status.best_hash,
genesis_hash: status.fork_data.genesis,
fork_id: fork_filter.current(),
};
vec![OutboundEvent::Message {
capability_name: capability_name(),
message: Message {
id: EthMessageId::Status.to_usize().unwrap(),
data: rlp::encode(&status_message).into(),
},
}]
} else {
vec![OutboundEvent::Disconnect {
reason: DisconnectReason::DisconnectRequested,
}]
};
let (sender, mut receiver) = channel(1);
self.setup_peer(
peer,
Pipes {
sender,
receiver: Arc::new(AsyncMutex::new(Box::pin(stream! {
for event in first_events {
yield event;
}
while let Some(event) = receiver.recv().await {
yield event;
}
}))),
},
);
}
#[instrument(skip(self, peer, event), level = "debug", fields(peer=&*peer.to_string(), event=&*event.to_string()))]
async fn on_peer_event(&self, peer: PeerId, event: InboundEvent) {
debug!("Received message");
if let Some(ev) = self.handle_event(peer, event).await.transpose() {
let _ = self
.sender(peer)
.unwrap()
.send(match ev {
Ok(message) => OutboundEvent::Message {
capability_name: capability_name(),
message,
},
Err(reason) => OutboundEvent::Disconnect { reason },
})
.await;
}
}
#[instrument(skip(self, peer), level = "debug", fields(peer=&*peer.to_string()))]
async fn next(&self, peer: PeerId) -> OutboundEvent {
self.receiver(peer)
.unwrap()
.lock()
.await
.next()
.await
.unwrap_or(OutboundEvent::Disconnect {
reason: DisconnectReason::DisconnectRequested,
})
}
}
#[tokio::main]
async fn main() -> anyhow::Result<()> {
let opts = Opts::parse();
let filter = if std::env::var(EnvFilter::DEFAULT_ENV)
.unwrap_or_default()
.is_empty()
{
EnvFilter::new("ethereum_sentry=info,devp2p=info,discv4=info,discv5=info,dnsdisc=info")
} else {
EnvFilter::from_default_env()
};
let registry = tracing_subscriber::registry()
// the `TasksLayer` can be used in combination with other `tracing` layers...
.with(tracing_subscriber::fmt::layer());
if opts.tokio_console {
let (layer, server) = console_subscriber::TasksLayer::new();
registry
.with(filter.add_directive("tokio=trace".parse()?))
.with(layer)
.init();
tokio::spawn(async move { server.serve().await.expect("server failed") });
} else {
registry.with(filter).init();
}
let secret_key;
if let Some(data) = opts.node_key {
secret_key = SecretKey::from_slice(&hex::decode(data)?)?;
info!("Loaded node key from config");
} else {
secret_key = SecretKey::new(&mut secp256k1::rand::thread_rng());
info!("Generated new node key: {}", secret_key);
};
let listen_addr = format!("0.0.0.0:{}", opts.listen_port);
info!("Starting Ethereum sentry");
info!(
"Node ID: {}",
hex::encode(
devp2p::util::pk2id(&PublicKey::from_secret_key(SECP256K1, &secret_key)).as_bytes()
)
);
if let Some(cidr_filter) = &opts.cidr {
info!("Peers restricted to range {}", cidr_filter);
}
let mut discovery_tasks = StreamMap::new();
info!("Starting DNS discovery fetch from {}", opts.dnsdisc_address);
let dns_resolver = dnsdisc::Resolver::new(Arc::new(
TokioAsyncResolver::tokio(ResolverConfig::default(), ResolverOpts::default())
.context("Failed to start DNS resolver")?,
));
discovery_tasks.insert(
"dnsdisc".to_string(),
Box::pin(DnsDiscovery::new(
Arc::new(dns_resolver),
opts.dnsdisc_address,
None,
)) as Discovery,
);
info!("Starting discv4 at port {}", opts.discv4_port);
let mut bootstrap_nodes = opts
.discv4_bootnodes
.into_iter()
.map(|Discv4NR(nr)| nr)
.collect::<Vec<_>>();
if bootstrap_nodes.is_empty() {
bootstrap_nodes = BOOTNODES
.iter()
.map(|b| Ok(Discv4NR::from_str(b)?.0))
.collect::<Result<Vec<_>, <Discv4NR as FromStr>::Err>>()?;
info!("Using default discv4 bootstrap nodes");
}
discovery_tasks.insert(
"discv4".to_string(),
Box::pin(
Discv4Builder::default()
.with_cache(opts.discv4_cache)
.with_concurrent_lookups(opts.discv4_concurrent_lookups)
.build(
discv4::Node::new(
format!("0.0.0.0:{}", opts.discv4_port).parse().unwrap(),
secret_key,
bootstrap_nodes,
None,
true,
opts.listen_port,
)
.await
.unwrap(),
),
),
);
if opts.discv5 {
let addr = opts
.discv5_addr
.ok_or_else(|| anyhow!("no discv5 addr specified"))?;
let enr = opts
.discv5_enr
.ok_or_else(|| anyhow!("discv5 ENR not specified"))?;
let mut svc = discv5::Discv5::new(
enr,
discv5::enr::CombinedKey::Secp256k1(
k256::ecdsa::SigningKey::from_bytes(secret_key.as_ref()).unwrap(),
),
Default::default(),
)
.map_err(|e| anyhow!("{}", e))?;
svc.start(addr.parse()?)
.await
.map_err(|e| anyhow!("{}", e))
.context("Failed to start discv5")?;
info!("Starting discv5 at {}", addr);
for bootnode in opts.discv5_bootnodes {
svc.add_enr(bootnode).unwrap();
}
discovery_tasks.insert("discv5".to_string(), Box::pin(Discv5::new(svc, 20)));
}
if !opts.static_peers.is_empty() {
info!("Enabling static peers: {:?}", opts.static_peers);
discovery_tasks.insert(
"static peers".to_string(),
Box::pin(StaticNodes::new(
opts.static_peers
.iter()
.map(|&NR(NodeRecord { addr, id })| (addr, id))
.collect::<HashMap<_, _>>(),
Duration::from_millis(opts.static_peers_interval),
)),
);
}
let tasks = Arc::new(TaskGroup::new());
let data_sender = broadcast(opts.max_peers * BUFFERING_FACTOR).0;
let upload_requests_sender = broadcast(opts.max_peers * BUFFERING_FACTOR).0;
let tx_message_sender = broadcast(opts.max_peers * BUFFERING_FACTOR).0;
let no_new_peers = Arc::new(AtomicBool::new(true));
let capability_server = Arc::new(CapabilityServerImpl {
peer_pipes: Default::default(),
block_tracker: Default::default(),
status_message: Default::default(),
valid_peers: Default::default(),
data_sender,
upload_requests_sender,
tx_message_sender,
no_new_peers: no_new_peers.clone(),
});
let swarm = Swarm::builder()
.with_task_group(tasks.clone())
.with_listen_options(ListenOptions {
discovery_tasks,
max_peers: opts.max_peers,
addr: listen_addr.parse().unwrap(),
cidr: opts.cidr,
no_new_peers,
})
.with_client_version(format!("sentry/v{}", env!("CARGO_PKG_VERSION")))
.build(
btreemap! {
CapabilityId { name: capability_name(), version: 66 } => 17,
},
capability_server.clone(),
secret_key,
)
.await
.context("Failed to start RLPx node")?;
info!("RLPx node listening at {}", listen_addr);
let sentry_addr = opts.sentry_addr.parse()?;
tasks.spawn(async move {
let svc = SentryServer::new(SentryService::new(capability_server));
info!("Sentry gRPC server starting on {}", sentry_addr);
Server::builder()
.add_service(svc)
.serve(sentry_addr)
.await
.unwrap();
});
loop {
info!(
"Peer info: {} active (+{} dialing) / {} max.",
swarm.connected_peers(),
swarm.dialing(),
opts.max_peers
);
sleep(Duration::from_secs(5)).await;
}
}
| 33.001808 | 119 | 0.522356 |
e984d7e694ad2b98e44f591e49999d1d33484c5e | 18,796 | // Copyright (c) 2017-2020 Fabian Schuiki
//! The simulation state.
#![allow(unused_imports)]
use crate::value::{TimeValue, Value};
use llhd::ir::Unit;
use num::zero;
use std::{
cmp::Ordering,
collections::{BTreeMap, BinaryHeap, HashMap, HashSet},
fmt,
ops::{Index, IndexMut},
sync::Mutex,
};
/// A simulation state.
pub struct State<'ll> {
/// The LLHD module being simulated.
pub module: &'ll llhd::ir::Module,
/// The signals present in the simulation.
pub signals: Vec<Signal>,
/// The probed signals.
pub probes: HashMap<SignalRef, Vec<String>>,
/// The root scope of the simulation.
pub scope: Scope,
/// The process and entity instances in the simulation.
pub insts: Vec<Mutex<Instance<'ll>>>,
/// The current simulation time.
pub time: TimeValue,
/// The current state of the event queue.
pub events: BTreeMap<TimeValue, HashMap<ValuePointer, Value>>,
/// The current wakeup queue for instances.
pub timed: BTreeMap<TimeValue, HashSet<InstanceRef>>,
}
impl<'ll> State<'ll> {
// /// Create a new simulation state.
// pub fn new(
// module: &'ll Module,
// signals: Vec<Signal>,
// probes: HashMap<SignalRef, Vec<String>>,
// scope: Scope,
// insts: Vec<Mutex<Instance<'ll>>>,
// ) -> State<'ll> {
// State {
// module: module,
// context: ModuleContext::new(module),
// time: TimeValue::new(zero(), zero(), zero()),
// signals,
// probes,
// scope,
// insts,
// events: BinaryHeap::new(),
// timed: BinaryHeap::new(),
// }
// }
// /// Get the module whose state this object holds.
// pub fn module(&self) -> &'ll Module {
// self.module
// }
// /// Get the module context for the module whose state this object holds
// pub fn context(&self) -> &ModuleContext {
// &self.context
// }
// /// Get the current simulation time.
// pub fn time(&self) -> &TimeValue {
// &self.time
// }
// /// Change the current simulation time.
// pub fn set_time(&mut self, time: TimeValue) {
// self.time = time
// }
// /// Get a slice of instances in the state.
// pub fn instances(&self) -> &[Mutex<Instance<'ll>>] {
// &self.insts
// }
// /// Get a mutable slice of instances in the state.
// pub fn instances_mut(&mut self) -> &mut [Mutex<Instance<'ll>>] {
// &mut self.insts
// }
// /// Get a reference to an instance in the state.
// pub fn instance(&self, ir: InstanceRef) -> &Mutex<Instance<'ll>> {
// &self.insts[ir.0]
// }
// /// Obtain a reference to one of the state's signals.
// pub fn signal(&self, sr: SignalRef) -> &Signal {
// &self.signals[sr.0]
// }
// /// Obtain a mutable reference to one of the state's signals.
// pub fn signal_mut(&mut self, sr: SignalRef) -> &mut Signal {
// &mut self.signals[sr.0]
// }
// /// Get a reference to all signals of this state.
// pub fn signals(&self) -> &[Signal] {
// &self.signals
// }
// /// Get a map of all probe signals and the corresponding names.
// pub fn probes(&self) -> &HashMap<SignalRef, Vec<String>> {
// &self.probes
// }
// /// Get the root scope of the design.
// pub fn scope(&self) -> &Scope {
// &self.scope
// }
/// Add a set of events to the schedule.
pub fn schedule_events<I>(&mut self, iter: I)
where
I: Iterator<Item = Event>,
{
let time = self.time.clone();
let probes = self.probes.clone();
for i in iter {
assert!(i.time >= time);
debug!(
"Schedule {} <- {} [@ {}]",
i.signal
.0
.iter()
.map(|s| {
let sig = s.target.unwrap_signal();
probes
.get(&sig)
.map(|n| n[0].clone())
.unwrap_or_else(|| format!("{:?}", sig))
})
.collect::<String>(),
i.value,
i.time,
);
self.events
.entry(i.time)
.or_insert_with(Default::default)
.insert(i.signal, i.value);
}
}
/// Add a set of timed instances to the schedule.
pub fn schedule_timed<I>(&mut self, iter: I)
where
I: Iterator<Item = TimedInstance>,
{
let time = self.time.clone();
for i in iter {
assert!(i.time >= time);
debug!("Schedule {:?} [@ {}]", i.inst, i.time);
self.timed
.entry(i.time)
.or_insert_with(Default::default)
.insert(i.inst);
}
}
/// Dequeue all events due at the current time.
pub fn take_next_events(&mut self) -> impl Iterator<Item = (ValuePointer, Value)> {
if let Some(x) = self.events.remove(&self.time) {
x.into_iter()
} else {
HashMap::new().into_iter()
}
}
/// Dequeue all timed instances due at the current time.
pub fn take_next_timed(&mut self) -> impl Iterator<Item = InstanceRef> {
if let Some(x) = self.timed.remove(&self.time) {
x.into_iter()
} else {
HashSet::new().into_iter()
}
}
/// Determine the time of the next simulation step. This is the lowest time
/// value of any event or wake up request in the schedule. If both the event
/// and timed instances queue are empty, None is returned.
pub fn next_time(&self) -> Option<TimeValue> {
use std::cmp::min;
match (self.events.keys().next(), self.timed.keys().next()) {
(Some(e), Some(t)) => Some(min(e, t).clone()),
(Some(e), None) => Some(e.clone()),
(None, Some(t)) => Some(t.clone()),
(None, None) => None,
}
}
}
impl Index<SignalRef> for State<'_> {
type Output = Signal;
fn index(&self, idx: SignalRef) -> &Self::Output {
&self.signals[idx.0]
}
}
impl IndexMut<SignalRef> for State<'_> {
fn index_mut(&mut self, idx: SignalRef) -> &mut Self::Output {
&mut self.signals[idx.0]
}
}
impl<'ll> Index<InstanceRef> for State<'ll> {
type Output = Mutex<Instance<'ll>>;
fn index(&self, idx: InstanceRef) -> &Self::Output {
&self.insts[idx.0]
}
}
impl IndexMut<InstanceRef> for State<'_> {
fn index_mut(&mut self, idx: InstanceRef) -> &mut Self::Output {
&mut self.insts[idx.0]
}
}
/// A unique handle to a signal in a simulation state.
#[derive(Clone, Copy, PartialOrd, Ord, PartialEq, Eq, Hash)]
pub struct SignalRef(usize);
impl SignalRef {
/// Create a new signal reference.
pub fn new(id: usize) -> SignalRef {
SignalRef(id)
}
/// Return the underlying index of this reference.
pub fn as_usize(&self) -> usize {
self.0
}
}
impl fmt::Debug for SignalRef {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "s{}", self.0)
}
}
/// A signal in a simulation state.
pub struct Signal {
ty: llhd::Type,
value: Value,
}
impl Signal {
/// Create a new signal.
pub fn new(ty: llhd::Type, value: Value) -> Signal {
Signal {
ty: ty,
value: value,
}
}
/// Get the signal's type.
pub fn ty(&self) -> &llhd::Type {
&self.ty
}
/// Get the signal's current value.
pub fn value(&self) -> &Value {
&self.value
}
/// Change the signal's current value. Returns whether the values were
/// identical.
pub fn set_value(&mut self, value: Value) -> bool {
if self.value != value {
self.value = value;
true
} else {
false
}
}
}
/// An instance of a process or entity.
pub struct Instance<'ll> {
pub values: HashMap<llhd::ir::Value, ValueSlot>,
pub kind: InstanceKind<'ll>,
pub state: InstanceState,
pub signals: Vec<SignalRef>,
pub signal_values: HashMap<SignalRef, llhd::ir::Value>,
}
impl<'ll> Instance<'ll> {
// pub fn new(
// values: HashMap<llhd::ir::Value, ValueSlot>,
// kind: InstanceKind<'ll>,
// inputs: Vec<SignalRef>,
// outputs: Vec<SignalRef>,
// ) -> Instance<'ll> {
// Instance {
// values: values,
// kind: kind,
// state: InstanceState::Ready,
// inputs: inputs,
// outputs: outputs,
// }
// }
// /// Get the instance's current state.
// pub fn state(&self) -> &InstanceState {
// &self.state
// }
// /// Change the instance's current state.
// pub fn set_state(&mut self, state: InstanceState) {
// self.state = state;
// }
// pub fn kind(&self) -> &InstanceKind<'ll> {
// &self.kind
// }
// pub fn kind_mut(&mut self) -> &mut InstanceKind<'ll> {
// &mut self.kind
// }
// /// Get a reference to the value table of this instance.
// pub fn values(&self) -> &HashMap<llhd::ir::Value, ValueSlot> {
// &self.values
// }
/// Access an entry in this instance's value table.
pub fn value(&self, id: llhd::ir::Value) -> &ValueSlot {
self.values.get(&id).unwrap()
}
/// Change an entry in this instance's value table.
pub fn set_value(&mut self, id: llhd::ir::Value, value: ValueSlot) {
self.values.insert(id, value);
}
// /// Get a slice of the instance's input signals.
// pub fn inputs(&self) -> &[SignalRef] {
// &self.inputs
// }
// /// Get a slice of the instance's output signals.
// pub fn outputs(&self) -> &[SignalRef] {
// &self.outputs
// }
/// Get the name of the entity or process.
pub fn name(&self) -> String {
match self.kind {
InstanceKind::Process { prok, .. } => prok.name().to_string(),
InstanceKind::Entity { entity, .. } => entity.name().to_string(),
}
}
}
/// A slot that carries a single value.
///
/// Slots are assigned to each entity in the LLHD graph that may carry a value.
/// Execution of instructions change the value slots.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ValueSlot {
/// A signal.
Signal(SignalRef),
/// A variable with its current value.
Variable(Value),
/// A constant value.
Const(Value),
/// A pointer to a variable.
VariablePointer(ValuePointer),
/// A pointer to a signal.
SignalPointer(ValuePointer),
}
impl fmt::Display for ValueSlot {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
ValueSlot::Signal(v) => fmt::Debug::fmt(v, f),
ValueSlot::Variable(v) => fmt::Display::fmt(v, f),
ValueSlot::Const(v) => fmt::Display::fmt(v, f),
ValueSlot::VariablePointer(v) => fmt::Display::fmt(v, f),
ValueSlot::SignalPointer(v) => fmt::Display::fmt(v, f),
}
}
}
/// A pointer to a value.
///
/// A `ValuePointer` represents a variable or signal that is either referenced
/// in its entirety, or by selecting a subset of its elements, bits, or fields.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ValuePointer(pub Vec<ValueSlice>);
impl fmt::Display for ValuePointer {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let single = self.0.len() == 1;
if !single {
write!(f, "[")?;
}
let mut first = true;
for s in &self.0 {
if f.alternate() && !single {
write!(f, "\n ")?;
} else if !first {
write!(f, ", ")?;
}
write!(f, "{}", s)?;
first = false;
}
if !single {
if f.alternate() {
write!(f, "\n]")
} else {
write!(f, "]")
}
} else {
Ok(())
}
}
}
impl ValuePointer {
/// Compute the width of the pointed at value.
///
/// Returns 0 if it is a struct.
pub fn width(&self) -> usize {
self.0.iter().map(|s| s.width).sum()
}
/// Get an iterator over the slices which tracks slice offsets.
pub fn offset_slices(&self) -> impl Iterator<Item = (usize, &ValueSlice)> {
let mut i = 0;
self.0.iter().map(move |s| {
let v = i;
i += s.width;
(v, s)
})
}
}
/// A slice of a pointer.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ValueSlice {
/// The targeted value, variable, or signal.
pub target: ValueTarget,
/// The selection into the target.
pub select: Vec<ValueSelect>,
/// The width of this slice, or 0 if it is a struct.
pub width: usize,
}
impl fmt::Display for ValueSlice {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.target)?;
for s in &self.select {
write!(f, "{}", s)?;
}
Ok(())
}
}
/// A pointer target.
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum ValueTarget {
Value(llhd::ir::Value),
Variable(llhd::ir::Value),
Signal(SignalRef),
}
impl fmt::Display for ValueTarget {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
ValueTarget::Value(v) => write!(f, "{}", v),
ValueTarget::Variable(v) => write!(f, "*{}", v),
ValueTarget::Signal(v) => write!(f, "${:?}", v),
}
}
}
impl ValueTarget {
/// Unwrap the underlying value, or panic.
#[allow(dead_code)]
pub fn unwrap_value(&self) -> llhd::ir::Value {
match *self {
ValueTarget::Value(v) => v,
_ => panic!("value target is not a value"),
}
}
/// Unwrap the underlying variable, or panic.
pub fn unwrap_variable(&self) -> llhd::ir::Value {
match *self {
ValueTarget::Variable(v) => v,
_ => panic!("value target is not a variable"),
}
}
/// Unwrap the underlying signal, or panic.
pub fn unwrap_signal(&self) -> SignalRef {
match *self {
ValueTarget::Signal(v) => v,
_ => panic!("value target is not a signal"),
}
}
}
/// A selection of a part of a value.
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum ValueSelect {
/// An individual array element or struct field.
Field(usize),
/// A slice of array elements or integer bits, given by `(offset, length)`.
Slice(usize, usize),
}
impl fmt::Display for ValueSelect {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
ValueSelect::Field(i) => write!(f, ".{}", i),
ValueSelect::Slice(o, l) => write!(f, "[{}+:{}]", o, l),
}
}
}
/// An instantiation.
pub enum InstanceKind<'ll> {
Process {
prok: llhd::ir::Unit<'ll>,
next_block: Option<llhd::ir::Block>,
},
Entity {
entity: llhd::ir::Unit<'ll>,
},
}
/// The state an instance can be in.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum InstanceState {
Ready,
Wait(Option<TimeValue>, Vec<SignalRef>),
Done,
}
/// A unique reference to an instance in the simulation.
#[derive(Debug, Clone, Copy, PartialOrd, Ord, PartialEq, Eq, Hash)]
pub struct InstanceRef(usize);
impl InstanceRef {
/// Create a new instance reference.
pub fn new(id: usize) -> InstanceRef {
InstanceRef(id)
}
}
/// An event that can be scheduled in a binary heap, forming an event queue. The
/// largest element, i.e. the one at the top of the heap, is the one with the
/// lowest time value.
#[derive(Debug, Eq, PartialEq)]
pub struct Event {
pub time: TimeValue,
pub signal: ValuePointer,
pub value: Value,
}
impl Ord for Event {
fn cmp(&self, rhs: &Event) -> Ordering {
match self.time.cmp(&rhs.time) {
Ordering::Equal => self.signal.cmp(&rhs.signal),
Ordering::Greater => Ordering::Less,
Ordering::Less => Ordering::Greater,
}
}
}
impl PartialOrd for Event {
fn partial_cmp(&self, rhs: &Event) -> Option<Ordering> {
Some(self.cmp(rhs))
}
}
/// A notice that an instance is in a wait state and wants to be resumed once a
/// certain simulation time has been reached. TimedInstance objects can be
/// scheduled in a binary heap, which forms a wake up queue. The largest
/// element, i.e. the one at the top of the heap, is the one with the lowest
/// time value.
#[derive(Debug, Eq, PartialEq)]
pub struct TimedInstance {
pub time: TimeValue,
pub inst: InstanceRef,
}
impl Ord for TimedInstance {
fn cmp(&self, rhs: &TimedInstance) -> Ordering {
match self.time.cmp(&rhs.time) {
Ordering::Equal => self.inst.cmp(&rhs.inst),
Ordering::Greater => Ordering::Less,
Ordering::Less => Ordering::Greater,
}
}
}
impl PartialOrd for TimedInstance {
fn partial_cmp(&self, rhs: &TimedInstance) -> Option<Ordering> {
Some(self.cmp(rhs))
}
}
/// A level of hierarchy.
///
/// The scope represents the hierarchy of a design. Each instantiation or
/// process creates a new subscope with its own set of probes.
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Scope {
/// The name of the scope.
pub name: String,
/// The probes in this scope.
pub probes: HashMap<SignalRef, Vec<String>>,
/// The subscopes.
pub subscopes: Vec<Scope>,
}
impl Scope {
/// Create a new empty scope.
pub fn new(name: impl Into<String>) -> Scope {
Scope {
name: name.into(),
probes: Default::default(),
subscopes: vec![],
}
}
/// Add a subscope.
pub fn add_subscope(&mut self, scope: Scope) {
self.subscopes.push(scope);
}
/// Add a probe.
pub fn add_probe(&mut self, signal: SignalRef, name: String) {
self.probes.entry(signal).or_insert(Vec::new()).push(name);
}
}
| 28.916923 | 87 | 0.533199 |
ef1fa8648c2e75ad70db66de41a61bc0139eadb4 | 1,762 | const HEAT_PER_SHOT: f32 = 0.025;
const NATURAL_COOL_DOWN_RATE: f32 = 0.2;
const OVERHEAT_COOL_DOWN_RATE: f32 = 0.4;
/// This is the player's gun: it slowly overheats with every shot so that the player can't just
/// spam shooting and ruin the gameplay
#[derive(Default)]
pub struct Gun {
/// The current temperature of the gun - a percentage between 0 and 1
pub temperature: f32,
/// Whether or not the gun has overheated
overheated: bool,
}
impl Gun {
pub fn new() -> Gun {
Gun::default()
}
/// This is called every tick and slowly cools the gun down
pub fn cool_down(&mut self, dt: f32) {
if self.overheated {
self.overheat_cool_down(dt);
} else {
self.natural_cool_down(dt);
}
}
/// Whenever the gun is fired it heats up
pub fn heat_up(&mut self) {
self.temperature = f32::min(1.0, self.temperature + HEAT_PER_SHOT);
if self.temperature == 1.0 {
self.overheated = true;
}
}
/// Check if the gun has overheated or not
pub fn is_available(&self) -> bool {
!self.overheated
}
/// Reset the gun's state back to its defaults
pub fn reset(&mut self) {
self.temperature = 0.0;
self.overheated = false;
}
/// Cool down the gun naturally
fn natural_cool_down(&mut self, dt: f32) {
self.temperature = f32::max(0.0, self.temperature - NATURAL_COOL_DOWN_RATE * dt);
}
/// The gun cools down faster if it has overheated
fn overheat_cool_down(&mut self, dt: f32) {
self.temperature = f32::max(0.0, self.temperature - OVERHEAT_COOL_DOWN_RATE * dt);
if self.temperature == 0.0 {
self.overheated = false;
}
}
}
| 28.885246 | 95 | 0.611805 |
f44438c3b94a2b3f7bb8acaead0d3f02b6b0d2e2 | 73,853 | //! Semantic representations of types.
use crate::semantic::fresh::{Fresh, Fresher};
use crate::semantic::sub::{Substitutable, Substitution};
use derive_more::Display;
use std::fmt::Write;
use std::{
cmp,
collections::{BTreeMap, BTreeSet, HashMap},
fmt,
};
/// For use in generics where the specific type of map is not mentioned.
pub type SemanticMap<K, V> = BTreeMap<K, V>;
#[allow(missing_docs)]
pub type SemanticMapIter<'a, K, V> = std::collections::btree_map::Iter<'a, K, V>;
/// A type scheme that quantifies the free variables of a monotype.
#[derive(Debug, Clone)]
pub struct PolyType {
/// List of the free variables within the monotypes.
pub vars: Vec<Tvar>,
/// The list of kind constraints on any of the free variables.
pub cons: TvarKinds,
/// The underlying monotype.
pub expr: MonoType,
}
/// Map of identifier to a polytype that preserves a sorted order when iterating.
pub type PolyTypeMap = SemanticMap<String, PolyType>;
/// Nested map of polytypes that preserves a sorted order when iterating
pub type PolyTypeMapMap = SemanticMap<String, SemanticMap<String, PolyType>>;
/// Alias the maplit literal construction macro so we can specify the type here.
#[macro_export]
macro_rules! semantic_map {
( $($x:tt)* ) => ( maplit::btreemap!( $($x)* ) );
}
impl fmt::Display for PolyType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.cons.is_empty() {
self.expr.fmt(f)
} else {
write!(
f,
"{} where {}",
self.expr,
PolyType::display_constraints(&self.cons),
)
}
}
}
impl PartialEq for PolyType {
fn eq(&self, poly: &Self) -> bool {
let a: Tvar = self.max_tvar();
let b: Tvar = poly.max_tvar();
let max = if a > b { a.0 } else { b.0 };
let mut f = Fresher::from(max + 1);
let mut g = Fresher::from(max + 1);
let mut a = self.clone().fresh(&mut f, &mut TvarMap::new());
let mut b = poly.clone().fresh(&mut g, &mut TvarMap::new());
a.vars.sort();
b.vars.sort();
for kinds in a.cons.values_mut() {
kinds.sort();
}
for kinds in b.cons.values_mut() {
kinds.sort();
}
a.vars == b.vars && a.cons == b.cons && a.expr == b.expr
}
}
impl Substitutable for PolyType {
fn apply(self, sub: &Substitution) -> Self {
PolyType {
vars: self.vars,
cons: self.cons,
expr: self.expr.apply(sub),
}
}
fn free_vars(&self) -> Vec<Tvar> {
minus(&self.vars, self.expr.free_vars())
}
}
impl MaxTvar for [Tvar] {
fn max_tvar(&self) -> Tvar {
self.iter().max().cloned().unwrap_or(Tvar(0))
}
}
impl MaxTvar for PolyType {
fn max_tvar(&self) -> Tvar {
[self.vars.max_tvar(), self.expr.max_tvar()].max_tvar()
}
}
impl PolyType {
fn display_constraints(cons: &TvarKinds) -> String {
cons.iter()
// A BTree produces a sorted iterator for
// deterministic display output
.collect::<BTreeMap<_, _>>()
.iter()
.map(|(&&tv, &kinds)| format!("{}: {}", tv, PolyType::display_kinds(kinds)))
.collect::<Vec<_>>()
.join(", ")
}
fn display_kinds(kinds: &[Kind]) -> String {
kinds
.iter()
// Sort kinds with BTree
.collect::<BTreeSet<_>>()
.iter()
.map(|x| x.to_string())
.collect::<Vec<_>>()
.join(" + ")
}
/// Produces a `PolyType` where the type variables have been normalized to start at 0
/// (i.e. A), instead of whatever type variables are present in the orginal.
///
/// Useful for pretty printing the type in error messages.
pub fn normal(&self) -> PolyType {
self.clone()
.fresh(&mut Fresher::from(0), &mut TvarMap::new())
}
}
/// Helper function that concatenates two vectors into a single vector while removing duplicates.
pub(crate) fn union<T: PartialEq>(mut vars: Vec<T>, mut with: Vec<T>) -> Vec<T> {
with.retain(|tv| !vars.contains(tv));
vars.append(&mut with);
vars
}
/// Helper function that removes all elements in `vars` from `from`.
pub(crate) fn minus<T: PartialEq>(vars: &[T], mut from: Vec<T>) -> Vec<T> {
from.retain(|tv| !vars.contains(tv));
from
}
/// Errors that can be returned during type inference.
/// (Note that these error messages are read by end users.
/// This should be kept in mind when returning one of these errors.)
#[derive(Debug, PartialEq)]
#[allow(missing_docs)]
pub enum Error {
CannotUnify {
exp: MonoType,
act: MonoType,
},
CannotConstrain {
exp: Kind,
act: MonoType,
},
OccursCheck(Tvar, MonoType),
MissingLabel(String),
ExtraLabel(String),
CannotUnifyLabel {
lab: String,
exp: MonoType,
act: MonoType,
},
MissingArgument(String),
ExtraArgument(String),
CannotUnifyArgument(String, Box<Error>),
CannotUnifyReturn {
exp: MonoType,
act: MonoType,
},
MissingPipeArgument,
MultiplePipeArguments {
exp: String,
act: String,
},
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut fresh = Fresher::from(0);
match self {
Error::CannotUnify { exp, act } => write!(
f,
"expected {} but found {}",
exp.clone().fresh(&mut fresh, &mut TvarMap::new()),
act.clone().fresh(&mut fresh, &mut TvarMap::new()),
),
Error::CannotConstrain { exp, act } => write!(
f,
"{} is not {}",
act.clone().fresh(&mut fresh, &mut TvarMap::new()),
exp,
),
Error::OccursCheck(tv, ty) => {
write!(f, "recursive types not supported {} != {}", tv, ty)
}
Error::MissingLabel(a) => write!(f, "record is missing label {}", a),
Error::ExtraLabel(a) => write!(f, "found unexpected label {}", a),
Error::CannotUnifyLabel { lab, exp, act } => write!(
f,
"expected {} but found {} for label {}",
exp.clone().fresh(&mut fresh, &mut TvarMap::new()),
act.clone().fresh(&mut fresh, &mut TvarMap::new()),
lab
),
Error::MissingArgument(x) => write!(f, "missing required argument {}", x),
Error::ExtraArgument(x) => write!(f, "found unexpected argument {}", x),
Error::CannotUnifyArgument(x, e) => write!(f, "{} (argument {})", e, x),
Error::CannotUnifyReturn { exp, act } => write!(
f,
"expected {} but found {} for return type",
exp.clone().fresh(&mut fresh, &mut TvarMap::new()),
act.clone().fresh(&mut fresh, &mut TvarMap::new())
),
Error::MissingPipeArgument => write!(f, "missing pipe argument"),
Error::MultiplePipeArguments { exp, act } => {
write!(f, "expected pipe argument {} but found {}", exp, act)
}
}
}
}
/// Represents a constraint on a type variable to a specific kind (*i.e.*, a type class).
#[derive(Debug, Display, Clone, Copy, PartialEq, Eq, Hash)]
#[allow(missing_docs)]
pub enum Kind {
Addable,
Subtractable,
Divisible,
Numeric,
Comparable,
Equatable,
Nullable,
Record,
Negatable,
Timeable,
Stringable,
}
// Kinds are ordered by name so that polytypes are displayed deterministically
impl cmp::Ord for Kind {
fn cmp(&self, other: &Self) -> cmp::Ordering {
self.to_string().cmp(&other.to_string())
}
}
// Kinds are ordered by name so that polytypes are displayed deterministically
impl cmp::PartialOrd for Kind {
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
Some(self.cmp(other))
}
}
/// Represents a Flux type. The type may be unknown, represented as a type variable,
/// or may be a known concrete type.
#[derive(Debug, Display, Clone, PartialEq, Serialize)]
#[allow(missing_docs)]
pub enum MonoType {
#[display(fmt = "bool")]
Bool,
#[display(fmt = "int")]
Int,
#[display(fmt = "uint")]
Uint,
#[display(fmt = "float")]
Float,
#[display(fmt = "string")]
String,
#[display(fmt = "duration")]
Duration,
#[display(fmt = "time")]
Time,
#[display(fmt = "regexp")]
Regexp,
#[display(fmt = "bytes")]
Bytes,
#[display(fmt = "{}", _0)]
Var(Tvar),
#[display(fmt = "{}", _0)]
Arr(Box<Array>),
#[display(fmt = "{}", _0)]
Dict(Box<Dictionary>),
#[display(fmt = "{}", _0)]
Record(Box<Record>),
#[display(fmt = "{}", _0)]
Fun(Box<Function>),
}
/// An ordered map of string identifiers to monotypes.
pub type MonoTypeMap = SemanticMap<String, MonoType>;
#[allow(missing_docs)]
pub type MonoTypeVecMap = SemanticMap<String, Vec<MonoType>>;
#[allow(missing_docs)]
type RefMonoTypeVecMap<'a> = HashMap<&'a String, Vec<&'a MonoType>>;
impl Substitutable for MonoType {
fn apply(self, sub: &Substitution) -> Self {
match self {
MonoType::Bool
| MonoType::Int
| MonoType::Uint
| MonoType::Float
| MonoType::String
| MonoType::Duration
| MonoType::Time
| MonoType::Regexp
| MonoType::Bytes => self,
MonoType::Var(tvr) => sub.apply(tvr),
MonoType::Arr(arr) => MonoType::Arr(Box::new(arr.apply(sub))),
MonoType::Dict(dict) => MonoType::Dict(Box::new(dict.apply(sub))),
MonoType::Record(obj) => MonoType::Record(Box::new(obj.apply(sub))),
MonoType::Fun(fun) => MonoType::Fun(Box::new(fun.apply(sub))),
}
}
fn free_vars(&self) -> Vec<Tvar> {
match self {
MonoType::Bool
| MonoType::Int
| MonoType::Uint
| MonoType::Float
| MonoType::String
| MonoType::Duration
| MonoType::Time
| MonoType::Regexp
| MonoType::Bytes => Vec::new(),
MonoType::Var(tvr) => vec![*tvr],
MonoType::Arr(arr) => arr.free_vars(),
MonoType::Dict(dict) => dict.free_vars(),
MonoType::Record(obj) => obj.free_vars(),
MonoType::Fun(fun) => fun.free_vars(),
}
}
}
impl MaxTvar for MonoType {
fn max_tvar(&self) -> Tvar {
match self {
MonoType::Bool
| MonoType::Int
| MonoType::Uint
| MonoType::Float
| MonoType::String
| MonoType::Duration
| MonoType::Time
| MonoType::Regexp
| MonoType::Bytes => Tvar(0),
MonoType::Var(tvr) => tvr.max_tvar(),
MonoType::Arr(arr) => arr.max_tvar(),
MonoType::Dict(dict) => dict.max_tvar(),
MonoType::Record(obj) => obj.max_tvar(),
MonoType::Fun(fun) => fun.max_tvar(),
}
}
}
impl From<Record> for MonoType {
fn from(r: Record) -> MonoType {
MonoType::Record(Box::new(r))
}
}
impl MonoType {
/// Performs unification on the type with another type.
/// If successful, results in a solution to the unification problem,
/// in the form of a substitution. If there is no solution to the
/// unification problem then unification fails and an error is reported.
pub fn unify(
self, // self represents the expected type
actual: Self,
cons: &mut TvarKinds,
f: &mut Fresher,
) -> Result<Substitution, Error> {
match (self, actual) {
(MonoType::Bool, MonoType::Bool)
| (MonoType::Int, MonoType::Int)
| (MonoType::Uint, MonoType::Uint)
| (MonoType::Float, MonoType::Float)
| (MonoType::String, MonoType::String)
| (MonoType::Duration, MonoType::Duration)
| (MonoType::Time, MonoType::Time)
| (MonoType::Regexp, MonoType::Regexp)
| (MonoType::Bytes, MonoType::Bytes) => Ok(Substitution::empty()),
(MonoType::Var(tv), t) => tv.unify(t, cons),
(t, MonoType::Var(tv)) => tv.unify(t, cons),
(MonoType::Arr(t), MonoType::Arr(s)) => t.unify(*s, cons, f),
(MonoType::Dict(t), MonoType::Dict(s)) => t.unify(*s, cons, f),
(MonoType::Record(t), MonoType::Record(s)) => t.unify(*s, cons, f),
(MonoType::Fun(t), MonoType::Fun(s)) => t.unify(*s, cons, f),
(exp, act) => Err(Error::CannotUnify { exp, act }),
}
}
/// Validates that the current type meets the constraints of the specified kind.
pub fn constrain(self, with: Kind, cons: &mut TvarKinds) -> Result<Substitution, Error> {
match self {
MonoType::Bool => match with {
Kind::Equatable | Kind::Nullable | Kind::Stringable => Ok(Substitution::empty()),
_ => Err(Error::CannotConstrain {
act: self,
exp: with,
}),
},
MonoType::Int => match with {
Kind::Addable
| Kind::Subtractable
| Kind::Divisible
| Kind::Numeric
| Kind::Comparable
| Kind::Equatable
| Kind::Nullable
| Kind::Stringable
| Kind::Negatable => Ok(Substitution::empty()),
_ => Err(Error::CannotConstrain {
act: self,
exp: with,
}),
},
MonoType::Uint => match with {
Kind::Addable
| Kind::Subtractable
| Kind::Divisible
| Kind::Numeric
| Kind::Comparable
| Kind::Equatable
| Kind::Nullable
| Kind::Stringable
| Kind::Negatable => Ok(Substitution::empty()),
_ => Err(Error::CannotConstrain {
act: self,
exp: with,
}),
},
MonoType::Float => match with {
Kind::Addable
| Kind::Subtractable
| Kind::Divisible
| Kind::Numeric
| Kind::Comparable
| Kind::Equatable
| Kind::Nullable
| Kind::Stringable
| Kind::Negatable => Ok(Substitution::empty()),
_ => Err(Error::CannotConstrain {
act: self,
exp: with,
}),
},
MonoType::String => match with {
Kind::Addable
| Kind::Comparable
| Kind::Equatable
| Kind::Nullable
| Kind::Stringable => Ok(Substitution::empty()),
_ => Err(Error::CannotConstrain {
act: self,
exp: with,
}),
},
MonoType::Duration => match with {
Kind::Comparable
| Kind::Equatable
| Kind::Nullable
| Kind::Negatable
| Kind::Stringable
| Kind::Timeable => Ok(Substitution::empty()),
_ => Err(Error::CannotConstrain {
act: self,
exp: with,
}),
},
MonoType::Time => match with {
Kind::Comparable
| Kind::Equatable
| Kind::Nullable
| Kind::Timeable
| Kind::Stringable => Ok(Substitution::empty()),
_ => Err(Error::CannotConstrain {
act: self,
exp: with,
}),
},
MonoType::Regexp => Err(Error::CannotConstrain {
act: self,
exp: with,
}),
MonoType::Bytes => match with {
Kind::Equatable => Ok(Substitution::empty()),
_ => Err(Error::CannotConstrain {
act: self,
exp: with,
}),
},
MonoType::Var(tvr) => {
tvr.constrain(with, cons);
Ok(Substitution::empty())
}
MonoType::Arr(arr) => arr.constrain(with, cons),
MonoType::Dict(dict) => dict.constrain(with, cons),
MonoType::Record(obj) => obj.constrain(with, cons),
MonoType::Fun(fun) => fun.constrain(with, cons),
}
}
fn contains(&self, tv: Tvar) -> bool {
match self {
MonoType::Bool
| MonoType::Int
| MonoType::Uint
| MonoType::Float
| MonoType::String
| MonoType::Duration
| MonoType::Time
| MonoType::Regexp
| MonoType::Bytes => false,
MonoType::Var(tvr) => tv == *tvr,
MonoType::Arr(arr) => arr.contains(tv),
MonoType::Dict(dict) => dict.contains(tv),
MonoType::Record(row) => row.contains(tv),
MonoType::Fun(fun) => fun.contains(tv),
}
}
}
/// `Tvar` stands for *type variable*.
/// A type variable holds an unknown type, before type inference.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize)]
pub struct Tvar(pub u64);
/// A map from type variables to their constraining kinds.
pub type TvarKinds = SemanticMap<Tvar, Vec<Kind>>;
#[allow(missing_docs)]
pub type TvarMap = SemanticMap<Tvar, Tvar>;
#[allow(missing_docs)]
pub type SubstitutionMap = SemanticMap<Tvar, MonoType>;
impl fmt::Display for Tvar {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.0 {
0 => write!(f, "A"),
1 => write!(f, "B"),
2 => write!(f, "C"),
3 => write!(f, "D"),
4 => write!(f, "E"),
5 => write!(f, "F"),
6 => write!(f, "G"),
7 => write!(f, "H"),
8 => write!(f, "I"),
9 => write!(f, "J"),
_ => write!(f, "t{}", self.0),
}
}
}
impl MaxTvar for Tvar {
fn max_tvar(&self) -> Tvar {
*self
}
}
impl Tvar {
fn unify(self, with: MonoType, cons: &mut TvarKinds) -> Result<Substitution, Error> {
match with {
MonoType::Var(tv) => {
if self == tv {
// The empty substitution will always
// unify a type variable with itself.
Ok(Substitution::empty())
} else {
// Unify two distinct type variables.
// This will update the kind constraints
// associated with these type variables.
self.unify_with_tvar(tv, cons)
}
}
_ => {
if with.contains(self) {
// Invalid recursive type
Err(Error::OccursCheck(self, with))
} else {
// Unify a type variable with a monotype.
// The monotype must satisify any
// constraints placed on the type variable.
self.unify_with_type(with, cons)
}
}
}
}
fn unify_with_tvar(self, tv: Tvar, cons: &mut TvarKinds) -> Result<Substitution, Error> {
// Kind constraints for both type variables
let kinds = union(
cons.remove(&self).unwrap_or_default(),
cons.remove(&tv).unwrap_or_default(),
);
if !kinds.is_empty() {
cons.insert(tv, kinds);
}
Ok(Substitution::from(
semantic_map! {self => MonoType::Var(tv)},
))
}
fn unify_with_type(self, t: MonoType, cons: &mut TvarKinds) -> Result<Substitution, Error> {
let sub = Substitution::from(semantic_map! {self => t.clone()});
match cons.remove(&self) {
None => Ok(sub),
Some(kinds) => Ok(sub.merge(kinds.into_iter().try_fold(
Substitution::empty(),
|sub, kind| {
// The monotype that is being unified with the
// tvar must be constrained with the same kinds
// as that of the tvar.
Ok(sub.merge(t.clone().constrain(kind, cons)?))
},
)?)),
}
}
fn constrain(self, with: Kind, cons: &mut TvarKinds) {
match cons.get_mut(&self) {
Some(kinds) => {
if !kinds.contains(&with) {
kinds.push(with);
}
}
None => {
cons.insert(self, vec![with]);
}
}
}
}
/// A homogeneous list type.
#[derive(Debug, Display, Clone, PartialEq, Serialize)]
#[display(fmt = "[{}]", _0)]
pub struct Array(pub MonoType);
impl Substitutable for Array {
fn apply(self, sub: &Substitution) -> Self {
Array(self.0.apply(sub))
}
fn free_vars(&self) -> Vec<Tvar> {
self.0.free_vars()
}
}
impl MaxTvar for Array {
fn max_tvar(&self) -> Tvar {
self.0.max_tvar()
}
}
impl Array {
// self represents the expected type.
fn unify(
self,
with: Self,
cons: &mut TvarKinds,
f: &mut Fresher,
) -> Result<Substitution, Error> {
self.0.unify(with.0, cons, f)
}
fn constrain(self, with: Kind, cons: &mut TvarKinds) -> Result<Substitution, Error> {
match with {
Kind::Equatable => self.0.constrain(with, cons),
_ => Err(Error::CannotConstrain {
act: MonoType::Arr(Box::new(self)),
exp: with,
}),
}
}
fn contains(&self, tv: Tvar) -> bool {
self.0.contains(tv)
}
}
/// A key-value data structure.
#[derive(Debug, Display, Clone, PartialEq, Serialize)]
#[display(fmt = "[{}:{}]", key, val)]
pub struct Dictionary {
/// Type of key.
pub key: MonoType,
/// Type of value.
pub val: MonoType,
}
impl Substitutable for Dictionary {
fn apply(self, sub: &Substitution) -> Self {
Dictionary {
key: self.key.apply(sub),
val: self.val.apply(sub),
}
}
fn free_vars(&self) -> Vec<Tvar> {
union(self.key.free_vars(), self.val.free_vars())
}
}
impl MaxTvar for Dictionary {
fn max_tvar(&self) -> Tvar {
[self.key.max_tvar(), self.val.max_tvar()].max_tvar()
}
}
impl Dictionary {
fn unify(
self,
actual: Self,
cons: &mut TvarKinds,
f: &mut Fresher,
) -> Result<Substitution, Error> {
let sub = self.key.unify(actual.key, cons, f)?;
apply_then_unify(self.val, actual.val, sub, cons, f)
}
fn constrain(self, with: Kind, _: &mut TvarKinds) -> Result<Substitution, Error> {
Err(Error::CannotConstrain {
act: MonoType::Dict(Box::new(self)),
exp: with,
})
}
fn contains(&self, tv: Tvar) -> bool {
self.key.contains(tv) || self.val.contains(tv)
}
}
/// An extensible record type.
///
/// A record is either `Empty`, meaning it has no properties,
/// or it is an extension of a record.
///
/// A record may extend what is referred to as a *record
/// variable*. A record variable is a type variable that
/// represents an unknown record type.
#[derive(Debug, Clone, Serialize)]
#[serde(tag = "type")]
pub enum Record {
/// A record that has no properties.
Empty,
/// Extension of a record.
Extension {
/// The [`Property`] that extends the record type.
head: Property,
/// `tail` is the record variable.
tail: MonoType,
},
}
impl fmt::Display for Record {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("{")?;
let mut s = String::new();
let tvar = self.format(&mut s)?;
if let Some(tv) = tvar {
write!(f, "{} with ", tv)?;
}
if s.len() > 2 {
// remove trailing ', ' delimiter
s.truncate(s.len() - 2);
}
f.write_str(s.as_str())?;
f.write_str("}")
}
}
impl cmp::PartialEq for Record {
fn eq(mut self: &Self, mut other: &Self) -> bool {
let mut a = RefMonoTypeVecMap::new();
let t = loop {
match self {
Record::Empty => break None,
Record::Extension {
head,
tail: MonoType::Record(o),
} => {
a.entry(&head.k).or_insert_with(Vec::new).push(&head.v);
self = o;
}
Record::Extension {
head,
tail: MonoType::Var(t),
} => {
a.entry(&head.k).or_insert_with(Vec::new).push(&head.v);
break Some(t);
}
_ => return false,
}
};
let mut b = RefMonoTypeVecMap::new();
let v = loop {
match other {
Record::Empty => break None,
Record::Extension {
head,
tail: MonoType::Record(o),
} => {
b.entry(&head.k).or_insert_with(Vec::new).push(&head.v);
other = o;
}
Record::Extension {
head,
tail: MonoType::Var(t),
} => {
b.entry(&head.k).or_insert_with(Vec::new).push(&head.v);
break Some(t);
}
_ => return false,
}
};
t == v && a == b
}
}
impl Substitutable for Record {
fn apply(self, sub: &Substitution) -> Self {
match self {
Record::Empty => Record::Empty,
Record::Extension { head, tail } => Record::Extension {
head: head.apply(sub),
tail: tail.apply(sub),
},
}
}
fn free_vars(&self) -> Vec<Tvar> {
match self {
Record::Empty => Vec::new(),
Record::Extension { head, tail } => union(tail.free_vars(), head.v.free_vars()),
}
}
}
impl MaxTvar for Record {
fn max_tvar(&self) -> Tvar {
match self {
Record::Empty => Tvar(0),
Record::Extension { head, tail } => [head.max_tvar(), tail.max_tvar()].max_tvar(),
}
}
}
#[allow(clippy::many_single_char_names)]
impl Record {
// Below are the rules for record unification. In what follows monotypes
// are denoted using lowercase letters, and type variables are denoted
// by a lowercase letter preceded by an apostrophe `'`.
//
// `t = u` is read as:
//
// type t unifies with type u
//
// `t = u => a = b` is read as:
//
// if t unifies with u, then a must unify with b
//
// 1. Two empty records always unify, producing an empty substitution.
// 2. {a: t | 'r} = {b: u | 'r} => error
// 3. {a: t | 'r} = {a: u | 'r} => t = u
// 4. {a: t | r} = {a: u | s} => t = u, r = s
// 5. {a: t | r} = {b: u | s} => r = {b: u | 'v}, s = {a: t | 'v}
//
// Note rule 2. states that if two records extend the same type variable
// they must have the same property name otherwise they cannot unify.
//
// self represents the expected type.
//
fn unify(
self,
actual: Self,
cons: &mut TvarKinds,
f: &mut Fresher,
) -> Result<Substitution, Error> {
match (self.clone(), actual.clone()) {
(Record::Empty, Record::Empty) => Ok(Substitution::empty()),
(
Record::Extension {
head: Property { k: a, v: t },
tail: MonoType::Var(l),
},
Record::Extension {
head: Property { k: b, v: u },
tail: MonoType::Var(r),
},
) if a == b && l == r => match t.clone().unify(u.clone(), cons, f) {
Err(_) => Err(Error::CannotUnifyLabel {
lab: a,
exp: t,
act: u,
}),
Ok(sub) => Ok(sub),
},
(
Record::Extension {
head: Property { k: a, .. },
tail: MonoType::Var(l),
},
Record::Extension {
head: Property { k: b, .. },
tail: MonoType::Var(r),
},
) if a != b && l == r => Err(Error::CannotUnify {
exp: MonoType::Record(Box::new(self)),
act: MonoType::Record(Box::new(actual)),
}),
(
Record::Extension {
head: Property { k: a, v: t },
tail: l,
},
Record::Extension {
head: Property { k: b, v: u },
tail: r,
},
) if a == b => {
let sub = t.unify(u, cons, f)?;
apply_then_unify(l, r, sub, cons, f)
}
(
Record::Extension {
head: Property { k: a, v: t },
tail: l,
},
Record::Extension {
head: Property { k: b, v: u },
tail: r,
},
) if a != b => {
let var = f.fresh();
let exp = MonoType::from(Record::Extension {
head: Property { k: a, v: t },
tail: MonoType::Var(var),
});
let act = MonoType::from(Record::Extension {
head: Property { k: b, v: u },
tail: MonoType::Var(var),
});
let sub = l.unify(act, cons, f)?;
apply_then_unify(exp, r, sub, cons, f)
}
// If we are expecting {a: u | r} but find {}, label `a` is missing.
(
Record::Extension {
head: Property { k: a, .. },
..
},
Record::Empty,
) => Err(Error::MissingLabel(a)),
// If we are expecting {} but find {a: u | r}, label `a` is extra.
(
Record::Empty,
Record::Extension {
head: Property { k: a, .. },
..
},
) => Err(Error::ExtraLabel(a)),
_ => Err(Error::CannotUnify {
exp: MonoType::Record(Box::new(self)),
act: MonoType::Record(Box::new(actual)),
}),
}
}
fn constrain(self, with: Kind, cons: &mut TvarKinds) -> Result<Substitution, Error> {
match with {
Kind::Record => Ok(Substitution::empty()),
Kind::Equatable => match self {
Record::Empty => Ok(Substitution::empty()),
Record::Extension { head, tail } => {
let sub = head.v.constrain(with, cons)?;
Ok(sub.merge(tail.constrain(with, cons)?))
}
},
_ => Err(Error::CannotConstrain {
act: MonoType::Record(Box::new(self)),
exp: with,
}),
}
}
fn contains(&self, tv: Tvar) -> bool {
match self {
Record::Empty => false,
Record::Extension { head, tail } => head.v.contains(tv) && tail.contains(tv),
}
}
fn format(&self, f: &mut String) -> Result<Option<Tvar>, fmt::Error> {
match self {
Record::Empty => Ok(None),
Record::Extension { head, tail } => match tail {
MonoType::Var(tv) => {
write!(f, "{}, ", head)?;
Ok(Some(*tv))
}
MonoType::Record(obj) => {
write!(f, "{}, ", head)?;
obj.format(f)
}
_ => Err(fmt::Error),
},
}
}
}
// Unification requires that the current substitution be applied
// to both sides of a constraint before unifying.
//
// This helper function applies a substitution to a constraint
// before unifying the two types. Note the substitution produced
// from unification is merged with input substitution before it
// is returned.
//
fn apply_then_unify(
exp: MonoType,
act: MonoType,
sub: Substitution,
cons: &mut TvarKinds,
f: &mut Fresher,
) -> Result<Substitution, Error> {
let s = exp.apply(&sub).unify(act.apply(&sub), cons, f)?;
Ok(sub.merge(s))
}
/// A key-value pair representing a property type in a record.
#[derive(Debug, Display, Clone, PartialEq, Serialize)]
#[display(fmt = "{}:{}", k, v)]
#[allow(missing_docs)]
pub struct Property {
pub k: String,
pub v: MonoType,
}
impl Substitutable for Property {
fn apply(self, sub: &Substitution) -> Self {
Property {
k: self.k,
v: self.v.apply(sub),
}
}
fn free_vars(&self) -> Vec<Tvar> {
self.v.free_vars()
}
}
impl MaxTvar for Property {
fn max_tvar(&self) -> Tvar {
self.v.max_tvar()
}
}
/// Represents a function type.
///
/// A function type is defined by a set of required arguments,
/// a set of optional arguments, an optional pipe argument, and
/// a required return type.
#[derive(Debug, Clone, PartialEq, Serialize)]
pub struct Function {
/// Required arguments to a function.
pub req: MonoTypeMap,
/// Optional arguments to a function.
pub opt: MonoTypeMap,
/// An optional pipe argument.
pub pipe: Option<Property>,
/// Required return type.
pub retn: MonoType,
}
impl fmt::Display for Function {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let required = self
.req
.iter()
// Sort args with BTree
.collect::<BTreeMap<_, _>>()
.iter()
.map(|(&k, &v)| Property {
k: k.clone(),
v: v.clone(),
})
.collect::<Vec<_>>();
let optional = self
.opt
.iter()
// Sort args with BTree
.collect::<BTreeMap<_, _>>()
.iter()
.map(|(&k, &v)| Property {
k: String::from("?") + k,
v: v.clone(),
})
.collect::<Vec<_>>();
let pipe = match &self.pipe {
Some(pipe) => {
if pipe.k == "<-" {
vec![pipe.clone()]
} else {
vec![Property {
k: String::from("<-") + &pipe.k,
v: pipe.v.clone(),
}]
}
}
None => vec![],
};
write!(
f,
"({}) => {}",
pipe.iter()
.chain(required.iter().chain(optional.iter()))
.map(|x| x.to_string())
.collect::<Vec<_>>()
.join(", "),
self.retn
)
}
}
#[allow(clippy::implicit_hasher)]
impl<T: Substitutable> Substitutable for SemanticMap<String, T> {
fn apply(self, sub: &Substitution) -> Self {
self.into_iter().map(|(k, v)| (k, v.apply(sub))).collect()
}
fn free_vars(&self) -> Vec<Tvar> {
self.values()
.fold(Vec::new(), |vars, t| union(vars, t.free_vars()))
}
}
impl<T: Substitutable> Substitutable for Option<T> {
fn apply(self, sub: &Substitution) -> Self {
self.map(|t| t.apply(sub))
}
fn free_vars(&self) -> Vec<Tvar> {
match self {
Some(t) => t.free_vars(),
None => Vec::new(),
}
}
}
impl Substitutable for Function {
fn apply(self, sub: &Substitution) -> Self {
Function {
req: self.req.apply(sub),
opt: self.opt.apply(sub),
pipe: self.pipe.apply(sub),
retn: self.retn.apply(sub),
}
}
fn free_vars(&self) -> Vec<Tvar> {
union(
self.req.free_vars(),
union(
self.opt.free_vars(),
union(self.pipe.free_vars(), self.retn.free_vars()),
),
)
}
}
impl<U, T: MaxTvar> MaxTvar for SemanticMap<U, T> {
fn max_tvar(&self) -> Tvar {
self.iter()
.map(|(_, t)| t.max_tvar())
.fold(Tvar(0), |max, tv| if tv > max { tv } else { max })
}
}
impl<T: MaxTvar> MaxTvar for Option<T> {
fn max_tvar(&self) -> Tvar {
match self {
None => Tvar(0),
Some(t) => t.max_tvar(),
}
}
}
impl MaxTvar for Function {
fn max_tvar(&self) -> Tvar {
[
self.req.max_tvar(),
self.opt.max_tvar(),
self.pipe.max_tvar(),
self.retn.max_tvar(),
]
.max_tvar()
}
}
impl Function {
/// Given two function types f and g, the process for unifying their arguments is as follows:
/// 1. If a required arg of f is not present in the arguments of g,
/// otherwise unify both argument types.
/// 2. If an optional arg of f is not present in the arguments of g, continue,
/// otherwise unify both argument types (repeat for g).
/// 3. Lastly unify pipe args. Note that pipe arguments are optional.
/// However if a pipe arg was used in a calling context, i.e it's an un-named pipe arg,
/// then the other type must specify a pipe arg too, otherwise unification fails.
///
/// For pipe arguments, it becomes quite tricky. Take these statements:
///
/// 1. f = (a=<-, b) => {...}
/// 2. 0 |> f(b: 1)
/// 3. f(a: 0, b: 1)
/// 4. f = (d=<-, b, c=0) => {...}
///
/// 2 and 3 are two equivalent ways of invoking 1, and they should both unify.
/// `a` is the named pipe argument in 1. In 2, the pipe argument is unnamed.
///
/// Unify 1 and 2: one of the required arguments of 1 will not be in its call,
/// so, we should check for the pipe argument and succeed. If we do the other way around (unify
/// 2 with 1), the unnamed pipe argument unifies with the other pipe argument.
///
/// Unify 1 and 3: no problem, required arguments are satisfied. Take care that, if you unify
/// 3 with 1, you will find `a` in 1's pipe argument.
///
/// Unify 1 and 4: should fail because `d` != `a`.
///
/// Unify 2 and 3: should fail because `a` is not in the arguments of 2.
///
/// Unify 2 and 4: should succeed, the same as 1 and 2.
///
/// Unify 3 and 4: should fail because `a` is not in the arguments of 4.
///
/// self represents the expected type.
fn unify(
self,
actual: Self,
cons: &mut TvarKinds,
fresh: &mut Fresher,
) -> Result<Substitution, Error> {
// Some aliasing for coherence with the doc.
let mut f = self;
let mut g = actual;
// Fix pipe arguments:
// Make them required arguments with the correct name.
match (f.pipe, g.pipe) {
// Both functions have pipe arguments.
(Some(fp), Some(gp)) => {
if fp.k != "<-" && gp.k != "<-" && fp.k != gp.k {
// Both are named and the name differs, fail unification.
return Err(Error::MultiplePipeArguments {
exp: fp.k,
act: gp.k,
});
} else {
// At least one is unnamed or they are both named with the same name.
// This means they should match. Enforce this condition by inserting
// the pipe argument into the required ones with the same key.
f.req.insert(fp.k.clone(), fp.v);
g.req.insert(fp.k, gp.v);
}
}
// F has a pipe argument and g does not.
(Some(fp), None) => {
if fp.k == "<-" {
// The pipe argument is unnamed and g does not have one.
// Fail unification.
return Err(Error::MissingPipeArgument);
} else {
// This is a named argument, simply put it into the required ones.
f.req.insert(fp.k, fp.v);
}
}
// G has a pipe argument and f does not.
(None, Some(gp)) => {
if gp.k == "<-" {
// The pipe argument is unnamed and f does not have one.
// Fail unification.
return Err(Error::MissingPipeArgument);
} else {
// This is a named argument, simply put it into the required ones.
g.req.insert(gp.k, gp.v);
}
}
// Nothing to do.
(None, None) => (),
}
// Now that f has not been consumed yet, check that every required argument in g is in f too.
for (name, _) in g.req.iter() {
if !f.req.contains_key(name) && !f.opt.contains_key(name) {
return Err(Error::ExtraArgument(String::from(name)));
}
}
let mut sub = Substitution::empty();
// Unify f's required arguments.
for (name, exp) in f.req.into_iter() {
if let Some(act) = g.req.remove(&name) {
// The required argument is in g's required arguments.
sub = match apply_then_unify(exp.clone(), act.clone(), sub, cons, fresh) {
Err(e) => Err(Error::CannotUnifyArgument(name, Box::new(e))),
Ok(sub) => Ok(sub),
}?;
} else if let Some(act) = g.opt.remove(&name) {
// The required argument is in g's optional arguments.
sub = match apply_then_unify(exp.clone(), act.clone(), sub, cons, fresh) {
Err(e) => Err(Error::CannotUnifyArgument(name, Box::new(e))),
Ok(sub) => Ok(sub),
}?;
} else {
return Err(Error::MissingArgument(name));
}
}
// Unify f's optional arguments.
for (name, exp) in f.opt.into_iter() {
if let Some(act) = g.req.remove(&name) {
sub = match apply_then_unify(exp.clone(), act.clone(), sub, cons, fresh) {
Err(e) => Err(Error::CannotUnifyArgument(name, Box::new(e))),
Ok(sub) => Ok(sub),
}?;
} else if let Some(act) = g.opt.remove(&name) {
sub = match apply_then_unify(exp.clone(), act.clone(), sub, cons, fresh) {
Err(e) => Err(Error::CannotUnifyArgument(name, Box::new(e))),
Ok(sub) => Ok(sub),
}?;
}
}
// Unify return types.
match apply_then_unify(f.retn.clone(), g.retn.clone(), sub, cons, fresh) {
Err(_) => Err(Error::CannotUnifyReturn {
exp: f.retn,
act: g.retn,
}),
Ok(sub) => Ok(sub),
}
}
fn constrain(self, with: Kind, _: &mut TvarKinds) -> Result<Substitution, Error> {
Err(Error::CannotConstrain {
act: MonoType::Fun(Box::new(self)),
exp: with,
})
}
fn contains(&self, tv: Tvar) -> bool {
if let Some(pipe) = &self.pipe {
self.req.values().any(|t| t.contains(tv))
|| self.opt.values().any(|t| t.contains(tv))
|| pipe.v.contains(tv)
|| self.retn.contains(tv)
} else {
self.req.values().any(|t| t.contains(tv))
|| self.opt.values().any(|t| t.contains(tv))
|| self.retn.contains(tv)
}
}
}
/// Trait for returning the maximum type variable of a type.
pub trait MaxTvar {
/// Return the maximum type variable of a type.
fn max_tvar(&self) -> Tvar;
}
#[cfg(test)]
mod tests {
use super::*;
use crate::ast::get_err_type_expression;
use crate::parser;
use crate::semantic::convert::convert_polytype;
/// `polytype` is a utility method that returns a `PolyType` from a string.
pub fn polytype(typ: &str) -> PolyType {
let mut p = parser::Parser::new(typ);
let typ_expr = p.parse_type_expression();
let err = get_err_type_expression(typ_expr.clone());
if err != "" {
panic!("TypeExpression parsing failed for {}. {:?}", typ, err);
}
convert_polytype(typ_expr, &mut Fresher::default()).unwrap()
}
#[test]
fn display_kind_addable() {
assert!(Kind::Addable.to_string() == "Addable");
}
#[test]
fn display_kind_subtractable() {
assert!(Kind::Subtractable.to_string() == "Subtractable");
}
#[test]
fn display_kind_divisible() {
assert!(Kind::Divisible.to_string() == "Divisible");
}
#[test]
fn display_kind_numeric() {
assert!(Kind::Numeric.to_string() == "Numeric");
}
#[test]
fn display_kind_comparable() {
assert!(Kind::Comparable.to_string() == "Comparable");
}
#[test]
fn display_kind_equatable() {
assert!(Kind::Equatable.to_string() == "Equatable");
}
#[test]
fn display_kind_nullable() {
assert!(Kind::Nullable.to_string() == "Nullable");
}
#[test]
fn display_kind_row() {
assert!(Kind::Record.to_string() == "Record");
}
#[test]
fn display_kind_stringable() {
assert!(Kind::Stringable.to_string() == "Stringable");
}
#[test]
fn display_type_bool() {
assert_eq!("bool", MonoType::Bool.to_string());
}
#[test]
fn display_type_int() {
assert_eq!("int", MonoType::Int.to_string());
}
#[test]
fn display_type_uint() {
assert_eq!("uint", MonoType::Uint.to_string());
}
#[test]
fn display_type_float() {
assert_eq!("float", MonoType::Float.to_string());
}
#[test]
fn display_type_string() {
assert_eq!("string", MonoType::String.to_string());
}
#[test]
fn display_type_duration() {
assert_eq!("duration", MonoType::Duration.to_string());
}
#[test]
fn display_type_time() {
assert_eq!("time", MonoType::Time.to_string());
}
#[test]
fn display_type_regexp() {
assert_eq!("regexp", MonoType::Regexp.to_string());
}
#[test]
fn display_type_bytes() {
assert_eq!("bytes", MonoType::Bytes.to_string());
}
#[test]
fn display_type_tvar() {
assert_eq!("t10", MonoType::Var(Tvar(10)).to_string());
}
#[test]
fn display_type_array() {
assert_eq!(
"[int]",
MonoType::Arr(Box::new(Array(MonoType::Int))).to_string()
);
}
#[test]
fn display_type_record() {
assert_eq!(
"{A with a:int, b:string}",
Record::Extension {
head: Property {
k: String::from("a"),
v: MonoType::Int,
},
tail: MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("b"),
v: MonoType::String,
},
tail: MonoType::Var(Tvar(0)),
})),
}
.to_string()
);
assert_eq!(
"{a:int, b:string}",
Record::Extension {
head: Property {
k: String::from("a"),
v: MonoType::Int,
},
tail: MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("b"),
v: MonoType::String,
},
tail: MonoType::Record(Box::new(Record::Empty)),
})),
}
.to_string()
);
}
#[test]
fn display_type_function() {
assert_eq!(
"() => int",
Function {
req: MonoTypeMap::new(),
opt: MonoTypeMap::new(),
pipe: None,
retn: MonoType::Int,
}
.to_string()
);
assert_eq!(
"(<-:int) => int",
Function {
req: MonoTypeMap::new(),
opt: MonoTypeMap::new(),
pipe: Some(Property {
k: String::from("<-"),
v: MonoType::Int,
}),
retn: MonoType::Int,
}
.to_string()
);
assert_eq!(
"(<-a:int) => int",
Function {
req: MonoTypeMap::new(),
opt: MonoTypeMap::new(),
pipe: Some(Property {
k: String::from("a"),
v: MonoType::Int,
}),
retn: MonoType::Int,
}
.to_string()
);
assert_eq!(
"(<-:int, a:int, b:int) => int",
Function {
req: semantic_map! {
String::from("a") => MonoType::Int,
String::from("b") => MonoType::Int,
},
opt: MonoTypeMap::new(),
pipe: Some(Property {
k: String::from("<-"),
v: MonoType::Int,
}),
retn: MonoType::Int,
}
.to_string()
);
assert_eq!(
"(<-:int, ?a:int, ?b:int) => int",
Function {
req: MonoTypeMap::new(),
opt: semantic_map! {
String::from("a") => MonoType::Int,
String::from("b") => MonoType::Int,
},
pipe: Some(Property {
k: String::from("<-"),
v: MonoType::Int,
}),
retn: MonoType::Int,
}
.to_string()
);
assert_eq!(
"(<-:int, a:int, b:int, ?c:int, ?d:int) => int",
Function {
req: semantic_map! {
String::from("a") => MonoType::Int,
String::from("b") => MonoType::Int,
},
opt: semantic_map! {
String::from("c") => MonoType::Int,
String::from("d") => MonoType::Int,
},
pipe: Some(Property {
k: String::from("<-"),
v: MonoType::Int,
}),
retn: MonoType::Int,
}
.to_string()
);
assert_eq!(
"(a:int, ?b:bool) => int",
Function {
req: semantic_map! {
String::from("a") => MonoType::Int,
},
opt: semantic_map! {
String::from("b") => MonoType::Bool,
},
pipe: None,
retn: MonoType::Int,
}
.to_string()
);
assert_eq!(
"(<-a:int, b:int, c:int, ?d:bool) => int",
Function {
req: semantic_map! {
String::from("b") => MonoType::Int,
String::from("c") => MonoType::Int,
},
opt: semantic_map! {
String::from("d") => MonoType::Bool,
},
pipe: Some(Property {
k: String::from("a"),
v: MonoType::Int,
}),
retn: MonoType::Int,
}
.to_string()
);
}
#[test]
fn display_polytype() {
assert_eq!(
"int",
PolyType {
vars: Vec::new(),
cons: TvarKinds::new(),
expr: MonoType::Int,
}
.to_string(),
);
assert_eq!(
"(x:A) => A",
PolyType {
vars: vec![Tvar(0)],
cons: TvarKinds::new(),
expr: MonoType::Fun(Box::new(Function {
req: semantic_map! {
String::from("x") => MonoType::Var(Tvar(0)),
},
opt: MonoTypeMap::new(),
pipe: None,
retn: MonoType::Var(Tvar(0)),
})),
}
.to_string(),
);
assert_eq!(
"(x:A, y:B) => {x:A, y:B}",
PolyType {
vars: vec![Tvar(0), Tvar(1)],
cons: TvarKinds::new(),
expr: MonoType::Fun(Box::new(Function {
req: semantic_map! {
String::from("x") => MonoType::Var(Tvar(0)),
String::from("y") => MonoType::Var(Tvar(1)),
},
opt: MonoTypeMap::new(),
pipe: None,
retn: MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("x"),
v: MonoType::Var(Tvar(0)),
},
tail: MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("y"),
v: MonoType::Var(Tvar(1)),
},
tail: MonoType::Record(Box::new(Record::Empty)),
})),
})),
})),
}
.to_string(),
);
assert_eq!(
"(a:A, b:A) => A where A: Addable",
PolyType {
vars: vec![Tvar(0)],
cons: semantic_map! {Tvar(0) => vec![Kind::Addable]},
expr: MonoType::Fun(Box::new(Function {
req: semantic_map! {
String::from("a") => MonoType::Var(Tvar(0)),
String::from("b") => MonoType::Var(Tvar(0)),
},
opt: MonoTypeMap::new(),
pipe: None,
retn: MonoType::Var(Tvar(0)),
})),
}
.to_string(),
);
assert_eq!(
"(x:A, y:B) => {x:A, y:B} where A: Addable, B: Divisible",
PolyType {
vars: vec![Tvar(0), Tvar(1)],
cons: semantic_map! {
Tvar(0) => vec![Kind::Addable],
Tvar(1) => vec![Kind::Divisible],
},
expr: MonoType::Fun(Box::new(Function {
req: semantic_map! {
String::from("x") => MonoType::Var(Tvar(0)),
String::from("y") => MonoType::Var(Tvar(1)),
},
opt: MonoTypeMap::new(),
pipe: None,
retn: MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("x"),
v: MonoType::Var(Tvar(0)),
},
tail: MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("y"),
v: MonoType::Var(Tvar(1)),
},
tail: MonoType::Record(Box::new(Record::Empty)),
})),
})),
})),
}
.to_string(),
);
assert_eq!(
"(x:A, y:B) => {x:A, y:B} where A: Comparable + Equatable, B: Addable + Divisible",
PolyType {
vars: vec![Tvar(0), Tvar(1)],
cons: semantic_map! {
Tvar(0) => vec![Kind::Comparable, Kind::Equatable],
Tvar(1) => vec![Kind::Addable, Kind::Divisible],
},
expr: MonoType::Fun(Box::new(Function {
req: semantic_map! {
String::from("x") => MonoType::Var(Tvar(0)),
String::from("y") => MonoType::Var(Tvar(1)),
},
opt: MonoTypeMap::new(),
pipe: None,
retn: MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("x"),
v: MonoType::Var(Tvar(0)),
},
tail: MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("y"),
v: MonoType::Var(Tvar(1)),
},
tail: MonoType::Record(Box::new(Record::Empty)),
})),
})),
})),
}
.to_string(),
);
}
#[test]
fn compare_records() {
assert_eq!(
// {A with a:int, b:string}
MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("a"),
v: MonoType::Int,
},
tail: MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("b"),
v: MonoType::String,
},
tail: MonoType::Var(Tvar(0)),
})),
})),
// {A with b:string, a:int}
MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("b"),
v: MonoType::String,
},
tail: MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("a"),
v: MonoType::Int,
},
tail: MonoType::Var(Tvar(0)),
})),
})),
);
assert_eq!(
// {A with a:int, b:string, b:int, c:float}
MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("a"),
v: MonoType::Int,
},
tail: MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("b"),
v: MonoType::String,
},
tail: MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("b"),
v: MonoType::Int,
},
tail: MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("c"),
v: MonoType::Float,
},
tail: MonoType::Var(Tvar(0)),
})),
})),
})),
})),
// {A with c:float, b:string, b:int, a:int}
MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("c"),
v: MonoType::Float,
},
tail: MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("b"),
v: MonoType::String,
},
tail: MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("b"),
v: MonoType::Int,
},
tail: MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("a"),
v: MonoType::Int,
},
tail: MonoType::Var(Tvar(0)),
})),
})),
})),
})),
);
assert_ne!(
// {A with a:int, b:string, b:int, c:float}
MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("a"),
v: MonoType::Int,
},
tail: MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("b"),
v: MonoType::String,
},
tail: MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("b"),
v: MonoType::Int,
},
tail: MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("c"),
v: MonoType::Float,
},
tail: MonoType::Var(Tvar(0)),
})),
})),
})),
})),
// {A with a:int, b:int, b:string, c:float}
MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("a"),
v: MonoType::Int,
},
tail: MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("b"),
v: MonoType::Int,
},
tail: MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("b"),
v: MonoType::String,
},
tail: MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("c"),
v: MonoType::Float,
},
tail: MonoType::Var(Tvar(0)),
})),
})),
})),
})),
);
assert_ne!(
// {a:int, b:string}
MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("a"),
v: MonoType::Int,
},
tail: MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("b"),
v: MonoType::String,
},
tail: MonoType::Record(Box::new(Record::Empty)),
})),
})),
// {b:int, a:int}
MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("b"),
v: MonoType::Int,
},
tail: MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("a"),
v: MonoType::Int,
},
tail: MonoType::Record(Box::new(Record::Empty)),
})),
})),
);
assert_ne!(
// {a:int}
MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("a"),
v: MonoType::Int,
},
tail: MonoType::Record(Box::new(Record::Empty)),
})),
// {A with a:int}
MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("a"),
v: MonoType::Int,
},
tail: MonoType::Var(Tvar(0)),
})),
);
assert_ne!(
// {A with a:int}
MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("a"),
v: MonoType::Int,
},
tail: MonoType::Var(Tvar(0)),
})),
// {B with a:int}
MonoType::Record(Box::new(Record::Extension {
head: Property {
k: String::from("a"),
v: MonoType::Int,
},
tail: MonoType::Var(Tvar(1)),
})),
);
}
#[test]
fn unify_ints() {
let sub = MonoType::Int
.unify(
MonoType::Int,
&mut TvarKinds::new(),
&mut Fresher::default(),
)
.unwrap();
assert_eq!(sub, Substitution::empty());
}
#[test]
fn constrain_ints() {
let allowable_cons = vec![
Kind::Addable,
Kind::Subtractable,
Kind::Divisible,
Kind::Numeric,
Kind::Comparable,
Kind::Equatable,
Kind::Nullable,
Kind::Stringable,
];
for c in allowable_cons {
let sub = MonoType::Int.constrain(c, &mut TvarKinds::new());
assert_eq!(Ok(Substitution::empty()), sub);
}
let sub = MonoType::Int.constrain(Kind::Record, &mut TvarKinds::new());
assert_eq!(
Err(Error::CannotConstrain {
act: MonoType::Int,
exp: Kind::Record
}),
sub
);
}
#[test]
fn constrain_rows() {
let sub = Record::Empty.constrain(Kind::Record, &mut TvarKinds::new());
assert_eq!(Ok(Substitution::empty()), sub);
let unallowable_cons = vec![
Kind::Addable,
Kind::Subtractable,
Kind::Divisible,
Kind::Numeric,
Kind::Comparable,
Kind::Nullable,
];
for c in unallowable_cons {
let sub = Record::Empty.constrain(c, &mut TvarKinds::new());
assert_eq!(
Err(Error::CannotConstrain {
act: MonoType::Record(Box::new(Record::Empty)),
exp: c
}),
sub
);
}
}
#[test]
fn unify_error() {
let err = MonoType::Int
.unify(
MonoType::String,
&mut TvarKinds::new(),
&mut Fresher::default(),
)
.unwrap_err();
assert_eq!(
err.to_string(),
String::from("expected int but found string"),
);
}
#[test]
fn unify_tvars() {
let sub = MonoType::Var(Tvar(0))
.unify(
MonoType::Var(Tvar(1)),
&mut TvarKinds::new(),
&mut Fresher::default(),
)
.unwrap();
assert_eq!(
sub,
Substitution::from(semantic_map! {Tvar(0) => MonoType::Var(Tvar(1))}),
);
}
#[test]
fn unify_constrained_tvars() {
let mut cons = semantic_map! {Tvar(0) => vec![Kind::Addable, Kind::Divisible]};
let sub = MonoType::Var(Tvar(0))
.unify(MonoType::Var(Tvar(1)), &mut cons, &mut Fresher::default())
.unwrap();
assert_eq!(
sub,
Substitution::from(semantic_map! {Tvar(0) => MonoType::Var(Tvar(1))})
);
assert_eq!(
cons,
semantic_map! {Tvar(1) => vec![Kind::Addable, Kind::Divisible]},
);
}
#[test]
fn cannot_unify_functions() {
// g-required and g-optional arguments do not contain a f-required argument (and viceversa).
let f = polytype("(a: A, b: A, ?c: B) => A where A: Addable, B: Divisible ");
let g = polytype("(d: C, ?e: C) => C where C: Addable ");
if let (
PolyType {
vars: _,
cons: f_cons,
expr: MonoType::Fun(f),
},
PolyType {
vars: _,
cons: g_cons,
expr: MonoType::Fun(g),
},
) = (f, g)
{
// this extends the first map with the second by generating a new one.
let mut cons = f_cons.into_iter().chain(g_cons).collect();
let res = f
.clone()
.unify(*g.clone(), &mut cons, &mut Fresher::default());
assert!(res.is_err());
let res = g
.clone()
.unify(*f.clone(), &mut cons, &mut Fresher::default());
assert!(res.is_err());
} else {
panic!("the monotypes under examination are not functions");
}
// f has a pipe argument, but g does not (and viceversa).
let f = polytype("(<-pip:A, a: B) => A where A: Addable, B: Divisible ");
let g = polytype("(a: C) => C where C: Addable ");
if let (
PolyType {
vars: _,
cons: f_cons,
expr: MonoType::Fun(f),
},
PolyType {
vars: _,
cons: g_cons,
expr: MonoType::Fun(g),
},
) = (f, g)
{
let mut cons = f_cons.into_iter().chain(g_cons).collect();
let res = f
.clone()
.unify(*g.clone(), &mut cons, &mut Fresher::default());
assert!(res.is_err());
let res = g
.clone()
.unify(*f.clone(), &mut cons, &mut Fresher::default());
assert!(res.is_err());
} else {
panic!("the monotypes under examination are not functions");
}
}
#[test]
fn unify_function_with_function_call() {
let fn_type = polytype("(a: A, b: A, ?c: B) => A where A: Addable, B: Divisible ");
// (a: int, b: int) => int
let call_type = Function {
// all arguments are required in a function call.
req: semantic_map! {
"a".to_string() => MonoType::Int,
"b".to_string() => MonoType::Int,
},
opt: semantic_map! {},
pipe: None,
retn: MonoType::Int,
};
if let PolyType {
vars: _,
mut cons,
expr: MonoType::Fun(f),
} = fn_type
{
let sub = f
.unify(call_type, &mut cons, &mut Fresher::default())
.unwrap();
assert_eq!(
sub,
Substitution::from(semantic_map! {Tvar(0) => MonoType::Int})
);
// the constraint on A gets removed.
assert_eq!(cons, semantic_map! {Tvar(1) => vec![Kind::Divisible]});
} else {
panic!("the monotype under examination is not a function");
}
}
#[test]
fn unify_higher_order_functions() {
let f = polytype(
"(a: A, b: A, ?c: (a: A) => B) => (d: string) => A where A: Addable, B: Divisible ",
);
let g = polytype("(a: int, b: int, c: (a: int) => float) => (d: string) => int");
if let (
PolyType {
vars: _,
cons: f_cons,
expr: MonoType::Fun(f),
},
PolyType {
vars: _,
cons: g_cons,
expr: MonoType::Fun(g),
},
) = (f, g)
{
// this extends the first map with the second by generating a new one.
let mut cons = f_cons.into_iter().chain(g_cons).collect();
let sub = f.unify(*g, &mut cons, &mut Fresher::default()).unwrap();
assert_eq!(
sub,
Substitution::from(semantic_map! {
Tvar(0) => MonoType::Int,
Tvar(1) => MonoType::Float,
})
);
// we know everything about tvars, there is no constraint.
assert_eq!(cons, semantic_map! {});
} else {
panic!("the monotypes under examination are not functions");
}
}
}
| 33.402533 | 101 | 0.446522 |
3ab9615cd1264f90c5d3cffb0dce17f5195c8566 | 276 | mod character;
mod dice;
mod fight;
mod item;
mod stuff;
pub mod prelude {
pub use crate::{character::*, dice::*, fight::*, item::*, stuff::*};
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
let result = 2 + 2;
assert_eq!(result, 4);
}
}
| 14.526316 | 72 | 0.539855 |
ede2a0839a1b3d25e266607f8caa1cabac1bb2ce | 33,220 | /*
This tool is part of the WhiteboxTools geospatial analysis library.
Authors: Dr. John Lindsay
Created: 28/04/2020
Last Modified: 28/04/2020
License: MIT
THIS TOOL CURRENTLY RUNS BUT THE CONTOURS THAT IT GENERATES ARE BROKEN FOR SOME REASON. SOME DEBUGGING WILL BE NECESSARY
BEFORE IT IS RELEASED.
*/
use whitebox_lidar::*;
use whitebox_common::algorithms::triangulate;
use whitebox_common::structures::{Point2D, Point3D};
use crate::tools::*;
use whitebox_vector::ShapefileGeometry;
use whitebox_vector::*;
use std::env;
use std::f64;
use std::io::{Error, ErrorKind};
use std::path;
use kdtree::distance::squared_euclidean;
use kdtree::KdTree;
const EPSILON: f64 = std::f64::EPSILON;
/// This tool creates a contour coverage from a set of input points (`--input`). The user must specify the contour
/// interval (`--interval`) and optionally, the base contour value (`--base`). The degree to which contours are
/// smoothed is controlled by the **Smoothing Filter Size** parameter (`--smooth`). This value, which determines
/// the size of a mean filter applied to the x-y position of vertices in each contour, should be an odd integer value, e.g.
/// 3, 5, 7, 9, 11, etc. Larger values will result in smoother contour lines.
///
/// # See Also
/// `ContoursFromPoints`, `ContoursFromRaster`
pub struct ContourLidar {
name: String,
description: String,
toolbox: String,
parameters: Vec<ToolParameter>,
example_usage: String,
}
impl ContourLidar {
pub fn new() -> ContourLidar {
// public constructor
let name = "ContourLidar".to_string();
let toolbox = "LiDAR Tools".to_string();
let description =
"Creates a contour coverage from a set of input points.".to_string();
let mut parameters = vec![];
parameters.push(ToolParameter {
name: "Input File".to_owned(),
flags: vec!["-i".to_owned(), "--input".to_owned()],
description: "Input LiDAR file (including extension).".to_owned(),
parameter_type: ParameterType::ExistingFile(ParameterFileType::Lidar),
default_value: None,
optional: true,
});
parameters.push(ToolParameter {
name: "Output Vector Lines File".to_owned(),
flags: vec!["-o".to_owned(), "--output".to_owned()],
description: "Output vector lines file.".to_owned(),
parameter_type: ParameterType::NewFile(ParameterFileType::Vector(
VectorGeometryType::Line,
)),
default_value: None,
optional: false,
});
parameters.push(ToolParameter {
name: "Maximum Triangle Edge Length (optional)".to_owned(),
flags: vec!["--max_triangle_edge_length".to_owned()],
description: "Optional maximum triangle edge length; triangles larger than this size will not be gridded.".to_owned(),
parameter_type: ParameterType::Float,
default_value: None,
optional: true,
});
parameters.push(ToolParameter {
name: "Contour Interval".to_owned(),
flags: vec!["--interval".to_owned()],
description: "Contour interval.".to_owned(),
parameter_type: ParameterType::Float,
default_value: Some("10.0".to_owned()),
optional: false,
});
parameters.push(ToolParameter {
name: "Base Contour".to_owned(),
flags: vec!["--base".to_owned()],
description: "Base contour height.".to_owned(),
parameter_type: ParameterType::Float,
default_value: Some("0.0".to_owned()),
optional: true,
});
parameters.push(ToolParameter {
name: "Smoothing Filter Size".to_owned(),
flags: vec!["--smooth".to_owned()],
description: "Smoothing filter size (in num. points), e.g. 3, 5, 7, 9, 11..."
.to_owned(),
parameter_type: ParameterType::Integer,
default_value: Some("5".to_owned()),
optional: true,
});
let sep: String = path::MAIN_SEPARATOR.to_string();
let p = format!("{}", env::current_dir().unwrap().display());
let e = format!("{}", env::current_exe().unwrap().display());
let mut short_exe = e
.replace(&p, "")
.replace(".exe", "")
.replace(".", "")
.replace(&sep, "");
if e.contains(".exe") {
short_exe += ".exe";
}
let usage = format!(
">>.*{0} -r={1} -v --wd=\"*path*to*data*\" -i=file.las -o=contours.shp --max_triangle_edge_length=100.0 --interval=100.0 --base=0.0 --smooth=11",
short_exe, name
)
.replace("*", &sep);
ContourLidar {
name: name,
description: description,
toolbox: toolbox,
parameters: parameters,
example_usage: usage,
}
}
}
impl WhiteboxTool for ContourLidar {
fn get_source_file(&self) -> String {
String::from(file!())
}
fn get_tool_name(&self) -> String {
self.name.clone()
}
fn get_tool_description(&self) -> String {
self.description.clone()
}
fn get_tool_parameters(&self) -> String {
let mut s = String::from("{\"parameters\": [");
for i in 0..self.parameters.len() {
if i < self.parameters.len() - 1 {
s.push_str(&(self.parameters[i].to_string()));
s.push_str(",");
} else {
s.push_str(&(self.parameters[i].to_string()));
}
}
s.push_str("]}");
s
}
fn get_example_usage(&self) -> String {
self.example_usage.clone()
}
fn get_toolbox(&self) -> String {
self.toolbox.clone()
}
fn run<'a>(
&self,
args: Vec<String>,
working_directory: &'a str,
verbose: bool,
) -> Result<(), Error> {
let mut input_file: String = "".to_string();
let mut output_file: String = "".to_string();
let mut max_triangle_edge_length = f64::INFINITY;
let mut contour_interval = 10f64;
let mut base_contour = 0f64;
let mut filter_size = 5;
// read the arguments
if args.len() == 0 {
return Err(Error::new(
ErrorKind::InvalidInput,
"Tool run with no parameters.",
));
}
for i in 0..args.len() {
let mut arg = args[i].replace("\"", "");
arg = arg.replace("\'", "");
let cmd = arg.split("="); // in case an equals sign was used
let vec = cmd.collect::<Vec<&str>>();
let mut keyval = false;
if vec.len() > 1 {
keyval = true;
}
let flag_val = vec[0].to_lowercase().replace("--", "-");
if flag_val == "-i" || flag_val == "-input" {
input_file = if keyval {
vec[1].to_string()
} else {
args[i + 1].to_string()
};
} else if flag_val == "-o" || flag_val == "-output" {
output_file = if keyval {
vec[1].to_string()
} else {
args[i + 1].to_string()
};
} else if flag_val == "-max_triangle_edge_length" {
max_triangle_edge_length = if keyval {
vec[1]
.to_string()
.parse::<f64>()
.expect(&format!("Error parsing {}", flag_val))
} else {
args[i + 1]
.to_string()
.parse::<f64>()
.expect(&format!("Error parsing {}", flag_val))
};
max_triangle_edge_length *= max_triangle_edge_length; // actually squared distance
} else if flag_val == "-interval" {
contour_interval = if keyval {
vec[1]
.to_string()
.parse::<f64>()
.expect(&format!("Error parsing {}", flag_val))
} else {
args[i + 1]
.to_string()
.parse::<f64>()
.expect(&format!("Error parsing {}", flag_val))
};
} else if flag_val == "-base" {
base_contour = if keyval {
vec[1]
.to_string()
.parse::<f64>()
.expect(&format!("Error parsing {}", flag_val))
} else {
args[i + 1]
.to_string()
.parse::<f64>()
.expect(&format!("Error parsing {}", flag_val))
};
} else if flag_val == "-smooth" {
filter_size = if keyval {
vec[1]
.to_string()
.parse::<usize>()
.expect(&format!("Error parsing {}", flag_val))
} else {
args[i + 1]
.to_string()
.parse::<usize>()
.expect(&format!("Error parsing {}", flag_val))
};
if filter_size > 21 {
filter_size = 21;
}
if filter_size > 0 && filter_size % 2 == 0 {
// it must be odd.
filter_size += 1;
}
}
}
let sep: String = path::MAIN_SEPARATOR.to_string();
let mut progress: usize;
let mut old_progress: usize = 1;
let precision = EPSILON * 10f64;
let filter_radius = filter_size as isize / 2isize;
let start = Instant::now();
if verbose {
let tool_name = self.get_tool_name();
let welcome_len = format!("* Welcome to {} *", tool_name).len().max(28);
// 28 = length of the 'Powered by' by statement.
println!("{}", "*".repeat(welcome_len));
println!("* Welcome to {} {}*", tool_name, " ".repeat(welcome_len - 15 - tool_name.len()));
println!("* Powered by WhiteboxTools {}*", " ".repeat(welcome_len - 28));
println!("* www.whiteboxgeo.com {}*", " ".repeat(welcome_len - 23));
println!("{}", "*".repeat(welcome_len));
}
if !input_file.contains(path::MAIN_SEPARATOR) && !input_file.contains("/") {
input_file = format!("{}{}", working_directory, input_file);
}
if !output_file.contains(&sep) && !output_file.contains("/") {
output_file = format!("{}{}", working_directory, output_file);
}
let mut input: LasFile = match LasFile::new(&input_file, "r") {
Ok(lf) => lf,
Err(_) => {
return Err(Error::new(
ErrorKind::NotFound,
format!("No such file or directory ({})", input_file),
))
}
};
// create output file
let mut output = Shapefile::new(&output_file, ShapeType::PolyLine)?;
// set the projection information
output.projection = input.get_wkt().clone();
// add the attributes
output.attributes.add_field(&AttributeField::new(
"FID",
FieldDataType::Int,
6u8,
0u8,
));
output.attributes.add_field(&AttributeField::new(
"ELEV",
FieldDataType::Real,
10u8,
4u8,
));
let mut points: Vec<Point2D> = vec![];
let mut z_values: Vec<f64> = vec![];
let num_points = input.header.number_of_points as f64 - 1.0;
// let mut pd: PointData;
let mut pd: Point3D;
for i in 0..input.header.number_of_points as usize {
// pd = input.get_point_info(i);
pd = input.get_transformed_coords(i);
points.push(Point2D::new(pd.x, pd.y));
z_values.push(pd.z);
if verbose {
progress = (100.0_f64 * i as f64 / num_points) as usize;
if progress != old_progress {
println!("Progress: {}%", progress);
old_progress = progress;
}
}
}
drop(input);
if points.len() <= 3 {
return Err(Error::new(
ErrorKind::InvalidInput,
"There are too few input points.",
));
}
if verbose {
println!("Performing triangulation...");
}
// this is where the heavy-lifting is
let result = triangulate(&points).expect("No triangulation exists.");
let (mut p1, mut p2, mut p3): (usize, usize, usize);
let (mut min_val, mut max_val): (f64, f64);
let (mut lower_interval, mut upper_interval): (usize, usize);
let mut contour_z: f64;
let dimensions = 2;
let capacity_per_node = 64;
let mut tree = KdTree::with_capacity(dimensions, capacity_per_node);
let mut node = 0usize;
let mut contour_points: Vec<(Point2D, f64)> = vec![];
let (mut x, mut y, mut fraction): (f64, f64, f64);
let mut fid = 1;
let (mut pt1, mut pt2, mut pt3): (Point2D, Point2D, Point2D);
let mut num_intersections: usize;
let (mut intersect1, mut intersect2, mut intersect3): (bool, bool, bool);
for i in (0..result.triangles.len()).step_by(3) {
p1 = result.triangles[i + 2];
p2 = result.triangles[i + 1];
p3 = result.triangles[i];
if max_distance_squared(
points[p1],
points[p2],
points[p3],
z_values[p1],
z_values[p2],
z_values[p3],
) < max_triangle_edge_length
{
min_val = z_values[p1].min(z_values[p2].min(z_values[p3]));
max_val = z_values[p1].max(z_values[p2].max(z_values[p3]));
lower_interval = ((min_val - base_contour) / contour_interval).ceil() as usize;
upper_interval = ((max_val - base_contour) / contour_interval).floor() as usize;
for a in lower_interval..=upper_interval {
contour_z = base_contour + a as f64 * contour_interval;
pt1 = Point2D::new(0f64, 0f64);
pt2 = Point2D::new(0f64, 0f64);
pt3 = Point2D::new(0f64, 0f64);
num_intersections = 0;
intersect1 = false;
intersect2 = false;
intersect3 = false;
if contour_z >= z_values[p1].min(z_values[p2]) && contour_z <= z_values[p1].max(z_values[p2]) {
num_intersections += 1;
intersect1 = true;
fraction = if z_values[p1] != z_values[p2] {
(contour_z - z_values[p1]) / (z_values[p2] - z_values[p1])
} else {
0f64
};
x = points[p1].x + fraction * (points[p2].x - points[p1].x);
y = points[p1].y + fraction * (points[p2].y - points[p1].y);
pt1 = Point2D::new(x, y);
}
if contour_z >= z_values[p2].min(z_values[p3]) && contour_z <= z_values[p2].max(z_values[p3]) {
num_intersections += 1;
intersect2 = true;
fraction = if z_values[p2] != z_values[p3] {
(contour_z - z_values[p2]) / (z_values[p3] - z_values[p2])
} else {
0f64
};
x = points[p2].x + fraction * (points[p3].x - points[p2].x);
y = points[p2].y + fraction * (points[p3].y - points[p2].y);
pt2 = Point2D::new(x, y);
}
if contour_z >= z_values[p1].min(z_values[p3]) && contour_z <= z_values[p1].max(z_values[p3]) {
num_intersections += 1;
intersect3 = true;
fraction = if z_values[p1] != z_values[p3] {
(contour_z - z_values[p1]) / (z_values[p3] - z_values[p1])
} else {
0f64
};
x = points[p1].x + fraction * (points[p3].x - points[p1].x);
y = points[p1].y + fraction * (points[p3].y - points[p1].y);
pt3 = Point2D::new(x, y);
}
if num_intersections == 3 {
// The contour intersects one of the vertices and two of these three points are the same.
// Remove one of the two identical points.
if pt1.distance(&pt2) < precision {
intersect2 = false;
num_intersections -= 1;
}
if pt1.distance(&pt3) < precision {
intersect3 = false;
num_intersections -= 1;
}
if pt2.distance(&pt3) < precision {
intersect3 = false;
num_intersections -= 1;
}
}
if num_intersections != 2 && verbose {
println!("Warning: An error occurred during the contouring operation.");
}
if intersect2 && intersect3 {
pt1 = pt2;
pt2 = pt3;
} else if intersect1 && intersect3 {
pt2 = pt3;
}
// The contour may only intersect a triangle at one of the triangle's vertices.
// We don't want to record this segment.
if pt1.distance(&pt2) > precision {
contour_points.push((pt1, contour_z));
tree.add([pt1.x, pt1.y], node).unwrap();
node += 1;
contour_points.push((pt2, contour_z));
tree.add([pt2.x, pt2.y], node).unwrap();
node += 1;
}
}
}
if verbose {
progress = (100.0_f64 * i as f64 / (result.triangles.len() - 1) as f64) as usize;
if progress != old_progress {
println!("Progress (Loop 1 of 3): {}%", progress);
old_progress = progress;
}
}
}
let num_points = contour_points.len();
let mut unvisited = vec![true; num_points];
let mut num_neighbours: usize;
let mut flag: bool;
let mut found_node: bool;
let mut other_node: usize;
for i in 0..num_points {
if unvisited[i] {
contour_z = contour_points[i].1;
// is it an endnode?
let ret = tree
.within(&[contour_points[i].0.x, contour_points[i].0.y], precision, &squared_euclidean)
.unwrap();
num_neighbours = 0;
for a in 0..ret.len() {
node = *ret[a].1;
if contour_points[node].1 == contour_z {
num_neighbours += 1;
}
}
if num_neighbours == 1 {
let mut line_points = vec![];
node = i;
line_points.push(contour_points[node].0);
unvisited[node] = false;
flag = true;
while flag {
// get the other side of this line segment
other_node = if node % 2 == 0 {
node + 1
} else {
node - 1
};
if unvisited[other_node] {
if filter_size > 0 {
// Add a mid-point
x = (contour_points[node].0.x + contour_points[other_node].0.x) / 2f64;
y = (contour_points[node].0.y + contour_points[other_node].0.y) / 2f64;
line_points.push(Point2D::new(x, y));
}
node = other_node;
line_points.push(contour_points[node].0);
unvisited[node] = false;
} else {
found_node = false;
let ret = tree
.within(&[contour_points[node].0.x, contour_points[node].0.y], precision, &squared_euclidean)
.unwrap();
for a in 0..ret.len() {
other_node = *ret[a].1;
if other_node != node && contour_points[other_node].1 == contour_z && unvisited[other_node] {
node = other_node;
line_points.push(contour_points[node].0);
unvisited[node] = false;
found_node = true;
break;
}
}
if !found_node {
// we've located the other end of the line.
flag = false;
}
}
}
// remove the duplicate points
for a in (1..line_points.len()).rev() {
if line_points[a] == line_points[a-1] {
line_points.remove(a);
}
}
if line_points.len() > 1 {
// Smooth the points
if line_points.len() > filter_size && filter_size > 0 {
for a in 0..line_points.len() {
x = 0f64;
y = 0f64;
for p in -filter_radius..=filter_radius {
let mut point_id: isize = a as isize + p;
if point_id < 0 {
point_id = 0;
}
if point_id >= line_points.len() as isize {
point_id = line_points.len() as isize - 1;
}
x += line_points[point_id as usize].x;
y += line_points[point_id as usize].y;
}
x /= filter_size as f64;
y /= filter_size as f64;
line_points[a].x = x;
line_points[a].y = y;
}
for a in (0..line_points.len()).rev() {
x = 0f64;
y = 0f64;
for p in -filter_radius..=filter_radius {
let mut point_id: isize = a as isize + p;
if point_id < 0 {
point_id = 0;
}
if point_id >= line_points.len() as isize {
point_id = line_points.len() as isize - 1;
}
x += line_points[point_id as usize].x;
y += line_points[point_id as usize].y;
}
x /= filter_size as f64;
y /= filter_size as f64;
line_points[a].x = x;
line_points[a].y = y;
}
}
let mut sfg = ShapefileGeometry::new(ShapeType::PolyLine);
sfg.add_part(&line_points);
output.add_record(sfg);
output.attributes.add_record(
vec![
FieldData::Int(fid as i32 + 1),
FieldData::Real(contour_z),
],
false,
);
fid += 1;
}
}
}
if verbose {
progress = (100.0_f64 * i as f64 / (num_points - 1) as f64) as usize;
if progress != old_progress {
println!("Progress (Loop 2 of 3): {}%", progress);
old_progress = progress;
}
}
}
// Closed contours
let mut num_line_points: usize;
for i in 0..num_points {
if unvisited[i] {
contour_z = contour_points[i].1;
let mut line_points = vec![];
node = i;
line_points.push(contour_points[node].0);
unvisited[node] = false;
flag = true;
while flag {
// get the other side of this line segment
other_node = if node % 2 == 0 {
node + 1
} else {
node - 1
};
if unvisited[other_node] {
if filter_size > 0 {
// Add a mid-point
x = (contour_points[node].0.x + contour_points[other_node].0.x) / 2f64;
y = (contour_points[node].0.y + contour_points[other_node].0.y) / 2f64;
line_points.push(Point2D::new(x, y));
}
node = other_node;
line_points.push(contour_points[node].0);
unvisited[node] = false;
} else {
found_node = false;
let ret = tree
.within(&[contour_points[node].0.x, contour_points[node].0.y], precision, &squared_euclidean)
.unwrap();
for a in 0..ret.len() {
other_node = *ret[a].1;
if other_node != node && contour_points[other_node].1 == contour_z && unvisited[other_node] {
node = other_node;
line_points.push(contour_points[node].0);
unvisited[node] = false;
found_node = true;
}
}
if !found_node {
// we've located the other end of the line.
flag = false;
}
}
}
// remove the duplicate points
for a in (1..line_points.len()).rev() {
if line_points[a] == line_points[a-1] {
line_points.remove(a);
}
}
num_line_points = line_points.len();
if num_line_points > 1 {
if num_line_points > filter_size && filter_size > 0 {
for a in 0..num_line_points {
x = 0f64;
y = 0f64;
for p in -filter_radius..=filter_radius {
let mut point_id: isize = a as isize + p;
if point_id < 0 {
point_id += num_line_points as isize - 1;
}
if point_id >= num_line_points as isize {
point_id -= num_line_points as isize - 1;
}
x += line_points[point_id as usize].x;
y += line_points[point_id as usize].y;
}
x /= filter_size as f64;
y /= filter_size as f64;
line_points[a].x = x;
line_points[a].y = y;
}
// set the final point position to the same as the first to close the loop
line_points[num_line_points - 1].x = line_points[0].x;
line_points[num_line_points - 1].y = line_points[0].y;
for a in (0..num_line_points).rev() {
x = 0f64;
y = 0f64;
for p in -filter_radius..=filter_radius {
let mut point_id: isize = a as isize + p;
if point_id < 0 {
point_id += num_line_points as isize - 1;
}
if point_id >= num_line_points as isize {
point_id -= num_line_points as isize - 1;
}
x += line_points[point_id as usize].x;
y += line_points[point_id as usize].y;
}
x /= filter_size as f64;
y /= filter_size as f64;
line_points[a].x = x;
line_points[a].y = y;
}
// set the final point position to the same as the first to close the loop
line_points[num_line_points - 1].x = line_points[0].x;
line_points[num_line_points - 1].y = line_points[0].y;
}
let mut sfg = ShapefileGeometry::new(ShapeType::PolyLine);
sfg.add_part(&line_points);
output.add_record(sfg);
output.attributes.add_record(
vec![
FieldData::Int(fid as i32 + 1),
FieldData::Real(contour_z),
],
false,
);
fid += 1;
}
}
if verbose {
progress = (100.0_f64 * i as f64 / (num_points - 1) as f64) as usize;
if progress != old_progress {
println!("Progress (Loop 3 of 3): {}%", progress);
old_progress = progress;
}
}
}
if verbose {
println!("Saving data...")
};
let _ = match output.write() {
Ok(_) => {
if verbose {
println!("Output file written")
}
}
Err(e) => return Err(e),
};
let elapsed_time = get_formatted_elapsed_time(start);
if verbose {
println!("{}", &format!("Elapsed Time: {}", elapsed_time));
}
Ok(())
}
}
/// Calculate squared Euclidean distance between the point and another.
pub fn max_distance_squared(
p1: Point2D,
p2: Point2D,
p3: Point2D,
z1: f64,
z2: f64,
z3: f64,
) -> f64 {
let mut dx = p1.x - p2.x;
let mut dy = p1.y - p2.y;
let mut dz = z1 - z2;
let mut max_dist = dx * dx + dy * dy + dz * dz;
dx = p1.x - p3.x;
dy = p1.y - p3.y;
dz = z1 - z3;
let mut dist = dx * dx + dy * dy + dz * dz;
if dist > max_dist {
max_dist = dist
}
dx = p2.x - p3.x;
dy = p2.y - p3.y;
dz = z2 - z3;
dist = dx * dx + dy * dy + dz * dz;
if dist > max_dist {
max_dist = dist
}
max_dist
} | 40.36452 | 157 | 0.42357 |
1491f7d356073efd9655b76d808ecb783989fc4a | 10,738 | use crate::callable::{Callable, WasmtimeFn};
use crate::runtime::Store;
use crate::trap::Trap;
use crate::types::{ExternType, FuncType, GlobalType, MemoryType, TableType, ValType};
use crate::values::Val;
use std::cell::RefCell;
use std::rc::Rc;
use std::result::Result;
use crate::trampoline::{generate_func_export, generate_global_export, generate_memory_export};
use wasmtime_runtime::InstanceHandle;
// Externals
pub enum Extern {
Func(Rc<RefCell<Func>>),
Global(Rc<RefCell<Global>>),
Table(Rc<RefCell<Table>>),
Memory(Rc<RefCell<Memory>>),
}
impl Extern {
pub fn func(&self) -> &Rc<RefCell<Func>> {
match self {
Extern::Func(func) => func,
_ => panic!("Extern::Func expected"),
}
}
pub fn global(&self) -> &Rc<RefCell<Global>> {
match self {
Extern::Global(global) => global,
_ => panic!("Extern::Global expected"),
}
}
pub fn table(&self) -> &Rc<RefCell<Table>> {
match self {
Extern::Table(table) => table,
_ => panic!("Extern::Table expected"),
}
}
pub fn memory(&self) -> &Rc<RefCell<Memory>> {
match self {
Extern::Memory(memory) => memory,
_ => panic!("Extern::Memory expected"),
}
}
pub fn r#type(&self) -> ExternType {
match self {
Extern::Func(ft) => ExternType::ExternFunc(ft.borrow().r#type().clone()),
Extern::Memory(ft) => ExternType::ExternMemory(ft.borrow().r#type().clone()),
Extern::Table(tt) => ExternType::ExternTable(tt.borrow().r#type().clone()),
Extern::Global(gt) => ExternType::ExternGlobal(gt.borrow().r#type().clone()),
}
}
pub(crate) fn get_wasmtime_export(&mut self) -> wasmtime_runtime::Export {
match self {
Extern::Func(f) => {
if f.borrow().anchor.is_none() {
generate_func_export(&f).expect("generate_func_export");
}
f.borrow().anchor.as_ref().unwrap().1.clone()
}
Extern::Global(g) => g.borrow().wasmtime_export().clone(),
Extern::Memory(m) => m.borrow().wasmtime_export().clone(),
_ => unimplemented!("get_wasmtime_export"),
}
}
pub(crate) fn from_wasmtime_export(
store: Rc<RefCell<Store>>,
instance_handle: InstanceHandle,
export: wasmtime_runtime::Export,
) -> Extern {
match export {
wasmtime_runtime::Export::Function {
address,
vmctx,
ref signature,
} => {
let ty = FuncType::from_cranelift_signature(signature.clone());
let callable = WasmtimeFn::new(store.clone(), signature.clone(), address, vmctx);
let mut f = Func::new(store, ty, Rc::new(callable));
f.anchor = Some((instance_handle, export.clone()));
Extern::Func(Rc::new(RefCell::new(f)))
}
wasmtime_runtime::Export::Memory { .. } => Extern::Memory(Rc::new(RefCell::new(
Memory::from_wasmtime_memory(export, store, instance_handle),
))),
wasmtime_runtime::Export::Global { .. } => Extern::Global(Rc::new(RefCell::new(
Global::from_wasmtime_global(export, store),
))),
wasmtime_runtime::Export::Table {
definition: _,
vmctx: _,
table,
} => {
let ty = TableType::from_cranelift_table(table.table.clone());
Extern::Table(Rc::new(RefCell::new(Table::new(store, ty))))
}
}
}
}
pub struct Func {
_store: Rc<RefCell<Store>>,
callable: Rc<dyn Callable + 'static>,
r#type: FuncType,
pub(crate) anchor: Option<(InstanceHandle, wasmtime_runtime::Export)>,
}
impl Func {
pub fn new(
store: Rc<RefCell<Store>>,
r#type: FuncType,
callable: Rc<dyn Callable + 'static>,
) -> Func {
Func {
_store: store,
callable,
r#type,
anchor: None,
}
}
pub fn r#type(&self) -> &FuncType {
&self.r#type
}
pub fn param_arity(&self) -> usize {
self.r#type.params().len()
}
pub fn result_arity(&self) -> usize {
self.r#type.results().len()
}
pub fn callable(&self) -> &(dyn Callable + 'static) {
self.callable.as_ref()
}
pub fn call(&self, params: &[Val]) -> Result<Box<[Val]>, Rc<RefCell<Trap>>> {
let mut results = vec![Val::default(); self.result_arity()];
self.callable.call(params, &mut results)?;
Ok(results.into_boxed_slice())
}
}
pub struct Global {
_store: Rc<RefCell<Store>>,
r#type: GlobalType,
wasmtime_export: wasmtime_runtime::Export,
#[allow(dead_code)]
wasmtime_state: Option<crate::trampoline::GlobalState>,
}
impl Global {
pub fn new(store: Rc<RefCell<Store>>, r#type: GlobalType, val: Val) -> Global {
let (wasmtime_export, wasmtime_state) =
generate_global_export(&r#type, val).expect("generated global");
Global {
_store: store,
r#type,
wasmtime_export,
wasmtime_state: Some(wasmtime_state),
}
}
pub fn r#type(&self) -> &GlobalType {
&self.r#type
}
fn wasmtime_global_definition(&self) -> *mut wasmtime_runtime::VMGlobalDefinition {
match self.wasmtime_export {
wasmtime_runtime::Export::Global { definition, .. } => definition,
_ => panic!("global definition not found"),
}
}
pub fn get(&self) -> Val {
let definition = unsafe { &mut *self.wasmtime_global_definition() };
unsafe {
match self.r#type().content() {
ValType::I32 => Val::from(*definition.as_i32()),
ValType::I64 => Val::from(*definition.as_i64()),
ValType::F32 => Val::from_f32_bits(*definition.as_u32()),
ValType::F64 => Val::from_f64_bits(*definition.as_u64()),
_ => unimplemented!("Global::get for {:?}", self.r#type().content()),
}
}
}
pub fn set(&mut self, val: Val) {
if val.r#type() != *self.r#type().content() {
panic!(
"global of type {:?} cannot be set to {:?}",
self.r#type().content(),
val.r#type()
);
}
let definition = unsafe { &mut *self.wasmtime_global_definition() };
unsafe {
match val {
Val::I32(i) => *definition.as_i32_mut() = i,
Val::I64(i) => *definition.as_i64_mut() = i,
Val::F32(f) => *definition.as_u32_mut() = f,
Val::F64(f) => *definition.as_u64_mut() = f,
_ => unimplemented!("Global::set for {:?}", val.r#type()),
}
}
}
pub(crate) fn wasmtime_export(&self) -> &wasmtime_runtime::Export {
&self.wasmtime_export
}
pub(crate) fn from_wasmtime_global(
export: wasmtime_runtime::Export,
store: Rc<RefCell<Store>>,
) -> Global {
let global = if let wasmtime_runtime::Export::Global { ref global, .. } = export {
global
} else {
panic!("wasmtime export is not memory")
};
let ty = GlobalType::from_cranelift_global(global.clone());
Global {
_store: store,
r#type: ty,
wasmtime_export: export,
wasmtime_state: None,
}
}
}
pub struct Table {
_store: Rc<RefCell<Store>>,
r#type: TableType,
}
impl Table {
pub fn new(store: Rc<RefCell<Store>>, r#type: TableType) -> Table {
Table {
_store: store,
r#type,
}
}
pub fn r#type(&self) -> &TableType {
&self.r#type
}
pub fn get(&self, _index: u32) -> Val {
unimplemented!("Table::get")
}
pub fn set(&self, _index: u32, _val: &Val) -> usize {
unimplemented!("Table::set")
}
pub fn size(&self) -> u32 {
unimplemented!("Table::size")
}
pub fn grow(&mut self, _delta: u32) -> bool {
unimplemented!("Table::grow")
}
}
pub struct Memory {
_store: Rc<RefCell<Store>>,
r#type: MemoryType,
wasmtime_handle: InstanceHandle,
wasmtime_export: wasmtime_runtime::Export,
}
impl Memory {
pub fn new(store: Rc<RefCell<Store>>, r#type: MemoryType) -> Memory {
let (wasmtime_handle, wasmtime_export) =
generate_memory_export(&r#type).expect("generated memory");
Memory {
_store: store,
r#type,
wasmtime_handle,
wasmtime_export,
}
}
pub fn r#type(&self) -> &MemoryType {
&self.r#type
}
fn wasmtime_memory_definition(&self) -> *mut wasmtime_runtime::VMMemoryDefinition {
match self.wasmtime_export {
wasmtime_runtime::Export::Memory { definition, .. } => definition,
_ => panic!("memory definition not found"),
}
}
pub fn data(&self) -> *mut u8 {
unsafe { (*self.wasmtime_memory_definition()).base }
}
pub fn data_size(&self) -> usize {
unsafe { (*self.wasmtime_memory_definition()).current_length }
}
pub fn size(&self) -> u32 {
(self.data_size() / wasmtime_environ::WASM_PAGE_SIZE as usize) as u32
}
pub fn grow(&mut self, delta: u32) -> bool {
match self.wasmtime_export {
wasmtime_runtime::Export::Memory { definition, .. } => {
let definition = unsafe { &(*definition) };
let index = self.wasmtime_handle.memory_index(definition);
self.wasmtime_handle.memory_grow(index, delta).is_some()
}
_ => panic!("memory definition not found"),
}
}
pub(crate) fn wasmtime_export(&self) -> &wasmtime_runtime::Export {
&self.wasmtime_export
}
pub(crate) fn from_wasmtime_memory(
export: wasmtime_runtime::Export,
store: Rc<RefCell<Store>>,
instance_handle: wasmtime_runtime::InstanceHandle,
) -> Memory {
let memory = if let wasmtime_runtime::Export::Memory { ref memory, .. } = export {
memory
} else {
panic!("wasmtime export is not memory")
};
let ty = MemoryType::from_cranelift_memory(memory.memory.clone());
Memory {
_store: store,
r#type: ty,
wasmtime_handle: instance_handle,
wasmtime_export: export,
}
}
}
| 31.034682 | 97 | 0.543211 |
03656534bf627963dc5d1414aa13a882965255bf | 2,466 | use amethyst_core::{
math::{zero, Quaternion, RealField, Unit, Vector3},
Transform,
};
use num_traits::NumCast;
use serde::{Deserialize, Serialize};
use crate::{
resources::{AnimationSampling, ApplyData, BlendMethod},
util::SamplerPrimitive,
};
/// Channels that can be animated on `Transform`
#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq, Serialize, Deserialize)]
pub enum TransformChannel {
/// The 3 dimensional cartesian coordinates of an entity
Translation,
/// The rotation in 3 dimensional space
Rotation,
/// The scale of an entity i.e. how big it is.
Scale,
}
impl<'a, N: RealField> ApplyData<'a> for Transform<N> {
type ApplyData = ();
}
impl<N: RealField + NumCast> AnimationSampling for Transform<N> {
type Primitive = SamplerPrimitive<N>;
type Channel = TransformChannel;
fn apply_sample(&mut self, channel: &Self::Channel, data: &SamplerPrimitive<N>, _: &()) {
use crate::util::SamplerPrimitive::*;
use self::TransformChannel::*;
match (channel, *data) {
(&Translation, Vec3(ref d)) => {
self.set_translation_xyz(d[0], d[1], d[2]);
}
(&Rotation, Vec4(ref d)) => {
*self.rotation_mut() = Unit::new_normalize(Quaternion::new(d[0], d[1], d[2], d[3]));
}
(&Scale, Vec3(ref d)) => {
self.set_scale(Vector3::new(d[0], d[1], d[2]));
}
_ => panic!("Attempt to apply invalid sample to Transform"),
}
}
fn current_sample(&self, channel: &Self::Channel, _: &()) -> SamplerPrimitive<N> {
use self::TransformChannel::*;
match channel {
Translation => SamplerPrimitive::Vec3((*self.translation()).into()),
Rotation => SamplerPrimitive::Vec4({
let c = self.rotation().as_ref().coords;
[c.w, c.x, c.y, c.z]
}),
Scale => SamplerPrimitive::Vec3((*self.scale()).into()),
}
}
fn default_primitive(channel: &Self::Channel) -> Self::Primitive {
use self::TransformChannel::*;
match channel {
Translation => SamplerPrimitive::Vec3([zero(); 3]),
Rotation => SamplerPrimitive::Vec4([zero(); 4]),
Scale => SamplerPrimitive::Vec3([zero(); 3]),
}
}
fn blend_method(&self, _: &Self::Channel) -> Option<BlendMethod> {
Some(BlendMethod::Linear)
}
}
| 32.447368 | 100 | 0.573398 |
fee96c81e4904c1b93b3d56e78c901efe2f0f246 | 3,005 | use super::{BinarySerializer, Encoding, Error, Result, StructuredSerializer};
use crate::event::{EventBinarySerializer, EventStructuredSerializer};
use crate::Event;
/// Deserializer trait for a Message that can be encoded as structured mode.
pub trait StructuredDeserializer
where
Self: Sized,
{
/// Deserialize the message to [`StructuredSerializer`].
fn deserialize_structured<R: Sized, V: StructuredSerializer<R>>(
self,
serializer: V,
) -> Result<R>;
/// Convert this Message to [`Event`].
fn into_event(self) -> Result<Event> {
self.deserialize_structured(EventStructuredSerializer {})
}
}
/// Deserializer trait for a Message that can be encoded as binary mode.
pub trait BinaryDeserializer
where
Self: Sized,
{
/// Deserialize the message to [`BinarySerializer`].
fn deserialize_binary<R: Sized, V: BinarySerializer<R>>(self, serializer: V) -> Result<R>;
/// Convert this Message to [`Event`].
fn into_event(self) -> Result<Event> {
self.deserialize_binary(EventBinarySerializer::new())
}
}
/// Deserializer trait for a Message that can be encoded both in structured mode or binary mode.
pub trait MessageDeserializer
where
Self: StructuredDeserializer + BinaryDeserializer + Sized,
{
/// Get this message [`Encoding`].
fn encoding(&self) -> Encoding;
/// Convert this Message to [`Event`].
fn into_event(self) -> Result<Event> {
match self.encoding() {
Encoding::BINARY => BinaryDeserializer::into_event(self),
Encoding::STRUCTURED => StructuredDeserializer::into_event(self),
_ => Err(Error::WrongEncoding {}),
}
}
/// Deserialize the message to [`BinarySerializer`].
fn deserialize_to_binary<R: Sized, T: BinarySerializer<R>>(self, serializer: T) -> Result<R> {
if self.encoding() == Encoding::BINARY {
return self.deserialize_binary(serializer);
}
return MessageDeserializer::into_event(self)?.deserialize_binary(serializer);
}
/// Deserialize the message to [`StructuredSerializer`].
fn deserialize_to_structured<R: Sized, T: StructuredSerializer<R>>(
self,
serializer: T,
) -> Result<R> {
if self.encoding() == Encoding::STRUCTURED {
return self.deserialize_structured(serializer);
}
return MessageDeserializer::into_event(self)?.deserialize_structured(serializer);
}
/// Deserialize the message to a serializer, depending on the message encoding.
/// You can use this method to transcode this message directly to another serializer, without going through [`Event`].
fn deserialize_to<R: Sized, T: BinarySerializer<R> + StructuredSerializer<R>>(
self,
serializer: T,
) -> Result<R> {
if self.encoding() == Encoding::STRUCTURED {
self.deserialize_structured(serializer)
} else {
self.deserialize_binary(serializer)
}
}
}
| 34.54023 | 122 | 0.664892 |
fe28a5d5cc008dd992359c9bfab56986d1f2e3c5 | 341,270 | use std::collections::HashMap;
use std::cell::RefCell;
use std::default::Default;
use std::collections::BTreeMap;
use serde_json as json;
use std::io;
use std::fs;
use std::mem;
use std::thread::sleep;
use crate::client;
// ##############
// UTILITIES ###
// ############
// ########
// HUB ###
// ######
/// Central instance to access all Partners related resource activities
///
/// # Examples
///
/// Instantiate a new hub
///
/// ```test_harness,no_run
/// extern crate hyper;
/// extern crate hyper_rustls;
/// extern crate yup_oauth2 as oauth2;
/// extern crate google_partners2 as partners2;
/// use partners2::api::CompanyRelation;
/// use partners2::{Result, Error};
/// # async fn dox() {
/// use std::default::Default;
/// use oauth2;
/// use partners2::Partners;
///
/// // Get an ApplicationSecret instance by some means. It contains the `client_id` and
/// // `client_secret`, among other things.
/// let secret: oauth2::ApplicationSecret = Default::default();
/// // Instantiate the authenticator. It will choose a suitable authentication flow for you,
/// // unless you replace `None` with the desired Flow.
/// // Provide your own `AuthenticatorDelegate` to adjust the way it operates and get feedback about
/// // what's going on. You probably want to bring in your own `TokenStorage` to persist tokens and
/// // retrieve them from storage.
/// let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// secret,
/// yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// ).build().await.unwrap();
/// let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // As the method needs a request, you would usually fill it with the desired information
/// // into the respective structure. Some of the parts shown here might not be applicable !
/// // Values shown here are possibly random and not representative !
/// let mut req = CompanyRelation::default();
///
/// // You can configure optional parameters by calling the respective setters at will, and
/// // execute the final call using `doit()`.
/// // Values shown here are possibly random and not representative !
/// let result = hub.users().create_company_relation(req, "userId")
/// .request_metadata_user_overrides_user_id("dolor")
/// .request_metadata_user_overrides_ip_address("ea")
/// .request_metadata_traffic_source_traffic_sub_id("ipsum")
/// .request_metadata_traffic_source_traffic_source_id("invidunt")
/// .request_metadata_partners_session_id("amet")
/// .request_metadata_locale("duo")
/// .add_request_metadata_experiment_ids("ipsum")
/// .doit().await;
///
/// match result {
/// Err(e) => match e {
/// // The Error enum provides details about what exactly happened.
/// // You can also just use its `Debug`, `Display` or `Error` traits
/// Error::HttpError(_)
/// |Error::Io(_)
/// |Error::MissingAPIKey
/// |Error::MissingToken(_)
/// |Error::Cancelled
/// |Error::UploadSizeLimitExceeded(_, _)
/// |Error::Failure(_)
/// |Error::BadRequest(_)
/// |Error::FieldClash(_)
/// |Error::JsonDecodeError(_, _) => println!("{}", e),
/// },
/// Ok(res) => println!("Success: {:?}", res),
/// }
/// # }
/// ```
pub struct Partners<> {
client: hyper::Client<hyper_rustls::HttpsConnector<hyper::client::connect::HttpConnector>, hyper::body::Body>,
auth: oauth2::authenticator::Authenticator<hyper_rustls::HttpsConnector<hyper::client::connect::HttpConnector>>,
_user_agent: String,
_base_url: String,
_root_url: String,
}
impl<'a, > client::Hub for Partners<> {}
impl<'a, > Partners<> {
pub fn new(client: hyper::Client<hyper_rustls::HttpsConnector<hyper::client::connect::HttpConnector>, hyper::body::Body>, authenticator: oauth2::authenticator::Authenticator<hyper_rustls::HttpsConnector<hyper::client::connect::HttpConnector>>) -> Partners<> {
Partners {
client,
auth: authenticator,
_user_agent: "google-api-rust-client/2.0.4".to_string(),
_base_url: "https://partners.googleapis.com/".to_string(),
_root_url: "https://partners.googleapis.com/".to_string(),
}
}
pub fn analytics(&'a self) -> AnalyticMethods<'a> {
AnalyticMethods { hub: &self }
}
pub fn client_messages(&'a self) -> ClientMessageMethods<'a> {
ClientMessageMethods { hub: &self }
}
pub fn companies(&'a self) -> CompanyMethods<'a> {
CompanyMethods { hub: &self }
}
pub fn leads(&'a self) -> LeadMethods<'a> {
LeadMethods { hub: &self }
}
pub fn methods(&'a self) -> MethodMethods<'a> {
MethodMethods { hub: &self }
}
pub fn offers(&'a self) -> OfferMethods<'a> {
OfferMethods { hub: &self }
}
pub fn user_events(&'a self) -> UserEventMethods<'a> {
UserEventMethods { hub: &self }
}
pub fn user_states(&'a self) -> UserStateMethods<'a> {
UserStateMethods { hub: &self }
}
pub fn users(&'a self) -> UserMethods<'a> {
UserMethods { hub: &self }
}
/// Set the user-agent header field to use in all requests to the server.
/// It defaults to `google-api-rust-client/2.0.4`.
///
/// Returns the previously set user-agent.
pub fn user_agent(&mut self, agent_name: String) -> String {
mem::replace(&mut self._user_agent, agent_name)
}
/// Set the base url to use in all requests to the server.
/// It defaults to `https://partners.googleapis.com/`.
///
/// Returns the previously set base url.
pub fn base_url(&mut self, new_base_url: String) -> String {
mem::replace(&mut self._base_url, new_base_url)
}
/// Set the root url to use in all requests to the server.
/// It defaults to `https://partners.googleapis.com/`.
///
/// Returns the previously set root url.
pub fn root_url(&mut self, new_root_url: String) -> String {
mem::replace(&mut self._root_url, new_root_url)
}
}
// ############
// SCHEMAS ###
// ##########
/// A generic empty message that you can re-use to avoid defining duplicated
/// empty messages in your APIs. A typical example is to use it as the request
/// or the response type of an API method. For instance:
///
/// ````text
/// service Foo {
/// rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
/// }
/// ````
///
/// The JSON representation for `Empty` is empty JSON object `{}`.
///
/// # Activities
///
/// This type is used in activities, which are methods you may call on this type or where this type is involved in.
/// The list links the activity name, along with information about where it is used (one of *request* and *response*).
///
/// * [delete company relation users](UserDeleteCompanyRelationCall) (response)
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct Empty { _never_set: Option<bool> }
impl client::ResponseResult for Empty {}
/// Source of traffic for the current request.
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct TrafficSource {
/// Identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
#[serde(rename="trafficSourceId")]
pub traffic_source_id: Option<String>,
/// Second level identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
#[serde(rename="trafficSubId")]
pub traffic_sub_id: Option<String>,
}
impl client::Part for TrafficSource {}
/// Common data that is in each API request.
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct RequestMetadata {
/// Locale to use for the current request.
pub locale: Option<String>,
/// Values to use instead of the user's respective defaults for the current
/// request. These are only honored by whitelisted products.
#[serde(rename="userOverrides")]
pub user_overrides: Option<UserOverrides>,
/// Google Partners session ID.
#[serde(rename="partnersSessionId")]
pub partners_session_id: Option<String>,
/// Experiment IDs the current request belongs to.
#[serde(rename="experimentIds")]
pub experiment_ids: Option<Vec<String>>,
/// Source of traffic for the current request.
#[serde(rename="trafficSource")]
pub traffic_source: Option<TrafficSource>,
}
impl client::Part for RequestMetadata {}
/// Request message for CreateLead.
///
/// # Activities
///
/// This type is used in activities, which are methods you may call on this type or where this type is involved in.
/// The list links the activity name, along with information about where it is used (one of *request* and *response*).
///
/// * [leads create companies](CompanyLeadCreateCall) (request)
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct CreateLeadRequest {
/// Current request metadata.
#[serde(rename="requestMetadata")]
pub request_metadata: Option<RequestMetadata>,
/// The lead resource. The `LeadType` must not be `LEAD_TYPE_UNSPECIFIED`
/// and either `email` or `phone_number` must be provided.
pub lead: Option<Lead>,
/// <a href="https://www.google.com/recaptcha/">reCaptcha</a> challenge info.
#[serde(rename="recaptchaChallenge")]
pub recaptcha_challenge: Option<RecaptchaChallenge>,
}
impl client::RequestValue for CreateLeadRequest {}
/// Key value data pair for an event.
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct EventData {
/// Data type.
pub key: Option<String>,
/// Data values.
pub values: Option<Vec<String>>,
}
impl client::Part for EventData {}
/// A user's information on a specific exam.
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct ExamStatus {
/// Whether this exam is in the state of warning.
pub warning: Option<bool>,
/// Date this exam is due to expire.
pub expiration: Option<String>,
/// The date the user last passed this exam.
#[serde(rename="lastPassed")]
pub last_passed: Option<String>,
/// The type of the exam.
#[serde(rename="examType")]
pub exam_type: Option<String>,
/// Whether this exam has been passed and not expired.
pub passed: Option<bool>,
/// The date the user last taken this exam.
pub taken: Option<String>,
}
impl client::Part for ExamStatus {}
/// Response for ListOffer.
///
/// # Activities
///
/// This type is used in activities, which are methods you may call on this type or where this type is involved in.
/// The list links the activity name, along with information about where it is used (one of *request* and *response*).
///
/// * [list offers](OfferListCall) (response)
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct ListOffersResponse {
/// Current response metadata.
#[serde(rename="responseMetadata")]
pub response_metadata: Option<ResponseMetadata>,
/// Reason why no Offers are available.
#[serde(rename="noOfferReason")]
pub no_offer_reason: Option<String>,
/// Available Offers to be distributed.
#[serde(rename="availableOffers")]
pub available_offers: Option<Vec<AvailableOffer>>,
}
impl client::ResponseResult for ListOffersResponse {}
/// Offer info by country.
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct CountryOfferInfo {
/// Country code for which offer codes may be requested.
#[serde(rename="offerCountryCode")]
pub offer_country_code: Option<String>,
/// (localized) Spend X amount for that country's offer.
#[serde(rename="spendXAmount")]
pub spend_x_amount: Option<String>,
/// Type of offer country is eligible for.
#[serde(rename="offerType")]
pub offer_type: Option<String>,
/// (localized) Get Y amount for that country's offer.
#[serde(rename="getYAmount")]
pub get_y_amount: Option<String>,
}
impl client::Part for CountryOfferInfo {}
/// Response message for
/// ListCompanies.
///
/// # Activities
///
/// This type is used in activities, which are methods you may call on this type or where this type is involved in.
/// The list links the activity name, along with information about where it is used (one of *request* and *response*).
///
/// * [list companies](CompanyListCall) (response)
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct ListCompaniesResponse {
/// Current response metadata.
#[serde(rename="responseMetadata")]
pub response_metadata: Option<ResponseMetadata>,
/// The list of companies.
pub companies: Option<Vec<Company>>,
/// A token to retrieve next page of results.
/// Pass this value in the `ListCompaniesRequest.page_token` field in the
/// subsequent call to
/// ListCompanies to retrieve the
/// next page of results.
#[serde(rename="nextPageToken")]
pub next_page_token: Option<String>,
}
impl client::ResponseResult for ListCompaniesResponse {}
/// Customers qualified for an offer.
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct OfferCustomer {
/// URL to the customer's AdWords page.
#[serde(rename="adwordsUrl")]
pub adwords_url: Option<String>,
/// Type of the offer
#[serde(rename="offerType")]
pub offer_type: Option<String>,
/// External CID for the customer.
#[serde(rename="externalCid")]
pub external_cid: Option<String>,
/// Country code of the customer.
#[serde(rename="countryCode")]
pub country_code: Option<String>,
/// Time the customer was created.
#[serde(rename="creationTime")]
pub creation_time: Option<String>,
/// Days the customer is still eligible.
#[serde(rename="eligibilityDaysLeft")]
pub eligibility_days_left: Option<i32>,
/// Formatted Get Y amount with currency code.
#[serde(rename="getYAmount")]
pub get_y_amount: Option<String>,
/// Name of the customer.
pub name: Option<String>,
/// Formatted Spend X amount with currency code.
#[serde(rename="spendXAmount")]
pub spend_x_amount: Option<String>,
}
impl client::Part for OfferCustomer {}
/// Google Partners certification status.
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct CertificationStatus {
/// The type of the certification.
#[serde(rename="type")]
pub type_: Option<String>,
/// Number of people who are certified,
#[serde(rename="userCount")]
pub user_count: Option<i32>,
/// Whether certification is passing.
#[serde(rename="isCertified")]
pub is_certified: Option<bool>,
/// List of certification exam statuses.
#[serde(rename="examStatuses")]
pub exam_statuses: Option<Vec<CertificationExamStatus>>,
}
impl client::Part for CertificationStatus {}
/// The localized company information.
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct LocalizedCompanyInfo {
/// Language code of the localized company info, as defined by
/// <a href="https://tools.ietf.org/html/bcp47">BCP 47</a>
/// (IETF BCP 47, "Tags for Identifying Languages").
#[serde(rename="languageCode")]
pub language_code: Option<String>,
/// List of country codes for the localized company info.
#[serde(rename="countryCodes")]
pub country_codes: Option<Vec<String>>,
/// Localized brief description that the company uses to advertise themselves.
pub overview: Option<String>,
/// Localized display name.
#[serde(rename="displayName")]
pub display_name: Option<String>,
}
impl client::Part for LocalizedCompanyInfo {}
/// Response message for
/// LogUserEvent.
///
/// # Activities
///
/// This type is used in activities, which are methods you may call on this type or where this type is involved in.
/// The list links the activity name, along with information about where it is used (one of *request* and *response*).
///
/// * [log user events](UserEventLogCall) (response)
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct LogUserEventResponse {
/// Current response metadata.
#[serde(rename="responseMetadata")]
pub response_metadata: Option<ResponseMetadata>,
}
impl client::ResponseResult for LogUserEventResponse {}
/// Response for ListOfferHistory.
///
/// # Activities
///
/// This type is used in activities, which are methods you may call on this type or where this type is involved in.
/// The list links the activity name, along with information about where it is used (one of *request* and *response*).
///
/// * [history list offers](OfferHistoryListCall) (response)
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct ListOffersHistoryResponse {
/// True if the user has the option to show entire company history.
#[serde(rename="canShowEntireCompany")]
pub can_show_entire_company: Option<bool>,
/// Number of results across all pages.
#[serde(rename="totalResults")]
pub total_results: Option<i32>,
/// True if this response is showing entire company history.
#[serde(rename="showingEntireCompany")]
pub showing_entire_company: Option<bool>,
/// Historical offers meeting request.
pub offers: Option<Vec<HistoricalOffer>>,
/// Supply this token in a ListOffersHistoryRequest to retrieve the next page.
#[serde(rename="nextPageToken")]
pub next_page_token: Option<String>,
/// Current response metadata.
#[serde(rename="responseMetadata")]
pub response_metadata: Option<ResponseMetadata>,
}
impl client::ResponseResult for ListOffersHistoryResponse {}
/// Response message for
/// LogClientMessage.
///
/// # Activities
///
/// This type is used in activities, which are methods you may call on this type or where this type is involved in.
/// The list links the activity name, along with information about where it is used (one of *request* and *response*).
///
/// * [log client messages](ClientMessageLogCall) (response)
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct LogMessageResponse {
/// Current response metadata.
#[serde(rename="responseMetadata")]
pub response_metadata: Option<ResponseMetadata>,
}
impl client::ResponseResult for LogMessageResponse {}
/// Agency specialization status
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct SpecializationStatus {
/// The specialization this status is for.
#[serde(rename="badgeSpecialization")]
pub badge_specialization: Option<String>,
/// State of agency specialization.
#[serde(rename="badgeSpecializationState")]
pub badge_specialization_state: Option<String>,
}
impl client::Part for SpecializationStatus {}
/// A user's information on a specific certification.
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct Certification {
/// The date the user last achieved certification.
#[serde(rename="lastAchieved")]
pub last_achieved: Option<String>,
/// Whether this certification has been achieved.
pub achieved: Option<bool>,
/// Date this certification is due to expire.
pub expiration: Option<String>,
/// Whether this certification is in the state of warning.
pub warning: Option<bool>,
/// The type of certification, the area of expertise.
#[serde(rename="certificationType")]
pub certification_type: Option<String>,
}
impl client::Part for Certification {}
/// A resource representing a user of the Partners platform.
///
/// # Activities
///
/// This type is used in activities, which are methods you may call on this type or where this type is involved in.
/// The list links the activity name, along with information about where it is used (one of *request* and *response*).
///
/// * [update profile users](UserUpdateProfileCall) (none)
/// * [create company relation users](UserCreateCompanyRelationCall) (none)
/// * [delete company relation users](UserDeleteCompanyRelationCall) (none)
/// * [get users](UserGetCall) (response)
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct User {
/// The profile information of a Partners user, contains all the directly
/// editable user information.
pub profile: Option<UserProfile>,
/// This is the list of AdWords Manager Accounts the user has edit access to.
/// If the user has edit access to multiple accounts, the user can choose the
/// preferred account and we use this when a personal account is needed. Can
/// be empty meaning the user has access to no accounts.
/// @OutputOnly
#[serde(rename="availableAdwordsManagerAccounts")]
pub available_adwords_manager_accounts: Option<Vec<AdWordsManagerAccountInfo>>,
/// The internal user ID.
/// Only available for a whitelisted set of api clients.
#[serde(rename="internalId")]
pub internal_id: Option<String>,
/// The list of exams the user ever taken. For each type of exam, only one
/// entry is listed.
#[serde(rename="examStatus")]
pub exam_status: Option<Vec<ExamStatus>>,
/// The ID of the user.
pub id: Option<String>,
/// Information about a user's external public profile outside Google Partners.
#[serde(rename="publicProfile")]
pub public_profile: Option<PublicProfile>,
/// The email address used by the user used for company verification.
/// @OutputOnly
#[serde(rename="companyVerificationEmail")]
pub company_verification_email: Option<String>,
/// The company that the user is associated with.
/// If not present, the user is not associated with any company.
pub company: Option<CompanyRelation>,
/// The most recent time the user interacted with the Partners site.
/// @OutputOnly
#[serde(rename="lastAccessTime")]
pub last_access_time: Option<String>,
/// The list of emails the user has access to/can select as primary.
/// @OutputOnly
#[serde(rename="primaryEmails")]
pub primary_emails: Option<Vec<String>>,
/// The list of achieved certifications. These are calculated based on exam
/// results and other requirements.
/// @OutputOnly
#[serde(rename="certificationStatus")]
pub certification_status: Option<Vec<Certification>>,
/// Whether or not the user has opted to share their Academy for Ads info with
/// Google Partners.
#[serde(rename="afaInfoShared")]
pub afa_info_shared: Option<bool>,
}
impl client::Resource for User {}
impl client::ResponseResult for User {}
/// Response message for
/// ListAnalytics.
///
/// # Activities
///
/// This type is used in activities, which are methods you may call on this type or where this type is involved in.
/// The list links the activity name, along with information about where it is used (one of *request* and *response*).
///
/// * [list analytics](AnalyticListCall) (response)
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct ListAnalyticsResponse {
/// A token to retrieve next page of results.
/// Pass this value in the `ListAnalyticsRequest.page_token` field in the
/// subsequent call to
/// ListAnalytics to retrieve the
/// next page of results.
#[serde(rename="nextPageToken")]
pub next_page_token: Option<String>,
/// Current response metadata.
#[serde(rename="responseMetadata")]
pub response_metadata: Option<ResponseMetadata>,
/// Aggregated information across the response's
/// analytics.
#[serde(rename="analyticsSummary")]
pub analytics_summary: Option<AnalyticsSummary>,
/// The list of analytics.
/// Sorted in ascending order of
/// Analytics.event_date.
pub analytics: Option<Vec<Analytics>>,
}
impl client::ResponseResult for ListAnalyticsResponse {}
/// Response message for ListLeads.
///
/// # Activities
///
/// This type is used in activities, which are methods you may call on this type or where this type is involved in.
/// The list links the activity name, along with information about where it is used (one of *request* and *response*).
///
/// * [list leads](LeadListCall) (response)
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct ListLeadsResponse {
/// A token to retrieve next page of results.
/// Pass this value in the `ListLeadsRequest.page_token` field in the
/// subsequent call to
/// ListLeads to retrieve the
/// next page of results.
#[serde(rename="nextPageToken")]
pub next_page_token: Option<String>,
/// Current response metadata.
#[serde(rename="responseMetadata")]
pub response_metadata: Option<ResponseMetadata>,
/// The total count of leads for the given company.
#[serde(rename="totalSize")]
pub total_size: Option<i32>,
/// The list of leads.
pub leads: Option<Vec<Lead>>,
}
impl client::ResponseResult for ListLeadsResponse {}
/// A company resource in the Google Partners API. Once certified, it qualifies
/// for being searched by advertisers.
///
/// # Activities
///
/// This type is used in activities, which are methods you may call on this type or where this type is involved in.
/// The list links the activity name, along with information about where it is used (one of *request* and *response*).
///
/// * [update companies](MethodUpdateCompanyCall) (request|response)
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct Company {
/// The public viewability status of the company's profile.
#[serde(rename="profileStatus")]
pub profile_status: Option<String>,
/// The primary language code of the company, as defined by
/// <a href="https://tools.ietf.org/html/bcp47">BCP 47</a>
/// (IETF BCP 47, "Tags for Identifying Languages").
#[serde(rename="primaryLanguageCode")]
pub primary_language_code: Option<String>,
/// The list of all company locations.
/// If set, must include the
/// primary_location
/// in the list.
pub locations: Option<Vec<Location>>,
/// The minimum monthly budget that the company accepts for partner business,
/// converted to the requested currency code.
#[serde(rename="convertedMinMonthlyBudget")]
pub converted_min_monthly_budget: Option<Money>,
/// Industries the company can help with.
pub industries: Option<Vec<String>>,
/// URL of the company's website.
#[serde(rename="websiteUrl")]
pub website_url: Option<String>,
/// URL of the company's additional websites used to verify the dynamic badges.
/// These are stored as full URLs as entered by the user, but only the TLD will
/// be used for the actual verification.
#[serde(rename="additionalWebsites")]
pub additional_websites: Option<Vec<String>>,
/// The Primary AdWords Manager Account id.
#[serde(rename="primaryAdwordsManagerAccountId")]
pub primary_adwords_manager_account_id: Option<String>,
/// Whether the company's badge authority is in AWN
#[serde(rename="badgeAuthorityInAwn")]
pub badge_authority_in_awn: Option<bool>,
/// The name of the company.
pub name: Option<String>,
/// The list of localized info for the company.
#[serde(rename="localizedInfos")]
pub localized_infos: Option<Vec<LocalizedCompanyInfo>>,
/// The list of Google Partners certification statuses for the company.
#[serde(rename="certificationStatuses")]
pub certification_statuses: Option<Vec<CertificationStatus>>,
/// The ID of the company.
pub id: Option<String>,
/// Basic information from the company's public profile.
#[serde(rename="publicProfile")]
pub public_profile: Option<PublicProfile>,
/// The unconverted minimum monthly budget that the company accepts for partner
/// business.
#[serde(rename="originalMinMonthlyBudget")]
pub original_min_monthly_budget: Option<Money>,
/// Services the company can help with.
pub services: Option<Vec<String>>,
/// The primary location of the company.
#[serde(rename="primaryLocation")]
pub primary_location: Option<Location>,
/// Information related to the ranking of the company within the list of
/// companies.
pub ranks: Option<Vec<Rank>>,
/// The list of Google Partners specialization statuses for the company.
#[serde(rename="specializationStatus")]
pub specialization_status: Option<Vec<SpecializationStatus>>,
/// Partner badge tier
#[serde(rename="badgeTier")]
pub badge_tier: Option<String>,
/// Email domains that allow users with a matching email address to get
/// auto-approved for associating with this company.
#[serde(rename="autoApprovalEmailDomains")]
pub auto_approval_email_domains: Option<Vec<String>>,
/// Company type labels listed on the company's profile.
#[serde(rename="companyTypes")]
pub company_types: Option<Vec<String>>,
}
impl client::RequestValue for Company {}
impl client::ResponseResult for Company {}
/// Response message for CreateLead.
///
/// # Activities
///
/// This type is used in activities, which are methods you may call on this type or where this type is involved in.
/// The list links the activity name, along with information about where it is used (one of *request* and *response*).
///
/// * [leads create companies](CompanyLeadCreateCall) (response)
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct CreateLeadResponse {
/// Lead that was created depending on the outcome of
/// <a href="https://www.google.com/recaptcha/">reCaptcha</a> validation.
pub lead: Option<Lead>,
/// The outcome of <a href="https://www.google.com/recaptcha/">reCaptcha</a>
/// validation.
#[serde(rename="recaptchaStatus")]
pub recaptcha_status: Option<String>,
/// Current response metadata.
#[serde(rename="responseMetadata")]
pub response_metadata: Option<ResponseMetadata>,
}
impl client::ResponseResult for CreateLeadResponse {}
/// Response message for GetCompany.
///
/// # Activities
///
/// This type is used in activities, which are methods you may call on this type or where this type is involved in.
/// The list links the activity name, along with information about where it is used (one of *request* and *response*).
///
/// * [get companies](CompanyGetCall) (response)
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct GetCompanyResponse {
/// Current response metadata.
#[serde(rename="responseMetadata")]
pub response_metadata: Option<ResponseMetadata>,
/// The company.
pub company: Option<Company>,
}
impl client::ResponseResult for GetCompanyResponse {}
/// A location with address and geographic coordinates. May optionally contain a
/// detailed (multi-field) version of the address.
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct Location {
/// Top-level administrative subdivision of this country.
#[serde(rename="administrativeArea")]
pub administrative_area: Option<String>,
/// Generally refers to the city/town portion of an address.
pub locality: Option<String>,
/// The latitude and longitude of the location, in degrees.
#[serde(rename="latLng")]
pub lat_lng: Option<LatLng>,
/// CLDR (Common Locale Data Repository) region code .
#[serde(rename="regionCode")]
pub region_code: Option<String>,
/// Dependent locality or sublocality. Used for UK dependent localities, or
/// neighborhoods or boroughs in other locations.
#[serde(rename="dependentLocality")]
pub dependent_locality: Option<String>,
/// The single string version of the address.
pub address: Option<String>,
/// Values are frequently alphanumeric.
#[serde(rename="postalCode")]
pub postal_code: Option<String>,
/// Use of this code is very country-specific, but will refer to a secondary
/// classification code for sorting mail.
#[serde(rename="sortingCode")]
pub sorting_code: Option<String>,
/// Language code of the address. Should be in BCP 47 format.
#[serde(rename="languageCode")]
pub language_code: Option<String>,
/// The following address lines represent the most specific part of any
/// address.
#[serde(rename="addressLine")]
pub address_line: Option<Vec<String>>,
}
impl client::Part for Location {}
/// Status for a Google Partners certification exam.
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct CertificationExamStatus {
/// The number of people who have passed the certification exam.
#[serde(rename="numberUsersPass")]
pub number_users_pass: Option<i32>,
/// The type of certification exam.
#[serde(rename="type")]
pub type_: Option<String>,
}
impl client::Part for CertificationExamStatus {}
/// A set of opt-ins for a user.
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct OptIns {
/// An opt-in about receiving email from Partners marketing teams. Includes
/// member-only events and special promotional offers for Google products.
#[serde(rename="marketComm")]
pub market_comm: Option<bool>,
/// An opt-in about receiving email regarding new features and products.
#[serde(rename="specialOffers")]
pub special_offers: Option<bool>,
/// An opt-in about receiving email with customized AdWords campaign management
/// tips.
#[serde(rename="performanceSuggestions")]
pub performance_suggestions: Option<bool>,
/// An opt-in to receive special promotional gifts and material in the mail.
#[serde(rename="physicalMail")]
pub physical_mail: Option<bool>,
/// An opt-in to allow recieivng phone calls about their Partners account.
#[serde(rename="phoneContact")]
pub phone_contact: Option<bool>,
}
impl client::Part for OptIns {}
/// Information related to ranking of results.
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct Rank {
/// The numerical value of the rank.
pub value: Option<f64>,
/// The type of rank.
#[serde(rename="type")]
pub type_: Option<String>,
}
impl client::Part for Rank {}
/// Response message for
/// GetPartnersStatus.
///
/// # Activities
///
/// This type is used in activities, which are methods you may call on this type or where this type is involved in.
/// The list links the activity name, along with information about where it is used (one of *request* and *response*).
///
/// * [get partnersstatus](MethodGetPartnersstatuCall) (response)
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct GetPartnersStatusResponse {
/// Current response metadata.
#[serde(rename="responseMetadata")]
pub response_metadata: Option<ResponseMetadata>,
}
impl client::ResponseResult for GetPartnersStatusResponse {}
/// The profile information of a Partners user.
///
/// # Activities
///
/// This type is used in activities, which are methods you may call on this type or where this type is involved in.
/// The list links the activity name, along with information about where it is used (one of *request* and *response*).
///
/// * [update profile users](UserUpdateProfileCall) (request|response)
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct UserProfile {
/// A list of ids representing which channels the user selected they were in.
pub channels: Option<Vec<String>>,
/// Whether the user's public profile is visible to anyone with the URL.
#[serde(rename="profilePublic")]
pub profile_public: Option<bool>,
/// A list of ids represnting which job categories the user selected.
#[serde(rename="jobFunctions")]
pub job_functions: Option<Vec<String>>,
/// The user's given name.
#[serde(rename="givenName")]
pub given_name: Option<String>,
/// The user's mailing address, contains multiple fields.
pub address: Option<Location>,
/// A list of ids representing which industries the user selected.
pub industries: Option<Vec<String>>,
/// The list of opt-ins for the user, related to communication preferences.
#[serde(rename="emailOptIns")]
pub email_opt_ins: Option<OptIns>,
/// The user's family name.
#[serde(rename="familyName")]
pub family_name: Option<String>,
/// The list of languages this user understands.
pub languages: Option<Vec<String>>,
/// A list of ids representing which markets the user was interested in.
pub markets: Option<Vec<String>>,
/// Whether or not to migrate the user's exam data to Academy for Ads.
#[serde(rename="migrateToAfa")]
pub migrate_to_afa: Option<bool>,
/// If the user has edit access to multiple accounts, the user can choose the
/// preferred account and it is used when a personal account is needed. Can
/// be empty.
#[serde(rename="adwordsManagerAccount")]
pub adwords_manager_account: Option<String>,
/// The user's phone number.
#[serde(rename="phoneNumber")]
pub phone_number: Option<String>,
/// The user's primary country, an ISO 2-character code.
#[serde(rename="primaryCountryCode")]
pub primary_country_code: Option<String>,
/// The email address the user has selected on the Partners site as primary.
#[serde(rename="emailAddress")]
pub email_address: Option<String>,
}
impl client::RequestValue for UserProfile {}
impl client::ResponseResult for UserProfile {}
/// Historical information about a Google Partners Offer.
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct HistoricalOffer {
/// Time offer was first created.
#[serde(rename="creationTime")]
pub creation_time: Option<String>,
/// Status of the offer.
pub status: Option<String>,
/// Email address for client.
#[serde(rename="clientEmail")]
pub client_email: Option<String>,
/// ID of client.
#[serde(rename="clientId")]
pub client_id: Option<String>,
/// Name of the client.
#[serde(rename="clientName")]
pub client_name: Option<String>,
/// Time last action was taken.
#[serde(rename="lastModifiedTime")]
pub last_modified_time: Option<String>,
/// Client's AdWords page URL.
#[serde(rename="adwordsUrl")]
pub adwords_url: Option<String>,
/// Type of offer.
#[serde(rename="offerType")]
pub offer_type: Option<String>,
/// Name (First + Last) of the partners user to whom the incentive is allocated.
#[serde(rename="senderName")]
pub sender_name: Option<String>,
/// Country Code for the offer country.
#[serde(rename="offerCountryCode")]
pub offer_country_code: Option<String>,
/// Time this offer expires.
#[serde(rename="expirationTime")]
pub expiration_time: Option<String>,
/// Offer code.
#[serde(rename="offerCode")]
pub offer_code: Option<String>,
}
impl client::Part for HistoricalOffer {}
/// Request message for
/// LogUserEvent.
///
/// # Activities
///
/// This type is used in activities, which are methods you may call on this type or where this type is involved in.
/// The list links the activity name, along with information about where it is used (one of *request* and *response*).
///
/// * [log user events](UserEventLogCall) (request)
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct LogUserEventRequest {
/// The URL where the event occurred.
pub url: Option<String>,
/// Current request metadata.
#[serde(rename="requestMetadata")]
pub request_metadata: Option<RequestMetadata>,
/// List of event data for the event.
#[serde(rename="eventDatas")]
pub event_datas: Option<Vec<EventData>>,
/// The scope of the event.
#[serde(rename="eventScope")]
pub event_scope: Option<String>,
/// The category the action belongs to.
#[serde(rename="eventCategory")]
pub event_category: Option<String>,
/// Advertiser lead information.
pub lead: Option<Lead>,
/// The action that occurred.
#[serde(rename="eventAction")]
pub event_action: Option<String>,
}
impl client::RequestValue for LogUserEventRequest {}
/// Values to use instead of the user's respective defaults. These are only
/// honored by whitelisted products.
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct UserOverrides {
/// IP address to use instead of the user's geo-located IP address.
#[serde(rename="ipAddress")]
pub ip_address: Option<String>,
/// Logged-in user ID to impersonate instead of the user's ID.
#[serde(rename="userId")]
pub user_id: Option<String>,
}
impl client::Part for UserOverrides {}
/// Details of the analytics events for a `Company` within a single day.
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct AnalyticsDataPoint {
/// Location information of where these events occurred.
#[serde(rename="eventLocations")]
pub event_locations: Option<Vec<LatLng>>,
/// Number of times the type of event occurred.
/// Meaning depends on context (e.g. profile views, contacts, etc.).
#[serde(rename="eventCount")]
pub event_count: Option<i32>,
}
impl client::Part for AnalyticsDataPoint {}
/// Analytics data for a `Company` within a single day.
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct Analytics {
/// Date on which these events occurred.
#[serde(rename="eventDate")]
pub event_date: Option<Date>,
/// Instances of users viewing the `Company` profile
/// on the specified date.
#[serde(rename="profileViews")]
pub profile_views: Option<AnalyticsDataPoint>,
/// Instances of users seeing the `Company` in Google Partners Search results
/// on the specified date.
#[serde(rename="searchViews")]
pub search_views: Option<AnalyticsDataPoint>,
/// Instances of users contacting the `Company`
/// on the specified date.
pub contacts: Option<AnalyticsDataPoint>,
}
impl client::Part for Analytics {}
/// Information about a particular AdWords Manager Account.
/// Read more at https://support.google.com/adwords/answer/6139186
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct AdWordsManagerAccountInfo {
/// The AdWords Manager Account id.
pub id: Option<String>,
/// Name of the customer this account represents.
#[serde(rename="customerName")]
pub customer_name: Option<String>,
}
impl client::Part for AdWordsManagerAccountInfo {}
/// Basic information from a public profile.
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct PublicProfile {
/// The URL to the main profile image of the public profile.
#[serde(rename="profileImage")]
pub profile_image: Option<String>,
/// The display name of the public profile.
#[serde(rename="displayName")]
pub display_name: Option<String>,
/// The URL to the main display image of the public profile. Being deprecated.
#[serde(rename="displayImageUrl")]
pub display_image_url: Option<String>,
/// The ID which can be used to retrieve more details about the public profile.
pub id: Option<String>,
/// The URL of the public profile.
pub url: Option<String>,
}
impl client::Part for PublicProfile {}
/// Common data that is in each API response.
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct ResponseMetadata {
/// Debug information about this request.
#[serde(rename="debugInfo")]
pub debug_info: Option<DebugInfo>,
}
impl client::Part for ResponseMetadata {}
/// <a href="https://www.google.com/recaptcha/">reCaptcha</a> challenge info.
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct RecaptchaChallenge {
/// The ID of the reCaptcha challenge.
pub id: Option<String>,
/// The response to the reCaptcha challenge.
pub response: Option<String>,
}
impl client::Part for RecaptchaChallenge {}
/// Available Offers to be distributed.
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct AvailableOffer {
/// Level of this offer.
#[serde(rename="offerLevel")]
pub offer_level: Option<String>,
/// Name of the offer.
pub name: Option<String>,
/// ID of this offer.
pub id: Option<String>,
/// Whether or not the list of qualified customers is definitely complete.
#[serde(rename="qualifiedCustomersComplete")]
pub qualified_customers_complete: Option<bool>,
/// Offer info by country.
#[serde(rename="countryOfferInfos")]
pub country_offer_infos: Option<Vec<CountryOfferInfo>>,
/// Type of offer.
#[serde(rename="offerType")]
pub offer_type: Option<String>,
/// The maximum age of an account [in days] to be eligible.
#[serde(rename="maxAccountAge")]
pub max_account_age: Option<i32>,
/// Customers who qualify for this offer.
#[serde(rename="qualifiedCustomer")]
pub qualified_customer: Option<Vec<OfferCustomer>>,
/// Terms of the offer.
pub terms: Option<String>,
/// Should special text be shown on the offers page.
#[serde(rename="showSpecialOfferCopy")]
pub show_special_offer_copy: Option<bool>,
/// The number of codes for this offer that are available for distribution.
pub available: Option<i32>,
/// Description of the offer.
pub description: Option<String>,
}
impl client::Part for AvailableOffer {}
/// An object representing a latitude/longitude pair. This is expressed as a pair
/// of doubles representing degrees latitude and degrees longitude. Unless
/// specified otherwise, this must conform to the
/// <a href="http://www.unoosa.org/pdf/icg/2012/template/WGS_84.pdf">WGS84
/// standard</a>. Values must be within normalized ranges.
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct LatLng {
/// The latitude in degrees. It must be in the range [-90.0, +90.0].
pub latitude: Option<f64>,
/// The longitude in degrees. It must be in the range [-180.0, +180.0].
pub longitude: Option<f64>,
}
impl client::Part for LatLng {}
/// Represents an amount of money with its currency type.
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct Money {
/// The 3-letter currency code defined in ISO 4217.
#[serde(rename="currencyCode")]
pub currency_code: Option<String>,
/// Number of nano (10^-9) units of the amount.
/// The value must be between -999,999,999 and +999,999,999 inclusive.
/// If `units` is positive, `nanos` must be positive or zero.
/// If `units` is zero, `nanos` can be positive, zero, or negative.
/// If `units` is negative, `nanos` must be negative or zero.
/// For example $-1.75 is represented as `units`=-1 and `nanos`=-750,000,000.
pub nanos: Option<i32>,
/// The whole units of the amount.
/// For example if `currencyCode` is `"USD"`, then 1 unit is one US dollar.
pub units: Option<String>,
}
impl client::Part for Money {}
/// Analytics aggregated data for a `Company` for a given date range.
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct AnalyticsSummary {
/// Aggregated number of profile views for the `Company` for given date range.
#[serde(rename="profileViewsCount")]
pub profile_views_count: Option<i32>,
/// Aggregated number of times users saw the `Company`
/// in Google Partners Search results for given date range.
#[serde(rename="searchViewsCount")]
pub search_views_count: Option<i32>,
/// Aggregated number of times users contacted the `Company`
/// for given date range.
#[serde(rename="contactsCount")]
pub contacts_count: Option<i32>,
}
impl client::Part for AnalyticsSummary {}
/// Request message for
/// LogClientMessage.
///
/// # Activities
///
/// This type is used in activities, which are methods you may call on this type or where this type is involved in.
/// The list links the activity name, along with information about where it is used (one of *request* and *response*).
///
/// * [log client messages](ClientMessageLogCall) (request)
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct LogMessageRequest {
/// Map of client info, such as URL, browser navigator, browser platform, etc.
#[serde(rename="clientInfo")]
pub client_info: Option<HashMap<String, String>>,
/// Current request metadata.
#[serde(rename="requestMetadata")]
pub request_metadata: Option<RequestMetadata>,
/// Message level of client message.
pub level: Option<String>,
/// Details about the client message.
pub details: Option<String>,
}
impl client::RequestValue for LogMessageRequest {}
/// A lead resource that represents an advertiser contact for a `Company`. These
/// are usually generated via Google Partner Search (the advertiser portal).
///
/// # Activities
///
/// This type is used in activities, which are methods you may call on this type or where this type is involved in.
/// The list links the activity name, along with information about where it is used (one of *request* and *response*).
///
/// * [list leads](LeadListCall) (none)
/// * [update leads](MethodUpdateLeadCall) (request|response)
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct Lead {
/// The minimum monthly budget lead source is willing to spend.
#[serde(rename="minMonthlyBudget")]
pub min_monthly_budget: Option<Money>,
/// First name of lead source.
#[serde(rename="givenName")]
pub given_name: Option<String>,
/// Language code of the lead's language preference, as defined by
/// <a href="https://tools.ietf.org/html/bcp47">BCP 47</a>
/// (IETF BCP 47, "Tags for Identifying Languages").
#[serde(rename="languageCode")]
pub language_code: Option<String>,
/// Website URL of lead source.
#[serde(rename="websiteUrl")]
pub website_url: Option<String>,
/// The lead's state in relation to the company.
pub state: Option<String>,
/// List of reasons for using Google Partner Search and creating a lead.
#[serde(rename="gpsMotivations")]
pub gps_motivations: Option<Vec<String>>,
/// Email address of lead source.
pub email: Option<String>,
/// Last name of lead source.
#[serde(rename="familyName")]
pub family_name: Option<String>,
/// ID of the lead.
pub id: Option<String>,
/// Comments lead source gave.
pub comments: Option<String>,
/// Phone number of lead source.
#[serde(rename="phoneNumber")]
pub phone_number: Option<String>,
/// The AdWords Customer ID of the lead.
#[serde(rename="adwordsCustomerId")]
pub adwords_customer_id: Option<String>,
/// Timestamp of when this lead was created.
#[serde(rename="createTime")]
pub create_time: Option<String>,
/// Whether or not the lead signed up for marketing emails
#[serde(rename="marketingOptIn")]
pub marketing_opt_in: Option<bool>,
/// Type of lead.
#[serde(rename="type")]
pub type_: Option<String>,
}
impl client::RequestValue for Lead {}
impl client::Resource for Lead {}
impl client::ResponseResult for Lead {}
/// Debug information about this request.
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct DebugInfo {
/// Info about the server that serviced this request.
#[serde(rename="serverInfo")]
pub server_info: Option<String>,
/// Server-side debug stack trace.
#[serde(rename="serverTraceInfo")]
pub server_trace_info: Option<String>,
/// URL of the service that handled this request.
#[serde(rename="serviceUrl")]
pub service_url: Option<String>,
}
impl client::Part for DebugInfo {}
/// Response message for
/// ListUserStates.
///
/// # Activities
///
/// This type is used in activities, which are methods you may call on this type or where this type is involved in.
/// The list links the activity name, along with information about where it is used (one of *request* and *response*).
///
/// * [list user states](UserStateListCall) (response)
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct ListUserStatesResponse {
/// Current response metadata.
#[serde(rename="responseMetadata")]
pub response_metadata: Option<ResponseMetadata>,
/// User's states.
#[serde(rename="userStates")]
pub user_states: Option<Vec<String>>,
}
impl client::ResponseResult for ListUserStatesResponse {}
/// A CompanyRelation resource representing information about a user's
/// affiliation and standing with a company in Partners.
///
/// # Activities
///
/// This type is used in activities, which are methods you may call on this type or where this type is involved in.
/// The list links the activity name, along with information about where it is used (one of *request* and *response*).
///
/// * [create company relation users](UserCreateCompanyRelationCall) (request|response)
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct CompanyRelation {
/// Indicates if the user is an admin for this company.
#[serde(rename="companyAdmin")]
pub company_admin: Option<bool>,
/// The primary address for this company.
pub address: Option<String>,
/// The flag that indicates if the company is pending verification.
#[serde(rename="isPending")]
pub is_pending: Option<bool>,
/// The timestamp of when affiliation was requested.
/// @OutputOnly
#[serde(rename="creationTime")]
pub creation_time: Option<String>,
/// The primary location of the company.
#[serde(rename="primaryAddress")]
pub primary_address: Option<Location>,
/// The state of relationship, in terms of approvals.
pub state: Option<String>,
/// The name (in the company's primary language) for the company.
pub name: Option<String>,
/// The AdWords manager account # associated this company.
#[serde(rename="managerAccount")]
pub manager_account: Option<String>,
/// The segment the company is classified as.
pub segment: Option<Vec<String>>,
/// The internal company ID.
/// Only available for a whitelisted set of api clients.
#[serde(rename="internalCompanyId")]
pub internal_company_id: Option<String>,
/// Whether the company is a Partner.
#[serde(rename="badgeTier")]
pub badge_tier: Option<String>,
/// The list of Google Partners specialization statuses for the company.
#[serde(rename="specializationStatus")]
pub specialization_status: Option<Vec<SpecializationStatus>>,
/// The phone number for the company's primary address.
#[serde(rename="phoneNumber")]
pub phone_number: Option<String>,
/// The website URL for this company.
pub website: Option<String>,
/// The primary country code of the company.
#[serde(rename="primaryCountryCode")]
pub primary_country_code: Option<String>,
/// The ID of the company. There may be no id if this is a
/// pending company.5
#[serde(rename="companyId")]
pub company_id: Option<String>,
/// The primary language code of the company.
#[serde(rename="primaryLanguageCode")]
pub primary_language_code: Option<String>,
/// A URL to a profile photo, e.g. a G+ profile photo.
#[serde(rename="logoUrl")]
pub logo_url: Option<String>,
/// The timestamp when the user was approved.
/// @OutputOnly
#[serde(rename="resolvedTimestamp")]
pub resolved_timestamp: Option<String>,
}
impl client::RequestValue for CompanyRelation {}
impl client::ResponseResult for CompanyRelation {}
/// Represents a whole or partial calendar date, e.g. a birthday. The time of day
/// and time zone are either specified elsewhere or are not significant. The date
/// is relative to the Proleptic Gregorian Calendar. This can represent:
///
/// * A full date, with non-zero year, month and day values
/// * A month and day value, with a zero year, e.g. an anniversary
/// * A year on its own, with zero month and day values
/// * A year and month value, with a zero day, e.g. a credit card expiration date
///
/// Related types are google.type.TimeOfDay and `google.protobuf.Timestamp`.
///
/// This type is not used in any activity, and only used as *part* of another schema.
///
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct Date {
/// Year of date. Must be from 1 to 9999, or 0 if specifying a date without
/// a year.
pub year: Option<i32>,
/// Day of month. Must be from 1 to 31 and valid for the year and month, or 0
/// if specifying a year by itself or a year and month where the day is not
/// significant.
pub day: Option<i32>,
/// Month of year. Must be from 1 to 12, or 0 if specifying a year without a
/// month and day.
pub month: Option<i32>,
}
impl client::Part for Date {}
// ###################
// MethodBuilders ###
// #################
/// A builder providing access to all methods supported on *userEvent* resources.
/// It is not used directly, but through the `Partners` hub.
///
/// # Example
///
/// Instantiate a resource builder
///
/// ```test_harness,no_run
/// extern crate hyper;
/// extern crate hyper_rustls;
/// extern crate yup_oauth2 as oauth2;
/// extern crate google_partners2 as partners2;
///
/// # async fn dox() {
/// use std::default::Default;
/// use oauth2;
/// use partners2::Partners;
///
/// let secret: oauth2::ApplicationSecret = Default::default();
/// let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// secret,
/// yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// ).build().await.unwrap();
/// let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // Usually you wouldn't bind this to a variable, but keep calling *CallBuilders*
/// // like `log(...)`
/// // to build up your call.
/// let rb = hub.user_events();
/// # }
/// ```
pub struct UserEventMethods<'a>
where {
hub: &'a Partners<>,
}
impl<'a> client::MethodsBuilder for UserEventMethods<'a> {}
impl<'a> UserEventMethods<'a> {
/// Create a builder to help you perform the following task:
///
/// Logs a user event.
///
/// # Arguments
///
/// * `request` - No description provided.
pub fn log(&self, request: LogUserEventRequest) -> UserEventLogCall<'a> {
UserEventLogCall {
hub: self.hub,
_request: request,
_delegate: Default::default(),
_additional_params: Default::default(),
}
}
}
/// A builder providing access to all methods supported on *clientMessage* resources.
/// It is not used directly, but through the `Partners` hub.
///
/// # Example
///
/// Instantiate a resource builder
///
/// ```test_harness,no_run
/// extern crate hyper;
/// extern crate hyper_rustls;
/// extern crate yup_oauth2 as oauth2;
/// extern crate google_partners2 as partners2;
///
/// # async fn dox() {
/// use std::default::Default;
/// use oauth2;
/// use partners2::Partners;
///
/// let secret: oauth2::ApplicationSecret = Default::default();
/// let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// secret,
/// yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// ).build().await.unwrap();
/// let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // Usually you wouldn't bind this to a variable, but keep calling *CallBuilders*
/// // like `log(...)`
/// // to build up your call.
/// let rb = hub.client_messages();
/// # }
/// ```
pub struct ClientMessageMethods<'a>
where {
hub: &'a Partners<>,
}
impl<'a> client::MethodsBuilder for ClientMessageMethods<'a> {}
impl<'a> ClientMessageMethods<'a> {
/// Create a builder to help you perform the following task:
///
/// Logs a generic message from the client, such as
/// `Failed to render component`, `Profile page is running slow`,
/// `More than 500 users have accessed this result.`, etc.
///
/// # Arguments
///
/// * `request` - No description provided.
pub fn log(&self, request: LogMessageRequest) -> ClientMessageLogCall<'a> {
ClientMessageLogCall {
hub: self.hub,
_request: request,
_delegate: Default::default(),
_additional_params: Default::default(),
}
}
}
/// A builder providing access to all methods supported on *lead* resources.
/// It is not used directly, but through the `Partners` hub.
///
/// # Example
///
/// Instantiate a resource builder
///
/// ```test_harness,no_run
/// extern crate hyper;
/// extern crate hyper_rustls;
/// extern crate yup_oauth2 as oauth2;
/// extern crate google_partners2 as partners2;
///
/// # async fn dox() {
/// use std::default::Default;
/// use oauth2;
/// use partners2::Partners;
///
/// let secret: oauth2::ApplicationSecret = Default::default();
/// let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// secret,
/// yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// ).build().await.unwrap();
/// let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // Usually you wouldn't bind this to a variable, but keep calling *CallBuilders*
/// // like `list(...)`
/// // to build up your call.
/// let rb = hub.leads();
/// # }
/// ```
pub struct LeadMethods<'a>
where {
hub: &'a Partners<>,
}
impl<'a> client::MethodsBuilder for LeadMethods<'a> {}
impl<'a> LeadMethods<'a> {
/// Create a builder to help you perform the following task:
///
/// Lists advertiser leads for a user's associated company.
/// Should only be called within the context of an authorized logged in user.
pub fn list(&self) -> LeadListCall<'a> {
LeadListCall {
hub: self.hub,
_request_metadata_user_overrides_user_id: Default::default(),
_request_metadata_user_overrides_ip_address: Default::default(),
_request_metadata_traffic_source_traffic_sub_id: Default::default(),
_request_metadata_traffic_source_traffic_source_id: Default::default(),
_request_metadata_partners_session_id: Default::default(),
_request_metadata_locale: Default::default(),
_request_metadata_experiment_ids: Default::default(),
_page_token: Default::default(),
_page_size: Default::default(),
_order_by: Default::default(),
_delegate: Default::default(),
_additional_params: Default::default(),
}
}
}
/// A builder providing access to all methods supported on *offer* resources.
/// It is not used directly, but through the `Partners` hub.
///
/// # Example
///
/// Instantiate a resource builder
///
/// ```test_harness,no_run
/// extern crate hyper;
/// extern crate hyper_rustls;
/// extern crate yup_oauth2 as oauth2;
/// extern crate google_partners2 as partners2;
///
/// # async fn dox() {
/// use std::default::Default;
/// use oauth2;
/// use partners2::Partners;
///
/// let secret: oauth2::ApplicationSecret = Default::default();
/// let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// secret,
/// yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// ).build().await.unwrap();
/// let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // Usually you wouldn't bind this to a variable, but keep calling *CallBuilders*
/// // like `history_list(...)` and `list(...)`
/// // to build up your call.
/// let rb = hub.offers();
/// # }
/// ```
pub struct OfferMethods<'a>
where {
hub: &'a Partners<>,
}
impl<'a> client::MethodsBuilder for OfferMethods<'a> {}
impl<'a> OfferMethods<'a> {
/// Create a builder to help you perform the following task:
///
/// Lists the Historical Offers for the current user (or user's entire company)
pub fn history_list(&self) -> OfferHistoryListCall<'a> {
OfferHistoryListCall {
hub: self.hub,
_request_metadata_user_overrides_user_id: Default::default(),
_request_metadata_user_overrides_ip_address: Default::default(),
_request_metadata_traffic_source_traffic_sub_id: Default::default(),
_request_metadata_traffic_source_traffic_source_id: Default::default(),
_request_metadata_partners_session_id: Default::default(),
_request_metadata_locale: Default::default(),
_request_metadata_experiment_ids: Default::default(),
_page_token: Default::default(),
_page_size: Default::default(),
_order_by: Default::default(),
_entire_company: Default::default(),
_delegate: Default::default(),
_additional_params: Default::default(),
}
}
/// Create a builder to help you perform the following task:
///
/// Lists the Offers available for the current user
pub fn list(&self) -> OfferListCall<'a> {
OfferListCall {
hub: self.hub,
_request_metadata_user_overrides_user_id: Default::default(),
_request_metadata_user_overrides_ip_address: Default::default(),
_request_metadata_traffic_source_traffic_sub_id: Default::default(),
_request_metadata_traffic_source_traffic_source_id: Default::default(),
_request_metadata_partners_session_id: Default::default(),
_request_metadata_locale: Default::default(),
_request_metadata_experiment_ids: Default::default(),
_delegate: Default::default(),
_additional_params: Default::default(),
}
}
}
/// A builder providing access to all methods supported on *analytic* resources.
/// It is not used directly, but through the `Partners` hub.
///
/// # Example
///
/// Instantiate a resource builder
///
/// ```test_harness,no_run
/// extern crate hyper;
/// extern crate hyper_rustls;
/// extern crate yup_oauth2 as oauth2;
/// extern crate google_partners2 as partners2;
///
/// # async fn dox() {
/// use std::default::Default;
/// use oauth2;
/// use partners2::Partners;
///
/// let secret: oauth2::ApplicationSecret = Default::default();
/// let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// secret,
/// yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// ).build().await.unwrap();
/// let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // Usually you wouldn't bind this to a variable, but keep calling *CallBuilders*
/// // like `list(...)`
/// // to build up your call.
/// let rb = hub.analytics();
/// # }
/// ```
pub struct AnalyticMethods<'a>
where {
hub: &'a Partners<>,
}
impl<'a> client::MethodsBuilder for AnalyticMethods<'a> {}
impl<'a> AnalyticMethods<'a> {
/// Create a builder to help you perform the following task:
///
/// Lists analytics data for a user's associated company.
/// Should only be called within the context of an authorized logged in user.
pub fn list(&self) -> AnalyticListCall<'a> {
AnalyticListCall {
hub: self.hub,
_request_metadata_user_overrides_user_id: Default::default(),
_request_metadata_user_overrides_ip_address: Default::default(),
_request_metadata_traffic_source_traffic_sub_id: Default::default(),
_request_metadata_traffic_source_traffic_source_id: Default::default(),
_request_metadata_partners_session_id: Default::default(),
_request_metadata_locale: Default::default(),
_request_metadata_experiment_ids: Default::default(),
_page_token: Default::default(),
_page_size: Default::default(),
_delegate: Default::default(),
_additional_params: Default::default(),
}
}
}
/// A builder providing access to all methods supported on *userState* resources.
/// It is not used directly, but through the `Partners` hub.
///
/// # Example
///
/// Instantiate a resource builder
///
/// ```test_harness,no_run
/// extern crate hyper;
/// extern crate hyper_rustls;
/// extern crate yup_oauth2 as oauth2;
/// extern crate google_partners2 as partners2;
///
/// # async fn dox() {
/// use std::default::Default;
/// use oauth2;
/// use partners2::Partners;
///
/// let secret: oauth2::ApplicationSecret = Default::default();
/// let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// secret,
/// yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// ).build().await.unwrap();
/// let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // Usually you wouldn't bind this to a variable, but keep calling *CallBuilders*
/// // like `list(...)`
/// // to build up your call.
/// let rb = hub.user_states();
/// # }
/// ```
pub struct UserStateMethods<'a>
where {
hub: &'a Partners<>,
}
impl<'a> client::MethodsBuilder for UserStateMethods<'a> {}
impl<'a> UserStateMethods<'a> {
/// Create a builder to help you perform the following task:
///
/// Lists states for current user.
pub fn list(&self) -> UserStateListCall<'a> {
UserStateListCall {
hub: self.hub,
_request_metadata_user_overrides_user_id: Default::default(),
_request_metadata_user_overrides_ip_address: Default::default(),
_request_metadata_traffic_source_traffic_sub_id: Default::default(),
_request_metadata_traffic_source_traffic_source_id: Default::default(),
_request_metadata_partners_session_id: Default::default(),
_request_metadata_locale: Default::default(),
_request_metadata_experiment_ids: Default::default(),
_delegate: Default::default(),
_additional_params: Default::default(),
}
}
}
/// A builder providing access to all free methods, which are not associated with a particular resource.
/// It is not used directly, but through the `Partners` hub.
///
/// # Example
///
/// Instantiate a resource builder
///
/// ```test_harness,no_run
/// extern crate hyper;
/// extern crate hyper_rustls;
/// extern crate yup_oauth2 as oauth2;
/// extern crate google_partners2 as partners2;
///
/// # async fn dox() {
/// use std::default::Default;
/// use oauth2;
/// use partners2::Partners;
///
/// let secret: oauth2::ApplicationSecret = Default::default();
/// let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// secret,
/// yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// ).build().await.unwrap();
/// let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // Usually you wouldn't bind this to a variable, but keep calling *CallBuilders*
/// // like `get_partnersstatus(...)`, `update_companies(...)` and `update_leads(...)`
/// // to build up your call.
/// let rb = hub.methods();
/// # }
/// ```
pub struct MethodMethods<'a>
where {
hub: &'a Partners<>,
}
impl<'a> client::MethodsBuilder for MethodMethods<'a> {}
impl<'a> MethodMethods<'a> {
/// Create a builder to help you perform the following task:
///
/// Updates the specified lead.
///
/// # Arguments
///
/// * `request` - No description provided.
pub fn update_leads(&self, request: Lead) -> MethodUpdateLeadCall<'a> {
MethodUpdateLeadCall {
hub: self.hub,
_request: request,
_update_mask: Default::default(),
_request_metadata_user_overrides_user_id: Default::default(),
_request_metadata_user_overrides_ip_address: Default::default(),
_request_metadata_traffic_source_traffic_sub_id: Default::default(),
_request_metadata_traffic_source_traffic_source_id: Default::default(),
_request_metadata_partners_session_id: Default::default(),
_request_metadata_locale: Default::default(),
_request_metadata_experiment_ids: Default::default(),
_delegate: Default::default(),
_additional_params: Default::default(),
}
}
/// Create a builder to help you perform the following task:
///
/// Update company.
/// Should only be called within the context of an authorized logged in user.
///
/// # Arguments
///
/// * `request` - No description provided.
pub fn update_companies(&self, request: Company) -> MethodUpdateCompanyCall<'a> {
MethodUpdateCompanyCall {
hub: self.hub,
_request: request,
_update_mask: Default::default(),
_request_metadata_user_overrides_user_id: Default::default(),
_request_metadata_user_overrides_ip_address: Default::default(),
_request_metadata_traffic_source_traffic_sub_id: Default::default(),
_request_metadata_traffic_source_traffic_source_id: Default::default(),
_request_metadata_partners_session_id: Default::default(),
_request_metadata_locale: Default::default(),
_request_metadata_experiment_ids: Default::default(),
_delegate: Default::default(),
_additional_params: Default::default(),
}
}
/// Create a builder to help you perform the following task:
///
/// Gets Partners Status of the logged in user's agency.
/// Should only be called if the logged in user is the admin of the agency.
pub fn get_partnersstatus(&self) -> MethodGetPartnersstatuCall<'a> {
MethodGetPartnersstatuCall {
hub: self.hub,
_request_metadata_user_overrides_user_id: Default::default(),
_request_metadata_user_overrides_ip_address: Default::default(),
_request_metadata_traffic_source_traffic_sub_id: Default::default(),
_request_metadata_traffic_source_traffic_source_id: Default::default(),
_request_metadata_partners_session_id: Default::default(),
_request_metadata_locale: Default::default(),
_request_metadata_experiment_ids: Default::default(),
_delegate: Default::default(),
_additional_params: Default::default(),
}
}
}
/// A builder providing access to all methods supported on *company* resources.
/// It is not used directly, but through the `Partners` hub.
///
/// # Example
///
/// Instantiate a resource builder
///
/// ```test_harness,no_run
/// extern crate hyper;
/// extern crate hyper_rustls;
/// extern crate yup_oauth2 as oauth2;
/// extern crate google_partners2 as partners2;
///
/// # async fn dox() {
/// use std::default::Default;
/// use oauth2;
/// use partners2::Partners;
///
/// let secret: oauth2::ApplicationSecret = Default::default();
/// let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// secret,
/// yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// ).build().await.unwrap();
/// let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // Usually you wouldn't bind this to a variable, but keep calling *CallBuilders*
/// // like `get(...)`, `leads_create(...)` and `list(...)`
/// // to build up your call.
/// let rb = hub.companies();
/// # }
/// ```
pub struct CompanyMethods<'a>
where {
hub: &'a Partners<>,
}
impl<'a> client::MethodsBuilder for CompanyMethods<'a> {}
impl<'a> CompanyMethods<'a> {
/// Create a builder to help you perform the following task:
///
/// Creates an advertiser lead for the given company ID.
///
/// # Arguments
///
/// * `request` - No description provided.
/// * `companyId` - The ID of the company to contact.
pub fn leads_create(&self, request: CreateLeadRequest, company_id: &str) -> CompanyLeadCreateCall<'a> {
CompanyLeadCreateCall {
hub: self.hub,
_request: request,
_company_id: company_id.to_string(),
_delegate: Default::default(),
_additional_params: Default::default(),
}
}
/// Create a builder to help you perform the following task:
///
/// Gets a company.
///
/// # Arguments
///
/// * `companyId` - The ID of the company to retrieve.
pub fn get(&self, company_id: &str) -> CompanyGetCall<'a> {
CompanyGetCall {
hub: self.hub,
_company_id: company_id.to_string(),
_view: Default::default(),
_request_metadata_user_overrides_user_id: Default::default(),
_request_metadata_user_overrides_ip_address: Default::default(),
_request_metadata_traffic_source_traffic_sub_id: Default::default(),
_request_metadata_traffic_source_traffic_source_id: Default::default(),
_request_metadata_partners_session_id: Default::default(),
_request_metadata_locale: Default::default(),
_request_metadata_experiment_ids: Default::default(),
_order_by: Default::default(),
_currency_code: Default::default(),
_address: Default::default(),
_delegate: Default::default(),
_additional_params: Default::default(),
}
}
/// Create a builder to help you perform the following task:
///
/// Lists companies.
pub fn list(&self) -> CompanyListCall<'a> {
CompanyListCall {
hub: self.hub,
_website_url: Default::default(),
_view: Default::default(),
_specializations: Default::default(),
_services: Default::default(),
_request_metadata_user_overrides_user_id: Default::default(),
_request_metadata_user_overrides_ip_address: Default::default(),
_request_metadata_traffic_source_traffic_sub_id: Default::default(),
_request_metadata_traffic_source_traffic_source_id: Default::default(),
_request_metadata_partners_session_id: Default::default(),
_request_metadata_locale: Default::default(),
_request_metadata_experiment_ids: Default::default(),
_page_token: Default::default(),
_page_size: Default::default(),
_order_by: Default::default(),
_min_monthly_budget_units: Default::default(),
_min_monthly_budget_nanos: Default::default(),
_min_monthly_budget_currency_code: Default::default(),
_max_monthly_budget_units: Default::default(),
_max_monthly_budget_nanos: Default::default(),
_max_monthly_budget_currency_code: Default::default(),
_language_codes: Default::default(),
_industries: Default::default(),
_gps_motivations: Default::default(),
_company_name: Default::default(),
_address: Default::default(),
_delegate: Default::default(),
_additional_params: Default::default(),
}
}
}
/// A builder providing access to all methods supported on *user* resources.
/// It is not used directly, but through the `Partners` hub.
///
/// # Example
///
/// Instantiate a resource builder
///
/// ```test_harness,no_run
/// extern crate hyper;
/// extern crate hyper_rustls;
/// extern crate yup_oauth2 as oauth2;
/// extern crate google_partners2 as partners2;
///
/// # async fn dox() {
/// use std::default::Default;
/// use oauth2;
/// use partners2::Partners;
///
/// let secret: oauth2::ApplicationSecret = Default::default();
/// let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// secret,
/// yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// ).build().await.unwrap();
/// let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // Usually you wouldn't bind this to a variable, but keep calling *CallBuilders*
/// // like `create_company_relation(...)`, `delete_company_relation(...)`, `get(...)` and `update_profile(...)`
/// // to build up your call.
/// let rb = hub.users();
/// # }
/// ```
pub struct UserMethods<'a>
where {
hub: &'a Partners<>,
}
impl<'a> client::MethodsBuilder for UserMethods<'a> {}
impl<'a> UserMethods<'a> {
/// Create a builder to help you perform the following task:
///
/// Updates a user's profile. A user can only update their own profile and
/// should only be called within the context of a logged in user.
///
/// # Arguments
///
/// * `request` - No description provided.
pub fn update_profile(&self, request: UserProfile) -> UserUpdateProfileCall<'a> {
UserUpdateProfileCall {
hub: self.hub,
_request: request,
_request_metadata_user_overrides_user_id: Default::default(),
_request_metadata_user_overrides_ip_address: Default::default(),
_request_metadata_traffic_source_traffic_sub_id: Default::default(),
_request_metadata_traffic_source_traffic_source_id: Default::default(),
_request_metadata_partners_session_id: Default::default(),
_request_metadata_locale: Default::default(),
_request_metadata_experiment_ids: Default::default(),
_delegate: Default::default(),
_additional_params: Default::default(),
}
}
/// Create a builder to help you perform the following task:
///
/// Creates a user's company relation. Affiliates the user to a company.
///
/// # Arguments
///
/// * `request` - No description provided.
/// * `userId` - The ID of the user. Can be set to <code>me</code> to mean
/// the currently authenticated user.
pub fn create_company_relation(&self, request: CompanyRelation, user_id: &str) -> UserCreateCompanyRelationCall<'a> {
UserCreateCompanyRelationCall {
hub: self.hub,
_request: request,
_user_id: user_id.to_string(),
_request_metadata_user_overrides_user_id: Default::default(),
_request_metadata_user_overrides_ip_address: Default::default(),
_request_metadata_traffic_source_traffic_sub_id: Default::default(),
_request_metadata_traffic_source_traffic_source_id: Default::default(),
_request_metadata_partners_session_id: Default::default(),
_request_metadata_locale: Default::default(),
_request_metadata_experiment_ids: Default::default(),
_delegate: Default::default(),
_additional_params: Default::default(),
}
}
/// Create a builder to help you perform the following task:
///
/// Deletes a user's company relation. Unaffiliaites the user from a company.
///
/// # Arguments
///
/// * `userId` - The ID of the user. Can be set to <code>me</code> to mean
/// the currently authenticated user.
pub fn delete_company_relation(&self, user_id: &str) -> UserDeleteCompanyRelationCall<'a> {
UserDeleteCompanyRelationCall {
hub: self.hub,
_user_id: user_id.to_string(),
_request_metadata_user_overrides_user_id: Default::default(),
_request_metadata_user_overrides_ip_address: Default::default(),
_request_metadata_traffic_source_traffic_sub_id: Default::default(),
_request_metadata_traffic_source_traffic_source_id: Default::default(),
_request_metadata_partners_session_id: Default::default(),
_request_metadata_locale: Default::default(),
_request_metadata_experiment_ids: Default::default(),
_delegate: Default::default(),
_additional_params: Default::default(),
}
}
/// Create a builder to help you perform the following task:
///
/// Gets a user.
///
/// # Arguments
///
/// * `userId` - Identifier of the user. Can be set to <code>me</code> to mean the currently
/// authenticated user.
pub fn get(&self, user_id: &str) -> UserGetCall<'a> {
UserGetCall {
hub: self.hub,
_user_id: user_id.to_string(),
_user_view: Default::default(),
_request_metadata_user_overrides_user_id: Default::default(),
_request_metadata_user_overrides_ip_address: Default::default(),
_request_metadata_traffic_source_traffic_sub_id: Default::default(),
_request_metadata_traffic_source_traffic_source_id: Default::default(),
_request_metadata_partners_session_id: Default::default(),
_request_metadata_locale: Default::default(),
_request_metadata_experiment_ids: Default::default(),
_delegate: Default::default(),
_additional_params: Default::default(),
}
}
}
// ###################
// CallBuilders ###
// #################
/// Logs a user event.
///
/// A builder for the *log* method supported by a *userEvent* resource.
/// It is not used directly, but through a `UserEventMethods` instance.
///
/// # Example
///
/// Instantiate a resource method builder
///
/// ```test_harness,no_run
/// # extern crate hyper;
/// # extern crate hyper_rustls;
/// # extern crate yup_oauth2 as oauth2;
/// # extern crate google_partners2 as partners2;
/// use partners2::api::LogUserEventRequest;
/// # async fn dox() {
/// # use std::default::Default;
/// # use oauth2;
/// # use partners2::Partners;
///
/// # let secret: oauth2::ApplicationSecret = Default::default();
/// # let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// # secret,
/// # yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// # ).build().await.unwrap();
/// # let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // As the method needs a request, you would usually fill it with the desired information
/// // into the respective structure. Some of the parts shown here might not be applicable !
/// // Values shown here are possibly random and not representative !
/// let mut req = LogUserEventRequest::default();
///
/// // You can configure optional parameters by calling the respective setters at will, and
/// // execute the final call using `doit()`.
/// // Values shown here are possibly random and not representative !
/// let result = hub.user_events().log(req)
/// .doit().await;
/// # }
/// ```
pub struct UserEventLogCall<'a>
where {
hub: &'a Partners<>,
_request: LogUserEventRequest,
_delegate: Option<&'a mut dyn client::Delegate>,
_additional_params: HashMap<String, String>,
}
impl<'a> client::CallBuilder for UserEventLogCall<'a> {}
impl<'a> UserEventLogCall<'a> {
/// Perform the operation you have build so far.
pub async fn doit(mut self) -> client::Result<(hyper::Response<hyper::body::Body>, LogUserEventResponse)> {
use std::io::{Read, Seek};
use hyper::header::{CONTENT_TYPE, CONTENT_LENGTH, AUTHORIZATION, USER_AGENT, LOCATION};
use client::ToParts;
let mut dd = client::DefaultDelegate;
let mut dlg: &mut dyn client::Delegate = match self._delegate {
Some(d) => d,
None => &mut dd
};
dlg.begin(client::MethodInfo { id: "partners.userEvents.log",
http_method: hyper::Method::POST });
let mut params: Vec<(&str, String)> = Vec::with_capacity(3 + self._additional_params.len());
for &field in ["alt"].iter() {
if self._additional_params.contains_key(field) {
dlg.finished(false);
return Err(client::Error::FieldClash(field));
}
}
for (name, value) in self._additional_params.iter() {
params.push((&name, value.clone()));
}
params.push(("alt", "json".to_string()));
let mut url = self.hub._base_url.clone() + "v2/userEvents:log";
let key = dlg.api_key();
match key {
Some(value) => params.push(("key", value)),
None => {
dlg.finished(false);
return Err(client::Error::MissingAPIKey)
}
}
let url = url::Url::parse_with_params(&url, params).unwrap();
let mut json_mime_type: mime::Mime = "application/json".parse().unwrap();
let mut request_value_reader =
{
let mut value = json::value::to_value(&self._request).expect("serde to work");
client::remove_json_null_values(&mut value);
let mut dst = io::Cursor::new(Vec::with_capacity(128));
json::to_writer(&mut dst, &value).unwrap();
dst
};
let request_size = request_value_reader.seek(io::SeekFrom::End(0)).unwrap();
request_value_reader.seek(io::SeekFrom::Start(0)).unwrap();
loop {
request_value_reader.seek(io::SeekFrom::Start(0)).unwrap();
let mut req_result = {
let client = &self.hub.client;
dlg.pre_request();
let mut req_builder = hyper::Request::builder().method(hyper::Method::POST).uri(url.clone().into_string())
.header(USER_AGENT, self.hub._user_agent.clone());
let request = req_builder
.header(CONTENT_TYPE, format!("{}", json_mime_type.to_string()))
.header(CONTENT_LENGTH, request_size as u64)
.body(hyper::body::Body::from(request_value_reader.get_ref().clone()));
client.request(request.unwrap()).await
};
match req_result {
Err(err) => {
if let client::Retry::After(d) = dlg.http_error(&err) {
sleep(d);
continue;
}
dlg.finished(false);
return Err(client::Error::HttpError(err))
}
Ok(mut res) => {
if !res.status().is_success() {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
let json_server_error = json::from_str::<client::JsonServerError>(&res_body_string).ok();
let server_error = json::from_str::<client::ServerError>(&res_body_string)
.or_else(|_| json::from_str::<client::ErrorResponse>(&res_body_string).map(|r| r.error))
.ok();
if let client::Retry::After(d) = dlg.http_failure(&res,
json_server_error,
server_error) {
sleep(d);
continue;
}
dlg.finished(false);
return match json::from_str::<client::ErrorResponse>(&res_body_string){
Err(_) => Err(client::Error::Failure(res)),
Ok(serr) => Err(client::Error::BadRequest(serr))
}
}
let result_value = {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
match json::from_str(&res_body_string) {
Ok(decoded) => (res, decoded),
Err(err) => {
dlg.response_json_decode_error(&res_body_string, &err);
return Err(client::Error::JsonDecodeError(res_body_string, err));
}
}
};
dlg.finished(true);
return Ok(result_value)
}
}
}
}
///
/// Sets the *request* property to the given value.
///
/// Even though the property as already been set when instantiating this call,
/// we provide this method for API completeness.
pub fn request(mut self, new_value: LogUserEventRequest) -> UserEventLogCall<'a> {
self._request = new_value;
self
}
/// The delegate implementation is consulted whenever there is an intermediate result, or if something goes wrong
/// while executing the actual API request.
///
/// It should be used to handle progress information, and to implement a certain level of resilience.
///
/// Sets the *delegate* property to the given value.
pub fn delegate(mut self, new_value: &'a mut dyn client::Delegate) -> UserEventLogCall<'a> {
self._delegate = Some(new_value);
self
}
/// Set any additional parameter of the query string used in the request.
/// It should be used to set parameters which are not yet available through their own
/// setters.
///
/// Please note that this method must not be used to set any of the known parameters
/// which have their own setter method. If done anyway, the request will fail.
///
/// # Additional Parameters
///
/// * *alt* (query-string) - Data format for response.
/// * *key* (query-string) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
/// * *access_token* (query-string) - OAuth access token.
/// * *upload_protocol* (query-string) - Upload protocol for media (e.g. "raw", "multipart").
/// * *prettyPrint* (query-boolean) - Returns response with indentations and line breaks.
/// * *quotaUser* (query-string) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
/// * *uploadType* (query-string) - Legacy upload protocol for media (e.g. "media", "multipart").
/// * *fields* (query-string) - Selector specifying which fields to include in a partial response.
/// * *$.xgafv* (query-string) - V1 error format.
/// * *oauth_token* (query-string) - OAuth 2.0 token for the current user.
/// * *callback* (query-string) - JSONP
pub fn param<T>(mut self, name: T, value: T) -> UserEventLogCall<'a>
where T: AsRef<str> {
self._additional_params.insert(name.as_ref().to_string(), value.as_ref().to_string());
self
}
}
/// Logs a generic message from the client, such as
/// `Failed to render component`, `Profile page is running slow`,
/// `More than 500 users have accessed this result.`, etc.
///
/// A builder for the *log* method supported by a *clientMessage* resource.
/// It is not used directly, but through a `ClientMessageMethods` instance.
///
/// # Example
///
/// Instantiate a resource method builder
///
/// ```test_harness,no_run
/// # extern crate hyper;
/// # extern crate hyper_rustls;
/// # extern crate yup_oauth2 as oauth2;
/// # extern crate google_partners2 as partners2;
/// use partners2::api::LogMessageRequest;
/// # async fn dox() {
/// # use std::default::Default;
/// # use oauth2;
/// # use partners2::Partners;
///
/// # let secret: oauth2::ApplicationSecret = Default::default();
/// # let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// # secret,
/// # yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// # ).build().await.unwrap();
/// # let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // As the method needs a request, you would usually fill it with the desired information
/// // into the respective structure. Some of the parts shown here might not be applicable !
/// // Values shown here are possibly random and not representative !
/// let mut req = LogMessageRequest::default();
///
/// // You can configure optional parameters by calling the respective setters at will, and
/// // execute the final call using `doit()`.
/// // Values shown here are possibly random and not representative !
/// let result = hub.client_messages().log(req)
/// .doit().await;
/// # }
/// ```
pub struct ClientMessageLogCall<'a>
where {
hub: &'a Partners<>,
_request: LogMessageRequest,
_delegate: Option<&'a mut dyn client::Delegate>,
_additional_params: HashMap<String, String>,
}
impl<'a> client::CallBuilder for ClientMessageLogCall<'a> {}
impl<'a> ClientMessageLogCall<'a> {
/// Perform the operation you have build so far.
pub async fn doit(mut self) -> client::Result<(hyper::Response<hyper::body::Body>, LogMessageResponse)> {
use std::io::{Read, Seek};
use hyper::header::{CONTENT_TYPE, CONTENT_LENGTH, AUTHORIZATION, USER_AGENT, LOCATION};
use client::ToParts;
let mut dd = client::DefaultDelegate;
let mut dlg: &mut dyn client::Delegate = match self._delegate {
Some(d) => d,
None => &mut dd
};
dlg.begin(client::MethodInfo { id: "partners.clientMessages.log",
http_method: hyper::Method::POST });
let mut params: Vec<(&str, String)> = Vec::with_capacity(3 + self._additional_params.len());
for &field in ["alt"].iter() {
if self._additional_params.contains_key(field) {
dlg.finished(false);
return Err(client::Error::FieldClash(field));
}
}
for (name, value) in self._additional_params.iter() {
params.push((&name, value.clone()));
}
params.push(("alt", "json".to_string()));
let mut url = self.hub._base_url.clone() + "v2/clientMessages:log";
let key = dlg.api_key();
match key {
Some(value) => params.push(("key", value)),
None => {
dlg.finished(false);
return Err(client::Error::MissingAPIKey)
}
}
let url = url::Url::parse_with_params(&url, params).unwrap();
let mut json_mime_type: mime::Mime = "application/json".parse().unwrap();
let mut request_value_reader =
{
let mut value = json::value::to_value(&self._request).expect("serde to work");
client::remove_json_null_values(&mut value);
let mut dst = io::Cursor::new(Vec::with_capacity(128));
json::to_writer(&mut dst, &value).unwrap();
dst
};
let request_size = request_value_reader.seek(io::SeekFrom::End(0)).unwrap();
request_value_reader.seek(io::SeekFrom::Start(0)).unwrap();
loop {
request_value_reader.seek(io::SeekFrom::Start(0)).unwrap();
let mut req_result = {
let client = &self.hub.client;
dlg.pre_request();
let mut req_builder = hyper::Request::builder().method(hyper::Method::POST).uri(url.clone().into_string())
.header(USER_AGENT, self.hub._user_agent.clone());
let request = req_builder
.header(CONTENT_TYPE, format!("{}", json_mime_type.to_string()))
.header(CONTENT_LENGTH, request_size as u64)
.body(hyper::body::Body::from(request_value_reader.get_ref().clone()));
client.request(request.unwrap()).await
};
match req_result {
Err(err) => {
if let client::Retry::After(d) = dlg.http_error(&err) {
sleep(d);
continue;
}
dlg.finished(false);
return Err(client::Error::HttpError(err))
}
Ok(mut res) => {
if !res.status().is_success() {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
let json_server_error = json::from_str::<client::JsonServerError>(&res_body_string).ok();
let server_error = json::from_str::<client::ServerError>(&res_body_string)
.or_else(|_| json::from_str::<client::ErrorResponse>(&res_body_string).map(|r| r.error))
.ok();
if let client::Retry::After(d) = dlg.http_failure(&res,
json_server_error,
server_error) {
sleep(d);
continue;
}
dlg.finished(false);
return match json::from_str::<client::ErrorResponse>(&res_body_string){
Err(_) => Err(client::Error::Failure(res)),
Ok(serr) => Err(client::Error::BadRequest(serr))
}
}
let result_value = {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
match json::from_str(&res_body_string) {
Ok(decoded) => (res, decoded),
Err(err) => {
dlg.response_json_decode_error(&res_body_string, &err);
return Err(client::Error::JsonDecodeError(res_body_string, err));
}
}
};
dlg.finished(true);
return Ok(result_value)
}
}
}
}
///
/// Sets the *request* property to the given value.
///
/// Even though the property as already been set when instantiating this call,
/// we provide this method for API completeness.
pub fn request(mut self, new_value: LogMessageRequest) -> ClientMessageLogCall<'a> {
self._request = new_value;
self
}
/// The delegate implementation is consulted whenever there is an intermediate result, or if something goes wrong
/// while executing the actual API request.
///
/// It should be used to handle progress information, and to implement a certain level of resilience.
///
/// Sets the *delegate* property to the given value.
pub fn delegate(mut self, new_value: &'a mut dyn client::Delegate) -> ClientMessageLogCall<'a> {
self._delegate = Some(new_value);
self
}
/// Set any additional parameter of the query string used in the request.
/// It should be used to set parameters which are not yet available through their own
/// setters.
///
/// Please note that this method must not be used to set any of the known parameters
/// which have their own setter method. If done anyway, the request will fail.
///
/// # Additional Parameters
///
/// * *alt* (query-string) - Data format for response.
/// * *key* (query-string) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
/// * *access_token* (query-string) - OAuth access token.
/// * *upload_protocol* (query-string) - Upload protocol for media (e.g. "raw", "multipart").
/// * *prettyPrint* (query-boolean) - Returns response with indentations and line breaks.
/// * *quotaUser* (query-string) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
/// * *uploadType* (query-string) - Legacy upload protocol for media (e.g. "media", "multipart").
/// * *fields* (query-string) - Selector specifying which fields to include in a partial response.
/// * *$.xgafv* (query-string) - V1 error format.
/// * *oauth_token* (query-string) - OAuth 2.0 token for the current user.
/// * *callback* (query-string) - JSONP
pub fn param<T>(mut self, name: T, value: T) -> ClientMessageLogCall<'a>
where T: AsRef<str> {
self._additional_params.insert(name.as_ref().to_string(), value.as_ref().to_string());
self
}
}
/// Lists advertiser leads for a user's associated company.
/// Should only be called within the context of an authorized logged in user.
///
/// A builder for the *list* method supported by a *lead* resource.
/// It is not used directly, but through a `LeadMethods` instance.
///
/// # Example
///
/// Instantiate a resource method builder
///
/// ```test_harness,no_run
/// # extern crate hyper;
/// # extern crate hyper_rustls;
/// # extern crate yup_oauth2 as oauth2;
/// # extern crate google_partners2 as partners2;
/// # async fn dox() {
/// # use std::default::Default;
/// # use oauth2;
/// # use partners2::Partners;
///
/// # let secret: oauth2::ApplicationSecret = Default::default();
/// # let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// # secret,
/// # yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// # ).build().await.unwrap();
/// # let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // You can configure optional parameters by calling the respective setters at will, and
/// // execute the final call using `doit()`.
/// // Values shown here are possibly random and not representative !
/// let result = hub.leads().list()
/// .request_metadata_user_overrides_user_id("sed")
/// .request_metadata_user_overrides_ip_address("ut")
/// .request_metadata_traffic_source_traffic_sub_id("gubergren")
/// .request_metadata_traffic_source_traffic_source_id("rebum.")
/// .request_metadata_partners_session_id("est")
/// .request_metadata_locale("ipsum")
/// .add_request_metadata_experiment_ids("ipsum")
/// .page_token("est")
/// .page_size(-62)
/// .order_by("ea")
/// .doit().await;
/// # }
/// ```
pub struct LeadListCall<'a>
where {
hub: &'a Partners<>,
_request_metadata_user_overrides_user_id: Option<String>,
_request_metadata_user_overrides_ip_address: Option<String>,
_request_metadata_traffic_source_traffic_sub_id: Option<String>,
_request_metadata_traffic_source_traffic_source_id: Option<String>,
_request_metadata_partners_session_id: Option<String>,
_request_metadata_locale: Option<String>,
_request_metadata_experiment_ids: Vec<String>,
_page_token: Option<String>,
_page_size: Option<i32>,
_order_by: Option<String>,
_delegate: Option<&'a mut dyn client::Delegate>,
_additional_params: HashMap<String, String>,
}
impl<'a> client::CallBuilder for LeadListCall<'a> {}
impl<'a> LeadListCall<'a> {
/// Perform the operation you have build so far.
pub async fn doit(mut self) -> client::Result<(hyper::Response<hyper::body::Body>, ListLeadsResponse)> {
use std::io::{Read, Seek};
use hyper::header::{CONTENT_TYPE, CONTENT_LENGTH, AUTHORIZATION, USER_AGENT, LOCATION};
use client::ToParts;
let mut dd = client::DefaultDelegate;
let mut dlg: &mut dyn client::Delegate = match self._delegate {
Some(d) => d,
None => &mut dd
};
dlg.begin(client::MethodInfo { id: "partners.leads.list",
http_method: hyper::Method::GET });
let mut params: Vec<(&str, String)> = Vec::with_capacity(12 + self._additional_params.len());
if let Some(value) = self._request_metadata_user_overrides_user_id {
params.push(("requestMetadata.userOverrides.userId", value.to_string()));
}
if let Some(value) = self._request_metadata_user_overrides_ip_address {
params.push(("requestMetadata.userOverrides.ipAddress", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_sub_id {
params.push(("requestMetadata.trafficSource.trafficSubId", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_source_id {
params.push(("requestMetadata.trafficSource.trafficSourceId", value.to_string()));
}
if let Some(value) = self._request_metadata_partners_session_id {
params.push(("requestMetadata.partnersSessionId", value.to_string()));
}
if let Some(value) = self._request_metadata_locale {
params.push(("requestMetadata.locale", value.to_string()));
}
if self._request_metadata_experiment_ids.len() > 0 {
for f in self._request_metadata_experiment_ids.iter() {
params.push(("requestMetadata.experimentIds", f.to_string()));
}
}
if let Some(value) = self._page_token {
params.push(("pageToken", value.to_string()));
}
if let Some(value) = self._page_size {
params.push(("pageSize", value.to_string()));
}
if let Some(value) = self._order_by {
params.push(("orderBy", value.to_string()));
}
for &field in ["alt", "requestMetadata.userOverrides.userId", "requestMetadata.userOverrides.ipAddress", "requestMetadata.trafficSource.trafficSubId", "requestMetadata.trafficSource.trafficSourceId", "requestMetadata.partnersSessionId", "requestMetadata.locale", "requestMetadata.experimentIds", "pageToken", "pageSize", "orderBy"].iter() {
if self._additional_params.contains_key(field) {
dlg.finished(false);
return Err(client::Error::FieldClash(field));
}
}
for (name, value) in self._additional_params.iter() {
params.push((&name, value.clone()));
}
params.push(("alt", "json".to_string()));
let mut url = self.hub._base_url.clone() + "v2/leads";
let key = dlg.api_key();
match key {
Some(value) => params.push(("key", value)),
None => {
dlg.finished(false);
return Err(client::Error::MissingAPIKey)
}
}
let url = url::Url::parse_with_params(&url, params).unwrap();
loop {
let mut req_result = {
let client = &self.hub.client;
dlg.pre_request();
let mut req_builder = hyper::Request::builder().method(hyper::Method::GET).uri(url.clone().into_string())
.header(USER_AGENT, self.hub._user_agent.clone());
let request = req_builder
.body(hyper::body::Body::empty());
client.request(request.unwrap()).await
};
match req_result {
Err(err) => {
if let client::Retry::After(d) = dlg.http_error(&err) {
sleep(d);
continue;
}
dlg.finished(false);
return Err(client::Error::HttpError(err))
}
Ok(mut res) => {
if !res.status().is_success() {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
let json_server_error = json::from_str::<client::JsonServerError>(&res_body_string).ok();
let server_error = json::from_str::<client::ServerError>(&res_body_string)
.or_else(|_| json::from_str::<client::ErrorResponse>(&res_body_string).map(|r| r.error))
.ok();
if let client::Retry::After(d) = dlg.http_failure(&res,
json_server_error,
server_error) {
sleep(d);
continue;
}
dlg.finished(false);
return match json::from_str::<client::ErrorResponse>(&res_body_string){
Err(_) => Err(client::Error::Failure(res)),
Ok(serr) => Err(client::Error::BadRequest(serr))
}
}
let result_value = {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
match json::from_str(&res_body_string) {
Ok(decoded) => (res, decoded),
Err(err) => {
dlg.response_json_decode_error(&res_body_string, &err);
return Err(client::Error::JsonDecodeError(res_body_string, err));
}
}
};
dlg.finished(true);
return Ok(result_value)
}
}
}
}
/// Logged-in user ID to impersonate instead of the user's ID.
///
/// Sets the *request metadata.user overrides.user id* query property to the given value.
pub fn request_metadata_user_overrides_user_id(mut self, new_value: &str) -> LeadListCall<'a> {
self._request_metadata_user_overrides_user_id = Some(new_value.to_string());
self
}
/// IP address to use instead of the user's geo-located IP address.
///
/// Sets the *request metadata.user overrides.ip address* query property to the given value.
pub fn request_metadata_user_overrides_ip_address(mut self, new_value: &str) -> LeadListCall<'a> {
self._request_metadata_user_overrides_ip_address = Some(new_value.to_string());
self
}
/// Second level identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic sub id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_sub_id(mut self, new_value: &str) -> LeadListCall<'a> {
self._request_metadata_traffic_source_traffic_sub_id = Some(new_value.to_string());
self
}
/// Identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic source id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_source_id(mut self, new_value: &str) -> LeadListCall<'a> {
self._request_metadata_traffic_source_traffic_source_id = Some(new_value.to_string());
self
}
/// Google Partners session ID.
///
/// Sets the *request metadata.partners session id* query property to the given value.
pub fn request_metadata_partners_session_id(mut self, new_value: &str) -> LeadListCall<'a> {
self._request_metadata_partners_session_id = Some(new_value.to_string());
self
}
/// Locale to use for the current request.
///
/// Sets the *request metadata.locale* query property to the given value.
pub fn request_metadata_locale(mut self, new_value: &str) -> LeadListCall<'a> {
self._request_metadata_locale = Some(new_value.to_string());
self
}
/// Experiment IDs the current request belongs to.
///
/// Append the given value to the *request metadata.experiment ids* query property.
/// Each appended value will retain its original ordering and be '/'-separated in the URL's parameters.
pub fn add_request_metadata_experiment_ids(mut self, new_value: &str) -> LeadListCall<'a> {
self._request_metadata_experiment_ids.push(new_value.to_string());
self
}
/// A token identifying a page of results that the server returns.
/// Typically, this is the value of `ListLeadsResponse.next_page_token`
/// returned from the previous call to
/// ListLeads.
///
/// Sets the *page token* query property to the given value.
pub fn page_token(mut self, new_value: &str) -> LeadListCall<'a> {
self._page_token = Some(new_value.to_string());
self
}
/// Requested page size. Server may return fewer leads than requested.
/// If unspecified, server picks an appropriate default.
///
/// Sets the *page size* query property to the given value.
pub fn page_size(mut self, new_value: i32) -> LeadListCall<'a> {
self._page_size = Some(new_value);
self
}
/// How to order Leads. Currently, only `create_time`
/// and `create_time desc` are supported
///
/// Sets the *order by* query property to the given value.
pub fn order_by(mut self, new_value: &str) -> LeadListCall<'a> {
self._order_by = Some(new_value.to_string());
self
}
/// The delegate implementation is consulted whenever there is an intermediate result, or if something goes wrong
/// while executing the actual API request.
///
/// It should be used to handle progress information, and to implement a certain level of resilience.
///
/// Sets the *delegate* property to the given value.
pub fn delegate(mut self, new_value: &'a mut dyn client::Delegate) -> LeadListCall<'a> {
self._delegate = Some(new_value);
self
}
/// Set any additional parameter of the query string used in the request.
/// It should be used to set parameters which are not yet available through their own
/// setters.
///
/// Please note that this method must not be used to set any of the known parameters
/// which have their own setter method. If done anyway, the request will fail.
///
/// # Additional Parameters
///
/// * *alt* (query-string) - Data format for response.
/// * *key* (query-string) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
/// * *access_token* (query-string) - OAuth access token.
/// * *upload_protocol* (query-string) - Upload protocol for media (e.g. "raw", "multipart").
/// * *prettyPrint* (query-boolean) - Returns response with indentations and line breaks.
/// * *quotaUser* (query-string) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
/// * *uploadType* (query-string) - Legacy upload protocol for media (e.g. "media", "multipart").
/// * *fields* (query-string) - Selector specifying which fields to include in a partial response.
/// * *$.xgafv* (query-string) - V1 error format.
/// * *oauth_token* (query-string) - OAuth 2.0 token for the current user.
/// * *callback* (query-string) - JSONP
pub fn param<T>(mut self, name: T, value: T) -> LeadListCall<'a>
where T: AsRef<str> {
self._additional_params.insert(name.as_ref().to_string(), value.as_ref().to_string());
self
}
}
/// Lists the Historical Offers for the current user (or user's entire company)
///
/// A builder for the *history.list* method supported by a *offer* resource.
/// It is not used directly, but through a `OfferMethods` instance.
///
/// # Example
///
/// Instantiate a resource method builder
///
/// ```test_harness,no_run
/// # extern crate hyper;
/// # extern crate hyper_rustls;
/// # extern crate yup_oauth2 as oauth2;
/// # extern crate google_partners2 as partners2;
/// # async fn dox() {
/// # use std::default::Default;
/// # use oauth2;
/// # use partners2::Partners;
///
/// # let secret: oauth2::ApplicationSecret = Default::default();
/// # let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// # secret,
/// # yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// # ).build().await.unwrap();
/// # let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // You can configure optional parameters by calling the respective setters at will, and
/// // execute the final call using `doit()`.
/// // Values shown here are possibly random and not representative !
/// let result = hub.offers().history_list()
/// .request_metadata_user_overrides_user_id("dolor")
/// .request_metadata_user_overrides_ip_address("Lorem")
/// .request_metadata_traffic_source_traffic_sub_id("eos")
/// .request_metadata_traffic_source_traffic_source_id("labore")
/// .request_metadata_partners_session_id("sed")
/// .request_metadata_locale("duo")
/// .add_request_metadata_experiment_ids("sed")
/// .page_token("no")
/// .page_size(-15)
/// .order_by("kasd")
/// .entire_company(true)
/// .doit().await;
/// # }
/// ```
pub struct OfferHistoryListCall<'a>
where {
hub: &'a Partners<>,
_request_metadata_user_overrides_user_id: Option<String>,
_request_metadata_user_overrides_ip_address: Option<String>,
_request_metadata_traffic_source_traffic_sub_id: Option<String>,
_request_metadata_traffic_source_traffic_source_id: Option<String>,
_request_metadata_partners_session_id: Option<String>,
_request_metadata_locale: Option<String>,
_request_metadata_experiment_ids: Vec<String>,
_page_token: Option<String>,
_page_size: Option<i32>,
_order_by: Option<String>,
_entire_company: Option<bool>,
_delegate: Option<&'a mut dyn client::Delegate>,
_additional_params: HashMap<String, String>,
}
impl<'a> client::CallBuilder for OfferHistoryListCall<'a> {}
impl<'a> OfferHistoryListCall<'a> {
/// Perform the operation you have build so far.
pub async fn doit(mut self) -> client::Result<(hyper::Response<hyper::body::Body>, ListOffersHistoryResponse)> {
use std::io::{Read, Seek};
use hyper::header::{CONTENT_TYPE, CONTENT_LENGTH, AUTHORIZATION, USER_AGENT, LOCATION};
use client::ToParts;
let mut dd = client::DefaultDelegate;
let mut dlg: &mut dyn client::Delegate = match self._delegate {
Some(d) => d,
None => &mut dd
};
dlg.begin(client::MethodInfo { id: "partners.offers.history.list",
http_method: hyper::Method::GET });
let mut params: Vec<(&str, String)> = Vec::with_capacity(13 + self._additional_params.len());
if let Some(value) = self._request_metadata_user_overrides_user_id {
params.push(("requestMetadata.userOverrides.userId", value.to_string()));
}
if let Some(value) = self._request_metadata_user_overrides_ip_address {
params.push(("requestMetadata.userOverrides.ipAddress", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_sub_id {
params.push(("requestMetadata.trafficSource.trafficSubId", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_source_id {
params.push(("requestMetadata.trafficSource.trafficSourceId", value.to_string()));
}
if let Some(value) = self._request_metadata_partners_session_id {
params.push(("requestMetadata.partnersSessionId", value.to_string()));
}
if let Some(value) = self._request_metadata_locale {
params.push(("requestMetadata.locale", value.to_string()));
}
if self._request_metadata_experiment_ids.len() > 0 {
for f in self._request_metadata_experiment_ids.iter() {
params.push(("requestMetadata.experimentIds", f.to_string()));
}
}
if let Some(value) = self._page_token {
params.push(("pageToken", value.to_string()));
}
if let Some(value) = self._page_size {
params.push(("pageSize", value.to_string()));
}
if let Some(value) = self._order_by {
params.push(("orderBy", value.to_string()));
}
if let Some(value) = self._entire_company {
params.push(("entireCompany", value.to_string()));
}
for &field in ["alt", "requestMetadata.userOverrides.userId", "requestMetadata.userOverrides.ipAddress", "requestMetadata.trafficSource.trafficSubId", "requestMetadata.trafficSource.trafficSourceId", "requestMetadata.partnersSessionId", "requestMetadata.locale", "requestMetadata.experimentIds", "pageToken", "pageSize", "orderBy", "entireCompany"].iter() {
if self._additional_params.contains_key(field) {
dlg.finished(false);
return Err(client::Error::FieldClash(field));
}
}
for (name, value) in self._additional_params.iter() {
params.push((&name, value.clone()));
}
params.push(("alt", "json".to_string()));
let mut url = self.hub._base_url.clone() + "v2/offers/history";
let key = dlg.api_key();
match key {
Some(value) => params.push(("key", value)),
None => {
dlg.finished(false);
return Err(client::Error::MissingAPIKey)
}
}
let url = url::Url::parse_with_params(&url, params).unwrap();
loop {
let mut req_result = {
let client = &self.hub.client;
dlg.pre_request();
let mut req_builder = hyper::Request::builder().method(hyper::Method::GET).uri(url.clone().into_string())
.header(USER_AGENT, self.hub._user_agent.clone());
let request = req_builder
.body(hyper::body::Body::empty());
client.request(request.unwrap()).await
};
match req_result {
Err(err) => {
if let client::Retry::After(d) = dlg.http_error(&err) {
sleep(d);
continue;
}
dlg.finished(false);
return Err(client::Error::HttpError(err))
}
Ok(mut res) => {
if !res.status().is_success() {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
let json_server_error = json::from_str::<client::JsonServerError>(&res_body_string).ok();
let server_error = json::from_str::<client::ServerError>(&res_body_string)
.or_else(|_| json::from_str::<client::ErrorResponse>(&res_body_string).map(|r| r.error))
.ok();
if let client::Retry::After(d) = dlg.http_failure(&res,
json_server_error,
server_error) {
sleep(d);
continue;
}
dlg.finished(false);
return match json::from_str::<client::ErrorResponse>(&res_body_string){
Err(_) => Err(client::Error::Failure(res)),
Ok(serr) => Err(client::Error::BadRequest(serr))
}
}
let result_value = {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
match json::from_str(&res_body_string) {
Ok(decoded) => (res, decoded),
Err(err) => {
dlg.response_json_decode_error(&res_body_string, &err);
return Err(client::Error::JsonDecodeError(res_body_string, err));
}
}
};
dlg.finished(true);
return Ok(result_value)
}
}
}
}
/// Logged-in user ID to impersonate instead of the user's ID.
///
/// Sets the *request metadata.user overrides.user id* query property to the given value.
pub fn request_metadata_user_overrides_user_id(mut self, new_value: &str) -> OfferHistoryListCall<'a> {
self._request_metadata_user_overrides_user_id = Some(new_value.to_string());
self
}
/// IP address to use instead of the user's geo-located IP address.
///
/// Sets the *request metadata.user overrides.ip address* query property to the given value.
pub fn request_metadata_user_overrides_ip_address(mut self, new_value: &str) -> OfferHistoryListCall<'a> {
self._request_metadata_user_overrides_ip_address = Some(new_value.to_string());
self
}
/// Second level identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic sub id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_sub_id(mut self, new_value: &str) -> OfferHistoryListCall<'a> {
self._request_metadata_traffic_source_traffic_sub_id = Some(new_value.to_string());
self
}
/// Identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic source id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_source_id(mut self, new_value: &str) -> OfferHistoryListCall<'a> {
self._request_metadata_traffic_source_traffic_source_id = Some(new_value.to_string());
self
}
/// Google Partners session ID.
///
/// Sets the *request metadata.partners session id* query property to the given value.
pub fn request_metadata_partners_session_id(mut self, new_value: &str) -> OfferHistoryListCall<'a> {
self._request_metadata_partners_session_id = Some(new_value.to_string());
self
}
/// Locale to use for the current request.
///
/// Sets the *request metadata.locale* query property to the given value.
pub fn request_metadata_locale(mut self, new_value: &str) -> OfferHistoryListCall<'a> {
self._request_metadata_locale = Some(new_value.to_string());
self
}
/// Experiment IDs the current request belongs to.
///
/// Append the given value to the *request metadata.experiment ids* query property.
/// Each appended value will retain its original ordering and be '/'-separated in the URL's parameters.
pub fn add_request_metadata_experiment_ids(mut self, new_value: &str) -> OfferHistoryListCall<'a> {
self._request_metadata_experiment_ids.push(new_value.to_string());
self
}
/// Token to retrieve a specific page.
///
/// Sets the *page token* query property to the given value.
pub fn page_token(mut self, new_value: &str) -> OfferHistoryListCall<'a> {
self._page_token = Some(new_value.to_string());
self
}
/// Maximum number of rows to return per page.
///
/// Sets the *page size* query property to the given value.
pub fn page_size(mut self, new_value: i32) -> OfferHistoryListCall<'a> {
self._page_size = Some(new_value);
self
}
/// Comma-separated list of fields to order by, e.g.: "foo,bar,baz".
/// Use "foo desc" to sort descending.
/// List of valid field names is: name, offer_code, expiration_time, status,
/// last_modified_time, sender_name, creation_time, country_code,
/// offer_type.
///
/// Sets the *order by* query property to the given value.
pub fn order_by(mut self, new_value: &str) -> OfferHistoryListCall<'a> {
self._order_by = Some(new_value.to_string());
self
}
/// if true, show history for the entire company. Requires user to be admin.
///
/// Sets the *entire company* query property to the given value.
pub fn entire_company(mut self, new_value: bool) -> OfferHistoryListCall<'a> {
self._entire_company = Some(new_value);
self
}
/// The delegate implementation is consulted whenever there is an intermediate result, or if something goes wrong
/// while executing the actual API request.
///
/// It should be used to handle progress information, and to implement a certain level of resilience.
///
/// Sets the *delegate* property to the given value.
pub fn delegate(mut self, new_value: &'a mut dyn client::Delegate) -> OfferHistoryListCall<'a> {
self._delegate = Some(new_value);
self
}
/// Set any additional parameter of the query string used in the request.
/// It should be used to set parameters which are not yet available through their own
/// setters.
///
/// Please note that this method must not be used to set any of the known parameters
/// which have their own setter method. If done anyway, the request will fail.
///
/// # Additional Parameters
///
/// * *alt* (query-string) - Data format for response.
/// * *key* (query-string) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
/// * *access_token* (query-string) - OAuth access token.
/// * *upload_protocol* (query-string) - Upload protocol for media (e.g. "raw", "multipart").
/// * *prettyPrint* (query-boolean) - Returns response with indentations and line breaks.
/// * *quotaUser* (query-string) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
/// * *uploadType* (query-string) - Legacy upload protocol for media (e.g. "media", "multipart").
/// * *fields* (query-string) - Selector specifying which fields to include in a partial response.
/// * *$.xgafv* (query-string) - V1 error format.
/// * *oauth_token* (query-string) - OAuth 2.0 token for the current user.
/// * *callback* (query-string) - JSONP
pub fn param<T>(mut self, name: T, value: T) -> OfferHistoryListCall<'a>
where T: AsRef<str> {
self._additional_params.insert(name.as_ref().to_string(), value.as_ref().to_string());
self
}
}
/// Lists the Offers available for the current user
///
/// A builder for the *list* method supported by a *offer* resource.
/// It is not used directly, but through a `OfferMethods` instance.
///
/// # Example
///
/// Instantiate a resource method builder
///
/// ```test_harness,no_run
/// # extern crate hyper;
/// # extern crate hyper_rustls;
/// # extern crate yup_oauth2 as oauth2;
/// # extern crate google_partners2 as partners2;
/// # async fn dox() {
/// # use std::default::Default;
/// # use oauth2;
/// # use partners2::Partners;
///
/// # let secret: oauth2::ApplicationSecret = Default::default();
/// # let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// # secret,
/// # yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// # ).build().await.unwrap();
/// # let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // You can configure optional parameters by calling the respective setters at will, and
/// // execute the final call using `doit()`.
/// // Values shown here are possibly random and not representative !
/// let result = hub.offers().list()
/// .request_metadata_user_overrides_user_id("et")
/// .request_metadata_user_overrides_ip_address("et")
/// .request_metadata_traffic_source_traffic_sub_id("vero")
/// .request_metadata_traffic_source_traffic_source_id("erat")
/// .request_metadata_partners_session_id("sed")
/// .request_metadata_locale("duo")
/// .add_request_metadata_experiment_ids("dolore")
/// .doit().await;
/// # }
/// ```
pub struct OfferListCall<'a>
where {
hub: &'a Partners<>,
_request_metadata_user_overrides_user_id: Option<String>,
_request_metadata_user_overrides_ip_address: Option<String>,
_request_metadata_traffic_source_traffic_sub_id: Option<String>,
_request_metadata_traffic_source_traffic_source_id: Option<String>,
_request_metadata_partners_session_id: Option<String>,
_request_metadata_locale: Option<String>,
_request_metadata_experiment_ids: Vec<String>,
_delegate: Option<&'a mut dyn client::Delegate>,
_additional_params: HashMap<String, String>,
}
impl<'a> client::CallBuilder for OfferListCall<'a> {}
impl<'a> OfferListCall<'a> {
/// Perform the operation you have build so far.
pub async fn doit(mut self) -> client::Result<(hyper::Response<hyper::body::Body>, ListOffersResponse)> {
use std::io::{Read, Seek};
use hyper::header::{CONTENT_TYPE, CONTENT_LENGTH, AUTHORIZATION, USER_AGENT, LOCATION};
use client::ToParts;
let mut dd = client::DefaultDelegate;
let mut dlg: &mut dyn client::Delegate = match self._delegate {
Some(d) => d,
None => &mut dd
};
dlg.begin(client::MethodInfo { id: "partners.offers.list",
http_method: hyper::Method::GET });
let mut params: Vec<(&str, String)> = Vec::with_capacity(9 + self._additional_params.len());
if let Some(value) = self._request_metadata_user_overrides_user_id {
params.push(("requestMetadata.userOverrides.userId", value.to_string()));
}
if let Some(value) = self._request_metadata_user_overrides_ip_address {
params.push(("requestMetadata.userOverrides.ipAddress", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_sub_id {
params.push(("requestMetadata.trafficSource.trafficSubId", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_source_id {
params.push(("requestMetadata.trafficSource.trafficSourceId", value.to_string()));
}
if let Some(value) = self._request_metadata_partners_session_id {
params.push(("requestMetadata.partnersSessionId", value.to_string()));
}
if let Some(value) = self._request_metadata_locale {
params.push(("requestMetadata.locale", value.to_string()));
}
if self._request_metadata_experiment_ids.len() > 0 {
for f in self._request_metadata_experiment_ids.iter() {
params.push(("requestMetadata.experimentIds", f.to_string()));
}
}
for &field in ["alt", "requestMetadata.userOverrides.userId", "requestMetadata.userOverrides.ipAddress", "requestMetadata.trafficSource.trafficSubId", "requestMetadata.trafficSource.trafficSourceId", "requestMetadata.partnersSessionId", "requestMetadata.locale", "requestMetadata.experimentIds"].iter() {
if self._additional_params.contains_key(field) {
dlg.finished(false);
return Err(client::Error::FieldClash(field));
}
}
for (name, value) in self._additional_params.iter() {
params.push((&name, value.clone()));
}
params.push(("alt", "json".to_string()));
let mut url = self.hub._base_url.clone() + "v2/offers";
let key = dlg.api_key();
match key {
Some(value) => params.push(("key", value)),
None => {
dlg.finished(false);
return Err(client::Error::MissingAPIKey)
}
}
let url = url::Url::parse_with_params(&url, params).unwrap();
loop {
let mut req_result = {
let client = &self.hub.client;
dlg.pre_request();
let mut req_builder = hyper::Request::builder().method(hyper::Method::GET).uri(url.clone().into_string())
.header(USER_AGENT, self.hub._user_agent.clone());
let request = req_builder
.body(hyper::body::Body::empty());
client.request(request.unwrap()).await
};
match req_result {
Err(err) => {
if let client::Retry::After(d) = dlg.http_error(&err) {
sleep(d);
continue;
}
dlg.finished(false);
return Err(client::Error::HttpError(err))
}
Ok(mut res) => {
if !res.status().is_success() {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
let json_server_error = json::from_str::<client::JsonServerError>(&res_body_string).ok();
let server_error = json::from_str::<client::ServerError>(&res_body_string)
.or_else(|_| json::from_str::<client::ErrorResponse>(&res_body_string).map(|r| r.error))
.ok();
if let client::Retry::After(d) = dlg.http_failure(&res,
json_server_error,
server_error) {
sleep(d);
continue;
}
dlg.finished(false);
return match json::from_str::<client::ErrorResponse>(&res_body_string){
Err(_) => Err(client::Error::Failure(res)),
Ok(serr) => Err(client::Error::BadRequest(serr))
}
}
let result_value = {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
match json::from_str(&res_body_string) {
Ok(decoded) => (res, decoded),
Err(err) => {
dlg.response_json_decode_error(&res_body_string, &err);
return Err(client::Error::JsonDecodeError(res_body_string, err));
}
}
};
dlg.finished(true);
return Ok(result_value)
}
}
}
}
/// Logged-in user ID to impersonate instead of the user's ID.
///
/// Sets the *request metadata.user overrides.user id* query property to the given value.
pub fn request_metadata_user_overrides_user_id(mut self, new_value: &str) -> OfferListCall<'a> {
self._request_metadata_user_overrides_user_id = Some(new_value.to_string());
self
}
/// IP address to use instead of the user's geo-located IP address.
///
/// Sets the *request metadata.user overrides.ip address* query property to the given value.
pub fn request_metadata_user_overrides_ip_address(mut self, new_value: &str) -> OfferListCall<'a> {
self._request_metadata_user_overrides_ip_address = Some(new_value.to_string());
self
}
/// Second level identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic sub id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_sub_id(mut self, new_value: &str) -> OfferListCall<'a> {
self._request_metadata_traffic_source_traffic_sub_id = Some(new_value.to_string());
self
}
/// Identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic source id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_source_id(mut self, new_value: &str) -> OfferListCall<'a> {
self._request_metadata_traffic_source_traffic_source_id = Some(new_value.to_string());
self
}
/// Google Partners session ID.
///
/// Sets the *request metadata.partners session id* query property to the given value.
pub fn request_metadata_partners_session_id(mut self, new_value: &str) -> OfferListCall<'a> {
self._request_metadata_partners_session_id = Some(new_value.to_string());
self
}
/// Locale to use for the current request.
///
/// Sets the *request metadata.locale* query property to the given value.
pub fn request_metadata_locale(mut self, new_value: &str) -> OfferListCall<'a> {
self._request_metadata_locale = Some(new_value.to_string());
self
}
/// Experiment IDs the current request belongs to.
///
/// Append the given value to the *request metadata.experiment ids* query property.
/// Each appended value will retain its original ordering and be '/'-separated in the URL's parameters.
pub fn add_request_metadata_experiment_ids(mut self, new_value: &str) -> OfferListCall<'a> {
self._request_metadata_experiment_ids.push(new_value.to_string());
self
}
/// The delegate implementation is consulted whenever there is an intermediate result, or if something goes wrong
/// while executing the actual API request.
///
/// It should be used to handle progress information, and to implement a certain level of resilience.
///
/// Sets the *delegate* property to the given value.
pub fn delegate(mut self, new_value: &'a mut dyn client::Delegate) -> OfferListCall<'a> {
self._delegate = Some(new_value);
self
}
/// Set any additional parameter of the query string used in the request.
/// It should be used to set parameters which are not yet available through their own
/// setters.
///
/// Please note that this method must not be used to set any of the known parameters
/// which have their own setter method. If done anyway, the request will fail.
///
/// # Additional Parameters
///
/// * *alt* (query-string) - Data format for response.
/// * *key* (query-string) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
/// * *access_token* (query-string) - OAuth access token.
/// * *upload_protocol* (query-string) - Upload protocol for media (e.g. "raw", "multipart").
/// * *prettyPrint* (query-boolean) - Returns response with indentations and line breaks.
/// * *quotaUser* (query-string) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
/// * *uploadType* (query-string) - Legacy upload protocol for media (e.g. "media", "multipart").
/// * *fields* (query-string) - Selector specifying which fields to include in a partial response.
/// * *$.xgafv* (query-string) - V1 error format.
/// * *oauth_token* (query-string) - OAuth 2.0 token for the current user.
/// * *callback* (query-string) - JSONP
pub fn param<T>(mut self, name: T, value: T) -> OfferListCall<'a>
where T: AsRef<str> {
self._additional_params.insert(name.as_ref().to_string(), value.as_ref().to_string());
self
}
}
/// Lists analytics data for a user's associated company.
/// Should only be called within the context of an authorized logged in user.
///
/// A builder for the *list* method supported by a *analytic* resource.
/// It is not used directly, but through a `AnalyticMethods` instance.
///
/// # Example
///
/// Instantiate a resource method builder
///
/// ```test_harness,no_run
/// # extern crate hyper;
/// # extern crate hyper_rustls;
/// # extern crate yup_oauth2 as oauth2;
/// # extern crate google_partners2 as partners2;
/// # async fn dox() {
/// # use std::default::Default;
/// # use oauth2;
/// # use partners2::Partners;
///
/// # let secret: oauth2::ApplicationSecret = Default::default();
/// # let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// # secret,
/// # yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// # ).build().await.unwrap();
/// # let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // You can configure optional parameters by calling the respective setters at will, and
/// // execute the final call using `doit()`.
/// // Values shown here are possibly random and not representative !
/// let result = hub.analytics().list()
/// .request_metadata_user_overrides_user_id("et")
/// .request_metadata_user_overrides_ip_address("voluptua.")
/// .request_metadata_traffic_source_traffic_sub_id("amet.")
/// .request_metadata_traffic_source_traffic_source_id("consetetur")
/// .request_metadata_partners_session_id("diam")
/// .request_metadata_locale("dolor")
/// .add_request_metadata_experiment_ids("et")
/// .page_token("et")
/// .page_size(-95)
/// .doit().await;
/// # }
/// ```
pub struct AnalyticListCall<'a>
where {
hub: &'a Partners<>,
_request_metadata_user_overrides_user_id: Option<String>,
_request_metadata_user_overrides_ip_address: Option<String>,
_request_metadata_traffic_source_traffic_sub_id: Option<String>,
_request_metadata_traffic_source_traffic_source_id: Option<String>,
_request_metadata_partners_session_id: Option<String>,
_request_metadata_locale: Option<String>,
_request_metadata_experiment_ids: Vec<String>,
_page_token: Option<String>,
_page_size: Option<i32>,
_delegate: Option<&'a mut dyn client::Delegate>,
_additional_params: HashMap<String, String>,
}
impl<'a> client::CallBuilder for AnalyticListCall<'a> {}
impl<'a> AnalyticListCall<'a> {
/// Perform the operation you have build so far.
pub async fn doit(mut self) -> client::Result<(hyper::Response<hyper::body::Body>, ListAnalyticsResponse)> {
use std::io::{Read, Seek};
use hyper::header::{CONTENT_TYPE, CONTENT_LENGTH, AUTHORIZATION, USER_AGENT, LOCATION};
use client::ToParts;
let mut dd = client::DefaultDelegate;
let mut dlg: &mut dyn client::Delegate = match self._delegate {
Some(d) => d,
None => &mut dd
};
dlg.begin(client::MethodInfo { id: "partners.analytics.list",
http_method: hyper::Method::GET });
let mut params: Vec<(&str, String)> = Vec::with_capacity(11 + self._additional_params.len());
if let Some(value) = self._request_metadata_user_overrides_user_id {
params.push(("requestMetadata.userOverrides.userId", value.to_string()));
}
if let Some(value) = self._request_metadata_user_overrides_ip_address {
params.push(("requestMetadata.userOverrides.ipAddress", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_sub_id {
params.push(("requestMetadata.trafficSource.trafficSubId", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_source_id {
params.push(("requestMetadata.trafficSource.trafficSourceId", value.to_string()));
}
if let Some(value) = self._request_metadata_partners_session_id {
params.push(("requestMetadata.partnersSessionId", value.to_string()));
}
if let Some(value) = self._request_metadata_locale {
params.push(("requestMetadata.locale", value.to_string()));
}
if self._request_metadata_experiment_ids.len() > 0 {
for f in self._request_metadata_experiment_ids.iter() {
params.push(("requestMetadata.experimentIds", f.to_string()));
}
}
if let Some(value) = self._page_token {
params.push(("pageToken", value.to_string()));
}
if let Some(value) = self._page_size {
params.push(("pageSize", value.to_string()));
}
for &field in ["alt", "requestMetadata.userOverrides.userId", "requestMetadata.userOverrides.ipAddress", "requestMetadata.trafficSource.trafficSubId", "requestMetadata.trafficSource.trafficSourceId", "requestMetadata.partnersSessionId", "requestMetadata.locale", "requestMetadata.experimentIds", "pageToken", "pageSize"].iter() {
if self._additional_params.contains_key(field) {
dlg.finished(false);
return Err(client::Error::FieldClash(field));
}
}
for (name, value) in self._additional_params.iter() {
params.push((&name, value.clone()));
}
params.push(("alt", "json".to_string()));
let mut url = self.hub._base_url.clone() + "v2/analytics";
let key = dlg.api_key();
match key {
Some(value) => params.push(("key", value)),
None => {
dlg.finished(false);
return Err(client::Error::MissingAPIKey)
}
}
let url = url::Url::parse_with_params(&url, params).unwrap();
loop {
let mut req_result = {
let client = &self.hub.client;
dlg.pre_request();
let mut req_builder = hyper::Request::builder().method(hyper::Method::GET).uri(url.clone().into_string())
.header(USER_AGENT, self.hub._user_agent.clone());
let request = req_builder
.body(hyper::body::Body::empty());
client.request(request.unwrap()).await
};
match req_result {
Err(err) => {
if let client::Retry::After(d) = dlg.http_error(&err) {
sleep(d);
continue;
}
dlg.finished(false);
return Err(client::Error::HttpError(err))
}
Ok(mut res) => {
if !res.status().is_success() {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
let json_server_error = json::from_str::<client::JsonServerError>(&res_body_string).ok();
let server_error = json::from_str::<client::ServerError>(&res_body_string)
.or_else(|_| json::from_str::<client::ErrorResponse>(&res_body_string).map(|r| r.error))
.ok();
if let client::Retry::After(d) = dlg.http_failure(&res,
json_server_error,
server_error) {
sleep(d);
continue;
}
dlg.finished(false);
return match json::from_str::<client::ErrorResponse>(&res_body_string){
Err(_) => Err(client::Error::Failure(res)),
Ok(serr) => Err(client::Error::BadRequest(serr))
}
}
let result_value = {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
match json::from_str(&res_body_string) {
Ok(decoded) => (res, decoded),
Err(err) => {
dlg.response_json_decode_error(&res_body_string, &err);
return Err(client::Error::JsonDecodeError(res_body_string, err));
}
}
};
dlg.finished(true);
return Ok(result_value)
}
}
}
}
/// Logged-in user ID to impersonate instead of the user's ID.
///
/// Sets the *request metadata.user overrides.user id* query property to the given value.
pub fn request_metadata_user_overrides_user_id(mut self, new_value: &str) -> AnalyticListCall<'a> {
self._request_metadata_user_overrides_user_id = Some(new_value.to_string());
self
}
/// IP address to use instead of the user's geo-located IP address.
///
/// Sets the *request metadata.user overrides.ip address* query property to the given value.
pub fn request_metadata_user_overrides_ip_address(mut self, new_value: &str) -> AnalyticListCall<'a> {
self._request_metadata_user_overrides_ip_address = Some(new_value.to_string());
self
}
/// Second level identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic sub id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_sub_id(mut self, new_value: &str) -> AnalyticListCall<'a> {
self._request_metadata_traffic_source_traffic_sub_id = Some(new_value.to_string());
self
}
/// Identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic source id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_source_id(mut self, new_value: &str) -> AnalyticListCall<'a> {
self._request_metadata_traffic_source_traffic_source_id = Some(new_value.to_string());
self
}
/// Google Partners session ID.
///
/// Sets the *request metadata.partners session id* query property to the given value.
pub fn request_metadata_partners_session_id(mut self, new_value: &str) -> AnalyticListCall<'a> {
self._request_metadata_partners_session_id = Some(new_value.to_string());
self
}
/// Locale to use for the current request.
///
/// Sets the *request metadata.locale* query property to the given value.
pub fn request_metadata_locale(mut self, new_value: &str) -> AnalyticListCall<'a> {
self._request_metadata_locale = Some(new_value.to_string());
self
}
/// Experiment IDs the current request belongs to.
///
/// Append the given value to the *request metadata.experiment ids* query property.
/// Each appended value will retain its original ordering and be '/'-separated in the URL's parameters.
pub fn add_request_metadata_experiment_ids(mut self, new_value: &str) -> AnalyticListCall<'a> {
self._request_metadata_experiment_ids.push(new_value.to_string());
self
}
/// A token identifying a page of results that the server returns.
/// Typically, this is the value of `ListAnalyticsResponse.next_page_token`
/// returned from the previous call to
/// ListAnalytics.
/// Will be a date string in `YYYY-MM-DD` format representing the end date
/// of the date range of results to return.
/// If unspecified or set to "", default value is the current date.
///
/// Sets the *page token* query property to the given value.
pub fn page_token(mut self, new_value: &str) -> AnalyticListCall<'a> {
self._page_token = Some(new_value.to_string());
self
}
/// Requested page size. Server may return fewer analytics than requested.
/// If unspecified or set to 0, default value is 30.
/// Specifies the number of days in the date range when querying analytics.
/// The `page_token` represents the end date of the date range
/// and the start date is calculated using the `page_size` as the number
/// of days BEFORE the end date.
/// Must be a non-negative integer.
///
/// Sets the *page size* query property to the given value.
pub fn page_size(mut self, new_value: i32) -> AnalyticListCall<'a> {
self._page_size = Some(new_value);
self
}
/// The delegate implementation is consulted whenever there is an intermediate result, or if something goes wrong
/// while executing the actual API request.
///
/// It should be used to handle progress information, and to implement a certain level of resilience.
///
/// Sets the *delegate* property to the given value.
pub fn delegate(mut self, new_value: &'a mut dyn client::Delegate) -> AnalyticListCall<'a> {
self._delegate = Some(new_value);
self
}
/// Set any additional parameter of the query string used in the request.
/// It should be used to set parameters which are not yet available through their own
/// setters.
///
/// Please note that this method must not be used to set any of the known parameters
/// which have their own setter method. If done anyway, the request will fail.
///
/// # Additional Parameters
///
/// * *alt* (query-string) - Data format for response.
/// * *key* (query-string) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
/// * *access_token* (query-string) - OAuth access token.
/// * *upload_protocol* (query-string) - Upload protocol for media (e.g. "raw", "multipart").
/// * *prettyPrint* (query-boolean) - Returns response with indentations and line breaks.
/// * *quotaUser* (query-string) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
/// * *uploadType* (query-string) - Legacy upload protocol for media (e.g. "media", "multipart").
/// * *fields* (query-string) - Selector specifying which fields to include in a partial response.
/// * *$.xgafv* (query-string) - V1 error format.
/// * *oauth_token* (query-string) - OAuth 2.0 token for the current user.
/// * *callback* (query-string) - JSONP
pub fn param<T>(mut self, name: T, value: T) -> AnalyticListCall<'a>
where T: AsRef<str> {
self._additional_params.insert(name.as_ref().to_string(), value.as_ref().to_string());
self
}
}
/// Lists states for current user.
///
/// A builder for the *list* method supported by a *userState* resource.
/// It is not used directly, but through a `UserStateMethods` instance.
///
/// # Example
///
/// Instantiate a resource method builder
///
/// ```test_harness,no_run
/// # extern crate hyper;
/// # extern crate hyper_rustls;
/// # extern crate yup_oauth2 as oauth2;
/// # extern crate google_partners2 as partners2;
/// # async fn dox() {
/// # use std::default::Default;
/// # use oauth2;
/// # use partners2::Partners;
///
/// # let secret: oauth2::ApplicationSecret = Default::default();
/// # let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// # secret,
/// # yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// # ).build().await.unwrap();
/// # let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // You can configure optional parameters by calling the respective setters at will, and
/// // execute the final call using `doit()`.
/// // Values shown here are possibly random and not representative !
/// let result = hub.user_states().list()
/// .request_metadata_user_overrides_user_id("Stet")
/// .request_metadata_user_overrides_ip_address("dolor")
/// .request_metadata_traffic_source_traffic_sub_id("duo")
/// .request_metadata_traffic_source_traffic_source_id("vero")
/// .request_metadata_partners_session_id("vero")
/// .request_metadata_locale("invidunt")
/// .add_request_metadata_experiment_ids("Stet")
/// .doit().await;
/// # }
/// ```
pub struct UserStateListCall<'a>
where {
hub: &'a Partners<>,
_request_metadata_user_overrides_user_id: Option<String>,
_request_metadata_user_overrides_ip_address: Option<String>,
_request_metadata_traffic_source_traffic_sub_id: Option<String>,
_request_metadata_traffic_source_traffic_source_id: Option<String>,
_request_metadata_partners_session_id: Option<String>,
_request_metadata_locale: Option<String>,
_request_metadata_experiment_ids: Vec<String>,
_delegate: Option<&'a mut dyn client::Delegate>,
_additional_params: HashMap<String, String>,
}
impl<'a> client::CallBuilder for UserStateListCall<'a> {}
impl<'a> UserStateListCall<'a> {
/// Perform the operation you have build so far.
pub async fn doit(mut self) -> client::Result<(hyper::Response<hyper::body::Body>, ListUserStatesResponse)> {
use std::io::{Read, Seek};
use hyper::header::{CONTENT_TYPE, CONTENT_LENGTH, AUTHORIZATION, USER_AGENT, LOCATION};
use client::ToParts;
let mut dd = client::DefaultDelegate;
let mut dlg: &mut dyn client::Delegate = match self._delegate {
Some(d) => d,
None => &mut dd
};
dlg.begin(client::MethodInfo { id: "partners.userStates.list",
http_method: hyper::Method::GET });
let mut params: Vec<(&str, String)> = Vec::with_capacity(9 + self._additional_params.len());
if let Some(value) = self._request_metadata_user_overrides_user_id {
params.push(("requestMetadata.userOverrides.userId", value.to_string()));
}
if let Some(value) = self._request_metadata_user_overrides_ip_address {
params.push(("requestMetadata.userOverrides.ipAddress", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_sub_id {
params.push(("requestMetadata.trafficSource.trafficSubId", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_source_id {
params.push(("requestMetadata.trafficSource.trafficSourceId", value.to_string()));
}
if let Some(value) = self._request_metadata_partners_session_id {
params.push(("requestMetadata.partnersSessionId", value.to_string()));
}
if let Some(value) = self._request_metadata_locale {
params.push(("requestMetadata.locale", value.to_string()));
}
if self._request_metadata_experiment_ids.len() > 0 {
for f in self._request_metadata_experiment_ids.iter() {
params.push(("requestMetadata.experimentIds", f.to_string()));
}
}
for &field in ["alt", "requestMetadata.userOverrides.userId", "requestMetadata.userOverrides.ipAddress", "requestMetadata.trafficSource.trafficSubId", "requestMetadata.trafficSource.trafficSourceId", "requestMetadata.partnersSessionId", "requestMetadata.locale", "requestMetadata.experimentIds"].iter() {
if self._additional_params.contains_key(field) {
dlg.finished(false);
return Err(client::Error::FieldClash(field));
}
}
for (name, value) in self._additional_params.iter() {
params.push((&name, value.clone()));
}
params.push(("alt", "json".to_string()));
let mut url = self.hub._base_url.clone() + "v2/userStates";
let key = dlg.api_key();
match key {
Some(value) => params.push(("key", value)),
None => {
dlg.finished(false);
return Err(client::Error::MissingAPIKey)
}
}
let url = url::Url::parse_with_params(&url, params).unwrap();
loop {
let mut req_result = {
let client = &self.hub.client;
dlg.pre_request();
let mut req_builder = hyper::Request::builder().method(hyper::Method::GET).uri(url.clone().into_string())
.header(USER_AGENT, self.hub._user_agent.clone());
let request = req_builder
.body(hyper::body::Body::empty());
client.request(request.unwrap()).await
};
match req_result {
Err(err) => {
if let client::Retry::After(d) = dlg.http_error(&err) {
sleep(d);
continue;
}
dlg.finished(false);
return Err(client::Error::HttpError(err))
}
Ok(mut res) => {
if !res.status().is_success() {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
let json_server_error = json::from_str::<client::JsonServerError>(&res_body_string).ok();
let server_error = json::from_str::<client::ServerError>(&res_body_string)
.or_else(|_| json::from_str::<client::ErrorResponse>(&res_body_string).map(|r| r.error))
.ok();
if let client::Retry::After(d) = dlg.http_failure(&res,
json_server_error,
server_error) {
sleep(d);
continue;
}
dlg.finished(false);
return match json::from_str::<client::ErrorResponse>(&res_body_string){
Err(_) => Err(client::Error::Failure(res)),
Ok(serr) => Err(client::Error::BadRequest(serr))
}
}
let result_value = {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
match json::from_str(&res_body_string) {
Ok(decoded) => (res, decoded),
Err(err) => {
dlg.response_json_decode_error(&res_body_string, &err);
return Err(client::Error::JsonDecodeError(res_body_string, err));
}
}
};
dlg.finished(true);
return Ok(result_value)
}
}
}
}
/// Logged-in user ID to impersonate instead of the user's ID.
///
/// Sets the *request metadata.user overrides.user id* query property to the given value.
pub fn request_metadata_user_overrides_user_id(mut self, new_value: &str) -> UserStateListCall<'a> {
self._request_metadata_user_overrides_user_id = Some(new_value.to_string());
self
}
/// IP address to use instead of the user's geo-located IP address.
///
/// Sets the *request metadata.user overrides.ip address* query property to the given value.
pub fn request_metadata_user_overrides_ip_address(mut self, new_value: &str) -> UserStateListCall<'a> {
self._request_metadata_user_overrides_ip_address = Some(new_value.to_string());
self
}
/// Second level identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic sub id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_sub_id(mut self, new_value: &str) -> UserStateListCall<'a> {
self._request_metadata_traffic_source_traffic_sub_id = Some(new_value.to_string());
self
}
/// Identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic source id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_source_id(mut self, new_value: &str) -> UserStateListCall<'a> {
self._request_metadata_traffic_source_traffic_source_id = Some(new_value.to_string());
self
}
/// Google Partners session ID.
///
/// Sets the *request metadata.partners session id* query property to the given value.
pub fn request_metadata_partners_session_id(mut self, new_value: &str) -> UserStateListCall<'a> {
self._request_metadata_partners_session_id = Some(new_value.to_string());
self
}
/// Locale to use for the current request.
///
/// Sets the *request metadata.locale* query property to the given value.
pub fn request_metadata_locale(mut self, new_value: &str) -> UserStateListCall<'a> {
self._request_metadata_locale = Some(new_value.to_string());
self
}
/// Experiment IDs the current request belongs to.
///
/// Append the given value to the *request metadata.experiment ids* query property.
/// Each appended value will retain its original ordering and be '/'-separated in the URL's parameters.
pub fn add_request_metadata_experiment_ids(mut self, new_value: &str) -> UserStateListCall<'a> {
self._request_metadata_experiment_ids.push(new_value.to_string());
self
}
/// The delegate implementation is consulted whenever there is an intermediate result, or if something goes wrong
/// while executing the actual API request.
///
/// It should be used to handle progress information, and to implement a certain level of resilience.
///
/// Sets the *delegate* property to the given value.
pub fn delegate(mut self, new_value: &'a mut dyn client::Delegate) -> UserStateListCall<'a> {
self._delegate = Some(new_value);
self
}
/// Set any additional parameter of the query string used in the request.
/// It should be used to set parameters which are not yet available through their own
/// setters.
///
/// Please note that this method must not be used to set any of the known parameters
/// which have their own setter method. If done anyway, the request will fail.
///
/// # Additional Parameters
///
/// * *alt* (query-string) - Data format for response.
/// * *key* (query-string) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
/// * *access_token* (query-string) - OAuth access token.
/// * *upload_protocol* (query-string) - Upload protocol for media (e.g. "raw", "multipart").
/// * *prettyPrint* (query-boolean) - Returns response with indentations and line breaks.
/// * *quotaUser* (query-string) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
/// * *uploadType* (query-string) - Legacy upload protocol for media (e.g. "media", "multipart").
/// * *fields* (query-string) - Selector specifying which fields to include in a partial response.
/// * *$.xgafv* (query-string) - V1 error format.
/// * *oauth_token* (query-string) - OAuth 2.0 token for the current user.
/// * *callback* (query-string) - JSONP
pub fn param<T>(mut self, name: T, value: T) -> UserStateListCall<'a>
where T: AsRef<str> {
self._additional_params.insert(name.as_ref().to_string(), value.as_ref().to_string());
self
}
}
/// Updates the specified lead.
///
/// A builder for the *updateLeads* method.
/// It is not used directly, but through a `MethodMethods` instance.
///
/// # Example
///
/// Instantiate a resource method builder
///
/// ```test_harness,no_run
/// # extern crate hyper;
/// # extern crate hyper_rustls;
/// # extern crate yup_oauth2 as oauth2;
/// # extern crate google_partners2 as partners2;
/// use partners2::api::Lead;
/// # async fn dox() {
/// # use std::default::Default;
/// # use oauth2;
/// # use partners2::Partners;
///
/// # let secret: oauth2::ApplicationSecret = Default::default();
/// # let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// # secret,
/// # yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// # ).build().await.unwrap();
/// # let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // As the method needs a request, you would usually fill it with the desired information
/// // into the respective structure. Some of the parts shown here might not be applicable !
/// // Values shown here are possibly random and not representative !
/// let mut req = Lead::default();
///
/// // You can configure optional parameters by calling the respective setters at will, and
/// // execute the final call using `doit()`.
/// // Values shown here are possibly random and not representative !
/// let result = hub.methods().update_leads(req)
/// .update_mask("vero")
/// .request_metadata_user_overrides_user_id("elitr")
/// .request_metadata_user_overrides_ip_address("Lorem")
/// .request_metadata_traffic_source_traffic_sub_id("diam")
/// .request_metadata_traffic_source_traffic_source_id("no")
/// .request_metadata_partners_session_id("ipsum")
/// .request_metadata_locale("accusam")
/// .add_request_metadata_experiment_ids("takimata")
/// .doit().await;
/// # }
/// ```
pub struct MethodUpdateLeadCall<'a>
where {
hub: &'a Partners<>,
_request: Lead,
_update_mask: Option<String>,
_request_metadata_user_overrides_user_id: Option<String>,
_request_metadata_user_overrides_ip_address: Option<String>,
_request_metadata_traffic_source_traffic_sub_id: Option<String>,
_request_metadata_traffic_source_traffic_source_id: Option<String>,
_request_metadata_partners_session_id: Option<String>,
_request_metadata_locale: Option<String>,
_request_metadata_experiment_ids: Vec<String>,
_delegate: Option<&'a mut dyn client::Delegate>,
_additional_params: HashMap<String, String>,
}
impl<'a> client::CallBuilder for MethodUpdateLeadCall<'a> {}
impl<'a> MethodUpdateLeadCall<'a> {
/// Perform the operation you have build so far.
pub async fn doit(mut self) -> client::Result<(hyper::Response<hyper::body::Body>, Lead)> {
use std::io::{Read, Seek};
use hyper::header::{CONTENT_TYPE, CONTENT_LENGTH, AUTHORIZATION, USER_AGENT, LOCATION};
use client::ToParts;
let mut dd = client::DefaultDelegate;
let mut dlg: &mut dyn client::Delegate = match self._delegate {
Some(d) => d,
None => &mut dd
};
dlg.begin(client::MethodInfo { id: "partners.updateLeads",
http_method: hyper::Method::PATCH });
let mut params: Vec<(&str, String)> = Vec::with_capacity(11 + self._additional_params.len());
if let Some(value) = self._update_mask {
params.push(("updateMask", value.to_string()));
}
if let Some(value) = self._request_metadata_user_overrides_user_id {
params.push(("requestMetadata.userOverrides.userId", value.to_string()));
}
if let Some(value) = self._request_metadata_user_overrides_ip_address {
params.push(("requestMetadata.userOverrides.ipAddress", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_sub_id {
params.push(("requestMetadata.trafficSource.trafficSubId", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_source_id {
params.push(("requestMetadata.trafficSource.trafficSourceId", value.to_string()));
}
if let Some(value) = self._request_metadata_partners_session_id {
params.push(("requestMetadata.partnersSessionId", value.to_string()));
}
if let Some(value) = self._request_metadata_locale {
params.push(("requestMetadata.locale", value.to_string()));
}
if self._request_metadata_experiment_ids.len() > 0 {
for f in self._request_metadata_experiment_ids.iter() {
params.push(("requestMetadata.experimentIds", f.to_string()));
}
}
for &field in ["alt", "updateMask", "requestMetadata.userOverrides.userId", "requestMetadata.userOverrides.ipAddress", "requestMetadata.trafficSource.trafficSubId", "requestMetadata.trafficSource.trafficSourceId", "requestMetadata.partnersSessionId", "requestMetadata.locale", "requestMetadata.experimentIds"].iter() {
if self._additional_params.contains_key(field) {
dlg.finished(false);
return Err(client::Error::FieldClash(field));
}
}
for (name, value) in self._additional_params.iter() {
params.push((&name, value.clone()));
}
params.push(("alt", "json".to_string()));
let mut url = self.hub._base_url.clone() + "v2/leads";
let key = dlg.api_key();
match key {
Some(value) => params.push(("key", value)),
None => {
dlg.finished(false);
return Err(client::Error::MissingAPIKey)
}
}
let url = url::Url::parse_with_params(&url, params).unwrap();
let mut json_mime_type: mime::Mime = "application/json".parse().unwrap();
let mut request_value_reader =
{
let mut value = json::value::to_value(&self._request).expect("serde to work");
client::remove_json_null_values(&mut value);
let mut dst = io::Cursor::new(Vec::with_capacity(128));
json::to_writer(&mut dst, &value).unwrap();
dst
};
let request_size = request_value_reader.seek(io::SeekFrom::End(0)).unwrap();
request_value_reader.seek(io::SeekFrom::Start(0)).unwrap();
loop {
request_value_reader.seek(io::SeekFrom::Start(0)).unwrap();
let mut req_result = {
let client = &self.hub.client;
dlg.pre_request();
let mut req_builder = hyper::Request::builder().method(hyper::Method::PATCH).uri(url.clone().into_string())
.header(USER_AGENT, self.hub._user_agent.clone());
let request = req_builder
.header(CONTENT_TYPE, format!("{}", json_mime_type.to_string()))
.header(CONTENT_LENGTH, request_size as u64)
.body(hyper::body::Body::from(request_value_reader.get_ref().clone()));
client.request(request.unwrap()).await
};
match req_result {
Err(err) => {
if let client::Retry::After(d) = dlg.http_error(&err) {
sleep(d);
continue;
}
dlg.finished(false);
return Err(client::Error::HttpError(err))
}
Ok(mut res) => {
if !res.status().is_success() {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
let json_server_error = json::from_str::<client::JsonServerError>(&res_body_string).ok();
let server_error = json::from_str::<client::ServerError>(&res_body_string)
.or_else(|_| json::from_str::<client::ErrorResponse>(&res_body_string).map(|r| r.error))
.ok();
if let client::Retry::After(d) = dlg.http_failure(&res,
json_server_error,
server_error) {
sleep(d);
continue;
}
dlg.finished(false);
return match json::from_str::<client::ErrorResponse>(&res_body_string){
Err(_) => Err(client::Error::Failure(res)),
Ok(serr) => Err(client::Error::BadRequest(serr))
}
}
let result_value = {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
match json::from_str(&res_body_string) {
Ok(decoded) => (res, decoded),
Err(err) => {
dlg.response_json_decode_error(&res_body_string, &err);
return Err(client::Error::JsonDecodeError(res_body_string, err));
}
}
};
dlg.finished(true);
return Ok(result_value)
}
}
}
}
///
/// Sets the *request* property to the given value.
///
/// Even though the property as already been set when instantiating this call,
/// we provide this method for API completeness.
pub fn request(mut self, new_value: Lead) -> MethodUpdateLeadCall<'a> {
self._request = new_value;
self
}
/// Standard field mask for the set of fields to be updated.
/// Required with at least 1 value in FieldMask's paths.
/// Only `state` and `adwords_customer_id` are currently supported.
///
/// Sets the *update mask* query property to the given value.
pub fn update_mask(mut self, new_value: &str) -> MethodUpdateLeadCall<'a> {
self._update_mask = Some(new_value.to_string());
self
}
/// Logged-in user ID to impersonate instead of the user's ID.
///
/// Sets the *request metadata.user overrides.user id* query property to the given value.
pub fn request_metadata_user_overrides_user_id(mut self, new_value: &str) -> MethodUpdateLeadCall<'a> {
self._request_metadata_user_overrides_user_id = Some(new_value.to_string());
self
}
/// IP address to use instead of the user's geo-located IP address.
///
/// Sets the *request metadata.user overrides.ip address* query property to the given value.
pub fn request_metadata_user_overrides_ip_address(mut self, new_value: &str) -> MethodUpdateLeadCall<'a> {
self._request_metadata_user_overrides_ip_address = Some(new_value.to_string());
self
}
/// Second level identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic sub id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_sub_id(mut self, new_value: &str) -> MethodUpdateLeadCall<'a> {
self._request_metadata_traffic_source_traffic_sub_id = Some(new_value.to_string());
self
}
/// Identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic source id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_source_id(mut self, new_value: &str) -> MethodUpdateLeadCall<'a> {
self._request_metadata_traffic_source_traffic_source_id = Some(new_value.to_string());
self
}
/// Google Partners session ID.
///
/// Sets the *request metadata.partners session id* query property to the given value.
pub fn request_metadata_partners_session_id(mut self, new_value: &str) -> MethodUpdateLeadCall<'a> {
self._request_metadata_partners_session_id = Some(new_value.to_string());
self
}
/// Locale to use for the current request.
///
/// Sets the *request metadata.locale* query property to the given value.
pub fn request_metadata_locale(mut self, new_value: &str) -> MethodUpdateLeadCall<'a> {
self._request_metadata_locale = Some(new_value.to_string());
self
}
/// Experiment IDs the current request belongs to.
///
/// Append the given value to the *request metadata.experiment ids* query property.
/// Each appended value will retain its original ordering and be '/'-separated in the URL's parameters.
pub fn add_request_metadata_experiment_ids(mut self, new_value: &str) -> MethodUpdateLeadCall<'a> {
self._request_metadata_experiment_ids.push(new_value.to_string());
self
}
/// The delegate implementation is consulted whenever there is an intermediate result, or if something goes wrong
/// while executing the actual API request.
///
/// It should be used to handle progress information, and to implement a certain level of resilience.
///
/// Sets the *delegate* property to the given value.
pub fn delegate(mut self, new_value: &'a mut dyn client::Delegate) -> MethodUpdateLeadCall<'a> {
self._delegate = Some(new_value);
self
}
/// Set any additional parameter of the query string used in the request.
/// It should be used to set parameters which are not yet available through their own
/// setters.
///
/// Please note that this method must not be used to set any of the known parameters
/// which have their own setter method. If done anyway, the request will fail.
///
/// # Additional Parameters
///
/// * *alt* (query-string) - Data format for response.
/// * *key* (query-string) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
/// * *access_token* (query-string) - OAuth access token.
/// * *upload_protocol* (query-string) - Upload protocol for media (e.g. "raw", "multipart").
/// * *prettyPrint* (query-boolean) - Returns response with indentations and line breaks.
/// * *quotaUser* (query-string) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
/// * *uploadType* (query-string) - Legacy upload protocol for media (e.g. "media", "multipart").
/// * *fields* (query-string) - Selector specifying which fields to include in a partial response.
/// * *$.xgafv* (query-string) - V1 error format.
/// * *oauth_token* (query-string) - OAuth 2.0 token for the current user.
/// * *callback* (query-string) - JSONP
pub fn param<T>(mut self, name: T, value: T) -> MethodUpdateLeadCall<'a>
where T: AsRef<str> {
self._additional_params.insert(name.as_ref().to_string(), value.as_ref().to_string());
self
}
}
/// Update company.
/// Should only be called within the context of an authorized logged in user.
///
/// A builder for the *updateCompanies* method.
/// It is not used directly, but through a `MethodMethods` instance.
///
/// # Example
///
/// Instantiate a resource method builder
///
/// ```test_harness,no_run
/// # extern crate hyper;
/// # extern crate hyper_rustls;
/// # extern crate yup_oauth2 as oauth2;
/// # extern crate google_partners2 as partners2;
/// use partners2::api::Company;
/// # async fn dox() {
/// # use std::default::Default;
/// # use oauth2;
/// # use partners2::Partners;
///
/// # let secret: oauth2::ApplicationSecret = Default::default();
/// # let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// # secret,
/// # yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// # ).build().await.unwrap();
/// # let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // As the method needs a request, you would usually fill it with the desired information
/// // into the respective structure. Some of the parts shown here might not be applicable !
/// // Values shown here are possibly random and not representative !
/// let mut req = Company::default();
///
/// // You can configure optional parameters by calling the respective setters at will, and
/// // execute the final call using `doit()`.
/// // Values shown here are possibly random and not representative !
/// let result = hub.methods().update_companies(req)
/// .update_mask("consetetur")
/// .request_metadata_user_overrides_user_id("voluptua.")
/// .request_metadata_user_overrides_ip_address("et")
/// .request_metadata_traffic_source_traffic_sub_id("erat")
/// .request_metadata_traffic_source_traffic_source_id("consetetur")
/// .request_metadata_partners_session_id("amet.")
/// .request_metadata_locale("sed")
/// .add_request_metadata_experiment_ids("takimata")
/// .doit().await;
/// # }
/// ```
pub struct MethodUpdateCompanyCall<'a>
where {
hub: &'a Partners<>,
_request: Company,
_update_mask: Option<String>,
_request_metadata_user_overrides_user_id: Option<String>,
_request_metadata_user_overrides_ip_address: Option<String>,
_request_metadata_traffic_source_traffic_sub_id: Option<String>,
_request_metadata_traffic_source_traffic_source_id: Option<String>,
_request_metadata_partners_session_id: Option<String>,
_request_metadata_locale: Option<String>,
_request_metadata_experiment_ids: Vec<String>,
_delegate: Option<&'a mut dyn client::Delegate>,
_additional_params: HashMap<String, String>,
}
impl<'a> client::CallBuilder for MethodUpdateCompanyCall<'a> {}
impl<'a> MethodUpdateCompanyCall<'a> {
/// Perform the operation you have build so far.
pub async fn doit(mut self) -> client::Result<(hyper::Response<hyper::body::Body>, Company)> {
use std::io::{Read, Seek};
use hyper::header::{CONTENT_TYPE, CONTENT_LENGTH, AUTHORIZATION, USER_AGENT, LOCATION};
use client::ToParts;
let mut dd = client::DefaultDelegate;
let mut dlg: &mut dyn client::Delegate = match self._delegate {
Some(d) => d,
None => &mut dd
};
dlg.begin(client::MethodInfo { id: "partners.updateCompanies",
http_method: hyper::Method::PATCH });
let mut params: Vec<(&str, String)> = Vec::with_capacity(11 + self._additional_params.len());
if let Some(value) = self._update_mask {
params.push(("updateMask", value.to_string()));
}
if let Some(value) = self._request_metadata_user_overrides_user_id {
params.push(("requestMetadata.userOverrides.userId", value.to_string()));
}
if let Some(value) = self._request_metadata_user_overrides_ip_address {
params.push(("requestMetadata.userOverrides.ipAddress", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_sub_id {
params.push(("requestMetadata.trafficSource.trafficSubId", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_source_id {
params.push(("requestMetadata.trafficSource.trafficSourceId", value.to_string()));
}
if let Some(value) = self._request_metadata_partners_session_id {
params.push(("requestMetadata.partnersSessionId", value.to_string()));
}
if let Some(value) = self._request_metadata_locale {
params.push(("requestMetadata.locale", value.to_string()));
}
if self._request_metadata_experiment_ids.len() > 0 {
for f in self._request_metadata_experiment_ids.iter() {
params.push(("requestMetadata.experimentIds", f.to_string()));
}
}
for &field in ["alt", "updateMask", "requestMetadata.userOverrides.userId", "requestMetadata.userOverrides.ipAddress", "requestMetadata.trafficSource.trafficSubId", "requestMetadata.trafficSource.trafficSourceId", "requestMetadata.partnersSessionId", "requestMetadata.locale", "requestMetadata.experimentIds"].iter() {
if self._additional_params.contains_key(field) {
dlg.finished(false);
return Err(client::Error::FieldClash(field));
}
}
for (name, value) in self._additional_params.iter() {
params.push((&name, value.clone()));
}
params.push(("alt", "json".to_string()));
let mut url = self.hub._base_url.clone() + "v2/companies";
let key = dlg.api_key();
match key {
Some(value) => params.push(("key", value)),
None => {
dlg.finished(false);
return Err(client::Error::MissingAPIKey)
}
}
let url = url::Url::parse_with_params(&url, params).unwrap();
let mut json_mime_type: mime::Mime = "application/json".parse().unwrap();
let mut request_value_reader =
{
let mut value = json::value::to_value(&self._request).expect("serde to work");
client::remove_json_null_values(&mut value);
let mut dst = io::Cursor::new(Vec::with_capacity(128));
json::to_writer(&mut dst, &value).unwrap();
dst
};
let request_size = request_value_reader.seek(io::SeekFrom::End(0)).unwrap();
request_value_reader.seek(io::SeekFrom::Start(0)).unwrap();
loop {
request_value_reader.seek(io::SeekFrom::Start(0)).unwrap();
let mut req_result = {
let client = &self.hub.client;
dlg.pre_request();
let mut req_builder = hyper::Request::builder().method(hyper::Method::PATCH).uri(url.clone().into_string())
.header(USER_AGENT, self.hub._user_agent.clone());
let request = req_builder
.header(CONTENT_TYPE, format!("{}", json_mime_type.to_string()))
.header(CONTENT_LENGTH, request_size as u64)
.body(hyper::body::Body::from(request_value_reader.get_ref().clone()));
client.request(request.unwrap()).await
};
match req_result {
Err(err) => {
if let client::Retry::After(d) = dlg.http_error(&err) {
sleep(d);
continue;
}
dlg.finished(false);
return Err(client::Error::HttpError(err))
}
Ok(mut res) => {
if !res.status().is_success() {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
let json_server_error = json::from_str::<client::JsonServerError>(&res_body_string).ok();
let server_error = json::from_str::<client::ServerError>(&res_body_string)
.or_else(|_| json::from_str::<client::ErrorResponse>(&res_body_string).map(|r| r.error))
.ok();
if let client::Retry::After(d) = dlg.http_failure(&res,
json_server_error,
server_error) {
sleep(d);
continue;
}
dlg.finished(false);
return match json::from_str::<client::ErrorResponse>(&res_body_string){
Err(_) => Err(client::Error::Failure(res)),
Ok(serr) => Err(client::Error::BadRequest(serr))
}
}
let result_value = {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
match json::from_str(&res_body_string) {
Ok(decoded) => (res, decoded),
Err(err) => {
dlg.response_json_decode_error(&res_body_string, &err);
return Err(client::Error::JsonDecodeError(res_body_string, err));
}
}
};
dlg.finished(true);
return Ok(result_value)
}
}
}
}
///
/// Sets the *request* property to the given value.
///
/// Even though the property as already been set when instantiating this call,
/// we provide this method for API completeness.
pub fn request(mut self, new_value: Company) -> MethodUpdateCompanyCall<'a> {
self._request = new_value;
self
}
/// Standard field mask for the set of fields to be updated.
/// Required with at least 1 value in FieldMask's paths.
///
/// Sets the *update mask* query property to the given value.
pub fn update_mask(mut self, new_value: &str) -> MethodUpdateCompanyCall<'a> {
self._update_mask = Some(new_value.to_string());
self
}
/// Logged-in user ID to impersonate instead of the user's ID.
///
/// Sets the *request metadata.user overrides.user id* query property to the given value.
pub fn request_metadata_user_overrides_user_id(mut self, new_value: &str) -> MethodUpdateCompanyCall<'a> {
self._request_metadata_user_overrides_user_id = Some(new_value.to_string());
self
}
/// IP address to use instead of the user's geo-located IP address.
///
/// Sets the *request metadata.user overrides.ip address* query property to the given value.
pub fn request_metadata_user_overrides_ip_address(mut self, new_value: &str) -> MethodUpdateCompanyCall<'a> {
self._request_metadata_user_overrides_ip_address = Some(new_value.to_string());
self
}
/// Second level identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic sub id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_sub_id(mut self, new_value: &str) -> MethodUpdateCompanyCall<'a> {
self._request_metadata_traffic_source_traffic_sub_id = Some(new_value.to_string());
self
}
/// Identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic source id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_source_id(mut self, new_value: &str) -> MethodUpdateCompanyCall<'a> {
self._request_metadata_traffic_source_traffic_source_id = Some(new_value.to_string());
self
}
/// Google Partners session ID.
///
/// Sets the *request metadata.partners session id* query property to the given value.
pub fn request_metadata_partners_session_id(mut self, new_value: &str) -> MethodUpdateCompanyCall<'a> {
self._request_metadata_partners_session_id = Some(new_value.to_string());
self
}
/// Locale to use for the current request.
///
/// Sets the *request metadata.locale* query property to the given value.
pub fn request_metadata_locale(mut self, new_value: &str) -> MethodUpdateCompanyCall<'a> {
self._request_metadata_locale = Some(new_value.to_string());
self
}
/// Experiment IDs the current request belongs to.
///
/// Append the given value to the *request metadata.experiment ids* query property.
/// Each appended value will retain its original ordering and be '/'-separated in the URL's parameters.
pub fn add_request_metadata_experiment_ids(mut self, new_value: &str) -> MethodUpdateCompanyCall<'a> {
self._request_metadata_experiment_ids.push(new_value.to_string());
self
}
/// The delegate implementation is consulted whenever there is an intermediate result, or if something goes wrong
/// while executing the actual API request.
///
/// It should be used to handle progress information, and to implement a certain level of resilience.
///
/// Sets the *delegate* property to the given value.
pub fn delegate(mut self, new_value: &'a mut dyn client::Delegate) -> MethodUpdateCompanyCall<'a> {
self._delegate = Some(new_value);
self
}
/// Set any additional parameter of the query string used in the request.
/// It should be used to set parameters which are not yet available through their own
/// setters.
///
/// Please note that this method must not be used to set any of the known parameters
/// which have their own setter method. If done anyway, the request will fail.
///
/// # Additional Parameters
///
/// * *alt* (query-string) - Data format for response.
/// * *key* (query-string) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
/// * *access_token* (query-string) - OAuth access token.
/// * *upload_protocol* (query-string) - Upload protocol for media (e.g. "raw", "multipart").
/// * *prettyPrint* (query-boolean) - Returns response with indentations and line breaks.
/// * *quotaUser* (query-string) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
/// * *uploadType* (query-string) - Legacy upload protocol for media (e.g. "media", "multipart").
/// * *fields* (query-string) - Selector specifying which fields to include in a partial response.
/// * *$.xgafv* (query-string) - V1 error format.
/// * *oauth_token* (query-string) - OAuth 2.0 token for the current user.
/// * *callback* (query-string) - JSONP
pub fn param<T>(mut self, name: T, value: T) -> MethodUpdateCompanyCall<'a>
where T: AsRef<str> {
self._additional_params.insert(name.as_ref().to_string(), value.as_ref().to_string());
self
}
}
/// Gets Partners Status of the logged in user's agency.
/// Should only be called if the logged in user is the admin of the agency.
///
/// A builder for the *getPartnersstatus* method.
/// It is not used directly, but through a `MethodMethods` instance.
///
/// # Example
///
/// Instantiate a resource method builder
///
/// ```test_harness,no_run
/// # extern crate hyper;
/// # extern crate hyper_rustls;
/// # extern crate yup_oauth2 as oauth2;
/// # extern crate google_partners2 as partners2;
/// # async fn dox() {
/// # use std::default::Default;
/// # use oauth2;
/// # use partners2::Partners;
///
/// # let secret: oauth2::ApplicationSecret = Default::default();
/// # let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// # secret,
/// # yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// # ).build().await.unwrap();
/// # let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // You can configure optional parameters by calling the respective setters at will, and
/// // execute the final call using `doit()`.
/// // Values shown here are possibly random and not representative !
/// let result = hub.methods().get_partnersstatus()
/// .request_metadata_user_overrides_user_id("dolores")
/// .request_metadata_user_overrides_ip_address("gubergren")
/// .request_metadata_traffic_source_traffic_sub_id("et")
/// .request_metadata_traffic_source_traffic_source_id("accusam")
/// .request_metadata_partners_session_id("voluptua.")
/// .request_metadata_locale("dolore")
/// .add_request_metadata_experiment_ids("dolore")
/// .doit().await;
/// # }
/// ```
pub struct MethodGetPartnersstatuCall<'a>
where {
hub: &'a Partners<>,
_request_metadata_user_overrides_user_id: Option<String>,
_request_metadata_user_overrides_ip_address: Option<String>,
_request_metadata_traffic_source_traffic_sub_id: Option<String>,
_request_metadata_traffic_source_traffic_source_id: Option<String>,
_request_metadata_partners_session_id: Option<String>,
_request_metadata_locale: Option<String>,
_request_metadata_experiment_ids: Vec<String>,
_delegate: Option<&'a mut dyn client::Delegate>,
_additional_params: HashMap<String, String>,
}
impl<'a> client::CallBuilder for MethodGetPartnersstatuCall<'a> {}
impl<'a> MethodGetPartnersstatuCall<'a> {
/// Perform the operation you have build so far.
pub async fn doit(mut self) -> client::Result<(hyper::Response<hyper::body::Body>, GetPartnersStatusResponse)> {
use std::io::{Read, Seek};
use hyper::header::{CONTENT_TYPE, CONTENT_LENGTH, AUTHORIZATION, USER_AGENT, LOCATION};
use client::ToParts;
let mut dd = client::DefaultDelegate;
let mut dlg: &mut dyn client::Delegate = match self._delegate {
Some(d) => d,
None => &mut dd
};
dlg.begin(client::MethodInfo { id: "partners.getPartnersstatus",
http_method: hyper::Method::GET });
let mut params: Vec<(&str, String)> = Vec::with_capacity(9 + self._additional_params.len());
if let Some(value) = self._request_metadata_user_overrides_user_id {
params.push(("requestMetadata.userOverrides.userId", value.to_string()));
}
if let Some(value) = self._request_metadata_user_overrides_ip_address {
params.push(("requestMetadata.userOverrides.ipAddress", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_sub_id {
params.push(("requestMetadata.trafficSource.trafficSubId", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_source_id {
params.push(("requestMetadata.trafficSource.trafficSourceId", value.to_string()));
}
if let Some(value) = self._request_metadata_partners_session_id {
params.push(("requestMetadata.partnersSessionId", value.to_string()));
}
if let Some(value) = self._request_metadata_locale {
params.push(("requestMetadata.locale", value.to_string()));
}
if self._request_metadata_experiment_ids.len() > 0 {
for f in self._request_metadata_experiment_ids.iter() {
params.push(("requestMetadata.experimentIds", f.to_string()));
}
}
for &field in ["alt", "requestMetadata.userOverrides.userId", "requestMetadata.userOverrides.ipAddress", "requestMetadata.trafficSource.trafficSubId", "requestMetadata.trafficSource.trafficSourceId", "requestMetadata.partnersSessionId", "requestMetadata.locale", "requestMetadata.experimentIds"].iter() {
if self._additional_params.contains_key(field) {
dlg.finished(false);
return Err(client::Error::FieldClash(field));
}
}
for (name, value) in self._additional_params.iter() {
params.push((&name, value.clone()));
}
params.push(("alt", "json".to_string()));
let mut url = self.hub._base_url.clone() + "v2/partnersstatus";
let key = dlg.api_key();
match key {
Some(value) => params.push(("key", value)),
None => {
dlg.finished(false);
return Err(client::Error::MissingAPIKey)
}
}
let url = url::Url::parse_with_params(&url, params).unwrap();
loop {
let mut req_result = {
let client = &self.hub.client;
dlg.pre_request();
let mut req_builder = hyper::Request::builder().method(hyper::Method::GET).uri(url.clone().into_string())
.header(USER_AGENT, self.hub._user_agent.clone());
let request = req_builder
.body(hyper::body::Body::empty());
client.request(request.unwrap()).await
};
match req_result {
Err(err) => {
if let client::Retry::After(d) = dlg.http_error(&err) {
sleep(d);
continue;
}
dlg.finished(false);
return Err(client::Error::HttpError(err))
}
Ok(mut res) => {
if !res.status().is_success() {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
let json_server_error = json::from_str::<client::JsonServerError>(&res_body_string).ok();
let server_error = json::from_str::<client::ServerError>(&res_body_string)
.or_else(|_| json::from_str::<client::ErrorResponse>(&res_body_string).map(|r| r.error))
.ok();
if let client::Retry::After(d) = dlg.http_failure(&res,
json_server_error,
server_error) {
sleep(d);
continue;
}
dlg.finished(false);
return match json::from_str::<client::ErrorResponse>(&res_body_string){
Err(_) => Err(client::Error::Failure(res)),
Ok(serr) => Err(client::Error::BadRequest(serr))
}
}
let result_value = {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
match json::from_str(&res_body_string) {
Ok(decoded) => (res, decoded),
Err(err) => {
dlg.response_json_decode_error(&res_body_string, &err);
return Err(client::Error::JsonDecodeError(res_body_string, err));
}
}
};
dlg.finished(true);
return Ok(result_value)
}
}
}
}
/// Logged-in user ID to impersonate instead of the user's ID.
///
/// Sets the *request metadata.user overrides.user id* query property to the given value.
pub fn request_metadata_user_overrides_user_id(mut self, new_value: &str) -> MethodGetPartnersstatuCall<'a> {
self._request_metadata_user_overrides_user_id = Some(new_value.to_string());
self
}
/// IP address to use instead of the user's geo-located IP address.
///
/// Sets the *request metadata.user overrides.ip address* query property to the given value.
pub fn request_metadata_user_overrides_ip_address(mut self, new_value: &str) -> MethodGetPartnersstatuCall<'a> {
self._request_metadata_user_overrides_ip_address = Some(new_value.to_string());
self
}
/// Second level identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic sub id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_sub_id(mut self, new_value: &str) -> MethodGetPartnersstatuCall<'a> {
self._request_metadata_traffic_source_traffic_sub_id = Some(new_value.to_string());
self
}
/// Identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic source id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_source_id(mut self, new_value: &str) -> MethodGetPartnersstatuCall<'a> {
self._request_metadata_traffic_source_traffic_source_id = Some(new_value.to_string());
self
}
/// Google Partners session ID.
///
/// Sets the *request metadata.partners session id* query property to the given value.
pub fn request_metadata_partners_session_id(mut self, new_value: &str) -> MethodGetPartnersstatuCall<'a> {
self._request_metadata_partners_session_id = Some(new_value.to_string());
self
}
/// Locale to use for the current request.
///
/// Sets the *request metadata.locale* query property to the given value.
pub fn request_metadata_locale(mut self, new_value: &str) -> MethodGetPartnersstatuCall<'a> {
self._request_metadata_locale = Some(new_value.to_string());
self
}
/// Experiment IDs the current request belongs to.
///
/// Append the given value to the *request metadata.experiment ids* query property.
/// Each appended value will retain its original ordering and be '/'-separated in the URL's parameters.
pub fn add_request_metadata_experiment_ids(mut self, new_value: &str) -> MethodGetPartnersstatuCall<'a> {
self._request_metadata_experiment_ids.push(new_value.to_string());
self
}
/// The delegate implementation is consulted whenever there is an intermediate result, or if something goes wrong
/// while executing the actual API request.
///
/// It should be used to handle progress information, and to implement a certain level of resilience.
///
/// Sets the *delegate* property to the given value.
pub fn delegate(mut self, new_value: &'a mut dyn client::Delegate) -> MethodGetPartnersstatuCall<'a> {
self._delegate = Some(new_value);
self
}
/// Set any additional parameter of the query string used in the request.
/// It should be used to set parameters which are not yet available through their own
/// setters.
///
/// Please note that this method must not be used to set any of the known parameters
/// which have their own setter method. If done anyway, the request will fail.
///
/// # Additional Parameters
///
/// * *alt* (query-string) - Data format for response.
/// * *key* (query-string) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
/// * *access_token* (query-string) - OAuth access token.
/// * *upload_protocol* (query-string) - Upload protocol for media (e.g. "raw", "multipart").
/// * *prettyPrint* (query-boolean) - Returns response with indentations and line breaks.
/// * *quotaUser* (query-string) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
/// * *uploadType* (query-string) - Legacy upload protocol for media (e.g. "media", "multipart").
/// * *fields* (query-string) - Selector specifying which fields to include in a partial response.
/// * *$.xgafv* (query-string) - V1 error format.
/// * *oauth_token* (query-string) - OAuth 2.0 token for the current user.
/// * *callback* (query-string) - JSONP
pub fn param<T>(mut self, name: T, value: T) -> MethodGetPartnersstatuCall<'a>
where T: AsRef<str> {
self._additional_params.insert(name.as_ref().to_string(), value.as_ref().to_string());
self
}
}
/// Creates an advertiser lead for the given company ID.
///
/// A builder for the *leads.create* method supported by a *company* resource.
/// It is not used directly, but through a `CompanyMethods` instance.
///
/// # Example
///
/// Instantiate a resource method builder
///
/// ```test_harness,no_run
/// # extern crate hyper;
/// # extern crate hyper_rustls;
/// # extern crate yup_oauth2 as oauth2;
/// # extern crate google_partners2 as partners2;
/// use partners2::api::CreateLeadRequest;
/// # async fn dox() {
/// # use std::default::Default;
/// # use oauth2;
/// # use partners2::Partners;
///
/// # let secret: oauth2::ApplicationSecret = Default::default();
/// # let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// # secret,
/// # yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// # ).build().await.unwrap();
/// # let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // As the method needs a request, you would usually fill it with the desired information
/// // into the respective structure. Some of the parts shown here might not be applicable !
/// // Values shown here are possibly random and not representative !
/// let mut req = CreateLeadRequest::default();
///
/// // You can configure optional parameters by calling the respective setters at will, and
/// // execute the final call using `doit()`.
/// // Values shown here are possibly random and not representative !
/// let result = hub.companies().leads_create(req, "companyId")
/// .doit().await;
/// # }
/// ```
pub struct CompanyLeadCreateCall<'a>
where {
hub: &'a Partners<>,
_request: CreateLeadRequest,
_company_id: String,
_delegate: Option<&'a mut dyn client::Delegate>,
_additional_params: HashMap<String, String>,
}
impl<'a> client::CallBuilder for CompanyLeadCreateCall<'a> {}
impl<'a> CompanyLeadCreateCall<'a> {
/// Perform the operation you have build so far.
pub async fn doit(mut self) -> client::Result<(hyper::Response<hyper::body::Body>, CreateLeadResponse)> {
use std::io::{Read, Seek};
use hyper::header::{CONTENT_TYPE, CONTENT_LENGTH, AUTHORIZATION, USER_AGENT, LOCATION};
use client::ToParts;
let mut dd = client::DefaultDelegate;
let mut dlg: &mut dyn client::Delegate = match self._delegate {
Some(d) => d,
None => &mut dd
};
dlg.begin(client::MethodInfo { id: "partners.companies.leads.create",
http_method: hyper::Method::POST });
let mut params: Vec<(&str, String)> = Vec::with_capacity(4 + self._additional_params.len());
params.push(("companyId", self._company_id.to_string()));
for &field in ["alt", "companyId"].iter() {
if self._additional_params.contains_key(field) {
dlg.finished(false);
return Err(client::Error::FieldClash(field));
}
}
for (name, value) in self._additional_params.iter() {
params.push((&name, value.clone()));
}
params.push(("alt", "json".to_string()));
let mut url = self.hub._base_url.clone() + "v2/companies/{companyId}/leads";
let key = dlg.api_key();
match key {
Some(value) => params.push(("key", value)),
None => {
dlg.finished(false);
return Err(client::Error::MissingAPIKey)
}
}
for &(find_this, param_name) in [("{companyId}", "companyId")].iter() {
let mut replace_with: Option<&str> = None;
for &(name, ref value) in params.iter() {
if name == param_name {
replace_with = Some(value);
break;
}
}
url = url.replace(find_this, replace_with.expect("to find substitution value in params"));
}
{
let mut indices_for_removal: Vec<usize> = Vec::with_capacity(1);
for param_name in ["companyId"].iter() {
if let Some(index) = params.iter().position(|t| &t.0 == param_name) {
indices_for_removal.push(index);
}
}
for &index in indices_for_removal.iter() {
params.remove(index);
}
}
let url = url::Url::parse_with_params(&url, params).unwrap();
let mut json_mime_type: mime::Mime = "application/json".parse().unwrap();
let mut request_value_reader =
{
let mut value = json::value::to_value(&self._request).expect("serde to work");
client::remove_json_null_values(&mut value);
let mut dst = io::Cursor::new(Vec::with_capacity(128));
json::to_writer(&mut dst, &value).unwrap();
dst
};
let request_size = request_value_reader.seek(io::SeekFrom::End(0)).unwrap();
request_value_reader.seek(io::SeekFrom::Start(0)).unwrap();
loop {
request_value_reader.seek(io::SeekFrom::Start(0)).unwrap();
let mut req_result = {
let client = &self.hub.client;
dlg.pre_request();
let mut req_builder = hyper::Request::builder().method(hyper::Method::POST).uri(url.clone().into_string())
.header(USER_AGENT, self.hub._user_agent.clone());
let request = req_builder
.header(CONTENT_TYPE, format!("{}", json_mime_type.to_string()))
.header(CONTENT_LENGTH, request_size as u64)
.body(hyper::body::Body::from(request_value_reader.get_ref().clone()));
client.request(request.unwrap()).await
};
match req_result {
Err(err) => {
if let client::Retry::After(d) = dlg.http_error(&err) {
sleep(d);
continue;
}
dlg.finished(false);
return Err(client::Error::HttpError(err))
}
Ok(mut res) => {
if !res.status().is_success() {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
let json_server_error = json::from_str::<client::JsonServerError>(&res_body_string).ok();
let server_error = json::from_str::<client::ServerError>(&res_body_string)
.or_else(|_| json::from_str::<client::ErrorResponse>(&res_body_string).map(|r| r.error))
.ok();
if let client::Retry::After(d) = dlg.http_failure(&res,
json_server_error,
server_error) {
sleep(d);
continue;
}
dlg.finished(false);
return match json::from_str::<client::ErrorResponse>(&res_body_string){
Err(_) => Err(client::Error::Failure(res)),
Ok(serr) => Err(client::Error::BadRequest(serr))
}
}
let result_value = {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
match json::from_str(&res_body_string) {
Ok(decoded) => (res, decoded),
Err(err) => {
dlg.response_json_decode_error(&res_body_string, &err);
return Err(client::Error::JsonDecodeError(res_body_string, err));
}
}
};
dlg.finished(true);
return Ok(result_value)
}
}
}
}
///
/// Sets the *request* property to the given value.
///
/// Even though the property as already been set when instantiating this call,
/// we provide this method for API completeness.
pub fn request(mut self, new_value: CreateLeadRequest) -> CompanyLeadCreateCall<'a> {
self._request = new_value;
self
}
/// The ID of the company to contact.
///
/// Sets the *company id* path property to the given value.
///
/// Even though the property as already been set when instantiating this call,
/// we provide this method for API completeness.
pub fn company_id(mut self, new_value: &str) -> CompanyLeadCreateCall<'a> {
self._company_id = new_value.to_string();
self
}
/// The delegate implementation is consulted whenever there is an intermediate result, or if something goes wrong
/// while executing the actual API request.
///
/// It should be used to handle progress information, and to implement a certain level of resilience.
///
/// Sets the *delegate* property to the given value.
pub fn delegate(mut self, new_value: &'a mut dyn client::Delegate) -> CompanyLeadCreateCall<'a> {
self._delegate = Some(new_value);
self
}
/// Set any additional parameter of the query string used in the request.
/// It should be used to set parameters which are not yet available through their own
/// setters.
///
/// Please note that this method must not be used to set any of the known parameters
/// which have their own setter method. If done anyway, the request will fail.
///
/// # Additional Parameters
///
/// * *alt* (query-string) - Data format for response.
/// * *key* (query-string) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
/// * *access_token* (query-string) - OAuth access token.
/// * *upload_protocol* (query-string) - Upload protocol for media (e.g. "raw", "multipart").
/// * *prettyPrint* (query-boolean) - Returns response with indentations and line breaks.
/// * *quotaUser* (query-string) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
/// * *uploadType* (query-string) - Legacy upload protocol for media (e.g. "media", "multipart").
/// * *fields* (query-string) - Selector specifying which fields to include in a partial response.
/// * *$.xgafv* (query-string) - V1 error format.
/// * *oauth_token* (query-string) - OAuth 2.0 token for the current user.
/// * *callback* (query-string) - JSONP
pub fn param<T>(mut self, name: T, value: T) -> CompanyLeadCreateCall<'a>
where T: AsRef<str> {
self._additional_params.insert(name.as_ref().to_string(), value.as_ref().to_string());
self
}
}
/// Gets a company.
///
/// A builder for the *get* method supported by a *company* resource.
/// It is not used directly, but through a `CompanyMethods` instance.
///
/// # Example
///
/// Instantiate a resource method builder
///
/// ```test_harness,no_run
/// # extern crate hyper;
/// # extern crate hyper_rustls;
/// # extern crate yup_oauth2 as oauth2;
/// # extern crate google_partners2 as partners2;
/// # async fn dox() {
/// # use std::default::Default;
/// # use oauth2;
/// # use partners2::Partners;
///
/// # let secret: oauth2::ApplicationSecret = Default::default();
/// # let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// # secret,
/// # yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// # ).build().await.unwrap();
/// # let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // You can configure optional parameters by calling the respective setters at will, and
/// // execute the final call using `doit()`.
/// // Values shown here are possibly random and not representative !
/// let result = hub.companies().get("companyId")
/// .view("amet.")
/// .request_metadata_user_overrides_user_id("ea")
/// .request_metadata_user_overrides_ip_address("sadipscing")
/// .request_metadata_traffic_source_traffic_sub_id("Lorem")
/// .request_metadata_traffic_source_traffic_source_id("invidunt")
/// .request_metadata_partners_session_id("no")
/// .request_metadata_locale("est")
/// .add_request_metadata_experiment_ids("At")
/// .order_by("sed")
/// .currency_code("sit")
/// .address("et")
/// .doit().await;
/// # }
/// ```
pub struct CompanyGetCall<'a>
where {
hub: &'a Partners<>,
_company_id: String,
_view: Option<String>,
_request_metadata_user_overrides_user_id: Option<String>,
_request_metadata_user_overrides_ip_address: Option<String>,
_request_metadata_traffic_source_traffic_sub_id: Option<String>,
_request_metadata_traffic_source_traffic_source_id: Option<String>,
_request_metadata_partners_session_id: Option<String>,
_request_metadata_locale: Option<String>,
_request_metadata_experiment_ids: Vec<String>,
_order_by: Option<String>,
_currency_code: Option<String>,
_address: Option<String>,
_delegate: Option<&'a mut dyn client::Delegate>,
_additional_params: HashMap<String, String>,
}
impl<'a> client::CallBuilder for CompanyGetCall<'a> {}
impl<'a> CompanyGetCall<'a> {
/// Perform the operation you have build so far.
pub async fn doit(mut self) -> client::Result<(hyper::Response<hyper::body::Body>, GetCompanyResponse)> {
use std::io::{Read, Seek};
use hyper::header::{CONTENT_TYPE, CONTENT_LENGTH, AUTHORIZATION, USER_AGENT, LOCATION};
use client::ToParts;
let mut dd = client::DefaultDelegate;
let mut dlg: &mut dyn client::Delegate = match self._delegate {
Some(d) => d,
None => &mut dd
};
dlg.begin(client::MethodInfo { id: "partners.companies.get",
http_method: hyper::Method::GET });
let mut params: Vec<(&str, String)> = Vec::with_capacity(14 + self._additional_params.len());
params.push(("companyId", self._company_id.to_string()));
if let Some(value) = self._view {
params.push(("view", value.to_string()));
}
if let Some(value) = self._request_metadata_user_overrides_user_id {
params.push(("requestMetadata.userOverrides.userId", value.to_string()));
}
if let Some(value) = self._request_metadata_user_overrides_ip_address {
params.push(("requestMetadata.userOverrides.ipAddress", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_sub_id {
params.push(("requestMetadata.trafficSource.trafficSubId", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_source_id {
params.push(("requestMetadata.trafficSource.trafficSourceId", value.to_string()));
}
if let Some(value) = self._request_metadata_partners_session_id {
params.push(("requestMetadata.partnersSessionId", value.to_string()));
}
if let Some(value) = self._request_metadata_locale {
params.push(("requestMetadata.locale", value.to_string()));
}
if self._request_metadata_experiment_ids.len() > 0 {
for f in self._request_metadata_experiment_ids.iter() {
params.push(("requestMetadata.experimentIds", f.to_string()));
}
}
if let Some(value) = self._order_by {
params.push(("orderBy", value.to_string()));
}
if let Some(value) = self._currency_code {
params.push(("currencyCode", value.to_string()));
}
if let Some(value) = self._address {
params.push(("address", value.to_string()));
}
for &field in ["alt", "companyId", "view", "requestMetadata.userOverrides.userId", "requestMetadata.userOverrides.ipAddress", "requestMetadata.trafficSource.trafficSubId", "requestMetadata.trafficSource.trafficSourceId", "requestMetadata.partnersSessionId", "requestMetadata.locale", "requestMetadata.experimentIds", "orderBy", "currencyCode", "address"].iter() {
if self._additional_params.contains_key(field) {
dlg.finished(false);
return Err(client::Error::FieldClash(field));
}
}
for (name, value) in self._additional_params.iter() {
params.push((&name, value.clone()));
}
params.push(("alt", "json".to_string()));
let mut url = self.hub._base_url.clone() + "v2/companies/{companyId}";
let key = dlg.api_key();
match key {
Some(value) => params.push(("key", value)),
None => {
dlg.finished(false);
return Err(client::Error::MissingAPIKey)
}
}
for &(find_this, param_name) in [("{companyId}", "companyId")].iter() {
let mut replace_with: Option<&str> = None;
for &(name, ref value) in params.iter() {
if name == param_name {
replace_with = Some(value);
break;
}
}
url = url.replace(find_this, replace_with.expect("to find substitution value in params"));
}
{
let mut indices_for_removal: Vec<usize> = Vec::with_capacity(1);
for param_name in ["companyId"].iter() {
if let Some(index) = params.iter().position(|t| &t.0 == param_name) {
indices_for_removal.push(index);
}
}
for &index in indices_for_removal.iter() {
params.remove(index);
}
}
let url = url::Url::parse_with_params(&url, params).unwrap();
loop {
let mut req_result = {
let client = &self.hub.client;
dlg.pre_request();
let mut req_builder = hyper::Request::builder().method(hyper::Method::GET).uri(url.clone().into_string())
.header(USER_AGENT, self.hub._user_agent.clone());
let request = req_builder
.body(hyper::body::Body::empty());
client.request(request.unwrap()).await
};
match req_result {
Err(err) => {
if let client::Retry::After(d) = dlg.http_error(&err) {
sleep(d);
continue;
}
dlg.finished(false);
return Err(client::Error::HttpError(err))
}
Ok(mut res) => {
if !res.status().is_success() {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
let json_server_error = json::from_str::<client::JsonServerError>(&res_body_string).ok();
let server_error = json::from_str::<client::ServerError>(&res_body_string)
.or_else(|_| json::from_str::<client::ErrorResponse>(&res_body_string).map(|r| r.error))
.ok();
if let client::Retry::After(d) = dlg.http_failure(&res,
json_server_error,
server_error) {
sleep(d);
continue;
}
dlg.finished(false);
return match json::from_str::<client::ErrorResponse>(&res_body_string){
Err(_) => Err(client::Error::Failure(res)),
Ok(serr) => Err(client::Error::BadRequest(serr))
}
}
let result_value = {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
match json::from_str(&res_body_string) {
Ok(decoded) => (res, decoded),
Err(err) => {
dlg.response_json_decode_error(&res_body_string, &err);
return Err(client::Error::JsonDecodeError(res_body_string, err));
}
}
};
dlg.finished(true);
return Ok(result_value)
}
}
}
}
/// The ID of the company to retrieve.
///
/// Sets the *company id* path property to the given value.
///
/// Even though the property as already been set when instantiating this call,
/// we provide this method for API completeness.
pub fn company_id(mut self, new_value: &str) -> CompanyGetCall<'a> {
self._company_id = new_value.to_string();
self
}
/// The view of `Company` resource to be returned. This must not be
/// `COMPANY_VIEW_UNSPECIFIED`.
///
/// Sets the *view* query property to the given value.
pub fn view(mut self, new_value: &str) -> CompanyGetCall<'a> {
self._view = Some(new_value.to_string());
self
}
/// Logged-in user ID to impersonate instead of the user's ID.
///
/// Sets the *request metadata.user overrides.user id* query property to the given value.
pub fn request_metadata_user_overrides_user_id(mut self, new_value: &str) -> CompanyGetCall<'a> {
self._request_metadata_user_overrides_user_id = Some(new_value.to_string());
self
}
/// IP address to use instead of the user's geo-located IP address.
///
/// Sets the *request metadata.user overrides.ip address* query property to the given value.
pub fn request_metadata_user_overrides_ip_address(mut self, new_value: &str) -> CompanyGetCall<'a> {
self._request_metadata_user_overrides_ip_address = Some(new_value.to_string());
self
}
/// Second level identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic sub id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_sub_id(mut self, new_value: &str) -> CompanyGetCall<'a> {
self._request_metadata_traffic_source_traffic_sub_id = Some(new_value.to_string());
self
}
/// Identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic source id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_source_id(mut self, new_value: &str) -> CompanyGetCall<'a> {
self._request_metadata_traffic_source_traffic_source_id = Some(new_value.to_string());
self
}
/// Google Partners session ID.
///
/// Sets the *request metadata.partners session id* query property to the given value.
pub fn request_metadata_partners_session_id(mut self, new_value: &str) -> CompanyGetCall<'a> {
self._request_metadata_partners_session_id = Some(new_value.to_string());
self
}
/// Locale to use for the current request.
///
/// Sets the *request metadata.locale* query property to the given value.
pub fn request_metadata_locale(mut self, new_value: &str) -> CompanyGetCall<'a> {
self._request_metadata_locale = Some(new_value.to_string());
self
}
/// Experiment IDs the current request belongs to.
///
/// Append the given value to the *request metadata.experiment ids* query property.
/// Each appended value will retain its original ordering and be '/'-separated in the URL's parameters.
pub fn add_request_metadata_experiment_ids(mut self, new_value: &str) -> CompanyGetCall<'a> {
self._request_metadata_experiment_ids.push(new_value.to_string());
self
}
/// How to order addresses within the returned company. Currently, only
/// `address` and `address desc` is supported which will sorted by closest to
/// farthest in distance from given address and farthest to closest distance
/// from given address respectively.
///
/// Sets the *order by* query property to the given value.
pub fn order_by(mut self, new_value: &str) -> CompanyGetCall<'a> {
self._order_by = Some(new_value.to_string());
self
}
/// If the company's budget is in a different currency code than this one, then
/// the converted budget is converted to this currency code.
///
/// Sets the *currency code* query property to the given value.
pub fn currency_code(mut self, new_value: &str) -> CompanyGetCall<'a> {
self._currency_code = Some(new_value.to_string());
self
}
/// The address to use for sorting the company's addresses by proximity.
/// If not given, the geo-located address of the request is used.
/// Used when order_by is set.
///
/// Sets the *address* query property to the given value.
pub fn address(mut self, new_value: &str) -> CompanyGetCall<'a> {
self._address = Some(new_value.to_string());
self
}
/// The delegate implementation is consulted whenever there is an intermediate result, or if something goes wrong
/// while executing the actual API request.
///
/// It should be used to handle progress information, and to implement a certain level of resilience.
///
/// Sets the *delegate* property to the given value.
pub fn delegate(mut self, new_value: &'a mut dyn client::Delegate) -> CompanyGetCall<'a> {
self._delegate = Some(new_value);
self
}
/// Set any additional parameter of the query string used in the request.
/// It should be used to set parameters which are not yet available through their own
/// setters.
///
/// Please note that this method must not be used to set any of the known parameters
/// which have their own setter method. If done anyway, the request will fail.
///
/// # Additional Parameters
///
/// * *alt* (query-string) - Data format for response.
/// * *key* (query-string) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
/// * *access_token* (query-string) - OAuth access token.
/// * *upload_protocol* (query-string) - Upload protocol for media (e.g. "raw", "multipart").
/// * *prettyPrint* (query-boolean) - Returns response with indentations and line breaks.
/// * *quotaUser* (query-string) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
/// * *uploadType* (query-string) - Legacy upload protocol for media (e.g. "media", "multipart").
/// * *fields* (query-string) - Selector specifying which fields to include in a partial response.
/// * *$.xgafv* (query-string) - V1 error format.
/// * *oauth_token* (query-string) - OAuth 2.0 token for the current user.
/// * *callback* (query-string) - JSONP
pub fn param<T>(mut self, name: T, value: T) -> CompanyGetCall<'a>
where T: AsRef<str> {
self._additional_params.insert(name.as_ref().to_string(), value.as_ref().to_string());
self
}
}
/// Lists companies.
///
/// A builder for the *list* method supported by a *company* resource.
/// It is not used directly, but through a `CompanyMethods` instance.
///
/// # Example
///
/// Instantiate a resource method builder
///
/// ```test_harness,no_run
/// # extern crate hyper;
/// # extern crate hyper_rustls;
/// # extern crate yup_oauth2 as oauth2;
/// # extern crate google_partners2 as partners2;
/// # async fn dox() {
/// # use std::default::Default;
/// # use oauth2;
/// # use partners2::Partners;
///
/// # let secret: oauth2::ApplicationSecret = Default::default();
/// # let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// # secret,
/// # yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// # ).build().await.unwrap();
/// # let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // You can configure optional parameters by calling the respective setters at will, and
/// // execute the final call using `doit()`.
/// // Values shown here are possibly random and not representative !
/// let result = hub.companies().list()
/// .website_url("tempor")
/// .view("aliquyam")
/// .add_specializations("ipsum")
/// .add_services("et")
/// .request_metadata_user_overrides_user_id("sanctus")
/// .request_metadata_user_overrides_ip_address("Lorem")
/// .request_metadata_traffic_source_traffic_sub_id("est")
/// .request_metadata_traffic_source_traffic_source_id("sed")
/// .request_metadata_partners_session_id("diam")
/// .request_metadata_locale("dolores")
/// .add_request_metadata_experiment_ids("dolores")
/// .page_token("et")
/// .page_size(-93)
/// .order_by("no")
/// .min_monthly_budget_units("et")
/// .min_monthly_budget_nanos(-94)
/// .min_monthly_budget_currency_code("sed")
/// .max_monthly_budget_units("no")
/// .max_monthly_budget_nanos(-91)
/// .max_monthly_budget_currency_code("At")
/// .add_language_codes("sadipscing")
/// .add_industries("aliquyam")
/// .add_gps_motivations("dolores")
/// .company_name("sadipscing")
/// .address("erat")
/// .doit().await;
/// # }
/// ```
pub struct CompanyListCall<'a>
where {
hub: &'a Partners<>,
_website_url: Option<String>,
_view: Option<String>,
_specializations: Vec<String>,
_services: Vec<String>,
_request_metadata_user_overrides_user_id: Option<String>,
_request_metadata_user_overrides_ip_address: Option<String>,
_request_metadata_traffic_source_traffic_sub_id: Option<String>,
_request_metadata_traffic_source_traffic_source_id: Option<String>,
_request_metadata_partners_session_id: Option<String>,
_request_metadata_locale: Option<String>,
_request_metadata_experiment_ids: Vec<String>,
_page_token: Option<String>,
_page_size: Option<i32>,
_order_by: Option<String>,
_min_monthly_budget_units: Option<String>,
_min_monthly_budget_nanos: Option<i32>,
_min_monthly_budget_currency_code: Option<String>,
_max_monthly_budget_units: Option<String>,
_max_monthly_budget_nanos: Option<i32>,
_max_monthly_budget_currency_code: Option<String>,
_language_codes: Vec<String>,
_industries: Vec<String>,
_gps_motivations: Vec<String>,
_company_name: Option<String>,
_address: Option<String>,
_delegate: Option<&'a mut dyn client::Delegate>,
_additional_params: HashMap<String, String>,
}
impl<'a> client::CallBuilder for CompanyListCall<'a> {}
impl<'a> CompanyListCall<'a> {
/// Perform the operation you have build so far.
pub async fn doit(mut self) -> client::Result<(hyper::Response<hyper::body::Body>, ListCompaniesResponse)> {
use std::io::{Read, Seek};
use hyper::header::{CONTENT_TYPE, CONTENT_LENGTH, AUTHORIZATION, USER_AGENT, LOCATION};
use client::ToParts;
let mut dd = client::DefaultDelegate;
let mut dlg: &mut dyn client::Delegate = match self._delegate {
Some(d) => d,
None => &mut dd
};
dlg.begin(client::MethodInfo { id: "partners.companies.list",
http_method: hyper::Method::GET });
let mut params: Vec<(&str, String)> = Vec::with_capacity(27 + self._additional_params.len());
if let Some(value) = self._website_url {
params.push(("websiteUrl", value.to_string()));
}
if let Some(value) = self._view {
params.push(("view", value.to_string()));
}
if self._specializations.len() > 0 {
for f in self._specializations.iter() {
params.push(("specializations", f.to_string()));
}
}
if self._services.len() > 0 {
for f in self._services.iter() {
params.push(("services", f.to_string()));
}
}
if let Some(value) = self._request_metadata_user_overrides_user_id {
params.push(("requestMetadata.userOverrides.userId", value.to_string()));
}
if let Some(value) = self._request_metadata_user_overrides_ip_address {
params.push(("requestMetadata.userOverrides.ipAddress", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_sub_id {
params.push(("requestMetadata.trafficSource.trafficSubId", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_source_id {
params.push(("requestMetadata.trafficSource.trafficSourceId", value.to_string()));
}
if let Some(value) = self._request_metadata_partners_session_id {
params.push(("requestMetadata.partnersSessionId", value.to_string()));
}
if let Some(value) = self._request_metadata_locale {
params.push(("requestMetadata.locale", value.to_string()));
}
if self._request_metadata_experiment_ids.len() > 0 {
for f in self._request_metadata_experiment_ids.iter() {
params.push(("requestMetadata.experimentIds", f.to_string()));
}
}
if let Some(value) = self._page_token {
params.push(("pageToken", value.to_string()));
}
if let Some(value) = self._page_size {
params.push(("pageSize", value.to_string()));
}
if let Some(value) = self._order_by {
params.push(("orderBy", value.to_string()));
}
if let Some(value) = self._min_monthly_budget_units {
params.push(("minMonthlyBudget.units", value.to_string()));
}
if let Some(value) = self._min_monthly_budget_nanos {
params.push(("minMonthlyBudget.nanos", value.to_string()));
}
if let Some(value) = self._min_monthly_budget_currency_code {
params.push(("minMonthlyBudget.currencyCode", value.to_string()));
}
if let Some(value) = self._max_monthly_budget_units {
params.push(("maxMonthlyBudget.units", value.to_string()));
}
if let Some(value) = self._max_monthly_budget_nanos {
params.push(("maxMonthlyBudget.nanos", value.to_string()));
}
if let Some(value) = self._max_monthly_budget_currency_code {
params.push(("maxMonthlyBudget.currencyCode", value.to_string()));
}
if self._language_codes.len() > 0 {
for f in self._language_codes.iter() {
params.push(("languageCodes", f.to_string()));
}
}
if self._industries.len() > 0 {
for f in self._industries.iter() {
params.push(("industries", f.to_string()));
}
}
if self._gps_motivations.len() > 0 {
for f in self._gps_motivations.iter() {
params.push(("gpsMotivations", f.to_string()));
}
}
if let Some(value) = self._company_name {
params.push(("companyName", value.to_string()));
}
if let Some(value) = self._address {
params.push(("address", value.to_string()));
}
for &field in ["alt", "websiteUrl", "view", "specializations", "services", "requestMetadata.userOverrides.userId", "requestMetadata.userOverrides.ipAddress", "requestMetadata.trafficSource.trafficSubId", "requestMetadata.trafficSource.trafficSourceId", "requestMetadata.partnersSessionId", "requestMetadata.locale", "requestMetadata.experimentIds", "pageToken", "pageSize", "orderBy", "minMonthlyBudget.units", "minMonthlyBudget.nanos", "minMonthlyBudget.currencyCode", "maxMonthlyBudget.units", "maxMonthlyBudget.nanos", "maxMonthlyBudget.currencyCode", "languageCodes", "industries", "gpsMotivations", "companyName", "address"].iter() {
if self._additional_params.contains_key(field) {
dlg.finished(false);
return Err(client::Error::FieldClash(field));
}
}
for (name, value) in self._additional_params.iter() {
params.push((&name, value.clone()));
}
params.push(("alt", "json".to_string()));
let mut url = self.hub._base_url.clone() + "v2/companies";
let key = dlg.api_key();
match key {
Some(value) => params.push(("key", value)),
None => {
dlg.finished(false);
return Err(client::Error::MissingAPIKey)
}
}
let url = url::Url::parse_with_params(&url, params).unwrap();
loop {
let mut req_result = {
let client = &self.hub.client;
dlg.pre_request();
let mut req_builder = hyper::Request::builder().method(hyper::Method::GET).uri(url.clone().into_string())
.header(USER_AGENT, self.hub._user_agent.clone());
let request = req_builder
.body(hyper::body::Body::empty());
client.request(request.unwrap()).await
};
match req_result {
Err(err) => {
if let client::Retry::After(d) = dlg.http_error(&err) {
sleep(d);
continue;
}
dlg.finished(false);
return Err(client::Error::HttpError(err))
}
Ok(mut res) => {
if !res.status().is_success() {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
let json_server_error = json::from_str::<client::JsonServerError>(&res_body_string).ok();
let server_error = json::from_str::<client::ServerError>(&res_body_string)
.or_else(|_| json::from_str::<client::ErrorResponse>(&res_body_string).map(|r| r.error))
.ok();
if let client::Retry::After(d) = dlg.http_failure(&res,
json_server_error,
server_error) {
sleep(d);
continue;
}
dlg.finished(false);
return match json::from_str::<client::ErrorResponse>(&res_body_string){
Err(_) => Err(client::Error::Failure(res)),
Ok(serr) => Err(client::Error::BadRequest(serr))
}
}
let result_value = {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
match json::from_str(&res_body_string) {
Ok(decoded) => (res, decoded),
Err(err) => {
dlg.response_json_decode_error(&res_body_string, &err);
return Err(client::Error::JsonDecodeError(res_body_string, err));
}
}
};
dlg.finished(true);
return Ok(result_value)
}
}
}
}
/// Website URL that will help to find a better matched company.
/// .
///
/// Sets the *website url* query property to the given value.
pub fn website_url(mut self, new_value: &str) -> CompanyListCall<'a> {
self._website_url = Some(new_value.to_string());
self
}
/// The view of the `Company` resource to be returned. This must not be
/// `COMPANY_VIEW_UNSPECIFIED`.
///
/// Sets the *view* query property to the given value.
pub fn view(mut self, new_value: &str) -> CompanyListCall<'a> {
self._view = Some(new_value.to_string());
self
}
/// List of specializations that the returned agencies should provide. If this
/// is not empty, any returned agency must have at least one of these
/// specializations, or one of the services in the "services" field.
///
/// Append the given value to the *specializations* query property.
/// Each appended value will retain its original ordering and be '/'-separated in the URL's parameters.
pub fn add_specializations(mut self, new_value: &str) -> CompanyListCall<'a> {
self._specializations.push(new_value.to_string());
self
}
/// List of services that the returned agencies should provide. If this is
/// not empty, any returned agency must have at least one of these services,
/// or one of the specializations in the "specializations" field.
///
/// Append the given value to the *services* query property.
/// Each appended value will retain its original ordering and be '/'-separated in the URL's parameters.
pub fn add_services(mut self, new_value: &str) -> CompanyListCall<'a> {
self._services.push(new_value.to_string());
self
}
/// Logged-in user ID to impersonate instead of the user's ID.
///
/// Sets the *request metadata.user overrides.user id* query property to the given value.
pub fn request_metadata_user_overrides_user_id(mut self, new_value: &str) -> CompanyListCall<'a> {
self._request_metadata_user_overrides_user_id = Some(new_value.to_string());
self
}
/// IP address to use instead of the user's geo-located IP address.
///
/// Sets the *request metadata.user overrides.ip address* query property to the given value.
pub fn request_metadata_user_overrides_ip_address(mut self, new_value: &str) -> CompanyListCall<'a> {
self._request_metadata_user_overrides_ip_address = Some(new_value.to_string());
self
}
/// Second level identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic sub id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_sub_id(mut self, new_value: &str) -> CompanyListCall<'a> {
self._request_metadata_traffic_source_traffic_sub_id = Some(new_value.to_string());
self
}
/// Identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic source id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_source_id(mut self, new_value: &str) -> CompanyListCall<'a> {
self._request_metadata_traffic_source_traffic_source_id = Some(new_value.to_string());
self
}
/// Google Partners session ID.
///
/// Sets the *request metadata.partners session id* query property to the given value.
pub fn request_metadata_partners_session_id(mut self, new_value: &str) -> CompanyListCall<'a> {
self._request_metadata_partners_session_id = Some(new_value.to_string());
self
}
/// Locale to use for the current request.
///
/// Sets the *request metadata.locale* query property to the given value.
pub fn request_metadata_locale(mut self, new_value: &str) -> CompanyListCall<'a> {
self._request_metadata_locale = Some(new_value.to_string());
self
}
/// Experiment IDs the current request belongs to.
///
/// Append the given value to the *request metadata.experiment ids* query property.
/// Each appended value will retain its original ordering and be '/'-separated in the URL's parameters.
pub fn add_request_metadata_experiment_ids(mut self, new_value: &str) -> CompanyListCall<'a> {
self._request_metadata_experiment_ids.push(new_value.to_string());
self
}
/// A token identifying a page of results that the server returns.
/// Typically, this is the value of `ListCompaniesResponse.next_page_token`
/// returned from the previous call to
/// ListCompanies.
///
/// Sets the *page token* query property to the given value.
pub fn page_token(mut self, new_value: &str) -> CompanyListCall<'a> {
self._page_token = Some(new_value.to_string());
self
}
/// Requested page size. Server may return fewer companies than requested.
/// If unspecified, server picks an appropriate default.
///
/// Sets the *page size* query property to the given value.
pub fn page_size(mut self, new_value: i32) -> CompanyListCall<'a> {
self._page_size = Some(new_value);
self
}
/// How to order addresses within the returned companies. Currently, only
/// `address` and `address desc` is supported which will sorted by closest to
/// farthest in distance from given address and farthest to closest distance
/// from given address respectively.
///
/// Sets the *order by* query property to the given value.
pub fn order_by(mut self, new_value: &str) -> CompanyListCall<'a> {
self._order_by = Some(new_value.to_string());
self
}
/// The whole units of the amount.
/// For example if `currencyCode` is `"USD"`, then 1 unit is one US dollar.
///
/// Sets the *min monthly budget.units* query property to the given value.
pub fn min_monthly_budget_units(mut self, new_value: &str) -> CompanyListCall<'a> {
self._min_monthly_budget_units = Some(new_value.to_string());
self
}
/// Number of nano (10^-9) units of the amount.
/// The value must be between -999,999,999 and +999,999,999 inclusive.
/// If `units` is positive, `nanos` must be positive or zero.
/// If `units` is zero, `nanos` can be positive, zero, or negative.
/// If `units` is negative, `nanos` must be negative or zero.
/// For example $-1.75 is represented as `units`=-1 and `nanos`=-750,000,000.
///
/// Sets the *min monthly budget.nanos* query property to the given value.
pub fn min_monthly_budget_nanos(mut self, new_value: i32) -> CompanyListCall<'a> {
self._min_monthly_budget_nanos = Some(new_value);
self
}
/// The 3-letter currency code defined in ISO 4217.
///
/// Sets the *min monthly budget.currency code* query property to the given value.
pub fn min_monthly_budget_currency_code(mut self, new_value: &str) -> CompanyListCall<'a> {
self._min_monthly_budget_currency_code = Some(new_value.to_string());
self
}
/// The whole units of the amount.
/// For example if `currencyCode` is `"USD"`, then 1 unit is one US dollar.
///
/// Sets the *max monthly budget.units* query property to the given value.
pub fn max_monthly_budget_units(mut self, new_value: &str) -> CompanyListCall<'a> {
self._max_monthly_budget_units = Some(new_value.to_string());
self
}
/// Number of nano (10^-9) units of the amount.
/// The value must be between -999,999,999 and +999,999,999 inclusive.
/// If `units` is positive, `nanos` must be positive or zero.
/// If `units` is zero, `nanos` can be positive, zero, or negative.
/// If `units` is negative, `nanos` must be negative or zero.
/// For example $-1.75 is represented as `units`=-1 and `nanos`=-750,000,000.
///
/// Sets the *max monthly budget.nanos* query property to the given value.
pub fn max_monthly_budget_nanos(mut self, new_value: i32) -> CompanyListCall<'a> {
self._max_monthly_budget_nanos = Some(new_value);
self
}
/// The 3-letter currency code defined in ISO 4217.
///
/// Sets the *max monthly budget.currency code* query property to the given value.
pub fn max_monthly_budget_currency_code(mut self, new_value: &str) -> CompanyListCall<'a> {
self._max_monthly_budget_currency_code = Some(new_value.to_string());
self
}
/// List of language codes that company can support. Only primary language
/// subtags are accepted as defined by
/// <a href="https://tools.ietf.org/html/bcp47">BCP 47</a>
/// (IETF BCP 47, "Tags for Identifying Languages").
///
/// Append the given value to the *language codes* query property.
/// Each appended value will retain its original ordering and be '/'-separated in the URL's parameters.
pub fn add_language_codes(mut self, new_value: &str) -> CompanyListCall<'a> {
self._language_codes.push(new_value.to_string());
self
}
/// List of industries the company can help with.
///
/// Append the given value to the *industries* query property.
/// Each appended value will retain its original ordering and be '/'-separated in the URL's parameters.
pub fn add_industries(mut self, new_value: &str) -> CompanyListCall<'a> {
self._industries.push(new_value.to_string());
self
}
/// List of reasons for using Google Partner Search to get companies.
///
/// Append the given value to the *gps motivations* query property.
/// Each appended value will retain its original ordering and be '/'-separated in the URL's parameters.
pub fn add_gps_motivations(mut self, new_value: &str) -> CompanyListCall<'a> {
self._gps_motivations.push(new_value.to_string());
self
}
/// Company name to search for.
///
/// Sets the *company name* query property to the given value.
pub fn company_name(mut self, new_value: &str) -> CompanyListCall<'a> {
self._company_name = Some(new_value.to_string());
self
}
/// The address to use when searching for companies.
/// If not given, the geo-located address of the request is used.
///
/// Sets the *address* query property to the given value.
pub fn address(mut self, new_value: &str) -> CompanyListCall<'a> {
self._address = Some(new_value.to_string());
self
}
/// The delegate implementation is consulted whenever there is an intermediate result, or if something goes wrong
/// while executing the actual API request.
///
/// It should be used to handle progress information, and to implement a certain level of resilience.
///
/// Sets the *delegate* property to the given value.
pub fn delegate(mut self, new_value: &'a mut dyn client::Delegate) -> CompanyListCall<'a> {
self._delegate = Some(new_value);
self
}
/// Set any additional parameter of the query string used in the request.
/// It should be used to set parameters which are not yet available through their own
/// setters.
///
/// Please note that this method must not be used to set any of the known parameters
/// which have their own setter method. If done anyway, the request will fail.
///
/// # Additional Parameters
///
/// * *alt* (query-string) - Data format for response.
/// * *key* (query-string) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
/// * *access_token* (query-string) - OAuth access token.
/// * *upload_protocol* (query-string) - Upload protocol for media (e.g. "raw", "multipart").
/// * *prettyPrint* (query-boolean) - Returns response with indentations and line breaks.
/// * *quotaUser* (query-string) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
/// * *uploadType* (query-string) - Legacy upload protocol for media (e.g. "media", "multipart").
/// * *fields* (query-string) - Selector specifying which fields to include in a partial response.
/// * *$.xgafv* (query-string) - V1 error format.
/// * *oauth_token* (query-string) - OAuth 2.0 token for the current user.
/// * *callback* (query-string) - JSONP
pub fn param<T>(mut self, name: T, value: T) -> CompanyListCall<'a>
where T: AsRef<str> {
self._additional_params.insert(name.as_ref().to_string(), value.as_ref().to_string());
self
}
}
/// Updates a user's profile. A user can only update their own profile and
/// should only be called within the context of a logged in user.
///
/// A builder for the *updateProfile* method supported by a *user* resource.
/// It is not used directly, but through a `UserMethods` instance.
///
/// # Example
///
/// Instantiate a resource method builder
///
/// ```test_harness,no_run
/// # extern crate hyper;
/// # extern crate hyper_rustls;
/// # extern crate yup_oauth2 as oauth2;
/// # extern crate google_partners2 as partners2;
/// use partners2::api::UserProfile;
/// # async fn dox() {
/// # use std::default::Default;
/// # use oauth2;
/// # use partners2::Partners;
///
/// # let secret: oauth2::ApplicationSecret = Default::default();
/// # let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// # secret,
/// # yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// # ).build().await.unwrap();
/// # let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // As the method needs a request, you would usually fill it with the desired information
/// // into the respective structure. Some of the parts shown here might not be applicable !
/// // Values shown here are possibly random and not representative !
/// let mut req = UserProfile::default();
///
/// // You can configure optional parameters by calling the respective setters at will, and
/// // execute the final call using `doit()`.
/// // Values shown here are possibly random and not representative !
/// let result = hub.users().update_profile(req)
/// .request_metadata_user_overrides_user_id("aliquyam")
/// .request_metadata_user_overrides_ip_address("amet")
/// .request_metadata_traffic_source_traffic_sub_id("est")
/// .request_metadata_traffic_source_traffic_source_id("et")
/// .request_metadata_partners_session_id("sea")
/// .request_metadata_locale("consetetur")
/// .add_request_metadata_experiment_ids("consetetur")
/// .doit().await;
/// # }
/// ```
pub struct UserUpdateProfileCall<'a>
where {
hub: &'a Partners<>,
_request: UserProfile,
_request_metadata_user_overrides_user_id: Option<String>,
_request_metadata_user_overrides_ip_address: Option<String>,
_request_metadata_traffic_source_traffic_sub_id: Option<String>,
_request_metadata_traffic_source_traffic_source_id: Option<String>,
_request_metadata_partners_session_id: Option<String>,
_request_metadata_locale: Option<String>,
_request_metadata_experiment_ids: Vec<String>,
_delegate: Option<&'a mut dyn client::Delegate>,
_additional_params: HashMap<String, String>,
}
impl<'a> client::CallBuilder for UserUpdateProfileCall<'a> {}
impl<'a> UserUpdateProfileCall<'a> {
/// Perform the operation you have build so far.
pub async fn doit(mut self) -> client::Result<(hyper::Response<hyper::body::Body>, UserProfile)> {
use std::io::{Read, Seek};
use hyper::header::{CONTENT_TYPE, CONTENT_LENGTH, AUTHORIZATION, USER_AGENT, LOCATION};
use client::ToParts;
let mut dd = client::DefaultDelegate;
let mut dlg: &mut dyn client::Delegate = match self._delegate {
Some(d) => d,
None => &mut dd
};
dlg.begin(client::MethodInfo { id: "partners.users.updateProfile",
http_method: hyper::Method::PATCH });
let mut params: Vec<(&str, String)> = Vec::with_capacity(10 + self._additional_params.len());
if let Some(value) = self._request_metadata_user_overrides_user_id {
params.push(("requestMetadata.userOverrides.userId", value.to_string()));
}
if let Some(value) = self._request_metadata_user_overrides_ip_address {
params.push(("requestMetadata.userOverrides.ipAddress", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_sub_id {
params.push(("requestMetadata.trafficSource.trafficSubId", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_source_id {
params.push(("requestMetadata.trafficSource.trafficSourceId", value.to_string()));
}
if let Some(value) = self._request_metadata_partners_session_id {
params.push(("requestMetadata.partnersSessionId", value.to_string()));
}
if let Some(value) = self._request_metadata_locale {
params.push(("requestMetadata.locale", value.to_string()));
}
if self._request_metadata_experiment_ids.len() > 0 {
for f in self._request_metadata_experiment_ids.iter() {
params.push(("requestMetadata.experimentIds", f.to_string()));
}
}
for &field in ["alt", "requestMetadata.userOverrides.userId", "requestMetadata.userOverrides.ipAddress", "requestMetadata.trafficSource.trafficSubId", "requestMetadata.trafficSource.trafficSourceId", "requestMetadata.partnersSessionId", "requestMetadata.locale", "requestMetadata.experimentIds"].iter() {
if self._additional_params.contains_key(field) {
dlg.finished(false);
return Err(client::Error::FieldClash(field));
}
}
for (name, value) in self._additional_params.iter() {
params.push((&name, value.clone()));
}
params.push(("alt", "json".to_string()));
let mut url = self.hub._base_url.clone() + "v2/users/profile";
let key = dlg.api_key();
match key {
Some(value) => params.push(("key", value)),
None => {
dlg.finished(false);
return Err(client::Error::MissingAPIKey)
}
}
let url = url::Url::parse_with_params(&url, params).unwrap();
let mut json_mime_type: mime::Mime = "application/json".parse().unwrap();
let mut request_value_reader =
{
let mut value = json::value::to_value(&self._request).expect("serde to work");
client::remove_json_null_values(&mut value);
let mut dst = io::Cursor::new(Vec::with_capacity(128));
json::to_writer(&mut dst, &value).unwrap();
dst
};
let request_size = request_value_reader.seek(io::SeekFrom::End(0)).unwrap();
request_value_reader.seek(io::SeekFrom::Start(0)).unwrap();
loop {
request_value_reader.seek(io::SeekFrom::Start(0)).unwrap();
let mut req_result = {
let client = &self.hub.client;
dlg.pre_request();
let mut req_builder = hyper::Request::builder().method(hyper::Method::PATCH).uri(url.clone().into_string())
.header(USER_AGENT, self.hub._user_agent.clone());
let request = req_builder
.header(CONTENT_TYPE, format!("{}", json_mime_type.to_string()))
.header(CONTENT_LENGTH, request_size as u64)
.body(hyper::body::Body::from(request_value_reader.get_ref().clone()));
client.request(request.unwrap()).await
};
match req_result {
Err(err) => {
if let client::Retry::After(d) = dlg.http_error(&err) {
sleep(d);
continue;
}
dlg.finished(false);
return Err(client::Error::HttpError(err))
}
Ok(mut res) => {
if !res.status().is_success() {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
let json_server_error = json::from_str::<client::JsonServerError>(&res_body_string).ok();
let server_error = json::from_str::<client::ServerError>(&res_body_string)
.or_else(|_| json::from_str::<client::ErrorResponse>(&res_body_string).map(|r| r.error))
.ok();
if let client::Retry::After(d) = dlg.http_failure(&res,
json_server_error,
server_error) {
sleep(d);
continue;
}
dlg.finished(false);
return match json::from_str::<client::ErrorResponse>(&res_body_string){
Err(_) => Err(client::Error::Failure(res)),
Ok(serr) => Err(client::Error::BadRequest(serr))
}
}
let result_value = {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
match json::from_str(&res_body_string) {
Ok(decoded) => (res, decoded),
Err(err) => {
dlg.response_json_decode_error(&res_body_string, &err);
return Err(client::Error::JsonDecodeError(res_body_string, err));
}
}
};
dlg.finished(true);
return Ok(result_value)
}
}
}
}
///
/// Sets the *request* property to the given value.
///
/// Even though the property as already been set when instantiating this call,
/// we provide this method for API completeness.
pub fn request(mut self, new_value: UserProfile) -> UserUpdateProfileCall<'a> {
self._request = new_value;
self
}
/// Logged-in user ID to impersonate instead of the user's ID.
///
/// Sets the *request metadata.user overrides.user id* query property to the given value.
pub fn request_metadata_user_overrides_user_id(mut self, new_value: &str) -> UserUpdateProfileCall<'a> {
self._request_metadata_user_overrides_user_id = Some(new_value.to_string());
self
}
/// IP address to use instead of the user's geo-located IP address.
///
/// Sets the *request metadata.user overrides.ip address* query property to the given value.
pub fn request_metadata_user_overrides_ip_address(mut self, new_value: &str) -> UserUpdateProfileCall<'a> {
self._request_metadata_user_overrides_ip_address = Some(new_value.to_string());
self
}
/// Second level identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic sub id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_sub_id(mut self, new_value: &str) -> UserUpdateProfileCall<'a> {
self._request_metadata_traffic_source_traffic_sub_id = Some(new_value.to_string());
self
}
/// Identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic source id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_source_id(mut self, new_value: &str) -> UserUpdateProfileCall<'a> {
self._request_metadata_traffic_source_traffic_source_id = Some(new_value.to_string());
self
}
/// Google Partners session ID.
///
/// Sets the *request metadata.partners session id* query property to the given value.
pub fn request_metadata_partners_session_id(mut self, new_value: &str) -> UserUpdateProfileCall<'a> {
self._request_metadata_partners_session_id = Some(new_value.to_string());
self
}
/// Locale to use for the current request.
///
/// Sets the *request metadata.locale* query property to the given value.
pub fn request_metadata_locale(mut self, new_value: &str) -> UserUpdateProfileCall<'a> {
self._request_metadata_locale = Some(new_value.to_string());
self
}
/// Experiment IDs the current request belongs to.
///
/// Append the given value to the *request metadata.experiment ids* query property.
/// Each appended value will retain its original ordering and be '/'-separated in the URL's parameters.
pub fn add_request_metadata_experiment_ids(mut self, new_value: &str) -> UserUpdateProfileCall<'a> {
self._request_metadata_experiment_ids.push(new_value.to_string());
self
}
/// The delegate implementation is consulted whenever there is an intermediate result, or if something goes wrong
/// while executing the actual API request.
///
/// It should be used to handle progress information, and to implement a certain level of resilience.
///
/// Sets the *delegate* property to the given value.
pub fn delegate(mut self, new_value: &'a mut dyn client::Delegate) -> UserUpdateProfileCall<'a> {
self._delegate = Some(new_value);
self
}
/// Set any additional parameter of the query string used in the request.
/// It should be used to set parameters which are not yet available through their own
/// setters.
///
/// Please note that this method must not be used to set any of the known parameters
/// which have their own setter method. If done anyway, the request will fail.
///
/// # Additional Parameters
///
/// * *alt* (query-string) - Data format for response.
/// * *key* (query-string) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
/// * *access_token* (query-string) - OAuth access token.
/// * *upload_protocol* (query-string) - Upload protocol for media (e.g. "raw", "multipart").
/// * *prettyPrint* (query-boolean) - Returns response with indentations and line breaks.
/// * *quotaUser* (query-string) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
/// * *uploadType* (query-string) - Legacy upload protocol for media (e.g. "media", "multipart").
/// * *fields* (query-string) - Selector specifying which fields to include in a partial response.
/// * *$.xgafv* (query-string) - V1 error format.
/// * *oauth_token* (query-string) - OAuth 2.0 token for the current user.
/// * *callback* (query-string) - JSONP
pub fn param<T>(mut self, name: T, value: T) -> UserUpdateProfileCall<'a>
where T: AsRef<str> {
self._additional_params.insert(name.as_ref().to_string(), value.as_ref().to_string());
self
}
}
/// Creates a user's company relation. Affiliates the user to a company.
///
/// A builder for the *createCompanyRelation* method supported by a *user* resource.
/// It is not used directly, but through a `UserMethods` instance.
///
/// # Example
///
/// Instantiate a resource method builder
///
/// ```test_harness,no_run
/// # extern crate hyper;
/// # extern crate hyper_rustls;
/// # extern crate yup_oauth2 as oauth2;
/// # extern crate google_partners2 as partners2;
/// use partners2::api::CompanyRelation;
/// # async fn dox() {
/// # use std::default::Default;
/// # use oauth2;
/// # use partners2::Partners;
///
/// # let secret: oauth2::ApplicationSecret = Default::default();
/// # let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// # secret,
/// # yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// # ).build().await.unwrap();
/// # let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // As the method needs a request, you would usually fill it with the desired information
/// // into the respective structure. Some of the parts shown here might not be applicable !
/// // Values shown here are possibly random and not representative !
/// let mut req = CompanyRelation::default();
///
/// // You can configure optional parameters by calling the respective setters at will, and
/// // execute the final call using `doit()`.
/// // Values shown here are possibly random and not representative !
/// let result = hub.users().create_company_relation(req, "userId")
/// .request_metadata_user_overrides_user_id("est")
/// .request_metadata_user_overrides_ip_address("aliquyam")
/// .request_metadata_traffic_source_traffic_sub_id("elitr")
/// .request_metadata_traffic_source_traffic_source_id("duo")
/// .request_metadata_partners_session_id("diam")
/// .request_metadata_locale("est")
/// .add_request_metadata_experiment_ids("sit")
/// .doit().await;
/// # }
/// ```
pub struct UserCreateCompanyRelationCall<'a>
where {
hub: &'a Partners<>,
_request: CompanyRelation,
_user_id: String,
_request_metadata_user_overrides_user_id: Option<String>,
_request_metadata_user_overrides_ip_address: Option<String>,
_request_metadata_traffic_source_traffic_sub_id: Option<String>,
_request_metadata_traffic_source_traffic_source_id: Option<String>,
_request_metadata_partners_session_id: Option<String>,
_request_metadata_locale: Option<String>,
_request_metadata_experiment_ids: Vec<String>,
_delegate: Option<&'a mut dyn client::Delegate>,
_additional_params: HashMap<String, String>,
}
impl<'a> client::CallBuilder for UserCreateCompanyRelationCall<'a> {}
impl<'a> UserCreateCompanyRelationCall<'a> {
/// Perform the operation you have build so far.
pub async fn doit(mut self) -> client::Result<(hyper::Response<hyper::body::Body>, CompanyRelation)> {
use std::io::{Read, Seek};
use hyper::header::{CONTENT_TYPE, CONTENT_LENGTH, AUTHORIZATION, USER_AGENT, LOCATION};
use client::ToParts;
let mut dd = client::DefaultDelegate;
let mut dlg: &mut dyn client::Delegate = match self._delegate {
Some(d) => d,
None => &mut dd
};
dlg.begin(client::MethodInfo { id: "partners.users.createCompanyRelation",
http_method: hyper::Method::PUT });
let mut params: Vec<(&str, String)> = Vec::with_capacity(11 + self._additional_params.len());
params.push(("userId", self._user_id.to_string()));
if let Some(value) = self._request_metadata_user_overrides_user_id {
params.push(("requestMetadata.userOverrides.userId", value.to_string()));
}
if let Some(value) = self._request_metadata_user_overrides_ip_address {
params.push(("requestMetadata.userOverrides.ipAddress", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_sub_id {
params.push(("requestMetadata.trafficSource.trafficSubId", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_source_id {
params.push(("requestMetadata.trafficSource.trafficSourceId", value.to_string()));
}
if let Some(value) = self._request_metadata_partners_session_id {
params.push(("requestMetadata.partnersSessionId", value.to_string()));
}
if let Some(value) = self._request_metadata_locale {
params.push(("requestMetadata.locale", value.to_string()));
}
if self._request_metadata_experiment_ids.len() > 0 {
for f in self._request_metadata_experiment_ids.iter() {
params.push(("requestMetadata.experimentIds", f.to_string()));
}
}
for &field in ["alt", "userId", "requestMetadata.userOverrides.userId", "requestMetadata.userOverrides.ipAddress", "requestMetadata.trafficSource.trafficSubId", "requestMetadata.trafficSource.trafficSourceId", "requestMetadata.partnersSessionId", "requestMetadata.locale", "requestMetadata.experimentIds"].iter() {
if self._additional_params.contains_key(field) {
dlg.finished(false);
return Err(client::Error::FieldClash(field));
}
}
for (name, value) in self._additional_params.iter() {
params.push((&name, value.clone()));
}
params.push(("alt", "json".to_string()));
let mut url = self.hub._base_url.clone() + "v2/users/{userId}/companyRelation";
let key = dlg.api_key();
match key {
Some(value) => params.push(("key", value)),
None => {
dlg.finished(false);
return Err(client::Error::MissingAPIKey)
}
}
for &(find_this, param_name) in [("{userId}", "userId")].iter() {
let mut replace_with: Option<&str> = None;
for &(name, ref value) in params.iter() {
if name == param_name {
replace_with = Some(value);
break;
}
}
url = url.replace(find_this, replace_with.expect("to find substitution value in params"));
}
{
let mut indices_for_removal: Vec<usize> = Vec::with_capacity(1);
for param_name in ["userId"].iter() {
if let Some(index) = params.iter().position(|t| &t.0 == param_name) {
indices_for_removal.push(index);
}
}
for &index in indices_for_removal.iter() {
params.remove(index);
}
}
let url = url::Url::parse_with_params(&url, params).unwrap();
let mut json_mime_type: mime::Mime = "application/json".parse().unwrap();
let mut request_value_reader =
{
let mut value = json::value::to_value(&self._request).expect("serde to work");
client::remove_json_null_values(&mut value);
let mut dst = io::Cursor::new(Vec::with_capacity(128));
json::to_writer(&mut dst, &value).unwrap();
dst
};
let request_size = request_value_reader.seek(io::SeekFrom::End(0)).unwrap();
request_value_reader.seek(io::SeekFrom::Start(0)).unwrap();
loop {
request_value_reader.seek(io::SeekFrom::Start(0)).unwrap();
let mut req_result = {
let client = &self.hub.client;
dlg.pre_request();
let mut req_builder = hyper::Request::builder().method(hyper::Method::PUT).uri(url.clone().into_string())
.header(USER_AGENT, self.hub._user_agent.clone());
let request = req_builder
.header(CONTENT_TYPE, format!("{}", json_mime_type.to_string()))
.header(CONTENT_LENGTH, request_size as u64)
.body(hyper::body::Body::from(request_value_reader.get_ref().clone()));
client.request(request.unwrap()).await
};
match req_result {
Err(err) => {
if let client::Retry::After(d) = dlg.http_error(&err) {
sleep(d);
continue;
}
dlg.finished(false);
return Err(client::Error::HttpError(err))
}
Ok(mut res) => {
if !res.status().is_success() {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
let json_server_error = json::from_str::<client::JsonServerError>(&res_body_string).ok();
let server_error = json::from_str::<client::ServerError>(&res_body_string)
.or_else(|_| json::from_str::<client::ErrorResponse>(&res_body_string).map(|r| r.error))
.ok();
if let client::Retry::After(d) = dlg.http_failure(&res,
json_server_error,
server_error) {
sleep(d);
continue;
}
dlg.finished(false);
return match json::from_str::<client::ErrorResponse>(&res_body_string){
Err(_) => Err(client::Error::Failure(res)),
Ok(serr) => Err(client::Error::BadRequest(serr))
}
}
let result_value = {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
match json::from_str(&res_body_string) {
Ok(decoded) => (res, decoded),
Err(err) => {
dlg.response_json_decode_error(&res_body_string, &err);
return Err(client::Error::JsonDecodeError(res_body_string, err));
}
}
};
dlg.finished(true);
return Ok(result_value)
}
}
}
}
///
/// Sets the *request* property to the given value.
///
/// Even though the property as already been set when instantiating this call,
/// we provide this method for API completeness.
pub fn request(mut self, new_value: CompanyRelation) -> UserCreateCompanyRelationCall<'a> {
self._request = new_value;
self
}
/// The ID of the user. Can be set to <code>me</code> to mean
/// the currently authenticated user.
///
/// Sets the *user id* path property to the given value.
///
/// Even though the property as already been set when instantiating this call,
/// we provide this method for API completeness.
pub fn user_id(mut self, new_value: &str) -> UserCreateCompanyRelationCall<'a> {
self._user_id = new_value.to_string();
self
}
/// Logged-in user ID to impersonate instead of the user's ID.
///
/// Sets the *request metadata.user overrides.user id* query property to the given value.
pub fn request_metadata_user_overrides_user_id(mut self, new_value: &str) -> UserCreateCompanyRelationCall<'a> {
self._request_metadata_user_overrides_user_id = Some(new_value.to_string());
self
}
/// IP address to use instead of the user's geo-located IP address.
///
/// Sets the *request metadata.user overrides.ip address* query property to the given value.
pub fn request_metadata_user_overrides_ip_address(mut self, new_value: &str) -> UserCreateCompanyRelationCall<'a> {
self._request_metadata_user_overrides_ip_address = Some(new_value.to_string());
self
}
/// Second level identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic sub id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_sub_id(mut self, new_value: &str) -> UserCreateCompanyRelationCall<'a> {
self._request_metadata_traffic_source_traffic_sub_id = Some(new_value.to_string());
self
}
/// Identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic source id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_source_id(mut self, new_value: &str) -> UserCreateCompanyRelationCall<'a> {
self._request_metadata_traffic_source_traffic_source_id = Some(new_value.to_string());
self
}
/// Google Partners session ID.
///
/// Sets the *request metadata.partners session id* query property to the given value.
pub fn request_metadata_partners_session_id(mut self, new_value: &str) -> UserCreateCompanyRelationCall<'a> {
self._request_metadata_partners_session_id = Some(new_value.to_string());
self
}
/// Locale to use for the current request.
///
/// Sets the *request metadata.locale* query property to the given value.
pub fn request_metadata_locale(mut self, new_value: &str) -> UserCreateCompanyRelationCall<'a> {
self._request_metadata_locale = Some(new_value.to_string());
self
}
/// Experiment IDs the current request belongs to.
///
/// Append the given value to the *request metadata.experiment ids* query property.
/// Each appended value will retain its original ordering and be '/'-separated in the URL's parameters.
pub fn add_request_metadata_experiment_ids(mut self, new_value: &str) -> UserCreateCompanyRelationCall<'a> {
self._request_metadata_experiment_ids.push(new_value.to_string());
self
}
/// The delegate implementation is consulted whenever there is an intermediate result, or if something goes wrong
/// while executing the actual API request.
///
/// It should be used to handle progress information, and to implement a certain level of resilience.
///
/// Sets the *delegate* property to the given value.
pub fn delegate(mut self, new_value: &'a mut dyn client::Delegate) -> UserCreateCompanyRelationCall<'a> {
self._delegate = Some(new_value);
self
}
/// Set any additional parameter of the query string used in the request.
/// It should be used to set parameters which are not yet available through their own
/// setters.
///
/// Please note that this method must not be used to set any of the known parameters
/// which have their own setter method. If done anyway, the request will fail.
///
/// # Additional Parameters
///
/// * *alt* (query-string) - Data format for response.
/// * *key* (query-string) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
/// * *access_token* (query-string) - OAuth access token.
/// * *upload_protocol* (query-string) - Upload protocol for media (e.g. "raw", "multipart").
/// * *prettyPrint* (query-boolean) - Returns response with indentations and line breaks.
/// * *quotaUser* (query-string) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
/// * *uploadType* (query-string) - Legacy upload protocol for media (e.g. "media", "multipart").
/// * *fields* (query-string) - Selector specifying which fields to include in a partial response.
/// * *$.xgafv* (query-string) - V1 error format.
/// * *oauth_token* (query-string) - OAuth 2.0 token for the current user.
/// * *callback* (query-string) - JSONP
pub fn param<T>(mut self, name: T, value: T) -> UserCreateCompanyRelationCall<'a>
where T: AsRef<str> {
self._additional_params.insert(name.as_ref().to_string(), value.as_ref().to_string());
self
}
}
/// Deletes a user's company relation. Unaffiliaites the user from a company.
///
/// A builder for the *deleteCompanyRelation* method supported by a *user* resource.
/// It is not used directly, but through a `UserMethods` instance.
///
/// # Example
///
/// Instantiate a resource method builder
///
/// ```test_harness,no_run
/// # extern crate hyper;
/// # extern crate hyper_rustls;
/// # extern crate yup_oauth2 as oauth2;
/// # extern crate google_partners2 as partners2;
/// # async fn dox() {
/// # use std::default::Default;
/// # use oauth2;
/// # use partners2::Partners;
///
/// # let secret: oauth2::ApplicationSecret = Default::default();
/// # let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// # secret,
/// # yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// # ).build().await.unwrap();
/// # let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // You can configure optional parameters by calling the respective setters at will, and
/// // execute the final call using `doit()`.
/// // Values shown here are possibly random and not representative !
/// let result = hub.users().delete_company_relation("userId")
/// .request_metadata_user_overrides_user_id("eos")
/// .request_metadata_user_overrides_ip_address("Lorem")
/// .request_metadata_traffic_source_traffic_sub_id("ea")
/// .request_metadata_traffic_source_traffic_source_id("Stet")
/// .request_metadata_partners_session_id("dolores")
/// .request_metadata_locale("eos")
/// .add_request_metadata_experiment_ids("et")
/// .doit().await;
/// # }
/// ```
pub struct UserDeleteCompanyRelationCall<'a>
where {
hub: &'a Partners<>,
_user_id: String,
_request_metadata_user_overrides_user_id: Option<String>,
_request_metadata_user_overrides_ip_address: Option<String>,
_request_metadata_traffic_source_traffic_sub_id: Option<String>,
_request_metadata_traffic_source_traffic_source_id: Option<String>,
_request_metadata_partners_session_id: Option<String>,
_request_metadata_locale: Option<String>,
_request_metadata_experiment_ids: Vec<String>,
_delegate: Option<&'a mut dyn client::Delegate>,
_additional_params: HashMap<String, String>,
}
impl<'a> client::CallBuilder for UserDeleteCompanyRelationCall<'a> {}
impl<'a> UserDeleteCompanyRelationCall<'a> {
/// Perform the operation you have build so far.
pub async fn doit(mut self) -> client::Result<(hyper::Response<hyper::body::Body>, Empty)> {
use std::io::{Read, Seek};
use hyper::header::{CONTENT_TYPE, CONTENT_LENGTH, AUTHORIZATION, USER_AGENT, LOCATION};
use client::ToParts;
let mut dd = client::DefaultDelegate;
let mut dlg: &mut dyn client::Delegate = match self._delegate {
Some(d) => d,
None => &mut dd
};
dlg.begin(client::MethodInfo { id: "partners.users.deleteCompanyRelation",
http_method: hyper::Method::DELETE });
let mut params: Vec<(&str, String)> = Vec::with_capacity(10 + self._additional_params.len());
params.push(("userId", self._user_id.to_string()));
if let Some(value) = self._request_metadata_user_overrides_user_id {
params.push(("requestMetadata.userOverrides.userId", value.to_string()));
}
if let Some(value) = self._request_metadata_user_overrides_ip_address {
params.push(("requestMetadata.userOverrides.ipAddress", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_sub_id {
params.push(("requestMetadata.trafficSource.trafficSubId", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_source_id {
params.push(("requestMetadata.trafficSource.trafficSourceId", value.to_string()));
}
if let Some(value) = self._request_metadata_partners_session_id {
params.push(("requestMetadata.partnersSessionId", value.to_string()));
}
if let Some(value) = self._request_metadata_locale {
params.push(("requestMetadata.locale", value.to_string()));
}
if self._request_metadata_experiment_ids.len() > 0 {
for f in self._request_metadata_experiment_ids.iter() {
params.push(("requestMetadata.experimentIds", f.to_string()));
}
}
for &field in ["alt", "userId", "requestMetadata.userOverrides.userId", "requestMetadata.userOverrides.ipAddress", "requestMetadata.trafficSource.trafficSubId", "requestMetadata.trafficSource.trafficSourceId", "requestMetadata.partnersSessionId", "requestMetadata.locale", "requestMetadata.experimentIds"].iter() {
if self._additional_params.contains_key(field) {
dlg.finished(false);
return Err(client::Error::FieldClash(field));
}
}
for (name, value) in self._additional_params.iter() {
params.push((&name, value.clone()));
}
params.push(("alt", "json".to_string()));
let mut url = self.hub._base_url.clone() + "v2/users/{userId}/companyRelation";
let key = dlg.api_key();
match key {
Some(value) => params.push(("key", value)),
None => {
dlg.finished(false);
return Err(client::Error::MissingAPIKey)
}
}
for &(find_this, param_name) in [("{userId}", "userId")].iter() {
let mut replace_with: Option<&str> = None;
for &(name, ref value) in params.iter() {
if name == param_name {
replace_with = Some(value);
break;
}
}
url = url.replace(find_this, replace_with.expect("to find substitution value in params"));
}
{
let mut indices_for_removal: Vec<usize> = Vec::with_capacity(1);
for param_name in ["userId"].iter() {
if let Some(index) = params.iter().position(|t| &t.0 == param_name) {
indices_for_removal.push(index);
}
}
for &index in indices_for_removal.iter() {
params.remove(index);
}
}
let url = url::Url::parse_with_params(&url, params).unwrap();
loop {
let mut req_result = {
let client = &self.hub.client;
dlg.pre_request();
let mut req_builder = hyper::Request::builder().method(hyper::Method::DELETE).uri(url.clone().into_string())
.header(USER_AGENT, self.hub._user_agent.clone());
let request = req_builder
.body(hyper::body::Body::empty());
client.request(request.unwrap()).await
};
match req_result {
Err(err) => {
if let client::Retry::After(d) = dlg.http_error(&err) {
sleep(d);
continue;
}
dlg.finished(false);
return Err(client::Error::HttpError(err))
}
Ok(mut res) => {
if !res.status().is_success() {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
let json_server_error = json::from_str::<client::JsonServerError>(&res_body_string).ok();
let server_error = json::from_str::<client::ServerError>(&res_body_string)
.or_else(|_| json::from_str::<client::ErrorResponse>(&res_body_string).map(|r| r.error))
.ok();
if let client::Retry::After(d) = dlg.http_failure(&res,
json_server_error,
server_error) {
sleep(d);
continue;
}
dlg.finished(false);
return match json::from_str::<client::ErrorResponse>(&res_body_string){
Err(_) => Err(client::Error::Failure(res)),
Ok(serr) => Err(client::Error::BadRequest(serr))
}
}
let result_value = {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
match json::from_str(&res_body_string) {
Ok(decoded) => (res, decoded),
Err(err) => {
dlg.response_json_decode_error(&res_body_string, &err);
return Err(client::Error::JsonDecodeError(res_body_string, err));
}
}
};
dlg.finished(true);
return Ok(result_value)
}
}
}
}
/// The ID of the user. Can be set to <code>me</code> to mean
/// the currently authenticated user.
///
/// Sets the *user id* path property to the given value.
///
/// Even though the property as already been set when instantiating this call,
/// we provide this method for API completeness.
pub fn user_id(mut self, new_value: &str) -> UserDeleteCompanyRelationCall<'a> {
self._user_id = new_value.to_string();
self
}
/// Logged-in user ID to impersonate instead of the user's ID.
///
/// Sets the *request metadata.user overrides.user id* query property to the given value.
pub fn request_metadata_user_overrides_user_id(mut self, new_value: &str) -> UserDeleteCompanyRelationCall<'a> {
self._request_metadata_user_overrides_user_id = Some(new_value.to_string());
self
}
/// IP address to use instead of the user's geo-located IP address.
///
/// Sets the *request metadata.user overrides.ip address* query property to the given value.
pub fn request_metadata_user_overrides_ip_address(mut self, new_value: &str) -> UserDeleteCompanyRelationCall<'a> {
self._request_metadata_user_overrides_ip_address = Some(new_value.to_string());
self
}
/// Second level identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic sub id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_sub_id(mut self, new_value: &str) -> UserDeleteCompanyRelationCall<'a> {
self._request_metadata_traffic_source_traffic_sub_id = Some(new_value.to_string());
self
}
/// Identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic source id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_source_id(mut self, new_value: &str) -> UserDeleteCompanyRelationCall<'a> {
self._request_metadata_traffic_source_traffic_source_id = Some(new_value.to_string());
self
}
/// Google Partners session ID.
///
/// Sets the *request metadata.partners session id* query property to the given value.
pub fn request_metadata_partners_session_id(mut self, new_value: &str) -> UserDeleteCompanyRelationCall<'a> {
self._request_metadata_partners_session_id = Some(new_value.to_string());
self
}
/// Locale to use for the current request.
///
/// Sets the *request metadata.locale* query property to the given value.
pub fn request_metadata_locale(mut self, new_value: &str) -> UserDeleteCompanyRelationCall<'a> {
self._request_metadata_locale = Some(new_value.to_string());
self
}
/// Experiment IDs the current request belongs to.
///
/// Append the given value to the *request metadata.experiment ids* query property.
/// Each appended value will retain its original ordering and be '/'-separated in the URL's parameters.
pub fn add_request_metadata_experiment_ids(mut self, new_value: &str) -> UserDeleteCompanyRelationCall<'a> {
self._request_metadata_experiment_ids.push(new_value.to_string());
self
}
/// The delegate implementation is consulted whenever there is an intermediate result, or if something goes wrong
/// while executing the actual API request.
///
/// It should be used to handle progress information, and to implement a certain level of resilience.
///
/// Sets the *delegate* property to the given value.
pub fn delegate(mut self, new_value: &'a mut dyn client::Delegate) -> UserDeleteCompanyRelationCall<'a> {
self._delegate = Some(new_value);
self
}
/// Set any additional parameter of the query string used in the request.
/// It should be used to set parameters which are not yet available through their own
/// setters.
///
/// Please note that this method must not be used to set any of the known parameters
/// which have their own setter method. If done anyway, the request will fail.
///
/// # Additional Parameters
///
/// * *alt* (query-string) - Data format for response.
/// * *key* (query-string) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
/// * *access_token* (query-string) - OAuth access token.
/// * *upload_protocol* (query-string) - Upload protocol for media (e.g. "raw", "multipart").
/// * *prettyPrint* (query-boolean) - Returns response with indentations and line breaks.
/// * *quotaUser* (query-string) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
/// * *uploadType* (query-string) - Legacy upload protocol for media (e.g. "media", "multipart").
/// * *fields* (query-string) - Selector specifying which fields to include in a partial response.
/// * *$.xgafv* (query-string) - V1 error format.
/// * *oauth_token* (query-string) - OAuth 2.0 token for the current user.
/// * *callback* (query-string) - JSONP
pub fn param<T>(mut self, name: T, value: T) -> UserDeleteCompanyRelationCall<'a>
where T: AsRef<str> {
self._additional_params.insert(name.as_ref().to_string(), value.as_ref().to_string());
self
}
}
/// Gets a user.
///
/// A builder for the *get* method supported by a *user* resource.
/// It is not used directly, but through a `UserMethods` instance.
///
/// # Example
///
/// Instantiate a resource method builder
///
/// ```test_harness,no_run
/// # extern crate hyper;
/// # extern crate hyper_rustls;
/// # extern crate yup_oauth2 as oauth2;
/// # extern crate google_partners2 as partners2;
/// # async fn dox() {
/// # use std::default::Default;
/// # use oauth2;
/// # use partners2::Partners;
///
/// # let secret: oauth2::ApplicationSecret = Default::default();
/// # let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
/// # secret,
/// # yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
/// # ).build().await.unwrap();
/// # let mut hub = Partners::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
/// // You can configure optional parameters by calling the respective setters at will, and
/// // execute the final call using `doit()`.
/// // Values shown here are possibly random and not representative !
/// let result = hub.users().get("userId")
/// .user_view("et")
/// .request_metadata_user_overrides_user_id("At")
/// .request_metadata_user_overrides_ip_address("dolore")
/// .request_metadata_traffic_source_traffic_sub_id("eirmod")
/// .request_metadata_traffic_source_traffic_source_id("Lorem")
/// .request_metadata_partners_session_id("accusam")
/// .request_metadata_locale("amet")
/// .add_request_metadata_experiment_ids("erat")
/// .doit().await;
/// # }
/// ```
pub struct UserGetCall<'a>
where {
hub: &'a Partners<>,
_user_id: String,
_user_view: Option<String>,
_request_metadata_user_overrides_user_id: Option<String>,
_request_metadata_user_overrides_ip_address: Option<String>,
_request_metadata_traffic_source_traffic_sub_id: Option<String>,
_request_metadata_traffic_source_traffic_source_id: Option<String>,
_request_metadata_partners_session_id: Option<String>,
_request_metadata_locale: Option<String>,
_request_metadata_experiment_ids: Vec<String>,
_delegate: Option<&'a mut dyn client::Delegate>,
_additional_params: HashMap<String, String>,
}
impl<'a> client::CallBuilder for UserGetCall<'a> {}
impl<'a> UserGetCall<'a> {
/// Perform the operation you have build so far.
pub async fn doit(mut self) -> client::Result<(hyper::Response<hyper::body::Body>, User)> {
use std::io::{Read, Seek};
use hyper::header::{CONTENT_TYPE, CONTENT_LENGTH, AUTHORIZATION, USER_AGENT, LOCATION};
use client::ToParts;
let mut dd = client::DefaultDelegate;
let mut dlg: &mut dyn client::Delegate = match self._delegate {
Some(d) => d,
None => &mut dd
};
dlg.begin(client::MethodInfo { id: "partners.users.get",
http_method: hyper::Method::GET });
let mut params: Vec<(&str, String)> = Vec::with_capacity(11 + self._additional_params.len());
params.push(("userId", self._user_id.to_string()));
if let Some(value) = self._user_view {
params.push(("userView", value.to_string()));
}
if let Some(value) = self._request_metadata_user_overrides_user_id {
params.push(("requestMetadata.userOverrides.userId", value.to_string()));
}
if let Some(value) = self._request_metadata_user_overrides_ip_address {
params.push(("requestMetadata.userOverrides.ipAddress", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_sub_id {
params.push(("requestMetadata.trafficSource.trafficSubId", value.to_string()));
}
if let Some(value) = self._request_metadata_traffic_source_traffic_source_id {
params.push(("requestMetadata.trafficSource.trafficSourceId", value.to_string()));
}
if let Some(value) = self._request_metadata_partners_session_id {
params.push(("requestMetadata.partnersSessionId", value.to_string()));
}
if let Some(value) = self._request_metadata_locale {
params.push(("requestMetadata.locale", value.to_string()));
}
if self._request_metadata_experiment_ids.len() > 0 {
for f in self._request_metadata_experiment_ids.iter() {
params.push(("requestMetadata.experimentIds", f.to_string()));
}
}
for &field in ["alt", "userId", "userView", "requestMetadata.userOverrides.userId", "requestMetadata.userOverrides.ipAddress", "requestMetadata.trafficSource.trafficSubId", "requestMetadata.trafficSource.trafficSourceId", "requestMetadata.partnersSessionId", "requestMetadata.locale", "requestMetadata.experimentIds"].iter() {
if self._additional_params.contains_key(field) {
dlg.finished(false);
return Err(client::Error::FieldClash(field));
}
}
for (name, value) in self._additional_params.iter() {
params.push((&name, value.clone()));
}
params.push(("alt", "json".to_string()));
let mut url = self.hub._base_url.clone() + "v2/users/{userId}";
let key = dlg.api_key();
match key {
Some(value) => params.push(("key", value)),
None => {
dlg.finished(false);
return Err(client::Error::MissingAPIKey)
}
}
for &(find_this, param_name) in [("{userId}", "userId")].iter() {
let mut replace_with: Option<&str> = None;
for &(name, ref value) in params.iter() {
if name == param_name {
replace_with = Some(value);
break;
}
}
url = url.replace(find_this, replace_with.expect("to find substitution value in params"));
}
{
let mut indices_for_removal: Vec<usize> = Vec::with_capacity(1);
for param_name in ["userId"].iter() {
if let Some(index) = params.iter().position(|t| &t.0 == param_name) {
indices_for_removal.push(index);
}
}
for &index in indices_for_removal.iter() {
params.remove(index);
}
}
let url = url::Url::parse_with_params(&url, params).unwrap();
loop {
let mut req_result = {
let client = &self.hub.client;
dlg.pre_request();
let mut req_builder = hyper::Request::builder().method(hyper::Method::GET).uri(url.clone().into_string())
.header(USER_AGENT, self.hub._user_agent.clone());
let request = req_builder
.body(hyper::body::Body::empty());
client.request(request.unwrap()).await
};
match req_result {
Err(err) => {
if let client::Retry::After(d) = dlg.http_error(&err) {
sleep(d);
continue;
}
dlg.finished(false);
return Err(client::Error::HttpError(err))
}
Ok(mut res) => {
if !res.status().is_success() {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
let json_server_error = json::from_str::<client::JsonServerError>(&res_body_string).ok();
let server_error = json::from_str::<client::ServerError>(&res_body_string)
.or_else(|_| json::from_str::<client::ErrorResponse>(&res_body_string).map(|r| r.error))
.ok();
if let client::Retry::After(d) = dlg.http_failure(&res,
json_server_error,
server_error) {
sleep(d);
continue;
}
dlg.finished(false);
return match json::from_str::<client::ErrorResponse>(&res_body_string){
Err(_) => Err(client::Error::Failure(res)),
Ok(serr) => Err(client::Error::BadRequest(serr))
}
}
let result_value = {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
match json::from_str(&res_body_string) {
Ok(decoded) => (res, decoded),
Err(err) => {
dlg.response_json_decode_error(&res_body_string, &err);
return Err(client::Error::JsonDecodeError(res_body_string, err));
}
}
};
dlg.finished(true);
return Ok(result_value)
}
}
}
}
/// Identifier of the user. Can be set to <code>me</code> to mean the currently
/// authenticated user.
///
/// Sets the *user id* path property to the given value.
///
/// Even though the property as already been set when instantiating this call,
/// we provide this method for API completeness.
pub fn user_id(mut self, new_value: &str) -> UserGetCall<'a> {
self._user_id = new_value.to_string();
self
}
/// Specifies what parts of the user information to return.
///
/// Sets the *user view* query property to the given value.
pub fn user_view(mut self, new_value: &str) -> UserGetCall<'a> {
self._user_view = Some(new_value.to_string());
self
}
/// Logged-in user ID to impersonate instead of the user's ID.
///
/// Sets the *request metadata.user overrides.user id* query property to the given value.
pub fn request_metadata_user_overrides_user_id(mut self, new_value: &str) -> UserGetCall<'a> {
self._request_metadata_user_overrides_user_id = Some(new_value.to_string());
self
}
/// IP address to use instead of the user's geo-located IP address.
///
/// Sets the *request metadata.user overrides.ip address* query property to the given value.
pub fn request_metadata_user_overrides_ip_address(mut self, new_value: &str) -> UserGetCall<'a> {
self._request_metadata_user_overrides_ip_address = Some(new_value.to_string());
self
}
/// Second level identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic sub id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_sub_id(mut self, new_value: &str) -> UserGetCall<'a> {
self._request_metadata_traffic_source_traffic_sub_id = Some(new_value.to_string());
self
}
/// Identifier to indicate where the traffic comes from.
/// An identifier has multiple letters created by a team which redirected the
/// traffic to us.
///
/// Sets the *request metadata.traffic source.traffic source id* query property to the given value.
pub fn request_metadata_traffic_source_traffic_source_id(mut self, new_value: &str) -> UserGetCall<'a> {
self._request_metadata_traffic_source_traffic_source_id = Some(new_value.to_string());
self
}
/// Google Partners session ID.
///
/// Sets the *request metadata.partners session id* query property to the given value.
pub fn request_metadata_partners_session_id(mut self, new_value: &str) -> UserGetCall<'a> {
self._request_metadata_partners_session_id = Some(new_value.to_string());
self
}
/// Locale to use for the current request.
///
/// Sets the *request metadata.locale* query property to the given value.
pub fn request_metadata_locale(mut self, new_value: &str) -> UserGetCall<'a> {
self._request_metadata_locale = Some(new_value.to_string());
self
}
/// Experiment IDs the current request belongs to.
///
/// Append the given value to the *request metadata.experiment ids* query property.
/// Each appended value will retain its original ordering and be '/'-separated in the URL's parameters.
pub fn add_request_metadata_experiment_ids(mut self, new_value: &str) -> UserGetCall<'a> {
self._request_metadata_experiment_ids.push(new_value.to_string());
self
}
/// The delegate implementation is consulted whenever there is an intermediate result, or if something goes wrong
/// while executing the actual API request.
///
/// It should be used to handle progress information, and to implement a certain level of resilience.
///
/// Sets the *delegate* property to the given value.
pub fn delegate(mut self, new_value: &'a mut dyn client::Delegate) -> UserGetCall<'a> {
self._delegate = Some(new_value);
self
}
/// Set any additional parameter of the query string used in the request.
/// It should be used to set parameters which are not yet available through their own
/// setters.
///
/// Please note that this method must not be used to set any of the known parameters
/// which have their own setter method. If done anyway, the request will fail.
///
/// # Additional Parameters
///
/// * *alt* (query-string) - Data format for response.
/// * *key* (query-string) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
/// * *access_token* (query-string) - OAuth access token.
/// * *upload_protocol* (query-string) - Upload protocol for media (e.g. "raw", "multipart").
/// * *prettyPrint* (query-boolean) - Returns response with indentations and line breaks.
/// * *quotaUser* (query-string) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
/// * *uploadType* (query-string) - Legacy upload protocol for media (e.g. "media", "multipart").
/// * *fields* (query-string) - Selector specifying which fields to include in a partial response.
/// * *$.xgafv* (query-string) - V1 error format.
/// * *oauth_token* (query-string) - OAuth 2.0 token for the current user.
/// * *callback* (query-string) - JSONP
pub fn param<T>(mut self, name: T, value: T) -> UserGetCall<'a>
where T: AsRef<str> {
self._additional_params.insert(name.as_ref().to_string(), value.as_ref().to_string());
self
}
}
| 43.978093 | 646 | 0.622299 |
64443afc0026733d46070a0036a4cee45e98ecaa | 4,887 | //! APIs to read from and write to Arrow's IPC format.
//!
//! Inter-process communication is a method through which different processes
//! share and pass data between them. Its use-cases include parallel
//! processing of chunks of data across different CPU cores, transferring
//! data between different Apache Arrow implementations in other languages and
//! more. Under the hood Apache Arrow uses [FlatBuffers](https://google.github.io/flatbuffers/)
//! as its binary protocol, so every Arrow-centered streaming or serialiation
//! problem that could be solved using FlatBuffers could probably be solved
//! using the more integrated approach that is exposed in this module.
//!
//! [Arrow's IPC protocol](https://arrow.apache.org/docs/format/Columnar.html#serialization-and-interprocess-communication-ipc)
//! allows only batch or dictionary columns to be passed
//! around due to its reliance on a pre-defined data scheme. This constraint
//! provides a large performance gain because serialized data will always have a
//! known structutre, i.e. the same fields and datatypes, with the only variance
//! being the number of rows and the actual data inside the Batch. This dramatically
//! increases the deserialization rate, as the bytes in the file or stream are already
//! structured "correctly".
//!
//! Reading and writing IPC messages is done using one of two variants - either
//! [`FileReader`](read::FileReader) <-> [`FileWriter`](struct@write::FileWriter) or
//! [`StreamReader`](read::StreamReader) <-> [`StreamWriter`](struct@write::StreamWriter).
//! These two variants wrap a type `T` that implements [`Read`](std::io::Read), and in
//! the case of the `File` variant it also implements [`Seek`](std::io::Seek). In
//! practice it means that `File`s can be arbitrarily accessed while `Stream`s are only
//! read in certain order - the one they were written in (first in, first out).
//!
//! # Examples
//! Read and write to a file:
//! ```
//! use arrow2::io::ipc::{{read::{FileReader, read_file_metadata}}, {write::{FileWriter, WriteOptions}}};
//! # use std::fs::File;
//! # use std::sync::Arc;
//! # use arrow2::datatypes::{Field, Schema, DataType};
//! # use arrow2::array::{Int32Array, Array};
//! # use arrow2::chunk::Chunk;
//! # use arrow2::error::ArrowError;
//! // Setup the writer
//! let path = "example.arrow".to_string();
//! let mut file = File::create(&path)?;
//! let x_coord = Field::new("x", DataType::Int32, false);
//! let y_coord = Field::new("y", DataType::Int32, false);
//! let schema = Schema::from(vec![x_coord, y_coord]);
//! let options = WriteOptions {compression: None};
//! let mut writer = FileWriter::try_new(file, &schema, None, options)?;
//!
//! // Setup the data
//! let x_data = Int32Array::from_slice([-1i32, 1]);
//! let y_data = Int32Array::from_slice([1i32, -1]);
//! let chunk = Chunk::try_new(
//! vec![Arc::new(x_data) as Arc<dyn Array>, Arc::new(y_data)]
//! )?;
//!
//! // Write the messages and finalize the stream
//! for _ in 0..5 {
//! writer.write(&chunk, None);
//! }
//! writer.finish();
//!
//! // Fetch some of the data and get the reader back
//! let mut reader = File::open(&path)?;
//! let metadata = read_file_metadata(&mut reader)?;
//! let mut filereader = FileReader::new(reader, metadata, None);
//! let row1 = filereader.next().unwrap(); // [[-1, 1], [1, -1]]
//! let row2 = filereader.next().unwrap(); // [[-1, 1], [1, -1]]
//! let mut reader = filereader.into_inner();
//! // Do more stuff with the reader, like seeking ahead.
//! # Ok::<(), ArrowError>(())
//! ```
//!
//! For further information and examples please consult the
//! [user guide](https://jorgecarleitao.github.io/arrow2/io/index.html).
//! For even more examples check the `examples` folder in the main repository
//! ([1](https://github.com/jorgecarleitao/arrow2/blob/main/examples/ipc_file_read.rs),
//! [2](https://github.com/jorgecarleitao/arrow2/blob/main/examples/ipc_file_write.rs),
//! [3](https://github.com/jorgecarleitao/arrow2/tree/main/examples/ipc_pyarrow)).
use crate::error::ArrowError;
mod compression;
mod endianess;
pub mod read;
pub mod write;
const ARROW_MAGIC: [u8; 6] = [b'A', b'R', b'R', b'O', b'W', b'1'];
const CONTINUATION_MARKER: [u8; 4] = [0xff; 4];
/// Struct containing `dictionary_id` and nested `IpcField`, allowing users
/// to specify the dictionary ids of the IPC fields when writing to IPC.
#[derive(Debug, Clone, PartialEq, Default)]
pub struct IpcField {
// optional children
pub fields: Vec<IpcField>,
// dictionary id
pub dictionary_id: Option<i64>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct IpcSchema {
pub fields: Vec<IpcField>,
pub is_little_endian: bool,
}
impl From<arrow_format::ipc::planus::Error> for ArrowError {
fn from(error: arrow_format::ipc::planus::Error) -> Self {
ArrowError::OutOfSpec(error.to_string())
}
}
| 44.027027 | 127 | 0.688971 |
16956f5309895968ade618a99a2c4cfe2858f8c9 | 2,277 | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
#![forbid(unsafe_code)]
//! Used to perform catching up between nodes for committed states.
//! Used for node restarts, network partitions, full node syncs
#![recursion_limit = "1024"]
use executor_types::ExecutedTrees;
use libra_types::{epoch_state::EpochState, ledger_info::LedgerInfoWithSignatures};
pub use synchronizer::{StateSyncClient, StateSynchronizer};
mod chunk_request;
mod chunk_response;
pub mod coordinator;
mod counters;
mod executor_proxy;
pub mod network;
mod request_manager;
mod synchronizer;
/// The state distinguishes between the following fields:
/// * highest_local_li is keeping the latest certified ledger info
/// * synced_trees is keeping the latest state in the transaction accumulator and state tree.
///
/// While `highest_local_li` can be used for helping the others (corresponding to the highest
/// version we have a proof for), `synced_trees` is used for retrieving missing chunks
/// for the local storage.
#[derive(Clone)]
pub struct SynchronizerState {
pub highest_local_li: LedgerInfoWithSignatures,
pub synced_trees: ExecutedTrees,
// Corresponds to the current epoch if the highest local LI is in the middle of the epoch,
// or the next epoch if the highest local LI is the final LI in the current epoch.
pub trusted_epoch: EpochState,
}
impl SynchronizerState {
pub fn new(
highest_local_li: LedgerInfoWithSignatures,
synced_trees: ExecutedTrees,
current_epoch_state: EpochState,
) -> Self {
let trusted_epoch = highest_local_li
.ledger_info()
.next_epoch_state()
.cloned()
.unwrap_or(current_epoch_state);
SynchronizerState {
highest_local_li,
synced_trees,
trusted_epoch,
}
}
/// The highest available version in the local storage (even if it's not covered by the LI).
pub fn highest_version_in_local_storage(&self) -> u64 {
self.synced_trees.version().unwrap_or(0)
}
pub fn epoch(&self) -> u64 {
self.trusted_epoch.epoch
}
}
#[cfg(any(feature = "fuzzing", test))]
mod tests;
#[cfg(any(feature = "fuzzing", test))]
pub use tests::fuzzing;
| 32.070423 | 96 | 0.706632 |
148193317081d18ab5a22a6aa366163ef46469ab | 21,789 | use ::insta::assert_snapshot;
use crate::panes::PositionAndSize;
use crate::tests::fakes::FakeInputOutput;
use crate::tests::utils::{get_next_to_last_snapshot, get_output_frame_snapshots};
use crate::{start, CliArgs};
use crate::common::input::config::Config;
use crate::tests::utils::commands::{
MOVE_FOCUS_IN_PANE_MODE, PANE_MODE, QUIT, RESIZE_LEFT_IN_RESIZE_MODE, RESIZE_MODE,
RESIZE_UP_IN_RESIZE_MODE, SLEEP, SPLIT_DOWN_IN_PANE_MODE, SPLIT_RIGHT_IN_PANE_MODE,
};
fn get_fake_os_input(fake_win_size: &PositionAndSize) -> FakeInputOutput {
FakeInputOutput::new(*fake_win_size)
}
#[test]
pub fn resize_up_with_pane_above() {
// ┌───────────┐ ┌───────────┐
// │ │ │ │
// │ │ ├───────────┤
// ├───────────┤ ==resize=up==> │███████████│
// │███████████│ │███████████│
// │███████████│ │███████████│
// └───────────┘ └───────────┘
// █ == focused pane
let fake_win_size = PositionAndSize {
columns: 121,
rows: 20,
x: 0,
y: 0,
..Default::default()
};
let mut fake_input_output = get_fake_os_input(&fake_win_size);
fake_input_output.add_terminal_input(&[
&PANE_MODE,
&SPLIT_DOWN_IN_PANE_MODE,
&RESIZE_MODE,
&RESIZE_UP_IN_RESIZE_MODE,
&SLEEP,
&QUIT,
]);
start(
Box::new(fake_input_output.clone()),
CliArgs::default(),
Box::new(fake_input_output.clone()),
Config::default(),
);
let output_frames = fake_input_output
.stdout_writer
.output_frames
.lock()
.unwrap();
let snapshots = get_output_frame_snapshots(&output_frames, &fake_win_size);
let snapshot_before_quit =
get_next_to_last_snapshot(snapshots).expect("could not find snapshot");
assert_snapshot!(snapshot_before_quit);
}
#[test]
pub fn resize_up_with_pane_below() {
// ┌───────────┐ ┌───────────┐
// │███████████│ │███████████│
// │███████████│ ├───────────┤
// ├───────────┤ ==resize=up==> │ │
// │ │ │ │
// │ │ │ │
// └───────────┘ └───────────┘
// █ == focused pane
let fake_win_size = PositionAndSize {
columns: 121,
rows: 20,
x: 0,
y: 0,
..Default::default()
};
let mut fake_input_output = get_fake_os_input(&fake_win_size);
fake_input_output.add_terminal_input(&[
&PANE_MODE,
&SPLIT_DOWN_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&RESIZE_MODE,
&RESIZE_UP_IN_RESIZE_MODE,
&SLEEP,
&QUIT,
]);
start(
Box::new(fake_input_output.clone()),
CliArgs::default(),
Box::new(fake_input_output.clone()),
Config::default(),
);
let output_frames = fake_input_output
.stdout_writer
.output_frames
.lock()
.unwrap();
let snapshots = get_output_frame_snapshots(&output_frames, &fake_win_size);
let snapshot_before_quit =
get_next_to_last_snapshot(snapshots).expect("could not find snapshot");
assert_snapshot!(snapshot_before_quit);
}
#[test]
pub fn resize_up_with_panes_above_and_below() {
// ┌───────────┐ ┌───────────┐
// │ │ │ │
// │ │ ├───────────┤
// ├───────────┤ │███████████│
// │███████████│ ==resize=up==> │███████████│
// │███████████│ │███████████│
// ├───────────┤ ├───────────┤
// │ │ │ │
// │ │ │ │
// └───────────┘ └───────────┘
// █ == focused pane
let fake_win_size = PositionAndSize {
columns: 121,
rows: 20,
x: 0,
y: 0,
..Default::default()
};
let mut fake_input_output = get_fake_os_input(&fake_win_size);
fake_input_output.add_terminal_input(&[
&PANE_MODE,
&SPLIT_DOWN_IN_PANE_MODE,
&SPLIT_DOWN_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&RESIZE_MODE,
&RESIZE_UP_IN_RESIZE_MODE,
&SLEEP,
&QUIT,
]);
start(
Box::new(fake_input_output.clone()),
CliArgs::default(),
Box::new(fake_input_output.clone()),
Config::default(),
);
let output_frames = fake_input_output
.stdout_writer
.output_frames
.lock()
.unwrap();
let snapshots = get_output_frame_snapshots(&output_frames, &fake_win_size);
let snapshot_before_quit =
get_next_to_last_snapshot(snapshots).expect("could not find snapshot");
assert_snapshot!(snapshot_before_quit);
}
#[test]
pub fn resize_up_with_multiple_panes_above() {
//
// ┌─────┬─────┐ ┌─────┬─────┐
// │ │ │ ├─────┴─────┤
// ├─────┴─────┤ ==resize=up==> │███████████│
// │███████████│ │███████████│
// └───────────┘ └───────────┘
// █ == focused pane
let fake_win_size = PositionAndSize {
columns: 121,
rows: 20,
x: 0,
y: 0,
..Default::default()
};
let mut fake_input_output = get_fake_os_input(&fake_win_size);
fake_input_output.add_terminal_input(&[
&PANE_MODE,
&SPLIT_DOWN_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&SPLIT_RIGHT_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&RESIZE_MODE,
&RESIZE_UP_IN_RESIZE_MODE,
&SLEEP,
&QUIT,
]);
start(
Box::new(fake_input_output.clone()),
CliArgs::default(),
Box::new(fake_input_output.clone()),
Config::default(),
);
let output_frames = fake_input_output
.stdout_writer
.output_frames
.lock()
.unwrap();
let snapshots = get_output_frame_snapshots(&output_frames, &fake_win_size);
let snapshot_before_quit =
get_next_to_last_snapshot(snapshots).expect("could not find snapshot");
assert_snapshot!(snapshot_before_quit);
}
#[test]
pub fn resize_up_with_panes_above_aligned_left_with_current_pane() {
// ┌─────┬─────┐ ┌─────┬─────┐
// │ │ │ │ ├─────┤
// ├─────┼─────┤ ==resize=up==> ├─────┤█████│
// │ │█████│ │ │█████│
// └─────┴─────┘ └─────┴─────┘
// █ == focused pane
let fake_win_size = PositionAndSize {
columns: 121,
rows: 20,
x: 0,
y: 0,
..Default::default()
};
let mut fake_input_output = get_fake_os_input(&fake_win_size);
fake_input_output.add_terminal_input(&[
&PANE_MODE,
&SPLIT_RIGHT_IN_PANE_MODE,
&SPLIT_DOWN_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&SPLIT_DOWN_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&RESIZE_MODE,
&RESIZE_UP_IN_RESIZE_MODE,
&SLEEP,
&QUIT,
]);
start(
Box::new(fake_input_output.clone()),
CliArgs::default(),
Box::new(fake_input_output.clone()),
Config::default(),
);
let output_frames = fake_input_output
.stdout_writer
.output_frames
.lock()
.unwrap();
let snapshots = get_output_frame_snapshots(&output_frames, &fake_win_size);
let snapshot_before_quit =
get_next_to_last_snapshot(snapshots).expect("could not find snapshot");
assert_snapshot!(snapshot_before_quit);
}
#[test]
pub fn resize_up_with_panes_below_aligned_left_with_current_pane() {
// ┌─────┬─────┐ ┌─────┬─────┐
// │ │█████│ │ │█████│
// │ │█████│ │ ├─────┤
// ├─────┼─────┤ ==resize=up==> ├─────┤ │
// │ │ │ │ │ │
// │ │ │ │ │ │
// └─────┴─────┘ └─────┴─────┘
// █ == focused pane
let fake_win_size = PositionAndSize {
columns: 121,
rows: 20,
x: 0,
y: 0,
..Default::default()
};
let mut fake_input_output = get_fake_os_input(&fake_win_size);
fake_input_output.add_terminal_input(&[
&PANE_MODE,
&SPLIT_RIGHT_IN_PANE_MODE,
&SPLIT_DOWN_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&SPLIT_DOWN_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&RESIZE_MODE,
&RESIZE_UP_IN_RESIZE_MODE,
&SLEEP,
&QUIT,
]);
start(
Box::new(fake_input_output.clone()),
CliArgs::default(),
Box::new(fake_input_output.clone()),
Config::default(),
);
let output_frames = fake_input_output
.stdout_writer
.output_frames
.lock()
.unwrap();
let snapshots = get_output_frame_snapshots(&output_frames, &fake_win_size);
let snapshot_before_quit =
get_next_to_last_snapshot(snapshots).expect("could not find snapshot");
assert_snapshot!(snapshot_before_quit);
}
#[test]
pub fn resize_up_with_panes_above_aligned_right_with_current_pane() {
// ┌─────┬─────┐ ┌─────┬─────┐
// │ │ │ │ │ │
// │ │ │ ├─────┤ │
// ├─────┼─────┤ ==resize=up==> │█████├─────┤
// │█████│ │ │█████│ │
// │█████│ │ │█████│ │
// └─────┴─────┘ └─────┴─────┘
// █ == focused pane
let fake_win_size = PositionAndSize {
columns: 121,
rows: 20,
x: 0,
y: 0,
..Default::default()
};
let mut fake_input_output = get_fake_os_input(&fake_win_size);
fake_input_output.add_terminal_input(&[
&PANE_MODE,
&SPLIT_RIGHT_IN_PANE_MODE,
&SPLIT_DOWN_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&SPLIT_DOWN_IN_PANE_MODE,
&RESIZE_MODE,
&RESIZE_UP_IN_RESIZE_MODE,
&SLEEP,
&QUIT,
]);
start(
Box::new(fake_input_output.clone()),
CliArgs::default(),
Box::new(fake_input_output.clone()),
Config::default(),
);
let output_frames = fake_input_output
.stdout_writer
.output_frames
.lock()
.unwrap();
let snapshots = get_output_frame_snapshots(&output_frames, &fake_win_size);
let snapshot_before_quit =
get_next_to_last_snapshot(snapshots).expect("could not find snapshot");
assert_snapshot!(snapshot_before_quit);
}
#[test]
pub fn resize_up_with_panes_below_aligned_right_with_current_pane() {
// ┌─────┬─────┐ ┌─────┬─────┐
// │█████│ │ │█████│ │
// │█████│ │ ├─────┤ │
// ├─────┼─────┤ ==resize=up==> │ ├─────┤
// │ │ │ │ │ │
// │ │ │ │ │ │
// └─────┴─────┘ └─────┴─────┘
// █ == focused pane
let fake_win_size = PositionAndSize {
columns: 121,
rows: 20,
x: 0,
y: 0,
..Default::default()
};
let mut fake_input_output = get_fake_os_input(&fake_win_size);
fake_input_output.add_terminal_input(&[
&PANE_MODE,
&SPLIT_RIGHT_IN_PANE_MODE,
&SPLIT_DOWN_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&SPLIT_DOWN_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&RESIZE_MODE,
&RESIZE_UP_IN_RESIZE_MODE,
&SLEEP,
&QUIT,
]);
start(
Box::new(fake_input_output.clone()),
CliArgs::default(),
Box::new(fake_input_output.clone()),
Config::default(),
);
let output_frames = fake_input_output
.stdout_writer
.output_frames
.lock()
.unwrap();
let snapshots = get_output_frame_snapshots(&output_frames, &fake_win_size);
let snapshot_before_quit =
get_next_to_last_snapshot(snapshots).expect("could not find snapshot");
assert_snapshot!(snapshot_before_quit);
}
#[test]
pub fn resize_up_with_panes_above_aligned_left_and_right_with_current_pane() {
// ┌───┬───┬───┐ ┌───┬───┬───┐
// │ │ │ │ │ │ │ │
// │ │ │ │ │ ├───┤ │
// ├───┼───┼───┤ ==resize=up==> ├───┤███├───┤
// │ │███│ │ │ │███│ │
// │ │███│ │ │ │███│ │
// └───┴───┴───┘ └───┴───┴───┘
// █ == focused pane
let fake_win_size = PositionAndSize {
columns: 121,
rows: 20,
x: 0,
y: 0,
..Default::default()
};
let mut fake_input_output = get_fake_os_input(&fake_win_size);
fake_input_output.add_terminal_input(&[
&PANE_MODE,
&SPLIT_RIGHT_IN_PANE_MODE,
&SPLIT_RIGHT_IN_PANE_MODE,
&SPLIT_DOWN_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&SPLIT_DOWN_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&SPLIT_DOWN_IN_PANE_MODE,
&RESIZE_MODE,
&RESIZE_UP_IN_RESIZE_MODE,
&SLEEP,
&QUIT,
]);
start(
Box::new(fake_input_output.clone()),
CliArgs::default(),
Box::new(fake_input_output.clone()),
Config::default(),
);
let output_frames = fake_input_output
.stdout_writer
.output_frames
.lock()
.unwrap();
let snapshots = get_output_frame_snapshots(&output_frames, &fake_win_size);
let snapshot_before_quit =
get_next_to_last_snapshot(snapshots).expect("could not find snapshot");
assert_snapshot!(snapshot_before_quit);
}
#[test]
pub fn resize_up_with_panes_below_aligned_left_and_right_with_current_pane() {
// ┌───┬───┬───┐ ┌───┬───┬───┐
// │ │███│ │ │ │███│ │
// │ │███│ │ │ ├───┤ │
// ├───┼───┼───┤ ==resize=up==> ├───┤ ├───┤
// │ │ │ │ │ │ │ │
// │ │ │ │ │ │ │ │
// └───┴───┴───┘ └───┴───┴───┘
// █ == focused pane
let fake_win_size = PositionAndSize {
columns: 121,
rows: 20,
x: 0,
y: 0,
..Default::default()
};
let mut fake_input_output = get_fake_os_input(&fake_win_size);
fake_input_output.add_terminal_input(&[
&PANE_MODE,
&SPLIT_RIGHT_IN_PANE_MODE,
&SPLIT_RIGHT_IN_PANE_MODE,
&SPLIT_DOWN_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&SPLIT_DOWN_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&SPLIT_DOWN_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&RESIZE_MODE,
&RESIZE_UP_IN_RESIZE_MODE,
&SLEEP,
&QUIT,
]);
start(
Box::new(fake_input_output.clone()),
CliArgs::default(),
Box::new(fake_input_output.clone()),
Config::default(),
);
let output_frames = fake_input_output
.stdout_writer
.output_frames
.lock()
.unwrap();
let snapshots = get_output_frame_snapshots(&output_frames, &fake_win_size);
let snapshot_before_quit =
get_next_to_last_snapshot(snapshots).expect("could not find snapshot");
assert_snapshot!(snapshot_before_quit);
}
#[test]
pub fn resize_up_with_panes_above_aligned_left_and_right_with_panes_to_the_left_and_right() {
// ┌─┬───────┬─┐ ┌─┬───────┬─┐
// │ │ │ │ │ │ │ │
// │ │ │ │ │ ├─┬───┬─┤ │
// ├─┼─┬───┬─┼─┤ ==resize=up==> ├─┤ │███│ ├─┤
// │ │ │███│ │ │ │ │ │███│ │ │
// │ │ │███│ │ │ │ │ │███│ │ │
// └─┴─┴───┴─┴─┘ └─┴─┴───┴─┴─┘
// █ == focused pane
let fake_win_size = PositionAndSize {
columns: 121,
rows: 40,
x: 0,
y: 0,
..Default::default()
};
let mut fake_input_output = get_fake_os_input(&fake_win_size);
fake_input_output.add_terminal_input(&[
&PANE_MODE,
&SPLIT_RIGHT_IN_PANE_MODE,
&SPLIT_RIGHT_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&RESIZE_MODE,
&RESIZE_LEFT_IN_RESIZE_MODE,
&RESIZE_LEFT_IN_RESIZE_MODE,
&RESIZE_LEFT_IN_RESIZE_MODE,
&PANE_MODE,
&SPLIT_DOWN_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&SPLIT_DOWN_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&SPLIT_DOWN_IN_PANE_MODE,
&SPLIT_RIGHT_IN_PANE_MODE,
&SPLIT_RIGHT_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&RESIZE_MODE,
&RESIZE_LEFT_IN_RESIZE_MODE,
&RESIZE_LEFT_IN_RESIZE_MODE,
&RESIZE_UP_IN_RESIZE_MODE,
&SLEEP,
&QUIT,
]);
start(
Box::new(fake_input_output.clone()),
CliArgs::default(),
Box::new(fake_input_output.clone()),
Config::default(),
);
let output_frames = fake_input_output
.stdout_writer
.output_frames
.lock()
.unwrap();
let snapshots = get_output_frame_snapshots(&output_frames, &fake_win_size);
let snapshot_before_quit =
get_next_to_last_snapshot(snapshots).expect("could not find snapshot");
assert_snapshot!(snapshot_before_quit);
}
#[test]
pub fn resize_up_with_panes_below_aligned_left_and_right_with_to_the_left_and_right() {
// ┌─┬─┬───┬─┬─┐ ┌─┬─┬───┬─┬─┐
// │ │ │███│ │ │ │ │ │███│ │ │
// │ │ │███│ │ │ │ ├─┴───┴─┤ │
// ├─┼─┴───┴─┼─┤ ==resize=up==> ├─┤ ├─┤
// │ │ │ │ │ │ │ │
// │ │ │ │ │ │ │ │
// └─┴───────┴─┘ └─┴───────┴─┘
// █ == focused pane
let fake_win_size = PositionAndSize {
columns: 121,
rows: 40,
x: 0,
y: 0,
..Default::default()
};
let mut fake_input_output = get_fake_os_input(&fake_win_size);
fake_input_output.add_terminal_input(&[
&PANE_MODE,
&SPLIT_RIGHT_IN_PANE_MODE,
&SPLIT_RIGHT_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&RESIZE_MODE,
&RESIZE_LEFT_IN_RESIZE_MODE,
&RESIZE_LEFT_IN_RESIZE_MODE,
&RESIZE_LEFT_IN_RESIZE_MODE,
&PANE_MODE,
&SPLIT_DOWN_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&SPLIT_DOWN_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&SPLIT_DOWN_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&SPLIT_RIGHT_IN_PANE_MODE,
&SPLIT_RIGHT_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&MOVE_FOCUS_IN_PANE_MODE,
&RESIZE_MODE,
&RESIZE_LEFT_IN_RESIZE_MODE,
&RESIZE_LEFT_IN_RESIZE_MODE,
&RESIZE_UP_IN_RESIZE_MODE,
&SLEEP,
&QUIT,
]);
start(
Box::new(fake_input_output.clone()),
CliArgs::default(),
Box::new(fake_input_output.clone()),
Config::default(),
);
let output_frames = fake_input_output
.stdout_writer
.output_frames
.lock()
.unwrap();
let snapshots = get_output_frame_snapshots(&output_frames, &fake_win_size);
let snapshot_before_quit =
get_next_to_last_snapshot(snapshots).expect("could not find snapshot");
assert_snapshot!(snapshot_before_quit);
}
#[test]
pub fn cannot_resize_up_when_pane_above_is_at_minimum_height() {
// ┌───────────┐ ┌───────────┐
// │ │ │ │
// ├───────────┤ ==resize=up==> ├───────────┤
// │███████████│ │███████████│
// └───────────┘ └───────────┘
// █ == focused pane
let fake_win_size = PositionAndSize {
columns: 121,
rows: 7,
x: 0,
y: 0,
..Default::default()
};
let mut fake_input_output = get_fake_os_input(&fake_win_size);
fake_input_output.add_terminal_input(&[
&PANE_MODE,
&SPLIT_DOWN_IN_PANE_MODE,
&RESIZE_MODE,
&RESIZE_UP_IN_RESIZE_MODE,
&SLEEP,
&QUIT,
]);
start(
Box::new(fake_input_output.clone()),
CliArgs::default(),
Box::new(fake_input_output.clone()),
Config::default(),
);
let output_frames = fake_input_output
.stdout_writer
.output_frames
.lock()
.unwrap();
let snapshots = get_output_frame_snapshots(&output_frames, &fake_win_size);
let snapshot_before_quit =
get_next_to_last_snapshot(snapshots).expect("could not find snapshot");
assert_snapshot!(snapshot_before_quit);
}
| 31.082739 | 93 | 0.510257 |
f746058e1a40793f6a4d775a3af911fbfe1ad5a0 | 1,827 | /*
* Location API
*
* Geolocation, Geocoding and Maps
*
* OpenAPI spec version: 2.0.0
*
* Generated by: https://openapi-generator.tech
*/
use std::rc::Rc;
use std::borrow::Borrow;
use hyper;
use serde_json;
use futures::Future;
use super::{Error, configuration};
use super::request as __internal_request;
pub struct SEARCHApiClient<C: hyper::client::Connect> {
configuration: Rc<configuration::Configuration<C>>,
}
impl<C: hyper::client::Connect> SEARCHApiClient<C> {
pub fn new(configuration: Rc<configuration::Configuration<C>>) -> SEARCHApiClient<C> {
SEARCHApiClient {
configuration: configuration,
}
}
}
pub trait SEARCHApi {
fn search(&self, q: &str, viewbox: &str, limit: i64, accept_language: &str, countrycodes: &str) -> Box<Future<Item = Value, Error = Error<serde_json::Value>>>;
}
impl<C: hyper::client::Connect>SEARCHApi for SEARCHApiClient<C> {
fn search(&self, q: &str, viewbox: &str, limit: i64, accept_language: &str, countrycodes: &str) -> Box<Future<Item = Value, Error = Error<serde_json::Value>>> {
__internal_request::Request::new(hyper::Method::Get, "/search.php".to_string())
.with_auth(__internal_request::Auth::ApiKey(__internal_request::ApiKey{
in_header: false,
in_query: true,
param_name: "token".to_owned(),
}))
.with_query_param("q".to_string(), q.to_string())
.with_query_param("viewbox".to_string(), viewbox.to_string())
.with_query_param("limit".to_string(), limit.to_string())
.with_query_param("accept-language".to_string(), accept_language.to_string())
.with_query_param("countrycodes".to_string(), countrycodes.to_string())
.execute(self.configuration.borrow())
}
}
| 33.218182 | 164 | 0.650794 |
5d4b7d1a816a9a2f7b7170f679efd19c797c8bc5 | 16,747 | //! Explicit numeric type conversion.
/// Blanket trait that imports all traits in this crate.
///
/// Improves syntax ergonomics by allowing the syntax `foo.bar::<T>()`.
pub trait Xias {
/// Convert between signed and unsigned types of the same integer,
/// assuming that the value is homogeneous over the conversion.
///
/// # Panics
/// Panics if the value is out of range after conversion.
fn homosign<T>(self) -> T
where
Self: Homosign<T>,
{
Homosign::homosign(self)
}
/// Downscale the precision of a floating point value.
///
/// # Panics
/// Panics if the value is infinite after conversion.
fn lossy_float<T>(self) -> T
where
Self: LossyFloat<T>,
{
LossyFloat::lossy_float(self)
}
/// Reduce the size of an integer,
/// assuming that the value is within the range of the new type.
///
/// # Panics
/// Panics if the value is out of range after conversion.
fn small_int<T>(self) -> T
where
Self: SmallInt<T>,
{
SmallInt::small_int(self)
}
/// Converts an integer to a floating point value,
/// assuming that the value can be losslessly represented in the new type.
///
/// # Panics
/// Panics if the value is infinite after conversion.
fn small_float<T>(self) -> T
where
Self: SmallFloat<T>,
{
SmallFloat::small_float(self)
}
/// Converts a floating point value to an integer
/// by calling the [`f32::trunc`]/[`f64::trunc`] method.
///
/// # Panics
/// Panics if the truncated integer is not in the range of the output type.
fn trunc_int<T>(self) -> T
where
Self: TruncInt<T>,
{
TruncInt::trunc_int(self)
}
}
macro_rules! impl_xias {
($($t:ty)*) => {
$(
impl Xias for $t {}
)*
}
}
impl_xias! {
u8 u16 u32 u64 u128 usize
i8 i16 i32 i64 i128 isize
f32 f64
}
/// See [`Xias::homosign`].
pub trait Homosign<T>: Sized {
/// See [`Xias::homosign`].
fn homosign(self) -> T;
}
macro_rules! impl_homosign {
($unsigned:ty, $signed:ty) => {
impl Homosign<$signed> for $unsigned {
fn homosign(self) -> $signed {
debug_assert!(
self <= <$unsigned>::MAX / 2,
"{:?} is not homogeneous over signs",
self
);
self as $signed
}
}
impl Homosign<$unsigned> for $signed {
fn homosign(self) -> $unsigned {
debug_assert!(self >= 0, "{:?} is not homogeneous over signs", self);
self as $unsigned
}
}
};
}
impl_homosign!(u8, i8);
impl_homosign!(u16, i16);
impl_homosign!(u32, i32);
impl_homosign!(u64, i64);
impl_homosign!(u128, i128);
impl_homosign!(usize, isize);
/// See [`Xias::lossy_float`].
pub trait LossyFloat<T>: Sized {
/// See [`Xias::lossy_float`].
fn lossy_float(self) -> T;
}
impl LossyFloat<f32> for f64 {
fn lossy_float(self) -> f32 {
debug_assert!(
self <= f32::MAX.into(),
"{:?} will become infinite in f32",
self
);
debug_assert!(
self >= f32::MIN.into(),
"{:?} will become infinite in f32",
self
);
self as f32
}
}
/// See [`Xias::small_int`].
pub trait SmallInt<T>: Sized {
/// See [`Xias::small_int`].
fn small_int(self) -> T;
}
macro_rules! impl_small_int {
($from:ty; $($to:ty),*) => {
$(
impl SmallInt<$to> for $from {
fn small_int(self) -> $to {
debug_assert!(self >= <$to>::MIN as $from, "{:?} is too small to fit into {}", self, stringify!($to));
debug_assert!(self <= <$to>::MAX as $from, "{:?} is too large to fit into {}", self, stringify!($to));
self as $to
}
}
)*
};
}
impl_small_int!(u8; u8, usize);
impl_small_int!(u16; u8, usize);
impl_small_int!(u32; u16, u8, usize);
impl_small_int!(u64; u32, u16, u8, usize);
impl_small_int!(u128; u64, u32, u16, u8, usize);
impl_small_int!(usize; u128, u64, u32, u16, u8);
impl_small_int!(i8; isize);
impl_small_int!(i16; i8, isize);
impl_small_int!(i32; i16, i8, isize);
impl_small_int!(i64; i32, i16, i8, isize);
impl_small_int!(i128; i64, i32, i16, i8, isize);
impl_small_int!(isize; i128, i64, i32, i16, i8);
/// See [`Xias::small_float`].
pub trait SmallFloat<T>: Sized {
/// See [`Xias::small_float`].
fn small_float(self) -> T;
}
macro_rules! impl_small_float_unsigned {
($($from:ty),*; $to:ty) => {
$(
impl SmallFloat<$to> for $from {
fn small_float(self) -> $to {
debug_assert!({
let float_size = <$to>::MANTISSA_DIGITS;
let int_size = <$from>::BITS - self.leading_zeros();
float_size >= int_size
}, "{:?} cannot fit into {}", self, stringify!($to));
self as $to
}
}
)*
};
}
impl_small_float_unsigned!(u8, u16, u32, u64, u128, usize; f32);
impl_small_float_unsigned!(u8, u16, u32, u64, u128, usize; f64);
macro_rules! impl_small_float_signed {
($($from:ty),*; $to:ty) => {
$(
impl SmallFloat<$to> for $from {
fn small_float(self) -> $to {
debug_assert!({
let float_size = <$to>::MANTISSA_DIGITS;
let int_size = if self == <$from>::min_value() {
<$from>::BITS - 1
} else {
<$from>::BITS - self.leading_zeros()
};
float_size >= int_size
}, "{:?} cannot fit into {}", self, stringify!($to));
self as $to
}
}
)*
};
}
impl_small_float_signed!(i8, i16, i32, i64, i128, isize; f32);
impl_small_float_signed!(i8, i16, i32, i64, i128, isize; f64);
/// See [`Xias::trunc_int`].
pub trait TruncInt<T>: Sized {
/// See [`Xias::trunc_int`].
fn trunc_int(self) -> T;
}
macro_rules! impl_trunc_int {
($float:ty; $($int:ty),*) => {
$(
impl TruncInt<$int> for $float {
fn trunc_int(self) -> $int {
debug_assert!(self.is_finite(), "Cannot convert a non-finite float ({:?}) to {}", self, stringify!($int));
let float = self.trunc();
debug_assert!(<$int>::MIN as $float <= float, "{:?} is too small to fit into {}", self, stringify!($int));
debug_assert!(<$int>::MAX as $float >= float, "{:?} is too large to fit into {}", self, stringify!($int));
float as $int
}
}
)*
}
}
impl_trunc_int! {
f32;
u8, u16, u32, u64, u128, usize,
i8, i16, i32, i64, i128, isize
}
impl_trunc_int! {
f64;
u8, u16, u32, u64, u128, usize,
i8, i16, i32, i64, i128, isize
}
#[cfg(all(test, debug_assertions))]
mod tests {
use paste::paste;
use super::Xias;
// homosign tests
macro_rules! test_homosign {
($signed:ty, $unsigned:ty) => {
paste! {
#[test]
fn [<test_homosign_zero_ $signed _ $unsigned>]() {
let zero: $signed = 0;
let actual = zero.homosign::<$unsigned>();
let expect: $unsigned = 0;
assert_eq!(expect, actual);
}
#[test]
fn [<test_homosign_zero_ $unsigned _ $signed>]() {
let zero: $unsigned = 0;
let actual = zero.homosign::<$signed>();
let expect: $signed = 0;
assert_eq!(expect, actual);
}
#[test]
fn [<test_homosign_max_ $signed _ $unsigned>]() {
let max: $signed = <$signed>::MAX;
let actual = max.homosign::<$unsigned>();
let expect: $unsigned = <$unsigned>::MAX / 2;
assert_eq!(expect, actual);
}
#[test]
fn [<test_homosign_max_ $unsigned _ $signed>]() {
let max: $unsigned = <$unsigned>::MAX / 2;
let actual = max.homosign::<$signed>();
let expect = <$signed>::MAX;
assert_eq!(expect, actual);
}
#[test]
#[should_panic(expected = "is not homogeneous over signs")]
fn [<test_homosign_panic_negative_ $signed _ $unsigned>]() {
let value: $signed = -1;
value.homosign::<$unsigned>();
}
#[test]
#[should_panic(expected = "is not homogeneous over signs")]
fn [<test_homosign_panic_overflow_ $signed _ $unsigned>]() {
let value: $unsigned = <$unsigned>::MAX / 2 + 1;
value.homosign::<$signed>();
}
}
};
}
test_homosign!(i8, u8);
test_homosign!(i16, u16);
test_homosign!(i32, u32);
test_homosign!(i64, u64);
test_homosign!(i128, u128);
test_homosign!(isize, usize);
// lossy_float tests
#[test]
fn test_lossy_float() {
assert_eq!(0f64.lossy_float::<f32>(), 0f32);
assert_eq!(f64::from(f32::MAX).lossy_float::<f32>(), f32::MAX);
}
#[test]
#[should_panic(expected = "will become infinite in")]
fn test_lossy_float_panic_overflow() {
let float = f64::from(f32::MAX) * 2.;
float.lossy_float::<f32>();
}
#[test]
#[should_panic(expected = "will become infinite in")]
fn test_lossy_float_panic_underflow() {
let float = f64::from(f32::MIN) * 2.;
float.lossy_float::<f32>();
}
// small_int tests
macro_rules! test_small_int_unsigned {
($from:ty; $($to:ty),*) => {
paste! {
$(
#[test]
fn [<test_small_int_zero_ $from _ $to>]() {
let zero: $from = 0;
let actual = zero.small_int::<$to>();
let expect: $to = 0;
assert_eq!(expect, actual);
}
#[test]
fn [<test_small_int_max_ $from _ $to>]() {
let zero: $from = <$from>::from($to::MAX);
let actual = zero.small_int::<$to>();
let expect: $to = $to::MAX;
assert_eq!(expect, actual);
}
#[test]
#[should_panic(expected = "is too large to fit into")]
fn [<test_small_int_panic_overflow_ $from _ $to>]() {
let int = <$from>::from(<$to>::MAX) + 1;
int.small_int::<$to>();
}
)*
}
}
}
test_small_int_unsigned!(u16; u8);
test_small_int_unsigned!(u32; u16, u8);
test_small_int_unsigned!(u64; u32, u16, u8);
test_small_int_unsigned!(u128; u64, u32, u16, u8);
macro_rules! test_small_int_signed {
($from:ty; $($to:ty),*) => {
test_small_int_unsigned!($from; $($to),*);
$(
paste! {
#[test]
#[should_panic(expected = "is too small to fit into")]
fn [<test_small_int_panic_underflow_ $from _ $to>]() {
let int = <$from>::from(<$to>::MIN) - 1;
int.small_int::<$to>();
}
}
)*
}
}
test_small_int_signed!(i16; i8);
test_small_int_signed!(i32; i16, i8);
test_small_int_signed!(i64; i32, i16, i8);
test_small_int_signed!(i128; i64, i32, i16, i8);
// small_float tests
macro_rules! test_small_float {
($float:ty; $($int:ty),*) => {
paste! {
$(
#[test]
fn [<test_small_float_zero_ $int _ $float>]() {
let zero: $int = 0;
let expect = zero.small_float::<$float>();
let actual: $float = 0.;
assert_eq!(expect, actual);
}
#[test]
fn [<test_small_float_max_ $int _ $float>]() {
let mut num: $int = 0;
let bits = std::cmp::min(<$float>::MANTISSA_DIGITS, <$int>::BITS);
for i in 0..bits {
num |= 1 << i;
}
num.small_float::<$float>();
}
)*
}
}
}
test_small_float! {
f32;
u8, u16, u32, u64, u128, usize,
i8, i16, i32, i64, i128, isize
}
test_small_float! {
f64;
u8, u16, u32, u64, u128, usize,
i8, i16, i32, i64, i128, isize
}
macro_rules! test_small_float_panic_overflow {
($int:ty, $float:ty) => {
paste! {
#[test]
#[should_panic(expected = "cannot fit into")]
fn [<test_small_float_panic_overflow_ $int _ $float>]() {
<$int>::MAX.small_float::<$float>();
}
}
};
}
test_small_float_panic_overflow!(u32, f32);
test_small_float_panic_overflow!(i32, f32);
test_small_float_panic_overflow!(u64, f64);
test_small_float_panic_overflow!(i64, f64);
macro_rules! test_small_float_panic_underflow {
($int:ty, $float:ty) => {
paste! {
#[test]
#[should_panic(expected = "cannot fit into")]
fn [<test_small_float_panic_underflow_ $int _ $float>]() {
<$int>::MIN.small_float::<$float>();
}
}
};
}
test_small_float_panic_underflow!(i32, f32);
test_small_float_panic_underflow!(i64, f64);
macro_rules! test_trunc_int {
($float:ty; $($int:ty),*) => {
paste! {
$(
#[test]
fn [<test_trunc_int_ $float _ $int>]() {
let float = 1.5;
let expect: $int = 1;
let actual = float.trunc_int::<$int>();
assert_eq!(expect, actual);
}
#[test]
#[should_panic(expected = "Cannot convert a non-finite float (inf) to")]
fn [<test_trunc_int_panic_positive_infinity_ $float _ $int>]() {
<$float>::INFINITY.trunc_int::<$int>();
}
#[test]
#[should_panic(expected = "Cannot convert a non-finite float (-inf) to")]
fn [<test_trunc_int_panic_negative_infinity_ $float _ $int>]() {
<$float>::NEG_INFINITY.trunc_int::<$int>();
}
#[test]
#[should_panic(expected = "Cannot convert a non-finite float (NaN) to")]
fn [<test_trunc_int_panic_nan_ $float _ $int>]() {
<$float>::NAN.trunc_int::<$int>();
}
)*
}
}
}
test_trunc_int! {
f32;
u8, u16, u32, u64, u128, usize,
i8, i16, i32, i64, i128
}
test_trunc_int! {
f64;
u8, u16, u32, u64, u128, usize,
i8, i16, i32, i64, i128
}
macro_rules! test_trunc_int_panic_overflow {
($float:ty; $($int:ty),*) => {
paste! {
$(
#[test]
#[should_panic(expected = "is too large to fit into")]
fn [<test_trunc_int_panic_overflow_ $float _ $int>]() {
let float = <$int>::MAX as $float * 2.;
float.trunc_int::<$int>();
}
)*
}
}
}
test_trunc_int_panic_overflow! {
f32;
u8, u16, u32, u64, usize,
i8, i16, i32, i64, isize
}
test_trunc_int_panic_overflow! {
f64;
u8, u16, u32, u64, u128, usize,
i8, i16, i32, i64, i128, isize
}
}
| 30.012545 | 126 | 0.466531 |
e42a274b8e147ad9cfa07f956fa43cee2dd9168d | 1,635 | // Copyright (c) 2015-2017 Ivo Wetzel
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// STD Dependencies -----------------------------------------------------------
use std::fmt;
// Internal Dependencies ------------------------------------------------------
use super::super::Config;
/// Trait describing optional per-packet payload modification logic.
pub trait PacketModifier {
/// Method that constructs a new packet modifier using the provided configuration.
fn new(Config) -> Self where Self: Sized;
/// Method that is called for payload modification before a packet is send
/// over a connection's underlying socket.
///
/// The default implementation does not actually perform any kind of
/// modification and leaves the payload to be send untouched.
fn outgoing(&mut self, _: &[u8]) -> Option<Vec<u8>> {
None
}
/// Method that is called for payload modification purposes after a packet
/// is received over a connection's underlying socket.
///
/// The default implementation does not actually perform any kind of
/// modification and returns leaves received payload untouched.
fn incoming(&mut self, _: &[u8]) -> Option<Vec<u8>> {
None
}
}
impl fmt::Debug for PacketModifier {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "PacketModifier")
}
}
| 33.367347 | 86 | 0.63792 |
1aa323a9b7db39c4491e7dbcb6fb1dfe87070778 | 3,484 |
use rustc::mir;
use rustc::ty::{self, Ty, TypeFoldable, TyCtxt};
use inspirv::core::instruction::*;
use {BlockAndBuilder, MirContext};
use lvalue::{LvalueRef, ValueRef};
#[derive(Debug)]
pub enum OperandValue {
Immediate(ValueRef),
Null,
}
#[derive(Debug)]
pub struct OperandRef<'tcx> {
// The value.
pub val: OperandValue,
// The type of value being returned.
pub ty: Ty<'tcx>
}
impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
pub fn trans_operand(&mut self,
bcx: &BlockAndBuilder<'bcx, 'tcx>,
operand: &mir::Operand<'tcx>)
-> Option<OperandRef<'tcx>>
{
println!("trans_operand(operand={:#?})", operand);
match *operand {
mir::Operand::Consume(ref lvalue) => {
self.trans_consume(bcx, lvalue)
}
mir::Operand::Constant(ref constant) => {
let const_val = self.trans_constant(bcx, constant);
let operand = const_val.to_operand(bcx.ccx());
Some(operand)
}
}
}
pub fn trans_load(&mut self,
bcx: &BlockAndBuilder<'bcx, 'tcx>,
spv_val: ValueRef,
ty: Ty<'tcx>)
-> OperandRef<'tcx>
{
println!("trans_load: {:#?} @ {:#?}", spv_val, ty);
let mut builder = self.fcx.spv().borrow_mut();
let operand_id = builder.alloc_id();
bcx.with_block(|bcx| {
bcx.spv_block.borrow_mut().emit_instruction(OpLoad(builder.define_type(&spv_val.spvty), operand_id, spv_val.spvid, None))
});
OperandRef {
val: OperandValue::Immediate(ValueRef {
spvid: operand_id,
spvty: spv_val.spvty,
}),
ty: ty,
}
}
pub fn trans_consume(&mut self,
bcx: &BlockAndBuilder<'bcx, 'tcx>,
lvalue: &mir::Lvalue<'tcx>)
-> Option<OperandRef<'tcx>>
{
println!("trans_consume(lvalue={:#?})", lvalue);
let tr_lvalue = self.trans_lvalue(bcx, lvalue);
match tr_lvalue {
LvalueRef::Value(val, ty) => {
let ty = ty.to_ty(bcx.tcx());
Some(self.trans_load(bcx, val, ty))
}
LvalueRef::Ref { .. } => {
// unimplemented!(),
None
}
LvalueRef::SigStruct(_, _) => {
// unimplemented!(),
None
}
LvalueRef::Ignore => None,
}
}
pub fn store_operand(&mut self,
bcx: &BlockAndBuilder<'bcx, 'tcx>,
dest: LvalueRef,
operand: OperandRef<'tcx>)
{
println!("store_operand: operand={:#?}", operand);
bcx.with_block(|bcx| {
match operand.val {
OperandValue::Immediate(ref op) => {
match dest {
LvalueRef::Value(ref lval, _) => {
bcx.spv_block.borrow_mut().emit_instruction(
OpStore(lval.spvid, op.spvid, None))
}
_ => unimplemented!(),
}
}
OperandValue::Null => {
bug!()
}
}
});
}
}
| 30.034483 | 133 | 0.453502 |
76fa7a590423eb8382628908f71d889ad9fb356a | 1,980 | use crate::neon::stage1::neon_movemask;
use crate::value::generator::ESCAPED;
use simd_lite::aarch64::*;
use std::io;
#[inline(always)]
pub(crate) unsafe fn write_str_simd<W>(
writer: &mut W,
string: &mut &[u8],
len: &mut usize,
idx: &mut usize,
) -> io::Result<()>
where
W: std::io::Write,
{
// The case where we have a 16+ byte block
// we repeate the same logic as above but with
// only 16 bytes
let zero = vdupq_n_u8(0);
let lower_quote_range = vdupq_n_u8(0x1F);
let quote = vdupq_n_u8(b'"');
let backslash = vdupq_n_u8(b'\\');
while *len - *idx > 16 {
// Load 16 bytes of data;
let data: uint8x16_t = vld1q_u8(string.as_ptr().add(*idx));
// Test the data against being backslash and quote.
let bs_or_quote = vorrq_u8(vceqq_u8(data, backslash), vceqq_u8(data, quote));
// Now mask the data with the quote range (0x1F).
let in_quote_range = vandq_u8(data, lower_quote_range);
// then test of the data is unchanged. aka: xor it with the
// Any field that was inside the quote range it will be zero
// now.
let is_unchanged = veorq_u8(data, in_quote_range);
let in_range = vceqq_u8(is_unchanged, zero);
let quote_bits = neon_movemask(vorrq_u8(bs_or_quote, in_range));
if quote_bits != 0 {
let quote_dist = quote_bits.trailing_zeros() as usize;
stry!(writer.write_all(&string[0..*idx + quote_dist]));
let ch = string[*idx + quote_dist];
match ESCAPED[ch as usize] {
b'u' => stry!(write!(writer, "\\u{:04x}", ch)),
escape => stry!(writer.write_all(&[b'\\', escape])),
};
*string = &string[*idx + quote_dist + 1..];
*idx = 0;
*len = string.len();
} else {
*idx += 16;
}
}
stry!(writer.write_all(&string[0..*idx]));
*string = &string[*idx..];
Ok(())
}
| 35.357143 | 85 | 0.573232 |
9b045ed1d02cce60d8eeb7d203bd58557e14896e | 954 | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Evaluation of constants in refutable patterns goes through
// different compiler control-flow paths.
#![allow(unused_imports)]
use std::fmt;
use std::{i8, i16, i32, i64, isize};
use std::{u8, u16, u32, u64, usize};
const NEG_128: i8 = -128;
const NEG_NEG_128: i8 = -NEG_128;
//~^ ERROR constant evaluation error
//~| attempt to negate with overflow
fn main() {
match -128i8 {
NEG_NEG_128 => println!("A"), //~ NOTE for pattern here
_ => println!("B"),
}
}
| 30.774194 | 68 | 0.694969 |
6a4fe67317e5debd55d9550667218b4a07bfd9ce | 239 | use crate::structs::raw::attribute::AttributeEntry;
pub struct MethodEntry {
pub access_flags: u16,
pub name_index: u16,
pub descriptor_index: u16,
pub attributes_count: u16,
pub attribute_info: Vec<AttributeEntry>,
}
| 23.9 | 51 | 0.723849 |
f7c21b3ea6ba497ae6da9b389088786d1cb1f619 | 229 | fn main() {
use std::cmp::Ordering;
assert_eq!(Ordering::Less.reverse(), Ordering::Greater);
assert_eq!(Ordering::Equal.reverse(), Ordering::Equal);
assert_eq!(Ordering::Greater.reverse(), Ordering::Less);
}
| 28.625 | 60 | 0.659389 |
2ffe367921524ad32248b09e95d34b8764fb454f | 909 | fn are_almost_equal(s1: String, s2: String) -> bool {
use std::collections::HashSet;
let h1: HashSet<char> = s1.chars().collect();
let h2: HashSet<char> = s2.chars().collect();
let not_matched = s1
.chars()
.zip(s2.chars())
.filter(|(c1, c2)| *c1 != *c2)
.count();
h1 == h2 && (not_matched == 0 || not_matched == 2)
}
fn main() {
let ret = are_almost_equal("bank".to_string(), "kanb".to_string());
println!("ret={}", ret);
}
#[test]
fn test_are_almost_equal() {
assert!(are_almost_equal("bank".to_string(), "kanb".to_string()));
assert!(!are_almost_equal(
"attack".to_string(),
"defend".to_string()
));
assert!(are_almost_equal("kelb".to_string(), "kelb".to_string()));
assert!(!are_almost_equal("abcd".to_string(), "dcba".to_string()));
assert!(!are_almost_equal("aa".to_string(), "bb".to_string()));
}
| 28.40625 | 71 | 0.587459 |
db0d6e2f9b9d4ac7246783c97a13555c0d3974be | 43,608 | //! A doubly-linked list with owned nodes.
//!
//! The `LinkedList` allows pushing and popping elements at either end
//! in constant time.
//!
//! Almost always it is better to use `Vec` or [`VecDeque`] instead of
//! [`LinkedList`]. In general, array-based containers are faster,
//! more memory efficient and make better use of CPU cache.
//!
//! [`LinkedList`]: ../linked_list/struct.LinkedList.html
//! [`VecDeque`]: ../vec_deque/struct.VecDeque.html
#![stable(feature = "rust1", since = "1.0.0")]
use core::cmp::Ordering;
use core::fmt;
use core::hash::{Hasher, Hash};
use core::iter::{FromIterator, FusedIterator};
use core::marker::PhantomData;
use core::mem;
use core::ptr::NonNull;
use crate::boxed::Box;
use super::SpecExtend;
/// A doubly-linked list with owned nodes.
///
/// The `LinkedList` allows pushing and popping elements at either end
/// in constant time.
///
/// Almost always it is better to use `Vec` or `VecDeque` instead of
/// `LinkedList`. In general, array-based containers are faster,
/// more memory efficient and make better use of CPU cache.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct LinkedList<T> {
head: Option<NonNull<Node<T>>>,
tail: Option<NonNull<Node<T>>>,
len: usize,
marker: PhantomData<Box<Node<T>>>,
}
struct Node<T> {
next: Option<NonNull<Node<T>>>,
prev: Option<NonNull<Node<T>>>,
element: T,
}
/// An iterator over the elements of a `LinkedList`.
///
/// This `struct` is created by the [`iter`] method on [`LinkedList`]. See its
/// documentation for more.
///
/// [`iter`]: struct.LinkedList.html#method.iter
/// [`LinkedList`]: struct.LinkedList.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Iter<'a, T: 'a> {
head: Option<NonNull<Node<T>>>,
tail: Option<NonNull<Node<T>>>,
len: usize,
marker: PhantomData<&'a Node<T>>,
}
#[stable(feature = "collection_debug", since = "1.17.0")]
impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("Iter")
.field(&self.len)
.finish()
}
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Clone for Iter<'_, T> {
fn clone(&self) -> Self {
Iter { ..*self }
}
}
/// A mutable iterator over the elements of a `LinkedList`.
///
/// This `struct` is created by the [`iter_mut`] method on [`LinkedList`]. See its
/// documentation for more.
///
/// [`iter_mut`]: struct.LinkedList.html#method.iter_mut
/// [`LinkedList`]: struct.LinkedList.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IterMut<'a, T: 'a> {
// We do *not* exclusively own the entire list here, references to node's `element`
// have been handed out by the iterator! So be careful when using this; the methods
// called must be aware that there can be aliasing pointers to `element`.
list: &'a mut LinkedList<T>,
head: Option<NonNull<Node<T>>>,
tail: Option<NonNull<Node<T>>>,
len: usize,
}
#[stable(feature = "collection_debug", since = "1.17.0")]
impl<T: fmt::Debug> fmt::Debug for IterMut<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("IterMut")
.field(&self.list)
.field(&self.len)
.finish()
}
}
/// An owning iterator over the elements of a `LinkedList`.
///
/// This `struct` is created by the [`into_iter`] method on [`LinkedList`][`LinkedList`]
/// (provided by the `IntoIterator` trait). See its documentation for more.
///
/// [`into_iter`]: struct.LinkedList.html#method.into_iter
/// [`LinkedList`]: struct.LinkedList.html
#[derive(Clone)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IntoIter<T> {
list: LinkedList<T>,
}
#[stable(feature = "collection_debug", since = "1.17.0")]
impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("IntoIter")
.field(&self.list)
.finish()
}
}
impl<T> Node<T> {
fn new(element: T) -> Self {
Node {
next: None,
prev: None,
element,
}
}
fn into_element(self: Box<Self>) -> T {
self.element
}
}
// private methods
impl<T> LinkedList<T> {
/// Adds the given node to the front of the list.
#[inline]
fn push_front_node(&mut self, mut node: Box<Node<T>>) {
// This method takes care not to create mutable references to whole nodes,
// to maintain validity of aliasing pointers into `element`.
unsafe {
node.next = self.head;
node.prev = None;
let node = Some(Box::into_raw_non_null(node));
match self.head {
None => self.tail = node,
// Not creating new mutable (unique!) references overlapping `element`.
Some(head) => (*head.as_ptr()).prev = node,
}
self.head = node;
self.len += 1;
}
}
/// Removes and returns the node at the front of the list.
#[inline]
fn pop_front_node(&mut self) -> Option<Box<Node<T>>> {
// This method takes care not to create mutable references to whole nodes,
// to maintain validity of aliasing pointers into `element`.
self.head.map(|node| unsafe {
let node = Box::from_raw(node.as_ptr());
self.head = node.next;
match self.head {
None => self.tail = None,
// Not creating new mutable (unique!) references overlapping `element`.
Some(head) => (*head.as_ptr()).prev = None,
}
self.len -= 1;
node
})
}
/// Adds the given node to the back of the list.
#[inline]
fn push_back_node(&mut self, mut node: Box<Node<T>>) {
// This method takes care not to create mutable references to whole nodes,
// to maintain validity of aliasing pointers into `element`.
unsafe {
node.next = None;
node.prev = self.tail;
let node = Some(Box::into_raw_non_null(node));
match self.tail {
None => self.head = node,
// Not creating new mutable (unique!) references overlapping `element`.
Some(tail) => (*tail.as_ptr()).next = node,
}
self.tail = node;
self.len += 1;
}
}
/// Removes and returns the node at the back of the list.
#[inline]
fn pop_back_node(&mut self) -> Option<Box<Node<T>>> {
// This method takes care not to create mutable references to whole nodes,
// to maintain validity of aliasing pointers into `element`.
self.tail.map(|node| unsafe {
let node = Box::from_raw(node.as_ptr());
self.tail = node.prev;
match self.tail {
None => self.head = None,
// Not creating new mutable (unique!) references overlapping `element`.
Some(tail) => (*tail.as_ptr()).next = None,
}
self.len -= 1;
node
})
}
/// Unlinks the specified node from the current list.
///
/// Warning: this will not check that the provided node belongs to the current list.
///
/// This method takes care not to create mutable references to `element`, to
/// maintain validity of aliasing pointers.
#[inline]
unsafe fn unlink_node(&mut self, mut node: NonNull<Node<T>>) {
let node = node.as_mut(); // this one is ours now, we can create an &mut.
// Not creating new mutable (unique!) references overlapping `element`.
match node.prev {
Some(prev) => (*prev.as_ptr()).next = node.next.clone(),
// this node is the head node
None => self.head = node.next.clone(),
};
match node.next {
Some(next) => (*next.as_ptr()).prev = node.prev.clone(),
// this node is the tail node
None => self.tail = node.prev.clone(),
};
self.len -= 1;
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Default for LinkedList<T> {
/// Creates an empty `LinkedList<T>`.
#[inline]
fn default() -> Self {
Self::new()
}
}
impl<T> LinkedList<T> {
/// Creates an empty `LinkedList`.
///
/// # Examples
///
/// ```
/// use std::collections::LinkedList;
///
/// let list: LinkedList<u32> = LinkedList::new();
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn new() -> Self {
LinkedList {
head: None,
tail: None,
len: 0,
marker: PhantomData,
}
}
/// Moves all elements from `other` to the end of the list.
///
/// This reuses all the nodes from `other` and moves them into `self`. After
/// this operation, `other` becomes empty.
///
/// This operation should compute in O(1) time and O(1) memory.
///
/// # Examples
///
/// ```
/// use std::collections::LinkedList;
///
/// let mut list1 = LinkedList::new();
/// list1.push_back('a');
///
/// let mut list2 = LinkedList::new();
/// list2.push_back('b');
/// list2.push_back('c');
///
/// list1.append(&mut list2);
///
/// let mut iter = list1.iter();
/// assert_eq!(iter.next(), Some(&'a'));
/// assert_eq!(iter.next(), Some(&'b'));
/// assert_eq!(iter.next(), Some(&'c'));
/// assert!(iter.next().is_none());
///
/// assert!(list2.is_empty());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn append(&mut self, other: &mut Self) {
match self.tail {
None => mem::swap(self, other),
Some(mut tail) => {
// `as_mut` is okay here because we have exclusive access to the entirety
// of both lists.
if let Some(mut other_head) = other.head.take() {
unsafe {
tail.as_mut().next = Some(other_head);
other_head.as_mut().prev = Some(tail);
}
self.tail = other.tail.take();
self.len += mem::replace(&mut other.len, 0);
}
}
}
}
/// Provides a forward iterator.
///
/// # Examples
///
/// ```
/// use std::collections::LinkedList;
///
/// let mut list: LinkedList<u32> = LinkedList::new();
///
/// list.push_back(0);
/// list.push_back(1);
/// list.push_back(2);
///
/// let mut iter = list.iter();
/// assert_eq!(iter.next(), Some(&0));
/// assert_eq!(iter.next(), Some(&1));
/// assert_eq!(iter.next(), Some(&2));
/// assert_eq!(iter.next(), None);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn iter(&self) -> Iter<'_, T> {
Iter {
head: self.head,
tail: self.tail,
len: self.len,
marker: PhantomData,
}
}
/// Provides a forward iterator with mutable references.
///
/// # Examples
///
/// ```
/// use std::collections::LinkedList;
///
/// let mut list: LinkedList<u32> = LinkedList::new();
///
/// list.push_back(0);
/// list.push_back(1);
/// list.push_back(2);
///
/// for element in list.iter_mut() {
/// *element += 10;
/// }
///
/// let mut iter = list.iter();
/// assert_eq!(iter.next(), Some(&10));
/// assert_eq!(iter.next(), Some(&11));
/// assert_eq!(iter.next(), Some(&12));
/// assert_eq!(iter.next(), None);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn iter_mut(&mut self) -> IterMut<'_, T> {
IterMut {
head: self.head,
tail: self.tail,
len: self.len,
list: self,
}
}
/// Returns `true` if the `LinkedList` is empty.
///
/// This operation should compute in O(1) time.
///
/// # Examples
///
/// ```
/// use std::collections::LinkedList;
///
/// let mut dl = LinkedList::new();
/// assert!(dl.is_empty());
///
/// dl.push_front("foo");
/// assert!(!dl.is_empty());
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn is_empty(&self) -> bool {
self.head.is_none()
}
/// Returns the length of the `LinkedList`.
///
/// This operation should compute in O(1) time.
///
/// # Examples
///
/// ```
/// use std::collections::LinkedList;
///
/// let mut dl = LinkedList::new();
///
/// dl.push_front(2);
/// assert_eq!(dl.len(), 1);
///
/// dl.push_front(1);
/// assert_eq!(dl.len(), 2);
///
/// dl.push_back(3);
/// assert_eq!(dl.len(), 3);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len(&self) -> usize {
self.len
}
/// Removes all elements from the `LinkedList`.
///
/// This operation should compute in O(n) time.
///
/// # Examples
///
/// ```
/// use std::collections::LinkedList;
///
/// let mut dl = LinkedList::new();
///
/// dl.push_front(2);
/// dl.push_front(1);
/// assert_eq!(dl.len(), 2);
/// assert_eq!(dl.front(), Some(&1));
///
/// dl.clear();
/// assert_eq!(dl.len(), 0);
/// assert_eq!(dl.front(), None);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn clear(&mut self) {
*self = Self::new();
}
/// Returns `true` if the `LinkedList` contains an element equal to the
/// given value.
///
/// # Examples
///
/// ```
/// use std::collections::LinkedList;
///
/// let mut list: LinkedList<u32> = LinkedList::new();
///
/// list.push_back(0);
/// list.push_back(1);
/// list.push_back(2);
///
/// assert_eq!(list.contains(&0), true);
/// assert_eq!(list.contains(&10), false);
/// ```
#[stable(feature = "linked_list_contains", since = "1.12.0")]
pub fn contains(&self, x: &T) -> bool
where T: PartialEq<T>
{
self.iter().any(|e| e == x)
}
/// Provides a reference to the front element, or `None` if the list is
/// empty.
///
/// # Examples
///
/// ```
/// use std::collections::LinkedList;
///
/// let mut dl = LinkedList::new();
/// assert_eq!(dl.front(), None);
///
/// dl.push_front(1);
/// assert_eq!(dl.front(), Some(&1));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn front(&self) -> Option<&T> {
unsafe {
self.head.as_ref().map(|node| &node.as_ref().element)
}
}
/// Provides a mutable reference to the front element, or `None` if the list
/// is empty.
///
/// # Examples
///
/// ```
/// use std::collections::LinkedList;
///
/// let mut dl = LinkedList::new();
/// assert_eq!(dl.front(), None);
///
/// dl.push_front(1);
/// assert_eq!(dl.front(), Some(&1));
///
/// match dl.front_mut() {
/// None => {},
/// Some(x) => *x = 5,
/// }
/// assert_eq!(dl.front(), Some(&5));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn front_mut(&mut self) -> Option<&mut T> {
unsafe {
self.head.as_mut().map(|node| &mut node.as_mut().element)
}
}
/// Provides a reference to the back element, or `None` if the list is
/// empty.
///
/// # Examples
///
/// ```
/// use std::collections::LinkedList;
///
/// let mut dl = LinkedList::new();
/// assert_eq!(dl.back(), None);
///
/// dl.push_back(1);
/// assert_eq!(dl.back(), Some(&1));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn back(&self) -> Option<&T> {
unsafe {
self.tail.as_ref().map(|node| &node.as_ref().element)
}
}
/// Provides a mutable reference to the back element, or `None` if the list
/// is empty.
///
/// # Examples
///
/// ```
/// use std::collections::LinkedList;
///
/// let mut dl = LinkedList::new();
/// assert_eq!(dl.back(), None);
///
/// dl.push_back(1);
/// assert_eq!(dl.back(), Some(&1));
///
/// match dl.back_mut() {
/// None => {},
/// Some(x) => *x = 5,
/// }
/// assert_eq!(dl.back(), Some(&5));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn back_mut(&mut self) -> Option<&mut T> {
unsafe {
self.tail.as_mut().map(|node| &mut node.as_mut().element)
}
}
/// Adds an element first in the list.
///
/// This operation should compute in O(1) time.
///
/// # Examples
///
/// ```
/// use std::collections::LinkedList;
///
/// let mut dl = LinkedList::new();
///
/// dl.push_front(2);
/// assert_eq!(dl.front().unwrap(), &2);
///
/// dl.push_front(1);
/// assert_eq!(dl.front().unwrap(), &1);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn push_front(&mut self, elt: T) {
self.push_front_node(box Node::new(elt));
}
/// Removes the first element and returns it, or `None` if the list is
/// empty.
///
/// This operation should compute in O(1) time.
///
/// # Examples
///
/// ```
/// use std::collections::LinkedList;
///
/// let mut d = LinkedList::new();
/// assert_eq!(d.pop_front(), None);
///
/// d.push_front(1);
/// d.push_front(3);
/// assert_eq!(d.pop_front(), Some(3));
/// assert_eq!(d.pop_front(), Some(1));
/// assert_eq!(d.pop_front(), None);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn pop_front(&mut self) -> Option<T> {
self.pop_front_node().map(Node::into_element)
}
/// Appends an element to the back of a list.
///
/// This operation should compute in O(1) time.
///
/// # Examples
///
/// ```
/// use std::collections::LinkedList;
///
/// let mut d = LinkedList::new();
/// d.push_back(1);
/// d.push_back(3);
/// assert_eq!(3, *d.back().unwrap());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn push_back(&mut self, elt: T) {
self.push_back_node(box Node::new(elt));
}
/// Removes the last element from a list and returns it, or `None` if
/// it is empty.
///
/// This operation should compute in O(1) time.
///
/// # Examples
///
/// ```
/// use std::collections::LinkedList;
///
/// let mut d = LinkedList::new();
/// assert_eq!(d.pop_back(), None);
/// d.push_back(1);
/// d.push_back(3);
/// assert_eq!(d.pop_back(), Some(3));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn pop_back(&mut self) -> Option<T> {
self.pop_back_node().map(Node::into_element)
}
/// Splits the list into two at the given index. Returns everything after the given index,
/// including the index.
///
/// This operation should compute in O(n) time.
///
/// # Panics
///
/// Panics if `at > len`.
///
/// # Examples
///
/// ```
/// use std::collections::LinkedList;
///
/// let mut d = LinkedList::new();
///
/// d.push_front(1);
/// d.push_front(2);
/// d.push_front(3);
///
/// let mut splitted = d.split_off(2);
///
/// assert_eq!(splitted.pop_front(), Some(1));
/// assert_eq!(splitted.pop_front(), None);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn split_off(&mut self, at: usize) -> LinkedList<T> {
let len = self.len();
assert!(at <= len, "Cannot split off at a nonexistent index");
if at == 0 {
return mem::take(self);
} else if at == len {
return Self::new();
}
// Below, we iterate towards the `i-1`th node, either from the start or the end,
// depending on which would be faster.
let split_node = if at - 1 <= len - 1 - (at - 1) {
let mut iter = self.iter_mut();
// instead of skipping using .skip() (which creates a new struct),
// we skip manually so we can access the head field without
// depending on implementation details of Skip
for _ in 0..at - 1 {
iter.next();
}
iter.head
} else {
// better off starting from the end
let mut iter = self.iter_mut();
for _ in 0..len - 1 - (at - 1) {
iter.next_back();
}
iter.tail
};
// The split node is the new tail node of the first part and owns
// the head of the second part.
let second_part_head;
unsafe {
second_part_head = split_node.unwrap().as_mut().next.take();
if let Some(mut head) = second_part_head {
head.as_mut().prev = None;
}
}
let second_part = LinkedList {
head: second_part_head,
tail: self.tail,
len: len - at,
marker: PhantomData,
};
// Fix the tail ptr of the first part
self.tail = split_node;
self.len = at;
second_part
}
/// Creates an iterator which uses a closure to determine if an element should be removed.
///
/// If the closure returns true, then the element is removed and yielded.
/// If the closure returns false, the element will remain in the list and will not be yielded
/// by the iterator.
///
/// Note that `drain_filter` lets you mutate every element in the filter closure, regardless of
/// whether you choose to keep or remove it.
///
/// # Examples
///
/// Splitting a list into evens and odds, reusing the original list:
///
/// ```
/// #![feature(drain_filter)]
/// use std::collections::LinkedList;
///
/// let mut numbers: LinkedList<u32> = LinkedList::new();
/// numbers.extend(&[1, 2, 3, 4, 5, 6, 8, 9, 11, 13, 14, 15]);
///
/// let evens = numbers.drain_filter(|x| *x % 2 == 0).collect::<LinkedList<_>>();
/// let odds = numbers;
///
/// assert_eq!(evens.into_iter().collect::<Vec<_>>(), vec![2, 4, 6, 8, 14]);
/// assert_eq!(odds.into_iter().collect::<Vec<_>>(), vec![1, 3, 5, 9, 11, 13, 15]);
/// ```
#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
pub fn drain_filter<F>(&mut self, filter: F) -> DrainFilter<'_, T, F>
where F: FnMut(&mut T) -> bool
{
// avoid borrow issues.
let it = self.head;
let old_len = self.len;
DrainFilter {
list: self,
it: it,
pred: filter,
idx: 0,
old_len: old_len,
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<#[may_dangle] T> Drop for LinkedList<T> {
fn drop(&mut self) {
while let Some(_) = self.pop_front_node() {}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> Iterator for Iter<'a, T> {
type Item = &'a T;
#[inline]
fn next(&mut self) -> Option<&'a T> {
if self.len == 0 {
None
} else {
self.head.map(|node| unsafe {
// Need an unbound lifetime to get 'a
let node = &*node.as_ptr();
self.len -= 1;
self.head = node.next;
&node.element
})
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(self.len, Some(self.len))
}
#[inline]
fn last(mut self) -> Option<&'a T> {
self.next_back()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a T> {
if self.len == 0 {
None
} else {
self.tail.map(|node| unsafe {
// Need an unbound lifetime to get 'a
let node = &*node.as_ptr();
self.len -= 1;
self.tail = node.prev;
&node.element
})
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ExactSizeIterator for Iter<'_, T> {}
#[stable(feature = "fused", since = "1.26.0")]
impl<T> FusedIterator for Iter<'_, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> Iterator for IterMut<'a, T> {
type Item = &'a mut T;
#[inline]
fn next(&mut self) -> Option<&'a mut T> {
if self.len == 0 {
None
} else {
self.head.map(|node| unsafe {
// Need an unbound lifetime to get 'a
let node = &mut *node.as_ptr();
self.len -= 1;
self.head = node.next;
&mut node.element
})
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(self.len, Some(self.len))
}
#[inline]
fn last(mut self) -> Option<&'a mut T> {
self.next_back()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> DoubleEndedIterator for IterMut<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a mut T> {
if self.len == 0 {
None
} else {
self.tail.map(|node| unsafe {
// Need an unbound lifetime to get 'a
let node = &mut *node.as_ptr();
self.len -= 1;
self.tail = node.prev;
&mut node.element
})
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ExactSizeIterator for IterMut<'_, T> {}
#[stable(feature = "fused", since = "1.26.0")]
impl<T> FusedIterator for IterMut<'_, T> {}
impl<T> IterMut<'_, T> {
/// Inserts the given element just after the element most recently returned by `.next()`.
/// The inserted element does not appear in the iteration.
///
/// # Examples
///
/// ```
/// #![feature(linked_list_extras)]
///
/// use std::collections::LinkedList;
///
/// let mut list: LinkedList<_> = vec![1, 3, 4].into_iter().collect();
///
/// {
/// let mut it = list.iter_mut();
/// assert_eq!(it.next().unwrap(), &1);
/// // insert `2` after `1`
/// it.insert_next(2);
/// }
/// {
/// let vec: Vec<_> = list.into_iter().collect();
/// assert_eq!(vec, [1, 2, 3, 4]);
/// }
/// ```
#[inline]
#[unstable(feature = "linked_list_extras",
reason = "this is probably better handled by a cursor type -- we'll see",
issue = "27794")]
pub fn insert_next(&mut self, element: T) {
match self.head {
// `push_back` is okay with aliasing `element` references
None => self.list.push_back(element),
Some(head) => unsafe {
let prev = match head.as_ref().prev {
// `push_front` is okay with aliasing nodes
None => return self.list.push_front(element),
Some(prev) => prev,
};
let node = Some(Box::into_raw_non_null(box Node {
next: Some(head),
prev: Some(prev),
element,
}));
// Not creating references to entire nodes to not invalidate the
// reference to `element` we handed to the user.
(*prev.as_ptr()).next = node;
(*head.as_ptr()).prev = node;
self.list.len += 1;
},
}
}
/// Provides a reference to the next element, without changing the iterator.
///
/// # Examples
///
/// ```
/// #![feature(linked_list_extras)]
///
/// use std::collections::LinkedList;
///
/// let mut list: LinkedList<_> = vec![1, 2, 3].into_iter().collect();
///
/// let mut it = list.iter_mut();
/// assert_eq!(it.next().unwrap(), &1);
/// assert_eq!(it.peek_next().unwrap(), &2);
/// // We just peeked at 2, so it was not consumed from the iterator.
/// assert_eq!(it.next().unwrap(), &2);
/// ```
#[inline]
#[unstable(feature = "linked_list_extras",
reason = "this is probably better handled by a cursor type -- we'll see",
issue = "27794")]
pub fn peek_next(&mut self) -> Option<&mut T> {
if self.len == 0 {
None
} else {
unsafe {
self.head.as_mut().map(|node| &mut node.as_mut().element)
}
}
}
}
/// An iterator produced by calling `drain_filter` on LinkedList.
#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
pub struct DrainFilter<'a, T: 'a, F: 'a>
where F: FnMut(&mut T) -> bool,
{
list: &'a mut LinkedList<T>,
it: Option<NonNull<Node<T>>>,
pred: F,
idx: usize,
old_len: usize,
}
#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
impl<T, F> Iterator for DrainFilter<'_, T, F>
where F: FnMut(&mut T) -> bool,
{
type Item = T;
fn next(&mut self) -> Option<T> {
while let Some(mut node) = self.it {
unsafe {
self.it = node.as_ref().next;
self.idx += 1;
if (self.pred)(&mut node.as_mut().element) {
// `unlink_node` is okay with aliasing `element` references.
self.list.unlink_node(node);
return Some(Box::from_raw(node.as_ptr()).element);
}
}
}
None
}
fn size_hint(&self) -> (usize, Option<usize>) {
(0, Some(self.old_len - self.idx))
}
}
#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
impl<T, F> Drop for DrainFilter<'_, T, F>
where F: FnMut(&mut T) -> bool,
{
fn drop(&mut self) {
self.for_each(drop);
}
}
#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
impl<T: fmt::Debug, F> fmt::Debug for DrainFilter<'_, T, F>
where F: FnMut(&mut T) -> bool
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("DrainFilter")
.field(&self.list)
.finish()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Iterator for IntoIter<T> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
self.list.pop_front()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(self.list.len, Some(self.list.len))
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> DoubleEndedIterator for IntoIter<T> {
#[inline]
fn next_back(&mut self) -> Option<T> {
self.list.pop_back()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ExactSizeIterator for IntoIter<T> {}
#[stable(feature = "fused", since = "1.26.0")]
impl<T> FusedIterator for IntoIter<T> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> FromIterator<T> for LinkedList<T> {
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
let mut list = Self::new();
list.extend(iter);
list
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> IntoIterator for LinkedList<T> {
type Item = T;
type IntoIter = IntoIter<T>;
/// Consumes the list into an iterator yielding elements by value.
#[inline]
fn into_iter(self) -> IntoIter<T> {
IntoIter { list: self }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> IntoIterator for &'a LinkedList<T> {
type Item = &'a T;
type IntoIter = Iter<'a, T>;
fn into_iter(self) -> Iter<'a, T> {
self.iter()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> IntoIterator for &'a mut LinkedList<T> {
type Item = &'a mut T;
type IntoIter = IterMut<'a, T>;
fn into_iter(self) -> IterMut<'a, T> {
self.iter_mut()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Extend<T> for LinkedList<T> {
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
<Self as SpecExtend<I>>::spec_extend(self, iter);
}
}
impl<I: IntoIterator> SpecExtend<I> for LinkedList<I::Item> {
default fn spec_extend(&mut self, iter: I) {
iter.into_iter().for_each(move |elt| self.push_back(elt));
}
}
impl<T> SpecExtend<LinkedList<T>> for LinkedList<T> {
fn spec_extend(&mut self, ref mut other: LinkedList<T>) {
self.append(other);
}
}
#[stable(feature = "extend_ref", since = "1.2.0")]
impl<'a, T: 'a + Copy> Extend<&'a T> for LinkedList<T> {
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
self.extend(iter.into_iter().cloned());
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: PartialEq> PartialEq for LinkedList<T> {
fn eq(&self, other: &Self) -> bool {
self.len() == other.len() && self.iter().eq(other)
}
fn ne(&self, other: &Self) -> bool {
self.len() != other.len() || self.iter().ne(other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Eq> Eq for LinkedList<T> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: PartialOrd> PartialOrd for LinkedList<T> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.iter().partial_cmp(other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Ord> Ord for LinkedList<T> {
#[inline]
fn cmp(&self, other: &Self) -> Ordering {
self.iter().cmp(other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Clone> Clone for LinkedList<T> {
fn clone(&self) -> Self {
self.iter().cloned().collect()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: fmt::Debug> fmt::Debug for LinkedList<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self).finish()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Hash> Hash for LinkedList<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.len().hash(state);
for elt in self {
elt.hash(state);
}
}
}
// Ensure that `LinkedList` and its read-only iterators are covariant in their type parameters.
#[allow(dead_code)]
fn assert_covariance() {
fn a<'a>(x: LinkedList<&'static str>) -> LinkedList<&'a str> {
x
}
fn b<'i, 'a>(x: Iter<'i, &'static str>) -> Iter<'i, &'a str> {
x
}
fn c<'a>(x: IntoIter<&'static str>) -> IntoIter<&'a str> {
x
}
}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<T: Send> Send for LinkedList<T> {}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<T: Sync> Sync for LinkedList<T> {}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<T: Sync> Send for Iter<'_, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<T: Sync> Sync for Iter<'_, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<T: Send> Send for IterMut<'_, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<T: Sync> Sync for IterMut<'_, T> {}
#[cfg(test)]
mod tests {
use std::thread;
use std::vec::Vec;
use rand::{thread_rng, RngCore};
use super::{LinkedList, Node};
#[cfg(test)]
fn list_from<T: Clone>(v: &[T]) -> LinkedList<T> {
v.iter().cloned().collect()
}
pub fn check_links<T>(list: &LinkedList<T>) {
unsafe {
let mut len = 0;
let mut last_ptr: Option<&Node<T>> = None;
let mut node_ptr: &Node<T>;
match list.head {
None => {
// tail node should also be None.
assert!(list.tail.is_none());
assert_eq!(0, list.len);
return;
}
Some(node) => node_ptr = &*node.as_ptr(),
}
loop {
match (last_ptr, node_ptr.prev) {
(None, None) => {}
(None, _) => panic!("prev link for head"),
(Some(p), Some(pptr)) => {
assert_eq!(p as *const Node<T>, pptr.as_ptr() as *const Node<T>);
}
_ => panic!("prev link is none, not good"),
}
match node_ptr.next {
Some(next) => {
last_ptr = Some(node_ptr);
node_ptr = &*next.as_ptr();
len += 1;
}
None => {
len += 1;
break;
}
}
}
// verify that the tail node points to the last node.
let tail = list.tail.as_ref().expect("some tail node").as_ref();
assert_eq!(tail as *const Node<T>, node_ptr as *const Node<T>);
// check that len matches interior links.
assert_eq!(len, list.len);
}
}
#[test]
fn test_append() {
// Empty to empty
{
let mut m = LinkedList::<i32>::new();
let mut n = LinkedList::new();
m.append(&mut n);
check_links(&m);
assert_eq!(m.len(), 0);
assert_eq!(n.len(), 0);
}
// Non-empty to empty
{
let mut m = LinkedList::new();
let mut n = LinkedList::new();
n.push_back(2);
m.append(&mut n);
check_links(&m);
assert_eq!(m.len(), 1);
assert_eq!(m.pop_back(), Some(2));
assert_eq!(n.len(), 0);
check_links(&m);
}
// Empty to non-empty
{
let mut m = LinkedList::new();
let mut n = LinkedList::new();
m.push_back(2);
m.append(&mut n);
check_links(&m);
assert_eq!(m.len(), 1);
assert_eq!(m.pop_back(), Some(2));
check_links(&m);
}
// Non-empty to non-empty
let v = vec![1, 2, 3, 4, 5];
let u = vec![9, 8, 1, 2, 3, 4, 5];
let mut m = list_from(&v);
let mut n = list_from(&u);
m.append(&mut n);
check_links(&m);
let mut sum = v;
sum.extend_from_slice(&u);
assert_eq!(sum.len(), m.len());
for elt in sum {
assert_eq!(m.pop_front(), Some(elt))
}
assert_eq!(n.len(), 0);
// let's make sure it's working properly, since we
// did some direct changes to private members
n.push_back(3);
assert_eq!(n.len(), 1);
assert_eq!(n.pop_front(), Some(3));
check_links(&n);
}
#[test]
fn test_insert_prev() {
let mut m = list_from(&[0, 2, 4, 6, 8]);
let len = m.len();
{
let mut it = m.iter_mut();
it.insert_next(-2);
loop {
match it.next() {
None => break,
Some(elt) => {
it.insert_next(*elt + 1);
match it.peek_next() {
Some(x) => assert_eq!(*x, *elt + 2),
None => assert_eq!(8, *elt),
}
}
}
}
it.insert_next(0);
it.insert_next(1);
}
check_links(&m);
assert_eq!(m.len(), 3 + len * 2);
assert_eq!(m.into_iter().collect::<Vec<_>>(),
[-2, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1]);
}
#[test]
#[cfg_attr(target_os = "emscripten", ignore)]
#[cfg(not(miri))] // Miri does not support threads
fn test_send() {
let n = list_from(&[1, 2, 3]);
thread::spawn(move || {
check_links(&n);
let a: &[_] = &[&1, &2, &3];
assert_eq!(a, &*n.iter().collect::<Vec<_>>());
})
.join()
.ok()
.unwrap();
}
#[test]
fn test_fuzz() {
for _ in 0..25 {
fuzz_test(3);
fuzz_test(16);
#[cfg(not(miri))] // Miri is too slow
fuzz_test(189);
}
}
#[test]
fn test_26021() {
// There was a bug in split_off that failed to null out the RHS's head's prev ptr.
// This caused the RHS's dtor to walk up into the LHS at drop and delete all of
// its nodes.
//
// https://github.com/rust-lang/rust/issues/26021
let mut v1 = LinkedList::new();
v1.push_front(1);
v1.push_front(1);
v1.push_front(1);
v1.push_front(1);
let _ = v1.split_off(3); // Dropping this now should not cause laundry consumption
assert_eq!(v1.len(), 3);
assert_eq!(v1.iter().len(), 3);
assert_eq!(v1.iter().collect::<Vec<_>>().len(), 3);
}
#[test]
fn test_split_off() {
let mut v1 = LinkedList::new();
v1.push_front(1);
v1.push_front(1);
v1.push_front(1);
v1.push_front(1);
// test all splits
for ix in 0..1 + v1.len() {
let mut a = v1.clone();
let b = a.split_off(ix);
check_links(&a);
check_links(&b);
a.extend(b);
assert_eq!(v1, a);
}
}
#[cfg(test)]
fn fuzz_test(sz: i32) {
let mut m: LinkedList<_> = LinkedList::new();
let mut v = vec![];
for i in 0..sz {
check_links(&m);
let r: u8 = thread_rng().next_u32() as u8;
match r % 6 {
0 => {
m.pop_back();
v.pop();
}
1 => {
if !v.is_empty() {
m.pop_front();
v.remove(0);
}
}
2 | 4 => {
m.push_front(-i);
v.insert(0, -i);
}
3 | 5 | _ => {
m.push_back(i);
v.push(i);
}
}
}
check_links(&m);
let mut i = 0;
for (a, &b) in m.into_iter().zip(&v) {
i += 1;
assert_eq!(a, b);
}
assert_eq!(i, v.len());
}
#[test]
fn drain_filter_test() {
let mut m: LinkedList<u32> = LinkedList::new();
m.extend(&[1, 2, 3, 4, 5, 6]);
let deleted = m.drain_filter(|v| *v < 4).collect::<Vec<_>>();
check_links(&m);
assert_eq!(deleted, &[1, 2, 3]);
assert_eq!(m.into_iter().collect::<Vec<_>>(), &[4, 5, 6]);
}
#[test]
fn drain_to_empty_test() {
let mut m: LinkedList<u32> = LinkedList::new();
m.extend(&[1, 2, 3, 4, 5, 6]);
let deleted = m.drain_filter(|_| true).collect::<Vec<_>>();
check_links(&m);
assert_eq!(deleted, &[1, 2, 3, 4, 5, 6]);
assert_eq!(m.into_iter().collect::<Vec<_>>(), &[]);
}
}
| 28.74621 | 99 | 0.500161 |
69b2bbe63e4f6fd299fd9739ce50788ae5f0b92d | 1,369 | //! Defines the GDT and TSS structures used by alloy.
use x86_64::VirtAddr;
use x86_64::structures::gdt::{GlobalDescriptorTable, Descriptor, SegmentSelector};
use x86_64::structures::tss::TaskStateSegment;
pub const DOUBLE_FAULT_IST_INDEX: u16 = 0;
lazy_static! {
static ref TSS: TaskStateSegment = {
let mut tss = TaskStateSegment::new();
tss.interrupt_stack_table[DOUBLE_FAULT_IST_INDEX as usize] = {
const STACK_SIZE: usize = 4096;
static mut STACK: [u8; STACK_SIZE] = [0; STACK_SIZE];
let stack_start = VirtAddr::from_ptr(unsafe { &STACK });
let stack_end = stack_start + STACK_SIZE;
stack_end
};
tss
};
}
lazy_static! {
static ref GDT: (GlobalDescriptorTable, Selectors) = {
let mut gdt = GlobalDescriptorTable::new();
let code = gdt.add_entry(Descriptor::kernel_code_segment());
let tss = gdt.add_entry(Descriptor::tss_segment(&TSS));
(gdt, Selectors { code, tss })
};
}
struct Selectors {
code: SegmentSelector,
tss: SegmentSelector,
}
pub fn init() {
use x86_64::instructions::segmentation::set_cs;
use x86_64::instructions::tables::load_tss;
GDT.0.load();
// Reload registers to activate new GDT and TSS.
unsafe {
set_cs(GDT.1.code);
load_tss(GDT.1.tss);
}
}
| 27.38 | 82 | 0.638422 |
7add6b87aec790a207696b4268edf034385bb180 | 1,028 | use std::error::Error;
use std::io::prelude::*;
use std::fs::File;
use std::path::Path;
use std::vec::Vec;
pub fn read_file(in_file_location: &str) -> String
{
let path = Path::new(in_file_location);
let mut open_file = match File::open(path)
{
Err(information) => panic!("Couldn't open file {}", Error::description(&information)),
Ok(open_file) => open_file
};
let mut file_body = String::new();
match open_file.read_to_string(&mut file_body)
{
Err(information) => panic!("Couldn't open file {}", Error::description(&information)),
Ok(_) => {}
};
file_body
}
pub fn read_file_by_line(in_file_location: &str) -> Vec<String>
{
let string_in = read_file(in_file_location);
let mut out_lines = Vec::new();
for line in string_in.lines()
{
out_lines.push(line.to_owned());
}
out_lines
}
pub fn convert_to_numerical_value(in_string: &str) -> i32
{
let output = in_string.trim().parse::<i32>();
output.unwrap()
}
| 21.87234 | 94 | 0.623541 |
bb3681a6a4cc70b267978684eed5b098dc01fca7 | 54,421 | use std::collections::BTreeSet;
use abstutil::Timer;
use geom::{ArrowCap, Distance, Duration, PolyLine, Pt2D, Time};
use map_gui::tools::{grey_out_map, Minimap, PopupMsg};
use map_gui::ID;
use map_model::raw::OriginalRoad;
use map_model::{osm, BuildingID, Map, Position};
use sim::{
AgentID, BorderSpawnOverTime, CarID, IndividTrip, PersonSpec, Scenario, ScenarioGenerator,
SpawnOverTime, TripEndpoint, TripMode, TripPurpose, VehicleType,
};
use widgetry::{
hotkeys, lctrl, Btn, Color, EventCtx, GfxCtx, HorizontalAlignment, Key, Line, Outcome, Panel,
RewriteColor, ScreenPt, State, Text, TextExt, VerticalAlignment, Widget,
};
use crate::app::{App, Transition};
use crate::challenges::cutscene::CutsceneBuilder;
use crate::common::{tool_panel, MinimapController, Warping};
use crate::edit::EditMode;
use crate::sandbox::gameplay::{GameplayMode, GameplayState};
use crate::sandbox::{
maybe_exit_sandbox, spawn_agents_around, Actions, AgentMeter, SandboxControls, SandboxMode,
SpeedControls, TimePanel,
};
const ESCORT: CarID = CarID(0, VehicleType::Car);
const CAR_BIKE_CONTENTION_GOAL: Duration = Duration::const_seconds(15.0);
pub struct Tutorial {
top_center: Panel,
last_finished_task: Task,
msg_panel: Option<Panel>,
warped: bool,
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub struct TutorialPointer {
pub stage: usize,
// Index into messages. messages.len() means the actual task.
pub part: usize,
}
impl TutorialPointer {
pub fn new(stage: usize, part: usize) -> TutorialPointer {
TutorialPointer { stage, part }
}
}
impl Tutorial {
/// Launches the tutorial gameplay along with its cutscene
pub fn start(ctx: &mut EventCtx, app: &mut App) -> Transition {
Tutorial::initialize(ctx, app);
Transition::Multi(vec![
// Constructing the intro_story cutscene doesn't require the map/scenario to be loaded.
Transition::Push(SandboxMode::simple_new(
ctx,
app,
GameplayMode::Tutorial(
app.session
.tutorial
.as_ref()
.map(|tut| tut.current)
.unwrap_or(TutorialPointer::new(0, 0)),
),
)),
Transition::Push(intro_story(ctx, app)),
])
}
/// Idempotent. This must be called before `make_gameplay` or `scenario`.
pub fn initialize(ctx: &mut EventCtx, app: &mut App) {
if app.session.tutorial.is_none() {
app.session.tutorial = Some(TutorialState::new(ctx, app));
}
}
pub fn make_gameplay(
ctx: &mut EventCtx,
app: &mut App,
current: TutorialPointer,
) -> Box<dyn GameplayState> {
let mut tut = app.session.tutorial.take().unwrap();
tut.current = current;
let state = tut.make_state(ctx, app);
app.session.tutorial = Some(tut);
state
}
pub fn scenario(app: &App, current: TutorialPointer) -> Option<ScenarioGenerator> {
app.session.tutorial.as_ref().unwrap().stages[current.stage]
.make_scenario
.clone()
}
fn inner_event(
&mut self,
ctx: &mut EventCtx,
app: &mut App,
controls: &mut SandboxControls,
tut: &mut TutorialState,
) -> Option<Transition> {
// First of all, might need to initiate warping
if !self.warped {
if let Some((ref id, zoom)) = tut.stage().warp_to {
self.warped = true;
return Some(Transition::Push(Warping::new(
ctx,
app.primary.canonical_point(id.clone()).unwrap(),
Some(zoom),
None,
&mut app.primary,
)));
}
}
match self.top_center.event(ctx) {
Outcome::Clicked(x) => match x.as_ref() {
"Quit" => {
return Some(maybe_exit_sandbox(ctx));
}
"previous tutorial" => {
tut.current = TutorialPointer::new(tut.current.stage - 1, 0);
return Some(transition(ctx, app, tut));
}
"next tutorial" => {
tut.current = TutorialPointer::new(tut.current.stage + 1, 0);
return Some(transition(ctx, app, tut));
}
"instructions" => {
tut.current = TutorialPointer::new(tut.current.stage, 0);
return Some(transition(ctx, app, tut));
}
"edit map" => {
// TODO Ideally this would be an inactive button in message states
if self.msg_panel.is_none() {
let mode = GameplayMode::Tutorial(tut.current);
return Some(Transition::Push(EditMode::new(ctx, app, mode)));
}
}
_ => unreachable!(),
},
_ => {}
}
if let Some(ref mut msg) = self.msg_panel {
match msg.event(ctx) {
Outcome::Clicked(x) => match x.as_ref() {
"previous message" => {
tut.prev();
return Some(transition(ctx, app, tut));
}
"next message" | "Try it" => {
tut.next();
return Some(transition(ctx, app, tut));
}
_ => unreachable!(),
},
_ => {
// Don't allow other interactions
return Some(Transition::Keep);
}
}
}
// Interaction things
if tut.interaction() == Task::Camera {
if app.primary.current_selection == Some(ID::Building(tut.fire_station))
&& app.per_obj.left_click(ctx, "put out the... fire?")
{
tut.next();
return Some(transition(ctx, app, tut));
}
} else if tut.interaction() == Task::InspectObjects {
// TODO Have to wiggle the mouse or something after opening the panel, because of the
// order in SandboxMode.
match controls.common.as_ref().unwrap().info_panel_open(app) {
Some(ID::Lane(l)) => {
if app.primary.map.get_l(l).is_biking() && !tut.inspected_bike_lane {
tut.inspected_bike_lane = true;
self.top_center = tut.make_top_center(ctx, false);
}
}
Some(ID::Building(_)) => {
if !tut.inspected_building {
tut.inspected_building = true;
self.top_center = tut.make_top_center(ctx, false);
}
}
Some(ID::Intersection(i)) => {
let i = app.primary.map.get_i(i);
if i.is_stop_sign() && !tut.inspected_stop_sign {
tut.inspected_stop_sign = true;
self.top_center = tut.make_top_center(ctx, false);
}
if i.is_border() && !tut.inspected_border {
tut.inspected_border = true;
self.top_center = tut.make_top_center(ctx, false);
}
}
_ => {}
}
if tut.inspected_bike_lane
&& tut.inspected_building
&& tut.inspected_stop_sign
&& tut.inspected_border
{
tut.next();
return Some(transition(ctx, app, tut));
}
} else if tut.interaction() == Task::TimeControls {
if app.primary.sim.time() >= Time::START_OF_DAY + Duration::hours(17) {
tut.next();
return Some(transition(ctx, app, tut));
}
} else if tut.interaction() == Task::PauseResume {
let is_paused = controls.speed.as_ref().unwrap().is_paused();
if tut.was_paused && !is_paused {
tut.was_paused = false;
}
if !tut.was_paused && is_paused {
tut.num_pauses += 1;
tut.was_paused = true;
self.top_center = tut.make_top_center(ctx, false);
}
if tut.num_pauses == 3 {
tut.next();
return Some(transition(ctx, app, tut));
}
} else if tut.interaction() == Task::Escort {
let following_car =
controls.common.as_ref().unwrap().info_panel_open(app) == Some(ID::Car(ESCORT));
let is_parked = app
.primary
.sim
.agent_to_trip(AgentID::Car(ESCORT))
.is_none();
if !tut.car_parked && is_parked && tut.following_car {
tut.car_parked = true;
self.top_center = tut.make_top_center(ctx, false);
}
if following_car && !tut.following_car {
// TODO There's a delay of one event before the checklist updates, because the
// info panel opening happens at the end of the event. Not a big deal.
tut.following_car = true;
self.top_center = tut.make_top_center(ctx, false);
}
if tut.prank_done {
tut.next();
return Some(transition(ctx, app, tut));
}
} else if tut.interaction() == Task::LowParking {
if tut.parking_found {
tut.next();
return Some(transition(ctx, app, tut));
}
} else if tut.interaction() == Task::WatchBikes {
if app.primary.sim.time() >= Time::START_OF_DAY + Duration::minutes(3) {
tut.next();
return Some(transition(ctx, app, tut));
}
} else if tut.interaction() == Task::FixBikes {
if app.primary.sim.is_done() {
let mut before = Duration::ZERO;
let mut after = Duration::ZERO;
for (_, b, a, _) in app
.primary
.sim
.get_analytics()
.both_finished_trips(app.primary.sim.get_end_of_day(), app.prebaked())
{
before = before.max(b);
after = after.max(a);
}
if !tut.score_delivered {
tut.score_delivered = true;
if before == after {
return Some(Transition::Push(PopupMsg::new(
ctx,
"All trips completed",
vec![
"Your changes didn't affect anything!",
"Try editing the map to create some bike lanes.",
],
)));
}
if after > before {
return Some(Transition::Push(PopupMsg::new(
ctx,
"All trips completed",
vec![
"Your changes made things worse!".to_string(),
format!(
"All trips originally finished in {}, but now they took {}",
before, after
),
"".to_string(),
"Try again!".to_string(),
],
)));
}
if before - after < CAR_BIKE_CONTENTION_GOAL {
return Some(Transition::Push(PopupMsg::new(
ctx,
"All trips completed",
vec![
"Nice, you helped things a bit!".to_string(),
format!(
"All trips originally took {}, but now they took {}",
before, after
),
"".to_string(),
"See if you can do a little better though.".to_string(),
],
)));
}
return Some(Transition::Push(PopupMsg::new(
ctx,
"All trips completed",
vec![format!(
"Awesome! All trips originally took {}, but now they only took {}",
before, after
)],
)));
}
if before - after >= CAR_BIKE_CONTENTION_GOAL {
tut.next();
}
return Some(transition(ctx, app, tut));
}
} else if tut.interaction() == Task::Done {
// If the player chooses to stay here, at least go back to the message panel.
tut.prev();
return Some(maybe_exit_sandbox(ctx));
}
None
}
}
impl GameplayState for Tutorial {
fn event(
&mut self,
ctx: &mut EventCtx,
app: &mut App,
controls: &mut SandboxControls,
_: &mut Actions,
) -> Option<Transition> {
// Dance around borrow-checker issues
let mut tut = app.session.tutorial.take().unwrap();
// The arrows get screwy when window size changes.
let window_dims = (ctx.canvas.window_width, ctx.canvas.window_height);
if window_dims != tut.window_dims {
tut.stages = TutorialState::new(ctx, app).stages;
tut.window_dims = window_dims;
}
let result = self.inner_event(ctx, app, controls, &mut tut);
app.session.tutorial = Some(tut);
result
}
fn draw(&self, g: &mut GfxCtx, app: &App) {
let tut = app.session.tutorial.as_ref().unwrap();
if self.msg_panel.is_some() {
grey_out_map(g, app);
}
self.top_center.draw(g);
if let Some(ref msg) = self.msg_panel {
// Arrows underneath the message panel, but on top of other panels
if let Some((_, _, Some(fxn))) = tut.lines() {
let pt = (fxn)(g, app);
g.fork_screenspace();
if let Ok(pl) = PolyLine::new(vec![
self.msg_panel
.as_ref()
.unwrap()
.center_of("next message")
.to_pt(),
pt,
]) {
g.draw_polygon(
Color::RED,
pl.make_arrow(Distance::meters(20.0), ArrowCap::Triangle),
);
}
g.unfork();
}
msg.draw(g);
}
// Special things
if tut.interaction() == Task::Camera {
g.draw_polygon(
Color::hex("#e25822"),
app.primary.map.get_b(tut.fire_station).polygon.clone(),
);
}
}
fn recreate_panels(&mut self, ctx: &mut EventCtx, app: &App) {
let tut = app.session.tutorial.as_ref().unwrap();
self.top_center = tut.make_top_center(ctx, self.last_finished_task >= Task::WatchBikes);
// Time can't pass while self.msg_panel is active
}
fn can_move_canvas(&self) -> bool {
self.msg_panel.is_none()
}
fn can_examine_objects(&self) -> bool {
self.last_finished_task >= Task::WatchBikes
}
fn has_common(&self) -> bool {
self.last_finished_task >= Task::Camera
}
fn has_tool_panel(&self) -> bool {
true
}
fn has_time_panel(&self) -> bool {
self.last_finished_task >= Task::InspectObjects
}
fn has_speed(&self) -> bool {
self.last_finished_task >= Task::InspectObjects
}
fn has_agent_meter(&self) -> bool {
self.last_finished_task >= Task::PauseResume
}
fn has_minimap(&self) -> bool {
self.last_finished_task >= Task::Escort
}
}
#[derive(PartialEq, PartialOrd, Clone, Copy)]
enum Task {
Nil,
Camera,
InspectObjects,
TimeControls,
PauseResume,
Escort,
LowParking,
WatchBikes,
FixBikes,
Done,
}
impl Task {
fn top_txt(self, state: &TutorialState) -> Text {
let simple = match self {
Task::Nil => unreachable!(),
Task::Camera => "Put out the fire at the fire station",
Task::InspectObjects => {
let mut txt = Text::from(Line("Find one of each:"));
for (name, done) in vec![
("bike lane", state.inspected_bike_lane),
("building", state.inspected_building),
("intersection with stop sign", state.inspected_stop_sign),
("intersection on the map border", state.inspected_border),
] {
if done {
txt.add(Line(format!("[X] {}", name)).fg(Color::GREEN));
} else {
txt.add(Line(format!("[ ] {}", name)));
}
}
return txt;
}
Task::TimeControls => "Wait until after 5pm",
Task::PauseResume => {
let mut txt = Text::from(Line("[ ] Pause/resume "));
txt.append(Line(format!("{} times", 3 - state.num_pauses)).fg(Color::GREEN));
return txt;
}
Task::Escort => {
// Inspect the target car, wait for them to park, draw WASH ME on the window
let mut txt = Text::new();
if state.following_car {
txt.add(Line("[X] follow the target car").fg(Color::GREEN));
} else {
txt.add(Line("[ ] follow the target car"));
}
if state.car_parked {
txt.add(Line("[X] wait for them to park").fg(Color::GREEN));
} else {
txt.add(Line("[ ] wait for them to park"));
}
if state.prank_done {
txt.add(Line("[X] click car and press c to draw WASH ME").fg(Color::GREEN));
} else {
txt.add(Line("[ ] click car and press "));
// TODO ctx.style().hotkey_color
txt.append(Line(Key::C.describe()).fg(Color::GREEN));
txt.append(Line(" to draw WASH ME"));
}
return txt;
}
Task::LowParking => {
let mut txt = Text::from(Line(
"1) Find a road with almost no parking spots available",
));
txt.add(Line("2) Click it and press "));
// TODO ctx.style().hotkey_color
txt.append(Line(Key::C.describe()).fg(Color::GREEN));
txt.append(Line(" to check the occupancy"));
return txt;
}
Task::WatchBikes => "Watch for 3 minutes",
Task::FixBikes => {
return Text::from(Line(format!(
"[ ] Complete all trips {} faster",
CAR_BIKE_CONTENTION_GOAL
)));
}
Task::Done => "Tutorial complete!",
};
Text::from(Line(simple))
}
fn label(self) -> &'static str {
match self {
Task::Nil => unreachable!(),
Task::Camera => "Moving the drone",
Task::InspectObjects => "Interacting with objects",
Task::TimeControls => "Passing the time",
Task::PauseResume => "Pausing/resuming",
Task::Escort => "Following people",
Task::LowParking => "Exploring map layers",
Task::WatchBikes => "Observing a problem",
Task::FixBikes => "Editing lanes",
Task::Done => "Tutorial complete!",
}
}
}
struct Stage {
messages: Vec<(
Vec<String>,
HorizontalAlignment,
Option<Box<dyn Fn(&GfxCtx, &App) -> Pt2D>>,
)>,
task: Task,
warp_to: Option<(ID, f64)>,
custom_spawn: Option<Box<dyn Fn(&mut App)>>,
make_scenario: Option<ScenarioGenerator>,
}
fn arrow(pt: ScreenPt) -> Option<Box<dyn Fn(&GfxCtx, &App) -> Pt2D>> {
Some(Box::new(move |_, _| pt.to_pt()))
}
impl Stage {
fn new(task: Task) -> Stage {
Stage {
messages: Vec::new(),
task,
warp_to: None,
custom_spawn: None,
make_scenario: None,
}
}
fn msg<I: Into<String>>(
mut self,
lines: Vec<I>,
point_to: Option<Box<dyn Fn(&GfxCtx, &App) -> Pt2D>>,
) -> Stage {
self.messages.push((
lines.into_iter().map(|l| l.into()).collect(),
HorizontalAlignment::Center,
point_to,
));
self
}
fn left_aligned_msg<I: Into<String>>(
mut self,
lines: Vec<I>,
point_to: Option<Box<dyn Fn(&GfxCtx, &App) -> Pt2D>>,
) -> Stage {
self.messages.push((
lines.into_iter().map(|l| l.into()).collect(),
HorizontalAlignment::Left,
point_to,
));
self
}
fn warp_to(mut self, id: ID, zoom: Option<f64>) -> Stage {
assert!(self.warp_to.is_none());
self.warp_to = Some((id, zoom.unwrap_or(4.0)));
self
}
fn custom_spawn(mut self, cb: Box<dyn Fn(&mut App)>) -> Stage {
assert!(self.custom_spawn.is_none());
self.custom_spawn = Some(cb);
self
}
fn scenario(mut self, generator: ScenarioGenerator) -> Stage {
assert!(self.make_scenario.is_none());
self.make_scenario = Some(generator);
self
}
}
pub struct TutorialState {
stages: Vec<Stage>,
pub current: TutorialPointer,
window_dims: (f64, f64),
// Goofy state for just some stages.
inspected_bike_lane: bool,
inspected_building: bool,
inspected_stop_sign: bool,
inspected_border: bool,
was_paused: bool,
num_pauses: usize,
following_car: bool,
car_parked: bool,
prank_done: bool,
parking_found: bool,
score_delivered: bool,
fire_station: BuildingID,
}
fn make_bike_lane_scenario(map: &Map) -> ScenarioGenerator {
let mut s = ScenarioGenerator::empty("car vs bike contention");
s.border_spawn_over_time.push(BorderSpawnOverTime {
num_peds: 0,
num_cars: 10,
num_bikes: 10,
percent_use_transit: 0.0,
start_time: Time::START_OF_DAY,
stop_time: Time::START_OF_DAY + Duration::seconds(10.0),
start_from_border: map.find_i_by_osm_id(osm::NodeID(3005680098)).unwrap(),
goal: Some(TripEndpoint::Bldg(
map.find_b_by_osm_id(bldg(217699501)).unwrap(),
)),
});
s
}
fn transition(ctx: &mut EventCtx, app: &mut App, tut: &mut TutorialState) -> Transition {
tut.reset_state();
let mode = GameplayMode::Tutorial(tut.current);
Transition::Replace(SandboxMode::simple_new(ctx, app, mode))
}
impl TutorialState {
// These're mutex to each state, but still important to reset. Otherwise if you go back to a
// previous interaction stage, it'll just be automatically marked done.
fn reset_state(&mut self) {
self.inspected_bike_lane = false;
self.inspected_building = false;
self.inspected_stop_sign = false;
self.inspected_border = false;
self.was_paused = true;
self.num_pauses = 0;
self.score_delivered = false;
self.following_car = false;
self.car_parked = false;
self.prank_done = false;
self.parking_found = false;
}
fn stage(&self) -> &Stage {
&self.stages[self.current.stage]
}
fn interaction(&self) -> Task {
let stage = self.stage();
if self.current.part == stage.messages.len() {
stage.task
} else {
Task::Nil
}
}
fn lines(
&self,
) -> Option<&(
Vec<String>,
HorizontalAlignment,
Option<Box<dyn Fn(&GfxCtx, &App) -> Pt2D>>,
)> {
let stage = self.stage();
if self.current.part == stage.messages.len() {
None
} else {
Some(&stage.messages[self.current.part])
}
}
fn next(&mut self) {
self.current.part += 1;
if self.current.part == self.stage().messages.len() + 1 {
self.current = TutorialPointer::new(self.current.stage + 1, 0);
}
}
fn prev(&mut self) {
if self.current.part == 0 {
self.current = TutorialPointer::new(
self.current.stage - 1,
self.stages[self.current.stage - 1].messages.len(),
);
} else {
self.current.part -= 1;
}
}
fn make_top_center(&self, ctx: &mut EventCtx, edit_map: bool) -> Panel {
let mut col = vec![Widget::row(vec![
Line("Tutorial").small_heading().draw(ctx),
Widget::vert_separator(ctx, 50.0),
if self.current.stage == 0 {
Btn::text_fg("<").inactive(ctx)
} else {
Btn::text_fg("<").build(ctx, "previous tutorial", None)
},
{
let mut txt = Text::from(Line(format!("Task {}", self.current.stage + 1)));
// TODO Smaller font and use alpha for the "/9" part
txt.append(Line(format!("/{}", self.stages.len())).fg(Color::grey(0.7)));
txt.draw(ctx)
},
if self.current.stage == self.stages.len() - 1 {
Btn::text_fg(">").inactive(ctx)
} else {
Btn::text_fg(">").build(ctx, "next tutorial", None)
},
Btn::text_fg("Quit").build_def(ctx, None),
])
.centered()];
{
let task = self.interaction();
if task != Task::Nil {
col.push(Widget::row(vec![
Text::from(
Line(format!(
"Task {}: {}",
self.current.stage + 1,
self.stage().task.label()
))
.small_heading(),
)
.draw(ctx),
// TODO also text saying "instructions"... can we layout two things easily to
// make a button?
Btn::svg_def("system/assets/tools/info.svg")
.build(ctx, "instructions", None)
.centered_vert()
.align_right(),
]));
col.push(task.top_txt(self).draw(ctx));
}
}
if edit_map {
col.push(Btn::svg_def("system/assets/tools/edit_map.svg").build(
ctx,
"edit map",
lctrl(Key::E),
));
}
Panel::new(Widget::col(col))
.aligned(HorizontalAlignment::Center, VerticalAlignment::Top)
.build(ctx)
}
fn make_state(&self, ctx: &mut EventCtx, app: &mut App) -> Box<dyn GameplayState> {
if self.interaction() == Task::Nil {
app.primary.current_selection = None;
}
if let Some(ref cb) = self.stage().custom_spawn {
(cb)(app);
app.primary
.sim
.tiny_step(&app.primary.map, &mut app.primary.sim_cb);
}
// If this stage has a scenario, it's instantiated when SandboxMode gets created.
let last_finished_task = if self.current.stage == 0 {
Task::Nil
} else {
self.stages[self.current.stage - 1].task
};
Box::new(Tutorial {
top_center: self.make_top_center(ctx, last_finished_task >= Task::WatchBikes),
last_finished_task,
msg_panel: if let Some((ref lines, horiz_align, _)) = self.lines() {
let mut col = vec![{
let mut txt = Text::new();
txt.add(Line(self.stage().task.label()).small_heading());
txt.add(Line(""));
for l in lines {
txt.add(Line(l));
}
txt.wrap_to_pct(ctx, 30).draw(ctx)
}];
let mut controls = vec![Widget::row(vec![
if self.current.part > 0 {
Btn::svg(
"system/assets/tools/prev.svg",
RewriteColor::Change(Color::WHITE, app.cs.hovering),
)
.build(ctx, "previous message", Key::LeftArrow)
} else {
Widget::draw_svg_transform(
ctx,
"system/assets/tools/prev.svg",
RewriteColor::ChangeAll(Color::WHITE.alpha(0.5)),
)
},
format!("{}/{}", self.current.part + 1, self.stage().messages.len())
.draw_text(ctx)
.centered_vert(),
if self.current.part == self.stage().messages.len() - 1 {
Widget::draw_svg_transform(
ctx,
"system/assets/tools/next.svg",
RewriteColor::ChangeAll(Color::WHITE.alpha(0.5)),
)
.named("next message")
} else {
Btn::svg(
"system/assets/tools/next.svg",
RewriteColor::Change(Color::WHITE, app.cs.hovering),
)
.build(
ctx,
"next message",
hotkeys(vec![Key::RightArrow, Key::Space, Key::Enter]),
)
},
])];
if self.current.part == self.stage().messages.len() - 1 {
controls.push(
Btn::text_bg2("Try it")
.build_def(ctx, hotkeys(vec![Key::RightArrow, Key::Space, Key::Enter])),
);
}
col.push(Widget::col(controls).align_bottom());
Some(
Panel::new(Widget::col(col).outline(5.0, Color::WHITE))
.exact_size_percent(40, 40)
.aligned(*horiz_align, VerticalAlignment::Center)
.build(ctx),
)
} else {
None
},
warped: false,
})
}
fn new(ctx: &mut EventCtx, app: &App) -> TutorialState {
let mut state = TutorialState {
stages: Vec::new(),
current: TutorialPointer::new(0, 0),
window_dims: (ctx.canvas.window_width, ctx.canvas.window_height),
inspected_bike_lane: false,
inspected_building: false,
inspected_stop_sign: false,
inspected_border: false,
was_paused: true,
num_pauses: 0,
following_car: false,
car_parked: false,
prank_done: false,
parking_found: false,
score_delivered: false,
fire_station: app.primary.map.find_b_by_osm_id(bldg(731238736)).unwrap(),
};
let tool_panel = tool_panel(ctx);
let time = TimePanel::new(ctx, app);
let speed = SpeedControls::new(ctx, app);
let agent_meter = AgentMeter::new(ctx, app);
// The minimap is hidden at low zoom levels
let orig_zoom = ctx.canvas.cam_zoom;
ctx.canvas.cam_zoom = 100.0;
let minimap = Minimap::new(ctx, app, MinimapController);
ctx.canvas.cam_zoom = orig_zoom;
let map = &app.primary.map;
state.stages.push(
Stage::new(Task::Camera)
.warp_to(
ID::Intersection(map.find_i_by_osm_id(osm::NodeID(53096945)).unwrap()),
None,
)
.msg(
vec![
"Let's start by piloting your fancy new drone.",
"",
"- Click and drag to pan around the map",
"- Use your scroll wheel or touchpad to zoom in and out.",
],
None,
)
.msg(
vec!["If the controls feel wrong, try adjusting the settings."],
arrow(tool_panel.center_of("settings")),
)
.msg(
vec![
"Let's try the drone ou--",
"",
"WHOA, THERE'S A FIRE STATION ON FIRE!",
"GO CLICK ON IT, QUICK!",
],
None,
)
.msg(
vec![
"Hint:",
"- Look around for an unusually red building",
"- You have to zoom in to interact with anything on the map.",
],
None,
),
);
state.stages.push(
Stage::new(Task::InspectObjects)
.msg(
vec![
"What, no fire? Er, sorry about that. Just a little joke we like to play \
on the new recruits.",
],
None,
)
.msg(
vec![
"Now, let's learn how to inspect and interact with objects in the map.",
"",
"- Click on something.",
"- Hint: You have to zoom in before you can select anything.",
],
None,
),
);
state.stages.push(
Stage::new(Task::TimeControls)
.warp_to(
ID::Intersection(map.find_i_by_osm_id(osm::NodeID(53096945)).unwrap()),
Some(6.5),
)
.msg(
vec![
"Inspection complete!",
"",
"You'll work day and night, watching traffic patterns unfold.",
],
arrow(time.panel.center_of_panel()),
)
.msg(
vec!["You can pause or resume time"],
arrow(speed.panel.center_of("pause")),
)
.msg(
vec![
"Speed things up",
"",
"(The keyboard shortcuts are very helpful here!)",
],
arrow(speed.panel.center_of("30x speed")),
)
.msg(
vec!["Advance time by certain amounts"],
arrow(speed.panel.center_of("step forwards")),
)
.msg(
vec!["And jump to the beginning of the day"],
arrow(speed.panel.center_of("reset to midnight")),
)
.msg(
vec!["Let's try these controls out. Wait until 5pm or later."],
None,
),
);
state.stages.push(
Stage::new(Task::PauseResume)
.msg(
vec!["Whew, that took a while! (Hopefully not though...)"],
None,
)
.msg(
vec![
"You might've figured it out already,",
"But you'll be pausing/resuming time VERY frequently",
],
arrow(speed.panel.center_of("pause")),
)
.msg(
vec!["Just reassure me and pause/resume time a few times, alright?"],
None,
),
);
state.stages.push(
Stage::new(Task::Escort)
// Don't center on where the agents are, be a little offset
.warp_to(
ID::Building(map.find_b_by_osm_id(bldg(217699780)).unwrap()),
Some(10.0),
)
.custom_spawn(Box::new(move |app| {
// Seed a specific target car, and fill up the target building's private
// parking to force the target to park on-street.
let map = &app.primary.map;
let goal_bldg = map.find_b_by_osm_id(bldg(217701875)).unwrap();
let start_lane = {
let r = map.get_r(
map.find_r_by_osm_id(OriginalRoad::new(
158782224,
(1709145066, 53128052),
))
.unwrap(),
);
assert_eq!(r.lanes_ltr().len(), 6);
r.lanes_ltr()[2].0
};
let lane_near_bldg = {
let r = map.get_r(
map.find_r_by_osm_id(OriginalRoad::new(6484869, (53163501, 53069236)))
.unwrap(),
);
assert_eq!(r.lanes_ltr().len(), 6);
r.lanes_ltr()[3].0
};
let mut scenario = Scenario::empty(map, "prank");
scenario.people.push(PersonSpec {
orig_id: None,
origin: TripEndpoint::SuddenlyAppear(Position::new(
start_lane,
map.get_l(start_lane).length() * 0.8,
)),
trips: vec![IndividTrip::new(
Time::START_OF_DAY,
TripPurpose::Shopping,
TripEndpoint::Bldg(goal_bldg),
TripMode::Drive,
)],
});
// Will definitely get there first
for _ in 0..map.get_b(goal_bldg).num_parking_spots() {
scenario.people.push(PersonSpec {
orig_id: None,
origin: TripEndpoint::SuddenlyAppear(Position::new(
lane_near_bldg,
map.get_l(lane_near_bldg).length() / 2.0,
)),
trips: vec![IndividTrip::new(
Time::START_OF_DAY,
TripPurpose::Shopping,
TripEndpoint::Bldg(goal_bldg),
TripMode::Drive,
)],
});
}
let mut rng = app.primary.current_flags.sim_flags.make_rng();
scenario.instantiate(
&mut app.primary.sim,
map,
&mut rng,
&mut Timer::new("spawn trip"),
);
app.primary.sim.tiny_step(map, &mut app.primary.sim_cb);
// And add some noise
spawn_agents_around(
app.primary
.map
.find_i_by_osm_id(osm::NodeID(1709145066))
.unwrap(),
app,
);
}))
.msg(
vec!["Alright alright, no need to wear out your spacebar."],
None,
)
.msg(
vec![
"Oh look, some people appeared!",
"We've got pedestrians, bikes, and cars moving around now.",
],
None,
)
.msg(
vec!["You can see the number of them here."],
arrow(agent_meter.panel.center_of_panel()),
)
.left_aligned_msg(
vec![
"Why don't you follow this car to their destination,",
"see where they park, and then play a little... prank?",
],
Some(Box::new(|g, app| {
g.canvas
.map_to_screen(
app.primary
.sim
.canonical_pt_for_agent(AgentID::Car(ESCORT), &app.primary.map)
.unwrap(),
)
.to_pt()
})),
)
.msg(
vec![
"You don't have to manually chase them; just click to follow.",
"",
"(If you do lose track of them, just reset)",
],
arrow(speed.panel.center_of("reset to midnight")),
),
);
state.stages.push(
Stage::new(Task::LowParking)
// TODO Actually, we ideally just want a bunch of parked cars, not all these trips
.scenario(ScenarioGenerator {
scenario_name: "low parking".to_string(),
only_seed_buses: Some(BTreeSet::new()),
spawn_over_time: vec![SpawnOverTime {
num_agents: 1000,
start_time: Time::START_OF_DAY,
stop_time: Time::START_OF_DAY + Duration::hours(3),
goal: None,
percent_driving: 1.0,
percent_biking: 0.0,
percent_use_transit: 0.0,
}],
border_spawn_over_time: Vec::new(),
})
.msg(
vec![
"What an immature prank. You should re-evaluate your life decisions.",
"",
"The map is quite large, so to help you orient, the minimap shows you an \
overview of all activity. You can click and drag it just like the normal \
map.",
],
arrow(minimap.get_panel().center_of("minimap")),
)
.msg(
vec![
"You can apply different layers to the map, to find things like:",
"",
"- roads with high traffic",
"- bus stops",
"- how much parking is filled up",
],
arrow(minimap.get_panel().center_of("change layers")),
)
.msg(
vec![
"Let's try these out.",
"There are lots of cars parked everywhere. Can you find a road that's \
almost out of parking spots?",
],
None,
),
);
let bike_lane_scenario = make_bike_lane_scenario(map);
let bike_lane_focus_pt = map.find_b_by_osm_id(bldg(217699496)).unwrap();
state.stages.push(
Stage::new(Task::WatchBikes)
.warp_to(ID::Building(bike_lane_focus_pt), None)
.scenario(bike_lane_scenario.clone())
.msg(
vec![
"Well done!",
"",
"Something's about to happen over here. Follow along and figure out what \
the problem is, at whatever speed you'd like.",
],
None,
),
);
let top_center = state.make_top_center(ctx, true);
state.stages.push(
Stage::new(Task::FixBikes)
.scenario(bike_lane_scenario)
.warp_to(ID::Building(bike_lane_focus_pt), None)
.msg(
vec![
"Looks like lots of cars and bikes trying to go to a house by the \
playfield.",
"",
"When lots of cars and bikes share the same lane, cars are delayed \
(assuming there's no room to pass) and the cyclist probably feels unsafe \
too.",
],
None,
)
.msg(
vec![
"Luckily, you have the power to modify lanes! What if you could transform \
the parking lanes that aren't being used much into bike lanes?",
],
None,
)
.msg(
vec!["To edit lanes, click 'edit map' and then select a lane."],
arrow(top_center.center_of("edit map")),
)
.msg(
vec![
"When you finish making edits, time will jump to the beginning of the \
next day. You can't make most changes in the middle of the day.",
"",
"Seattleites are really boring; they follow the exact same schedule \
everyday. They're also stubborn, so even if you try to influence their \
decision whether to drive, walk, bike, or take a bus, they'll do the \
same thing. For now, you're just trying to make things better, assuming \
people stick to their routine.",
],
None,
)
.msg(
// TODO Deliberately vague with the measurement.
vec![
format!(
"So adjust lanes and speed up the slowest trip by at least {}.",
CAR_BIKE_CONTENTION_GOAL
),
"".to_string(),
"You can explore results as trips finish. When everyone's finished, \
you'll get your final score."
.to_string(),
],
arrow(agent_meter.panel.center_of("more data")),
),
);
state.stages.push(Stage::new(Task::Done).msg(
vec![
"You're ready for the hard stuff now.",
"",
"- Try out some challenges",
"- Explore larger parts of Seattle in the sandbox, and try out any ideas you've \
got.",
"- Check out community proposals, and submit your own",
"",
"Go have the appropriate amount of fun!",
],
None,
));
state
// TODO Multi-modal trips -- including parking. (Cars per bldg, ownership)
// TODO Explain the finished trip data
// The city is in total crisis. You've only got 10 days to do something before all hell
// breaks loose and people start kayaking / ziplining / crab-walking / cartwheeling / to
// work.
}
pub fn scenarios_to_prebake(map: &Map) -> Vec<ScenarioGenerator> {
vec![make_bike_lane_scenario(map)]
}
}
pub fn actions(app: &App, id: ID) -> Vec<(Key, String)> {
match (app.session.tutorial.as_ref().unwrap().interaction(), id) {
(Task::LowParking, ID::Lane(_)) => {
vec![(Key::C, "check the parking occupancy".to_string())]
}
(Task::Escort, ID::Car(_)) => vec![(Key::C, "draw WASH ME".to_string())],
_ => Vec::new(),
}
}
pub fn execute(ctx: &mut EventCtx, app: &mut App, id: ID, action: &str) -> Transition {
let mut tut = app.session.tutorial.as_mut().unwrap();
let response = match (id, action.as_ref()) {
(ID::Car(c), "draw WASH ME") => {
let is_parked = app
.primary
.sim
.agent_to_trip(AgentID::Car(ESCORT))
.is_none();
if c == ESCORT {
if is_parked {
tut.prank_done = true;
PopupMsg::new(
ctx,
"Prank in progress",
vec!["You quickly scribble on the window..."],
)
} else {
PopupMsg::new(
ctx,
"Not yet!",
vec![
"You're going to run up to an occupied car and draw on their windows?",
"Sounds like we should be friends.",
"But, er, wait for the car to park. (You can speed up time!)",
],
)
}
} else if c.1 == VehicleType::Bike {
PopupMsg::new(
ctx,
"That's a bike",
vec![
"Achievement unlocked: You attempted to draw WASH ME on a cyclist.",
"This game is PG-13 or something, so I can't really describe what happens \
next.",
"But uh, don't try this at home.",
],
)
} else {
PopupMsg::new(
ctx,
"Wrong car",
vec![
"You're looking at the wrong car.",
"Use the 'reset to midnight' (key binding 'X') to start over, if you lost \
the car to follow.",
],
)
}
}
(ID::Lane(l), "check the parking occupancy") => {
let lane = app.primary.map.get_l(l);
if lane.is_parking() {
let percent = (app.primary.sim.get_free_onstreet_spots(l).len() as f64)
/ (lane.number_parking_spots() as f64);
if percent > 0.1 {
PopupMsg::new(
ctx,
"Not quite",
vec![
format!("This lane has {:.0}% spots free", percent * 100.0),
"Try using the 'parking occupancy' layer from the minimap controls"
.to_string(),
],
)
} else {
tut.parking_found = true;
PopupMsg::new(
ctx,
"Noice",
vec!["Yup, parallel parking would be tough here!"],
)
}
} else {
PopupMsg::new(ctx, "Uhh..", vec!["That's not even a parking lane"])
}
}
_ => unreachable!(),
};
Transition::Push(response)
}
fn intro_story(ctx: &mut EventCtx, app: &App) -> Box<dyn State<App>> {
CutsceneBuilder::new("Introduction")
.boss(
"Argh, the mayor's on my case again about the West Seattle bridge. This day couldn't \
get any worse.",
)
.player("Er, hello? Boss? I'm --")
.boss("Yet somehow it did.. You're the new recruit. Yeah, yeah. Come in.")
.boss(
"Due to budget cuts, we couldn't hire a real traffic engineer, so we just called some \
know-it-all from Reddit who seems to think they can fix Seattle traffic.",
)
.player("Yes, hi, my name is --")
.boss("We can't afford name-tags, didn't you hear, budget cuts? Your name doesn't matter.")
.player("What about my Insta handle?")
.boss("-glare-")
.boss(
"Look, you think fixing traffic is easy? Hah! You can't fix one intersection without \
breaking ten more.",
)
.boss(
"And everybody wants something different! Bike lanes here! More parking! Faster \
buses! Cheaper housing! Less rain! Free this, subsidized that!",
)
.boss("Light rail and robot cars aren't here to save the day! Know what you'll be using?")
.extra("drone.svg", 1.0, "The traffic drone")
.player("Is that... duct tape?")
.boss(
"Can't spit anymore cause of COVID and don't get me started on prayers. Well, off to \
training for you!",
)
.build(
ctx,
app,
Box::new(|ctx| {
Text::from(Line("Use the tutorial to learn the basic controls.").fg(Color::BLACK))
.draw(ctx)
}),
)
}
// Assumes ways
fn bldg(id: i64) -> osm::OsmID {
osm::OsmID::Way(osm::WayID(id))
}
| 37.924042 | 100 | 0.446868 |
62ce714092d9585464fccee96a49d68c2d569b0d | 126,969 | use assert_matches::assert_matches;
use candid::Encode;
use ic_base_types::NumSeconds;
use ic_config::{execution_environment, subnet_config::CyclesAccountManagerConfig};
use ic_error_types::{ErrorCode, RejectCode, UserError};
use ic_execution_environment::{
ExecutionEnvironment, ExecutionEnvironmentImpl, Hypervisor, IngressHistoryWriterImpl,
};
use ic_ic00_types::{
self as ic00, CanisterHttpRequestArgs, CanisterIdRecord, CanisterStatusResultV2, EmptyBlob,
InstallCodeArgs, Method, Payload as Ic00Payload, IC_00,
};
use ic_ic00_types::{CanisterInstallMode, CanisterStatusType, HttpMethodType};
use ic_interfaces::execution_environment::SubnetAvailableMemory;
use ic_interfaces::{
execution_environment::{
AvailableMemory, CanisterHeartbeatError, ExecuteMessageResult, ExecutionMode,
},
messages::CanisterInputMessage,
};
use ic_logger::ReplicaLogger;
use ic_metrics::MetricsRegistry;
use ic_registry_provisional_whitelist::ProvisionalWhitelist;
use ic_registry_routing_table::{CanisterIdRange, RoutingTable};
use ic_registry_subnet_type::SubnetType;
use ic_replicated_state::{
canister_state::WASM_PAGE_SIZE_IN_BYTES, NetworkTopology, SubnetTopology,
};
use ic_replicated_state::{
canister_state::{ENFORCE_MESSAGE_MEMORY_USAGE, QUEUE_INDEX_NONE},
testing::{CanisterQueuesTesting, ReplicatedStateTesting, SystemStateTesting},
CallContextManager, CallOrigin, CanisterState, CanisterStatus, InputQueueType, ReplicatedState,
SchedulerState, SystemState,
};
use ic_test_utilities::execution_environment::ExecutionEnvironmentBuilder;
use ic_test_utilities::state::get_stopping_canister_on_nns;
use ic_test_utilities::{
crypto::mock_random_number_generator,
cycles_account_manager::CyclesAccountManagerBuilder,
history::MockIngressHistory,
metrics::{fetch_histogram_vec_count, metric_vec},
mock_time,
state::{
get_running_canister, get_running_canister_with_args, get_running_canister_with_balance,
get_stopped_canister, get_stopped_canister_on_system_subnet,
get_stopped_canister_with_controller, get_stopping_canister, running_canister_into_stopped,
CanisterStateBuilder, ReplicatedStateBuilder, SystemStateBuilder,
},
types::{
ids::{canister_test_id, message_test_id, node_test_id, subnet_test_id, user_test_id},
messages::{IngressBuilder, RequestBuilder, ResponseBuilder, SignedIngressBuilder},
},
with_test_replica_logger,
};
use ic_types::{
ingress::{IngressStatus, WasmResult},
messages::{
CallbackId, MessageId, Payload, RejectContext, RequestOrResponse, Response,
StopCanisterContext, MAX_RESPONSE_COUNT_BYTES,
},
methods::{Callback, WasmClosure},
CanisterId, ComputeAllocation, Cycles, MemoryAllocation, NumBytes, NumInstructions,
PrincipalId, QueueIndex, RegistryVersion, SubnetId, Time,
};
use lazy_static::lazy_static;
use maplit::btreemap;
use std::{collections::BTreeSet, convert::TryFrom, sync::Arc};
use tempfile::TempDir;
const CANISTER_CREATION_FEE: Cycles = Cycles::new(1_000_000_000_000);
const MAX_NUM_INSTRUCTIONS: NumInstructions = NumInstructions::new(1_000_000_000);
const INITIAL_CYCLES: Cycles = Cycles::new(5_000_000_000_000);
lazy_static! {
static ref MAX_SUBNET_AVAILABLE_MEMORY: SubnetAvailableMemory =
AvailableMemory::new(i64::MAX / 2, i64::MAX / 2).into();
}
const MAX_NUMBER_OF_CANISTERS: u64 = 0;
fn initial_state(
subnet_type: SubnetType,
) -> (TempDir, SubnetId, Arc<NetworkTopology>, ReplicatedState) {
let tmpdir = tempfile::Builder::new().prefix("test").tempdir().unwrap();
let subnet_id = subnet_test_id(1);
let routing_table = Arc::new(
RoutingTable::try_from(btreemap! {
CanisterIdRange{ start: CanisterId::from(0), end: CanisterId::from(0xff) } => subnet_id,
})
.unwrap(),
);
let mut replicated_state = ReplicatedState::new_rooted_at(
subnet_id,
SubnetType::Application,
tmpdir.path().to_path_buf(),
);
replicated_state.metadata.network_topology.routing_table = Arc::clone(&routing_table);
replicated_state.metadata.network_topology.subnets.insert(
subnet_id,
SubnetTopology {
subnet_type,
..SubnetTopology::default()
},
);
(
tmpdir,
subnet_id,
Arc::new(replicated_state.metadata.network_topology.clone()),
replicated_state,
)
}
pub fn with_setup<F>(subnet_type: SubnetType, f: F)
where
F: FnOnce(ExecutionEnvironmentImpl, ReplicatedState, SubnetId, Arc<NetworkTopology>),
{
with_test_replica_logger(|log| {
let (_, subnet_id, network_topology, state) = initial_state(subnet_type);
let metrics_registry = MetricsRegistry::new();
let cycles_account_manager = Arc::new(
CyclesAccountManagerBuilder::new()
.with_subnet_id(subnet_id)
.build(),
);
let hypervisor = Hypervisor::new(
execution_environment::Config::default(),
&metrics_registry,
subnet_id,
subnet_type,
log.clone(),
Arc::clone(&cycles_account_manager),
);
let hypervisor = Arc::new(hypervisor);
let ingress_history_writer = IngressHistoryWriterImpl::new(
execution_environment::Config::default(),
log.clone(),
&metrics_registry,
);
let ingress_history_writer = Arc::new(ingress_history_writer);
let exec_env = ExecutionEnvironmentImpl::new(
log,
hypervisor,
ingress_history_writer,
&metrics_registry,
subnet_id,
subnet_type,
1,
execution_environment::Config::default(),
cycles_account_manager,
);
f(exec_env, state, subnet_id, network_topology)
});
}
fn test_outgoing_messages(
system_state: SystemState,
wat: &str,
test: impl FnOnce(ExecuteMessageResult<CanisterState>),
) {
let subnet_type = SubnetType::Application;
with_test_replica_logger(|log| {
let (_, subnet_id, network_topology, _) = initial_state(subnet_type);
let metrics_registry = MetricsRegistry::new();
let cycles_account_manager = Arc::new(
CyclesAccountManagerBuilder::new()
.with_subnet_type(subnet_type)
.build(),
);
let hypervisor = Hypervisor::new(
execution_environment::Config::default(),
&metrics_registry,
subnet_id,
subnet_type,
log.clone(),
Arc::clone(&cycles_account_manager),
);
let hypervisor = Arc::new(hypervisor);
let ingress_history_writer = MockIngressHistory::new();
let ingress_history_writer = Arc::new(ingress_history_writer);
let exec_env = ExecutionEnvironmentImpl::new(
log,
Arc::clone(&hypervisor) as Arc<_>,
ingress_history_writer,
&metrics_registry,
subnet_id,
subnet_type,
1,
execution_environment::Config::default(),
cycles_account_manager,
);
let wasm_binary = wabt::wat2wasm(wat).unwrap();
let tmpdir = tempfile::Builder::new().prefix("test").tempdir().unwrap();
let execution_state = hypervisor
.create_execution_state(
wasm_binary,
tmpdir.path().to_path_buf(),
system_state.canister_id,
)
.unwrap();
let mut canister = CanisterState {
system_state,
execution_state: Some(execution_state),
scheduler_state: SchedulerState::default(),
};
let input_message = canister.system_state.queues_mut().pop_input().unwrap();
let res = exec_env.execute_canister_message(
canister,
MAX_NUM_INSTRUCTIONS,
input_message,
mock_time(),
network_topology,
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
);
test(res);
});
}
// A Wasm module calling call_simple
const CALL_SIMPLE_WAT: &str = r#"(module
(import "ic0" "call_new"
(func $ic0_call_new
(param i32 i32)
(param $method_name_src i32) (param $method_name_len i32)
(param $reply_fun i32) (param $reply_env i32)
(param $reject_fun i32) (param $reject_env i32)
))
(import "ic0" "call_data_append" (func $ic0_call_data_append (param $src i32) (param $size i32)))
(import "ic0" "call_cycles_add" (func $ic0_call_cycles_add (param $amount i64)))
(import "ic0" "call_perform" (func $ic0_call_perform (result i32)))
(func $test
(call $ic0_call_new
(i32.const 100) (i32.const 10) ;; callee canister id = 777
(i32.const 0) (i32.const 18) ;; refers to "some_remote_method" on the heap
(i32.const 11) (i32.const 22) ;; fictive on_reply closure
(i32.const 33) (i32.const 44) ;; fictive on_reject closure
)
(call $ic0_call_data_append
(i32.const 19) (i32.const 3) ;; refers to "XYZ" on the heap
)
(call $ic0_call_cycles_add
(i64.const 100)
)
(call $ic0_call_perform)
drop)
(export "canister_update test" (func $test))
(memory $memory 1)
(export "memory" (memory $memory))
(data (i32.const 0) "some_remote_method XYZ")
(data (i32.const 100) "\00\00\00\00\00\00\03\09\01\01")
)"#;
// A Wasm module calling call_simple and replying
const CALL_SIMPLE_AND_REPLY_WAT: &str = r#"(module
(import "ic0" "call_simple"
(func $ic0_call_simple
(param i32 i32)
(param $method_name_src i32) (param $method_name_len i32)
(param $reply_fun i32) (param $reply_env i32)
(param $reject_fun i32) (param $reject_env i32)
(param $data_src i32) (param $data_len i32)
(result i32)))
(import "ic0" "msg_reply" (func $msg_reply))
(import "ic0" "msg_reply_data_append"
(func $msg_reply_data_append (param i32) (param i32)))
(func $test
(call $ic0_call_simple
(i32.const 100) (i32.const 10) ;; callee canister id = 777
(i32.const 0) (i32.const 18) ;; refers to "some_remote_method" on the heap
(i32.const 11) (i32.const 22) ;; fictive on_reply closure
(i32.const 33) (i32.const 44) ;; fictive on_reject closure
(i32.const 19) (i32.const 3) ;; refers to "XYZ" on the heap
)
drop
(call $msg_reply_data_append
(i32.const 23) (i32.const 8)) ;; refers to "MONOLORD"
(call $msg_reply))
(export "canister_update test" (func $test))
(memory $memory 1)
(export "memory" (memory $memory))
(data (i32.const 0) "some_remote_method XYZ MONOLORD")
(data (i32.const 100) "\00\00\00\00\00\00\03\09\01\01")
)"#;
// A Wasm module calling reject
const REJECT_WAT: &str = r#"(module
(import "ic0" "msg_reject"
(func $reject (param i32) (param i32)))
(func $test
(call $reject
(i32.const 23) (i32.const 8) ;; refers to "MONOLORD"
))
(export "canister_update test" (func $test))
(memory $memory 1)
(export "memory" (memory $memory))
(data (i32.const 0) "some_remote_method XYZ MONOLORD")
)"#;
// Calls reject in the callback 0
const REJECT_IN_CALLBACK_WAT: &str = r#"(module
(import "ic0" "msg_reject"
(func $reject (param i32 i32)))
(func $test (param i32)
(call $reject
(i32.const 0) (i32.const 5)))
(table funcref (elem $test))
(memory $memory 1)
(export "memory" (memory $memory))
(data (i32.const 0) "error"))"#;
fn wat_canister_id() -> CanisterId {
canister_test_id(777)
}
fn inject_ingress(system_state: &mut SystemState) {
let msg = IngressBuilder::default()
.source(user_test_id(2))
.receiver(canister_test_id(42))
.method_name("test".to_string())
.message_id(message_test_id(555))
.build();
system_state.queues_mut().push_ingress(msg);
}
fn inject_request(system_state: &mut SystemState) {
let msg = RequestBuilder::default()
.receiver(canister_test_id(42))
.sender(canister_test_id(55))
.method_name("test".to_string())
.sender_reply_callback(CallbackId::from(999))
.build()
.into();
system_state
.queues_mut()
.push_input(QueueIndex::from(0), msg, InputQueueType::RemoteSubnet)
.unwrap();
}
fn inject_response(system_state: &mut SystemState, cb_id: CallbackId) {
let current_canister = system_state.canister_id();
let partner_canister = canister_test_id(55);
let request = RequestBuilder::default()
.sender(current_canister)
.receiver(partner_canister)
.build();
let response = ResponseBuilder::default()
.originator(current_canister)
.respondent(partner_canister)
.originator_reply_callback(cb_id)
.response_payload(Payload::Data(vec![]))
.build()
.into();
system_state.push_output_request(request).unwrap();
system_state
.queues_mut()
.push_input(QueueIndex::from(0), response, InputQueueType::RemoteSubnet)
.unwrap();
}
fn assert_correct_request(system_state: &mut SystemState) {
let dst = wat_canister_id();
let (_, message) = system_state.queues_mut().pop_canister_output(&dst).unwrap();
if let RequestOrResponse::Request(msg) = message {
assert_eq!(msg.receiver, dst);
assert_eq!(msg.sender, canister_test_id(42));
assert_eq!(msg.method_name, "some_remote_method");
assert_eq!(msg.method_payload, b"XYZ");
} else {
panic!("unexpected message popped: {:?}", message);
}
}
#[test]
// Canister gets an ingress message, produces one outgoing request
fn test_ingress_message_side_effects_1() {
let mut system_state = SystemStateBuilder::default().build();
system_state.freeze_threshold = NumSeconds::from(0);
inject_ingress(&mut system_state);
test_outgoing_messages(
system_state,
CALL_SIMPLE_WAT,
|mut execute_message_result| {
assert_eq!(
execute_message_result
.canister
.system_state
.queues()
.output_queues_len(),
1
);
assert_eq!(
execute_message_result
.canister
.system_state
.queues()
.output_message_count(),
1
);
assert_correct_request(&mut execute_message_result.canister.system_state);
},
);
}
#[test]
// Canister gets an ingress message, produces one outgoing request and replies
fn test_ingress_message_side_effects_2() {
let mut system_state = SystemStateBuilder::default().build();
system_state.freeze_threshold = NumSeconds::from(0);
inject_ingress(&mut system_state);
test_outgoing_messages(
system_state,
CALL_SIMPLE_AND_REPLY_WAT,
|execute_message_result| {
assert_eq!(
execute_message_result
.canister
.system_state
.queues()
.output_queues_len(),
1
);
assert_eq!(
execute_message_result
.canister
.system_state
.queues()
.output_message_count(),
1
);
let (_, status) = execute_message_result.ingress_status.unwrap();
assert_eq!(
status,
IngressStatus::Completed {
receiver: canister_test_id(42).get(),
user_id: user_test_id(2),
result: WasmResult::Reply(b"MONOLORD".to_vec()),
time: mock_time(),
}
);
},
);
}
#[test]
// Canister gets a request message and rejects it
fn test_ingress_message_side_effects_3() {
let mut system_state = SystemStateBuilder::default().build();
system_state.freeze_threshold = NumSeconds::from(0);
inject_ingress(&mut system_state);
test_outgoing_messages(system_state, REJECT_WAT, |execute_message_result| {
assert_eq!(
execute_message_result
.canister
.system_state
.queues()
.output_queues_len(),
0
);
assert_eq!(
execute_message_result
.canister
.system_state
.queues()
.output_message_count(),
0
);
let (_, status) = execute_message_result.ingress_status.unwrap();
assert_eq!(
status,
IngressStatus::Completed {
receiver: canister_test_id(42).get(),
user_id: user_test_id(2),
result: WasmResult::Reject("MONOLORD".to_string()),
time: mock_time()
}
);
});
}
#[test]
/// Output requests can be enqueued on system subnets, irrespective of memory limits.
fn test_allocate_memory_for_output_request_system_subnet() {
with_setup(SubnetType::System, |exec_env, _, _, network_topology| {
// Canister enqueues an outgoing request when its `test()` method is called.
let wasm_binary = wabt::wat2wasm(CALL_SIMPLE_WAT).unwrap();
let tmpdir = tempfile::Builder::new().prefix("test").tempdir().unwrap();
let system_state = SystemStateBuilder::default()
.freeze_threshold(NumSeconds::from(0))
.build();
let execution_state = exec_env
.hypervisor_for_testing()
.create_execution_state(
wasm_binary,
tmpdir.path().to_path_buf(),
system_state.canister_id(),
)
.unwrap();
let mut canister = CanisterState {
system_state,
execution_state: Some(execution_state),
scheduler_state: SchedulerState::default(),
};
let input_message = CanisterInputMessage::Ingress(
IngressBuilder::default()
.method_name("test".to_string())
.build(),
);
let subnet_available_memory: SubnetAvailableMemory = AvailableMemory::new(13, 13).into();
canister.system_state.memory_allocation =
MemoryAllocation::try_from(NumBytes::new(13)).unwrap();
let execute_message_result = exec_env.execute_canister_message(
canister,
MAX_NUM_INSTRUCTIONS,
input_message,
mock_time(),
network_topology,
subnet_available_memory.clone(),
);
canister = execute_message_result.canister;
// There should be one reserved slot in the queues.
assert_eq!(1, canister.system_state.queues().reserved_slots());
// Subnet available memory should have remained the same.
assert_eq!(13, subnet_available_memory.get_total_memory());
assert_eq!(13, subnet_available_memory.get_message_memory());
// And the expected request should be enqueued.
assert_correct_request(&mut canister.system_state);
});
}
#[test]
/// Output requests use up canister and subnet memory and can't be enqueued if
/// any of them is above the limit.
fn test_allocate_memory_for_output_requests() {
with_setup(
SubnetType::Application,
|exec_env, _, _, network_topology| {
// Canister enqueues an outgoing request when its `test()` method is called.
let wasm_binary = wabt::wat2wasm(CALL_SIMPLE_WAT).unwrap();
let tmpdir = tempfile::Builder::new().prefix("test").tempdir().unwrap();
let system_state = SystemStateBuilder::default()
.freeze_threshold(NumSeconds::from(0))
.build();
let execution_state = exec_env
.hypervisor_for_testing()
.create_execution_state(
wasm_binary,
tmpdir.path().to_path_buf(),
system_state.canister_id(),
)
.unwrap();
let mut canister = CanisterState {
system_state,
execution_state: Some(execution_state),
scheduler_state: SchedulerState::default(),
};
let input_message = CanisterInputMessage::Ingress(
IngressBuilder::default()
.method_name("test".to_string())
.build(),
);
// Tiny canister memory allocation prevents enqueuing an output request.
let subnet_available_memory: SubnetAvailableMemory =
AvailableMemory::new(1 << 30, 1 << 30).into();
canister.system_state.memory_allocation =
MemoryAllocation::try_from(NumBytes::new(13)).unwrap();
let execute_message_result = exec_env.execute_canister_message(
canister,
MAX_NUM_INSTRUCTIONS,
input_message.clone(),
mock_time(),
network_topology.clone(),
subnet_available_memory.clone(),
);
canister = execute_message_result.canister;
assert_eq!(1 << 30, subnet_available_memory.get_total_memory());
assert_eq!(1 << 30, subnet_available_memory.get_message_memory());
if ENFORCE_MESSAGE_MEMORY_USAGE {
assert!(!canister.system_state.queues().has_output());
} else {
assert_eq!(1, canister.system_state.queues().reserved_slots());
assert_correct_request(&mut canister.system_state);
}
// Tiny `SubnetAvailableMemory` also prevents enqueuing an output request.
let subnet_available_memory: SubnetAvailableMemory =
AvailableMemory::new(13, 1 << 30).into();
canister.system_state.memory_allocation =
MemoryAllocation::try_from(NumBytes::new(1 << 30)).unwrap();
let execute_message_result = exec_env.execute_canister_message(
canister,
MAX_NUM_INSTRUCTIONS,
input_message.clone(),
mock_time(),
network_topology.clone(),
subnet_available_memory.clone(),
);
canister = execute_message_result.canister;
assert_eq!(13, subnet_available_memory.get_total_memory());
assert_eq!(1 << 30, subnet_available_memory.get_message_memory());
if ENFORCE_MESSAGE_MEMORY_USAGE {
assert!(!canister.system_state.queues().has_output());
} else {
assert_eq!(2, canister.system_state.queues().reserved_slots());
assert_correct_request(&mut canister.system_state);
}
// Tiny `SubnetAvailableMessageMemory` also prevents enqueuing an output request.
let subnet_available_memory: SubnetAvailableMemory =
AvailableMemory::new(1 << 30, 13).into();
canister.system_state.memory_allocation =
MemoryAllocation::try_from(NumBytes::new(1 << 30)).unwrap();
let execute_message_result = exec_env.execute_canister_message(
canister,
MAX_NUM_INSTRUCTIONS,
input_message.clone(),
mock_time(),
network_topology.clone(),
subnet_available_memory.clone(),
);
canister = execute_message_result.canister;
assert_eq!(1 << 30, subnet_available_memory.get_total_memory());
assert_eq!(13, subnet_available_memory.get_message_memory());
if ENFORCE_MESSAGE_MEMORY_USAGE {
assert!(!canister.system_state.queues().has_output());
} else {
assert_eq!(3, canister.system_state.queues().reserved_slots());
assert_correct_request(&mut canister.system_state);
}
// But large enough canister memory allocation and `SubnetAvailableMemory` allow
// enqueuing an outgoing request.
let subnet_available_memory: SubnetAvailableMemory =
AvailableMemory::new(1 << 30, 1 << 30).into();
canister.system_state.memory_allocation =
MemoryAllocation::try_from(NumBytes::new(1 << 30)).unwrap();
let execute_message_result = exec_env.execute_canister_message(
canister,
MAX_NUM_INSTRUCTIONS,
input_message,
mock_time(),
network_topology,
subnet_available_memory.clone(),
);
canister = execute_message_result.canister;
if ENFORCE_MESSAGE_MEMORY_USAGE {
// There should be one reserved slot in the queues.
assert_eq!(1, canister.system_state.queues().reserved_slots());
// Subnet available memory should have decreased by `MAX_RESPONSE_COUNT_BYTES`.
assert_eq!(
(1 << 30) - MAX_RESPONSE_COUNT_BYTES as i64,
subnet_available_memory.get_total_memory()
);
assert_eq!(
(1 << 30) - MAX_RESPONSE_COUNT_BYTES as i64,
subnet_available_memory.get_message_memory()
)
} else {
assert_eq!(4, canister.system_state.queues().reserved_slots());
assert_eq!(1 << 30, subnet_available_memory.get_total_memory());
assert_eq!(1 << 30, subnet_available_memory.get_message_memory());
}
// And the expected request should be enqueued.
assert_correct_request(&mut canister.system_state);
},
);
}
#[test]
// Canister gets a request message and produces one outgoing request
fn test_request_message_side_effects_1() {
let mut system_state = SystemStateBuilder::default().build();
system_state.freeze_threshold = NumSeconds::from(0);
inject_request(&mut system_state);
test_outgoing_messages(
system_state,
CALL_SIMPLE_WAT,
|mut execute_message_result| {
// The extra queue is the empty queue created due to the inter-canister request
// generated by the Canister.
assert_eq!(
execute_message_result
.canister
.system_state
.queues()
.output_queues_len(),
2
);
assert_eq!(
execute_message_result
.canister
.system_state
.queues()
.output_message_count(),
1
);
assert_correct_request(&mut execute_message_result.canister.system_state);
},
);
}
#[test]
// Canister gets a request message, produces one outgoing request and replies
fn test_request_message_side_effects_2() {
let canister_id = canister_test_id(42);
let mut system_state = SystemStateBuilder::default()
.canister_id(canister_id)
.build();
system_state.freeze_threshold = NumSeconds::from(0);
inject_request(&mut system_state);
test_outgoing_messages(
system_state,
CALL_SIMPLE_AND_REPLY_WAT,
|mut execute_message_result| {
assert_eq!(
execute_message_result
.canister
.system_state
.queues()
.output_queues_len(),
2
);
assert_eq!(
execute_message_result
.canister
.system_state
.queues()
.output_message_count(),
2
);
assert_correct_request(&mut execute_message_result.canister.system_state);
let dst = canister_test_id(55);
let (_, message) = execute_message_result
.canister
.system_state
.queues_mut()
.pop_canister_output(&dst)
.unwrap();
if let RequestOrResponse::Response(msg) = message {
assert_eq!(msg.originator, dst);
assert_eq!(msg.respondent, canister_id);
assert_eq!(msg.response_payload, Payload::Data(b"MONOLORD".to_vec()));
} else {
panic!("unexpected message popped: {:?}", message);
}
},
);
}
#[test]
// Canister gets a request message and rejects it
fn test_request_message_side_effects_3() {
let canister_id = canister_test_id(42);
let mut system_state = SystemStateBuilder::default()
.canister_id(canister_id)
.build();
system_state.freeze_threshold = NumSeconds::from(0);
inject_request(&mut system_state);
test_outgoing_messages(system_state, REJECT_WAT, |mut execute_message_result| {
assert_eq!(
execute_message_result
.canister
.system_state
.queues()
.output_queues_len(),
1
);
assert_eq!(
execute_message_result
.canister
.system_state
.queues()
.output_message_count(),
1
);
let dst = canister_test_id(55);
let (_, message) = execute_message_result
.canister
.system_state
.queues_mut()
.pop_canister_output(&dst)
.unwrap();
if let RequestOrResponse::Response(msg) = message {
assert_eq!(msg.originator, dst);
assert_eq!(msg.respondent, canister_id);
assert_eq!(
msg.response_payload,
Payload::Reject(RejectContext {
code: RejectCode::CanisterReject,
message: "MONOLORD".to_string()
})
);
} else {
panic!("unexpected message popped: {:?}", message);
}
});
}
#[test]
// Canister gets a response message and calls a callback, which rejects the call
// context
fn test_response_message_side_effects_1() {
let canister_id = canister_test_id(42);
let mut system_state = SystemStateBuilder::default()
.canister_id(canister_id)
.build();
let origin_id = canister_test_id(33);
let origin_cb_id = CallbackId::from(5);
let call_context_id = system_state
.call_context_manager_mut()
.unwrap()
.new_call_context(
CallOrigin::CanisterUpdate(origin_id, origin_cb_id),
Cycles::from(50),
Time::from_nanos_since_unix_epoch(0),
);
let callback_id = system_state
.call_context_manager_mut()
.unwrap()
.register_callback(Callback::new(
call_context_id,
Some(origin_id),
Some(canister_id),
Cycles::from(0),
WasmClosure::new(0, 2),
WasmClosure::new(0, 2),
None,
));
assert_eq!(
system_state
.call_context_manager_mut()
.unwrap()
.call_origin(call_context_id)
.unwrap(),
CallOrigin::CanisterUpdate(origin_id, origin_cb_id)
);
// Make a reservation for the response that the canister will produce
// for canister 33 when it executes the message above.
let req = RequestBuilder::default()
.receiver(canister_id)
.sender(origin_id)
.method_name("test".to_string())
.sender_reply_callback(CallbackId::from(999))
.build()
.into();
system_state
.queues_mut()
.push_input(QueueIndex::from(0), req, InputQueueType::RemoteSubnet)
.unwrap();
system_state.queues_mut().pop_input().unwrap();
inject_response(&mut system_state, callback_id);
test_outgoing_messages(
system_state,
REJECT_IN_CALLBACK_WAT,
|mut execute_message_result| {
// There should be two messages in the output queues.
assert_eq!(
2,
execute_message_result
.canister
.system_state
.queues()
.output_message_count()
);
assert_eq!(
2,
execute_message_result
.canister
.system_state
.queues()
.output_message_count()
);
let dst = origin_id;
let (_, message) = execute_message_result
.canister
.system_state
.queues_mut()
.pop_canister_output(&dst)
.unwrap();
if let RequestOrResponse::Response(msg) = message {
assert_eq!(msg.originator, dst);
assert_eq!(msg.respondent, canister_id);
assert_eq!(
msg.response_payload,
Payload::Reject(RejectContext {
code: RejectCode::CanisterReject,
message: "error".to_string()
})
);
} else {
panic!("unexpected message popped: {:?}", message);
}
},
);
}
#[test]
// tests that a canister traps on a reject of an already responded context and
// no outgoing message as a reply is generated anymore
fn test_repeated_response() {
let canister_id = canister_test_id(42);
let mut system_state = SystemStateBuilder::default()
.canister_id(canister_id)
.build();
let call_context_id = system_state
.call_context_manager_mut()
.unwrap()
.new_call_context(
CallOrigin::CanisterUpdate(canister_test_id(33), CallbackId::from(888)),
Cycles::from(42),
Time::from_nanos_since_unix_epoch(0),
);
let callback_id = system_state
.call_context_manager_mut()
.unwrap()
.register_callback(Callback::new(
call_context_id,
Some(canister_test_id(33)),
Some(canister_id),
Cycles::from(0),
WasmClosure::new(0, 2),
WasmClosure::new(0, 2),
None,
));
// mark this call context as responded
system_state
.call_context_manager_mut()
.unwrap()
.on_canister_result(call_context_id, Ok(Some(WasmResult::Reply(vec![]))));
inject_response(&mut system_state, callback_id);
test_outgoing_messages(
system_state,
REJECT_IN_CALLBACK_WAT,
|execute_message_result| {
// There should be just one message in the output queue.
assert_eq!(
1,
execute_message_result
.canister
.system_state
.queues()
.output_message_count()
);
},
);
}
#[test]
fn stopping_canister_rejects_requests() {
with_setup(
SubnetType::Application,
|exec_env, mut state, _, routing_table| {
// Since we can't enqueue a request into a stopping canister, create a canister
// that is running and enqueue the request in it.
let mut canister = get_running_canister(canister_test_id(0));
let cycles = Cycles::from(40);
let req = RequestBuilder::new()
.sender(canister_test_id(13))
.payment(cycles)
.build();
let reply_callback = req.sender_reply_callback;
canister
.system_state
.queues_mut()
.push_input(
QueueIndex::from(0),
RequestOrResponse::Request(req),
InputQueueType::RemoteSubnet,
)
.unwrap();
state.put_canister_state(canister);
// Transition the canister into the stopping state.
let payload = Encode!(&CanisterIdRecord::from(canister_test_id(0))).unwrap();
let mut state = exec_env
.execute_subnet_message(
CanisterInputMessage::Ingress(
IngressBuilder::new()
.source(user_test_id(1))
.method_payload(payload)
.method_name(Method::StopCanister)
.build(),
),
state,
MAX_NUM_INSTRUCTIONS,
&mut mock_random_number_generator(),
&None,
&ProvisionalWhitelist::Set(BTreeSet::new()),
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
MAX_NUMBER_OF_CANISTERS,
)
.0;
let mut canister = state.take_canister_state(&canister_test_id(0)).unwrap();
assert_eq!(
canister.system_state.status,
CanisterStatus::Stopping {
stop_contexts: vec![StopCanisterContext::Ingress {
sender: user_test_id(1),
message_id: message_test_id(0),
}],
call_context_manager: CallContextManager::default(),
}
);
let msg = canister.pop_input().unwrap();
let canister_id = canister.canister_id();
let mut result = exec_env.execute_canister_message(
canister,
MAX_NUM_INSTRUCTIONS,
msg,
mock_time(),
routing_table,
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
);
assert_eq!(
result
.canister
.system_state
.queues_mut()
.pop_canister_output(&canister_test_id(13))
.unwrap()
.1,
RequestOrResponse::Response(Response {
originator: canister_test_id(13),
respondent: canister_test_id(0),
originator_reply_callback: reply_callback,
refund: cycles,
response_payload: Payload::Reject(RejectContext {
code: RejectCode::SysFatal,
message: format!("Canister {} is not running", canister_id),
}),
})
);
},
);
}
#[test]
fn stopping_canister_rejects_ingress() {
with_setup(SubnetType::Application, |exec_env, _, _, routing_table| {
let canister = get_stopping_canister(canister_test_id(0));
let ingress = IngressBuilder::new().build();
assert_eq!(
exec_env
.execute_canister_message(
canister,
MAX_NUM_INSTRUCTIONS,
CanisterInputMessage::Ingress(ingress),
mock_time(),
routing_table,
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
)
.ingress_status
.unwrap()
.1,
IngressStatus::Failed {
receiver: canister_test_id(0).get(),
user_id: user_test_id(2),
error: UserError::new(
ErrorCode::CanisterStopped,
format!(
"Canister {} is not running and cannot accept ingress messages.",
canister_test_id(0)
),
),
time: mock_time(),
}
);
});
}
#[test]
fn stopped_canister_rejects_requests() {
with_setup(SubnetType::Application, |exec_env, _, _, routing_table| {
// Since we can't enqueue a request into a stopped canister, create a canister
// that is running and enqueue the request in it.
let mut canister = get_running_canister(canister_test_id(0));
let cycles = 30;
let req = RequestBuilder::new()
.sender(canister_test_id(13))
.payment(Cycles::from(cycles))
.build();
let reply_callback = req.sender_reply_callback;
canister
.system_state
.queues_mut()
.push_input(
QueueIndex::from(0),
RequestOrResponse::Request(req),
InputQueueType::RemoteSubnet,
)
.unwrap();
// Stop the canister. Here we manually stop the canister as opposed
// to the proper way sending a stop_canister request to exec_env. That way, we
// get the canister into a state where it is stopped and has requests in its
// input queue.
let mut canister = running_canister_into_stopped(canister);
let msg = canister.pop_input().unwrap();
let canister_id = canister.canister_id();
let mut result = exec_env.execute_canister_message(
canister,
MAX_NUM_INSTRUCTIONS,
msg,
mock_time(),
routing_table,
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
);
assert_eq!(
result
.canister
.system_state
.queues_mut()
.pop_canister_output(&canister_test_id(13))
.unwrap()
.1,
RequestOrResponse::Response(Response {
originator: canister_test_id(13),
respondent: canister_test_id(0),
originator_reply_callback: reply_callback,
refund: Cycles::from(cycles),
response_payload: Payload::Reject(RejectContext {
code: RejectCode::SysFatal,
message: format!("Canister {} is not running", canister_id),
}),
})
);
});
}
#[test]
fn stopped_canister_rejects_ingress() {
with_setup(SubnetType::Application, |exec_env, _, _, routing_table| {
let canister = get_stopped_canister(canister_test_id(0));
let ingress = IngressBuilder::new().build();
let result = exec_env.execute_canister_message(
canister,
MAX_NUM_INSTRUCTIONS,
CanisterInputMessage::Ingress(ingress),
mock_time(),
routing_table,
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
);
assert_eq!(
result.ingress_status.unwrap().1,
IngressStatus::Failed {
receiver: canister_test_id(0).get(),
user_id: user_test_id(2),
error: UserError::new(
ErrorCode::CanisterStopped,
format!(
"Canister {} is not running and cannot accept ingress messages.",
canister_test_id(0)
),
),
time: mock_time(),
}
);
});
}
#[test]
fn execute_stop_canister_updates_ingress_history_when_called_on_already_stopped_canister() {
with_setup(SubnetType::Application, |exec_env, mut state, _, _| {
let canister = get_stopped_canister(canister_test_id(0));
state.put_canister_state(canister);
let payload = Encode!(&CanisterIdRecord::from(canister_test_id(0))).unwrap();
let state = exec_env
.execute_subnet_message(
CanisterInputMessage::Ingress(
IngressBuilder::new()
.source(user_test_id(1))
.method_payload(payload)
.method_name(Method::StopCanister)
.build(),
),
state,
MAX_NUM_INSTRUCTIONS,
&mut mock_random_number_generator(),
&None,
&ProvisionalWhitelist::Set(BTreeSet::new()),
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
MAX_NUMBER_OF_CANISTERS,
)
.0;
// Verify that a response to the message has been written to ingress history.
assert_eq!(
state.get_ingress_status(&message_test_id(0)),
IngressStatus::Completed {
receiver: canister_test_id(0).get(),
user_id: user_test_id(1),
result: WasmResult::Reply(EmptyBlob::encode()),
time: mock_time(),
}
);
});
}
#[test]
fn execute_stop_canister_does_not_update_ingress_history_when_called_on_running_canister() {
with_setup(SubnetType::Application, |exec_env, mut state, _, _| {
let canister = get_stopping_canister(canister_test_id(0));
state.put_canister_state(canister);
let payload = Encode!(&CanisterIdRecord::from(canister_test_id(0))).unwrap();
let state = exec_env
.execute_subnet_message(
CanisterInputMessage::Ingress(
IngressBuilder::new()
.source(user_test_id(1))
.method_payload(payload)
.method_name(Method::StopCanister)
.build(),
),
state,
MAX_NUM_INSTRUCTIONS,
&mut mock_random_number_generator(),
&None,
&ProvisionalWhitelist::Set(BTreeSet::new()),
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
MAX_NUMBER_OF_CANISTERS,
)
.0;
assert_eq!(
state.get_ingress_status(&message_test_id(0)),
IngressStatus::Unknown
);
});
}
#[test]
fn execute_stop_canister_does_not_update_ingress_history_when_called_on_stopping_canister() {
with_setup(SubnetType::Application, |exec_env, mut state, _, _| {
let canister = get_stopping_canister(canister_test_id(0));
state.put_canister_state(canister);
let payload = Encode!(&CanisterIdRecord::from(canister_test_id(0))).unwrap();
let state = exec_env
.execute_subnet_message(
CanisterInputMessage::Ingress(
IngressBuilder::new()
.source(user_test_id(1))
.method_payload(payload)
.method_name(Method::StopCanister)
.build(),
),
state,
MAX_NUM_INSTRUCTIONS,
&mut mock_random_number_generator(),
&None,
&ProvisionalWhitelist::Set(BTreeSet::new()),
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
MAX_NUMBER_OF_CANISTERS,
)
.0;
// Verify that no response has been written to ingress history.
assert_eq!(
state.get_ingress_status(&message_test_id(0)),
IngressStatus::Unknown
);
});
}
#[test]
fn execute_stop_canister_writes_failure_to_ingress_history_when_called_with_incorrect_controller() {
with_setup(SubnetType::Application, |exec_env, mut state, _, _| {
let canister = get_running_canister(canister_test_id(0));
state.put_canister_state(canister);
let payload = Encode!(&CanisterIdRecord::from(canister_test_id(0))).unwrap();
let state = exec_env
.execute_subnet_message(
CanisterInputMessage::Ingress(
IngressBuilder::new()
.source(user_test_id(13))
.receiver(CanisterId::ic_00())
.method_payload(payload)
.method_name(Method::StopCanister)
.build(),
),
state,
MAX_NUM_INSTRUCTIONS,
&mut mock_random_number_generator(),
&None,
&ProvisionalWhitelist::Set(BTreeSet::new()),
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
MAX_NUMBER_OF_CANISTERS,
)
.0;
// Verify that the response has been written to ingress history.
assert_eq!(
state.get_ingress_status(&message_test_id(0)),
IngressStatus::Failed {
receiver: CanisterId::ic_00().get(),
user_id: user_test_id(13),
error: UserError::new(
ErrorCode::CanisterInvalidController,
format!(
"Only the controllers of the canister {} can control it.\n\
Canister's controllers: {}\n\
Sender's ID: {}",
canister_test_id(0),
user_test_id(1).get(),
user_test_id(13).get()
)
),
time: mock_time(),
}
);
});
}
fn test_canister_status_helper(
canister: CanisterState,
expected_status_result: CanisterStatusResultV2,
) {
with_setup(SubnetType::Application, |exec_env, mut state, _, _| {
let controller_id = canister.system_state.controllers.iter().next().unwrap();
let controller = CanisterId::new(*controller_id).unwrap();
let canister_id = canister.canister_id();
let subnet_id = subnet_test_id(1);
let payload = Encode!(&CanisterIdRecord::from(canister_id)).unwrap();
let cycles = 100;
state.put_canister_state(canister);
state
.subnet_queues_mut()
.push_input(
QUEUE_INDEX_NONE,
RequestOrResponse::Request(
RequestBuilder::new()
.sender(controller)
.receiver(CanisterId::from(subnet_id))
.method_name(Method::CanisterStatus)
.method_payload(payload)
.payment(Cycles::from(cycles))
.build(),
),
InputQueueType::RemoteSubnet,
)
.unwrap();
let mut state = exec_env
.execute_subnet_message(
state.subnet_queues_mut().pop_input().unwrap(),
state,
MAX_NUM_INSTRUCTIONS,
&mut mock_random_number_generator(),
&None,
&ProvisionalWhitelist::Set(BTreeSet::new()),
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
MAX_NUMBER_OF_CANISTERS,
)
.0;
if let RequestOrResponse::Response(resp) = state
.subnet_queues_mut()
.pop_canister_output(&controller)
.unwrap()
.1
{
if let Payload::Data(payload) = resp.response_payload {
assert_eq!(
CanisterStatusResultV2::decode(&payload).unwrap(),
expected_status_result
);
} else {
panic!("invalid payload");
}
} else {
panic!("No response found");
}
});
}
fn test_request_nonexistent_canister(method: Method) {
with_setup(SubnetType::Application, |exec_env, mut state, _, _| {
let controller = canister_test_id(1);
let canister_id = canister_test_id(0);
let cycles = 42;
let subnet_id = subnet_test_id(1);
let payload = Encode!(&CanisterIdRecord::from(canister_id)).unwrap();
state
.subnet_queues_mut()
.push_input(
QUEUE_INDEX_NONE,
RequestOrResponse::Request(
RequestBuilder::new()
.sender(controller)
.receiver(CanisterId::from(subnet_id))
.method_name(method)
.method_payload(payload)
.payment(Cycles::from(cycles))
.build(),
),
InputQueueType::RemoteSubnet,
)
.unwrap();
let mut state = exec_env
.execute_subnet_message(
state.subnet_queues_mut().pop_input().unwrap(),
state,
MAX_NUM_INSTRUCTIONS,
&mut mock_random_number_generator(),
&None,
&ProvisionalWhitelist::Set(BTreeSet::new()),
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
MAX_NUMBER_OF_CANISTERS,
)
.0;
assert_eq!(
state
.subnet_queues_mut()
.pop_canister_output(&controller)
.unwrap()
.1,
RequestOrResponse::Response(
ResponseBuilder::new()
.originator(controller)
.respondent(CanisterId::new(subnet_id.get()).unwrap())
.response_payload(Payload::Reject(RejectContext {
code: RejectCode::DestinationInvalid,
message: format!("Canister {} not found.", &canister_id)
}))
.refund(Cycles::from(cycles))
.build()
)
);
});
}
#[test]
fn get_running_canister_status_from_another_canister() {
let controller = canister_test_id(1);
let canister = CanisterStateBuilder::new()
.with_status(CanisterStatusType::Running)
.with_controller(controller)
.with_cycles(INITIAL_CYCLES)
.with_freezing_threshold(123)
.build();
test_canister_status_helper(
canister,
CanisterStatusResultV2::new(
CanisterStatusType::Running,
None,
controller.get(),
vec![controller.get()],
NumBytes::from(0),
INITIAL_CYCLES.get(),
ComputeAllocation::default().as_percent(),
None,
123,
),
)
}
#[test]
fn get_stopped_canister_status_from_another_canister() {
let controller = canister_test_id(1);
let canister = CanisterStateBuilder::new()
.with_status(CanisterStatusType::Stopped)
.with_controller(controller)
.with_freezing_threshold(123)
.build();
test_canister_status_helper(
canister,
CanisterStatusResultV2::new(
CanisterStatusType::Stopped,
None,
controller.get(),
vec![controller.get()],
NumBytes::from(0),
INITIAL_CYCLES.get(),
ComputeAllocation::default().as_percent(),
None,
123,
),
);
}
#[test]
fn get_stopping_canister_status_from_another_canister() {
let controller = canister_test_id(1);
let canister = CanisterStateBuilder::new()
.with_status(CanisterStatusType::Stopping)
.with_controller(controller)
.with_freezing_threshold(123)
.build();
test_canister_status_helper(
canister,
CanisterStatusResultV2::new(
CanisterStatusType::Stopping,
None,
controller.get(),
vec![controller.get()],
NumBytes::from(0),
INITIAL_CYCLES.get(),
ComputeAllocation::default().as_percent(),
None,
123,
),
);
}
#[test]
fn start_a_non_existing_canister() {
test_request_nonexistent_canister(Method::StartCanister);
}
#[test]
fn get_canister_status_of_nonexisting_canister() {
test_request_nonexistent_canister(Method::CanisterStatus);
}
#[test]
fn deposit_cycles_to_non_existing_canister_fails() {
test_request_nonexistent_canister(Method::DepositCycles);
}
#[test]
fn start_canister_from_another_canister() {
with_setup(SubnetType::Application, |exec_env, mut state, _, _| {
let controller = canister_test_id(1);
let canister_id = canister_test_id(0);
let canister = get_stopped_canister_with_controller(canister_id, *controller.get_ref());
let cycles = 42;
// Sanity check that the canister is stopped.
assert_eq!(canister.status(), CanisterStatusType::Stopped);
let subnet_id = subnet_test_id(1);
state.put_canister_state(canister);
let payload = Encode!(&CanisterIdRecord::from(canister_id)).unwrap();
state
.subnet_queues_mut()
.push_input(
QUEUE_INDEX_NONE,
RequestOrResponse::Request(
RequestBuilder::new()
.sender(controller)
.receiver(CanisterId::from(subnet_id))
.method_name(Method::StartCanister)
.method_payload(payload)
.payment(Cycles::from(cycles))
.build(),
),
InputQueueType::RemoteSubnet,
)
.unwrap();
let mut state = exec_env
.execute_subnet_message(
state.subnet_queues_mut().pop_input().unwrap(),
state,
MAX_NUM_INSTRUCTIONS,
&mut mock_random_number_generator(),
&None,
&ProvisionalWhitelist::Set(BTreeSet::new()),
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
MAX_NUMBER_OF_CANISTERS,
)
.0;
assert_eq!(
state
.subnet_queues_mut()
.pop_canister_output(&controller)
.unwrap()
.1,
RequestOrResponse::Response(
ResponseBuilder::new()
.originator(controller)
.respondent(CanisterId::new(subnet_id.get()).unwrap())
.response_payload(Payload::Data(EmptyBlob::encode()))
.refund(Cycles::from(cycles))
.build()
)
);
assert_eq!(
state.take_canister_state(&canister_id).unwrap().status(),
CanisterStatusType::Running
);
});
}
#[test]
fn stop_canister_from_another_canister() {
with_setup(SubnetType::Application, |exec_env, mut state, _, _| {
let controller = canister_test_id(1);
let canister_id = canister_test_id(0);
let canister =
get_running_canister_with_args(canister_id, *controller.get_ref(), INITIAL_CYCLES);
let cycles = 87;
// Sanity check that the canister is running.
assert_eq!(canister.status(), CanisterStatusType::Running);
let subnet_id = subnet_test_id(1);
state.put_canister_state(canister);
// Enqueue a request to stop the canister.
let payload = Encode!(&CanisterIdRecord::from(canister_id)).unwrap();
state
.subnet_queues_mut()
.push_input(
QUEUE_INDEX_NONE,
RequestOrResponse::Request(
RequestBuilder::new()
.sender(controller)
.receiver(CanisterId::from(subnet_id))
.method_name(Method::StopCanister)
.method_payload(payload)
.payment(Cycles::from(cycles))
.build(),
),
InputQueueType::RemoteSubnet,
)
.unwrap();
let mut state = exec_env
.execute_subnet_message(
state.subnet_queues_mut().pop_input().unwrap(),
state,
MAX_NUM_INSTRUCTIONS,
&mut mock_random_number_generator(),
&None,
&ProvisionalWhitelist::Set(BTreeSet::new()),
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
MAX_NUMBER_OF_CANISTERS,
)
.0;
let canister = state.take_canister_state(&canister_id).unwrap();
// Canister should now be in the stopping state.
assert_eq!(
canister.system_state.status,
CanisterStatus::Stopping {
stop_contexts: vec![StopCanisterContext::Canister {
sender: controller,
reply_callback: CallbackId::from(0),
cycles: Cycles::from(cycles),
}],
call_context_manager: CallContextManager::default()
}
);
assert!(canister.system_state.ready_to_stop());
// Since the canister isn't fully stopped yet, there should be no
// response in the output queue.
assert!(state
.subnet_queues_mut()
.pop_canister_output(&controller)
.is_none());
});
}
#[test]
fn starting_a_stopping_canister_succeeds() {
with_setup(SubnetType::Application, |exec_env, mut state, _, _| {
let mut canister = get_stopping_canister(canister_test_id(0));
let stop_msg_ids = [message_test_id(0), message_test_id(1)];
for msg_id in &stop_msg_ids {
canister
.system_state
.add_stop_context(StopCanisterContext::Ingress {
sender: user_test_id(1),
message_id: msg_id.clone(),
});
}
// Create a call context. Because there's a call context that isn't cleared the
// canister should stay in the `Stopping` status indefinitely.
canister
.system_state
.call_context_manager_mut()
.unwrap()
.new_call_context(
CallOrigin::Ingress(user_test_id(13), message_test_id(14)),
Cycles::from(0),
Time::from_nanos_since_unix_epoch(0),
);
// Ensure that the canister is `Stopping`.
assert_matches!(canister.status(), CanisterStatusType::Stopping);
state.put_canister_state(canister);
// Start the stopping canister.
let canister_id_record = CanisterIdRecord::from(canister_test_id(0)).encode();
let state = exec_env
.execute_subnet_message(
CanisterInputMessage::Ingress(
IngressBuilder::new()
.message_id(message_test_id(2))
.source(user_test_id(1))
.receiver(ic00::IC_00)
.method_payload(canister_id_record)
.method_name(ic00::Method::StartCanister)
.build(),
),
state,
MAX_NUM_INSTRUCTIONS,
&mut mock_random_number_generator(),
&None,
&ProvisionalWhitelist::Set(BTreeSet::new()),
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
MAX_NUMBER_OF_CANISTERS,
)
.0;
// Canister should now be running.
assert_eq!(
state.canister_state(&canister_test_id(0)).unwrap().status(),
CanisterStatusType::Running
);
// Assert that stop messages have been cancelled.
for msg_id in &stop_msg_ids {
assert_matches!(
state.get_ingress_status(msg_id),
IngressStatus::Failed {
user_id: u,
error: e,
..
} if u == user_test_id(1) && e.code() == ErrorCode::CanisterStoppingCancelled
);
}
});
}
#[test]
fn subnet_ingress_message_unknown_method() {
with_setup(SubnetType::Application, |exec_env, state, _, _| {
let sender = user_test_id(1);
let state = exec_env
.execute_subnet_message(
CanisterInputMessage::Ingress(
IngressBuilder::new()
.message_id(MessageId::from([0; 32]))
.source(sender)
.receiver(ic00::IC_00)
.method_payload(EmptyBlob::encode())
.method_name("non_existing_method")
.build(),
),
state,
MAX_NUM_INSTRUCTIONS,
&mut mock_random_number_generator(),
&None,
&ProvisionalWhitelist::Set(BTreeSet::new()),
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
MAX_NUMBER_OF_CANISTERS,
)
.0;
assert_eq!(
state.get_ingress_status(&MessageId::from([0; 32])),
IngressStatus::Failed {
receiver: ic00::IC_00.get(),
user_id: sender,
error: UserError::new(
ErrorCode::CanisterMethodNotFound,
"Management canister has no method \'non_existing_method\'"
),
time: mock_time(),
}
);
});
}
#[test]
fn subnet_canister_request_unknown_method() {
with_setup(SubnetType::Application, |exec_env, mut state, _, _| {
let sender = canister_test_id(1);
let receiver = CanisterId::new(subnet_test_id(1).get()).unwrap();
let cycles = 100;
state
.subnet_queues_mut()
.push_input(
QUEUE_INDEX_NONE,
RequestOrResponse::Request(
RequestBuilder::new()
.sender(sender)
.receiver(receiver)
.method_name("non_existing_method".to_string())
.method_payload(EmptyBlob::encode())
.payment(Cycles::from(cycles))
.build(),
),
InputQueueType::RemoteSubnet,
)
.unwrap();
let mut state = exec_env
.execute_subnet_message(
state.subnet_queues_mut().pop_input().unwrap(),
state,
MAX_NUM_INSTRUCTIONS,
&mut mock_random_number_generator(),
&None,
&ProvisionalWhitelist::Set(BTreeSet::new()),
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
MAX_NUMBER_OF_CANISTERS,
)
.0;
assert_eq!(
state
.subnet_queues_mut()
.pop_canister_output(&sender)
.unwrap()
.1,
RequestOrResponse::Response(Response {
originator: sender,
respondent: receiver,
originator_reply_callback: CallbackId::new(0),
refund: Cycles::from(cycles),
response_payload: Payload::Reject(RejectContext {
code: RejectCode::DestinationInvalid,
message: "Management canister has no method \'non_existing_method\'"
.to_string(),
})
})
);
});
}
#[test]
fn subnet_ingress_message_on_create_canister_fails() {
with_setup(SubnetType::Application, |exec_env, state, _, _| {
let sender = user_test_id(1);
let receiver = CanisterId::from(1);
let install_args = InstallCodeArgs::new(
CanisterInstallMode::Install,
CanisterId::new(PrincipalId::try_from([1, 2, 3].as_ref()).unwrap()).unwrap(),
vec![],
vec![],
None,
None,
None,
);
let state = exec_env
.execute_subnet_message(
CanisterInputMessage::Ingress(
IngressBuilder::new()
.message_id(MessageId::from([0; 32]))
.source(sender)
.receiver(receiver)
.method_payload(install_args.encode())
.method_name(Method::CreateCanister)
.build(),
),
state,
MAX_NUM_INSTRUCTIONS,
&mut mock_random_number_generator(),
&None,
&ProvisionalWhitelist::Set(BTreeSet::new()),
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
MAX_NUMBER_OF_CANISTERS,
)
.0;
assert_eq!(
state.get_ingress_status(&MessageId::from([0; 32])),
IngressStatus::Failed {
receiver: receiver.get(),
user_id: sender,
error: UserError::new(
ErrorCode::CanisterMethodNotFound,
"create_canister can only be called by other canisters, not via ingress messages."
),
time: mock_time(),
}
);
});
}
#[test]
fn subnet_canister_request_bad_candid_payload() {
with_setup(SubnetType::Application, |exec_env, mut state, _, _| {
let sender = canister_test_id(1);
let receiver = CanisterId::new(subnet_test_id(1).get()).unwrap();
let cycles = 1;
state
.subnet_queues_mut()
.push_input(
QUEUE_INDEX_NONE,
RequestOrResponse::Request(
RequestBuilder::new()
.sender(sender)
.receiver(receiver)
.method_name(Method::InstallCode)
.method_payload(vec![1, 2, 3]) // Invalid candid
.payment(Cycles::from(cycles))
.build(),
),
InputQueueType::RemoteSubnet,
)
.unwrap();
let mut state = exec_env
.execute_subnet_message(
state.subnet_queues_mut().pop_input().unwrap(),
state,
MAX_NUM_INSTRUCTIONS,
&mut mock_random_number_generator(),
&None,
&ProvisionalWhitelist::Set(BTreeSet::new()),
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
MAX_NUMBER_OF_CANISTERS,
)
.0;
assert_eq!(
state
.subnet_queues_mut()
.pop_canister_output(&sender)
.unwrap()
.1,
RequestOrResponse::Response(Response {
originator: sender,
respondent: receiver,
originator_reply_callback: CallbackId::new(0),
refund: Cycles::from(cycles),
response_payload: Payload::Reject(RejectContext {
code: RejectCode::CanisterError,
message: "Error decoding candid: Cannot parse header 010203".to_string()
})
})
);
});
}
fn execute_create_canister_request(
sender: CanisterId,
nns_subnet_id: SubnetId,
own_subnet_id: SubnetId,
sender_subnet_id: SubnetId,
own_subnet_type: SubnetType,
log: ReplicaLogger,
) -> ReplicatedState {
let receiver = canister_test_id(1);
let cycles = CANISTER_CREATION_FEE + Cycles::from(1);
let (mut state, exec_env) = ExecutionEnvironmentBuilder::new()
.with_log(log)
.with_nns_subnet_id(nns_subnet_id)
.with_own_subnet_id(own_subnet_id)
.with_sender_subnet_id(sender_subnet_id)
.with_subnet_type(own_subnet_type)
.with_sender_canister(sender)
.build();
state
.subnet_queues_mut()
.push_input(
QUEUE_INDEX_NONE,
RequestOrResponse::Request(
RequestBuilder::new()
.sender(sender)
.receiver(receiver)
.method_name(Method::CreateCanister)
.method_payload(EmptyBlob::encode())
.payment(Cycles::from(cycles.get()))
.build(),
),
InputQueueType::RemoteSubnet,
)
.unwrap();
exec_env
.execute_subnet_message(
state.subnet_queues_mut().pop_input().unwrap(),
state,
MAX_NUM_INSTRUCTIONS,
&mut mock_random_number_generator(),
&None,
&ProvisionalWhitelist::Set(BTreeSet::new()),
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
MAX_NUMBER_OF_CANISTERS,
)
.0
}
fn check_create_canister_fails(
sender: CanisterId,
nns_subnet_id: SubnetId,
own_subnet_id: SubnetId,
sender_subnet_id: SubnetId,
own_subnet_type: SubnetType,
log: ReplicaLogger,
) {
let mut state = execute_create_canister_request(
sender,
nns_subnet_id,
own_subnet_id,
sender_subnet_id,
own_subnet_type,
log,
);
assert_eq!(
state.subnet_queues_mut().pop_canister_output(&sender).unwrap().1,
RequestOrResponse::Response(Response {
originator: sender,
respondent: CanisterId::from(own_subnet_id),
originator_reply_callback: CallbackId::new(0),
refund: CANISTER_CREATION_FEE + Cycles::from(1),
response_payload: Payload::Reject(RejectContext {
code: RejectCode::CanisterError,
message:
"Cannot create canister. Sender should be on the same subnet or on the NNS subnet."
.to_string()
})
})
);
}
#[test]
fn create_canister_different_subnets_on_nns_and_sender_not_on_nns() {
with_test_replica_logger(|log| {
let own_subnet_type = SubnetType::System;
let sender = canister_test_id(1);
let nns_subnet_id = subnet_test_id(1);
let own_subnet_id = subnet_test_id(1);
let sender_subnet_id = subnet_test_id(2);
check_create_canister_fails(
sender,
nns_subnet_id,
own_subnet_id,
sender_subnet_id,
own_subnet_type,
log,
);
});
}
#[test]
fn create_canister_different_subnets_not_on_nns_and_sender_not_on_nns() {
with_test_replica_logger(|log| {
let own_subnet_type = SubnetType::Application;
let sender = canister_test_id(1);
let nns_subnet_id = subnet_test_id(0);
let own_subnet_id = subnet_test_id(1);
let sender_subnet_id = subnet_test_id(2);
check_create_canister_fails(
sender,
nns_subnet_id,
own_subnet_id,
sender_subnet_id,
own_subnet_type,
log,
);
});
}
fn check_create_canister_succeeds(
sender: CanisterId,
nns_subnet_id: SubnetId,
own_subnet_id: SubnetId,
sender_subnet_id: SubnetId,
own_subnet_type: SubnetType,
log: ReplicaLogger,
) {
let mut state = execute_create_canister_request(
sender,
nns_subnet_id,
own_subnet_id,
sender_subnet_id,
own_subnet_type,
log,
);
let response = state
.subnet_queues_mut()
.pop_canister_output(&sender)
.unwrap()
.1;
match response {
RequestOrResponse::Response(response) => {
assert_eq!(response.originator, sender);
assert_eq!(response.respondent, CanisterId::from(own_subnet_id));
assert_eq!(response.refund, Cycles::from(0));
match response.response_payload {
Payload::Data(_) => (),
_ => panic!("Failed creating the canister."),
}
}
_ => panic!("Type should be RequestOrResponse::Response"),
}
}
#[test]
fn create_canister_different_subnets_not_on_nns_sender_on_nns() {
with_test_replica_logger(|log| {
let own_subnet_type = SubnetType::Application;
let sender = canister_test_id(1);
let nns_subnet_id = subnet_test_id(2);
let own_subnet_id = subnet_test_id(1);
let sender_subnet_id = subnet_test_id(2); // sender is on nns
check_create_canister_succeeds(
sender,
nns_subnet_id,
own_subnet_id,
sender_subnet_id,
own_subnet_type,
log,
);
});
}
#[test]
fn create_canister_same_subnets_not_nns() {
with_test_replica_logger(|log| {
let own_subnet_type = SubnetType::Application;
let sender = canister_test_id(7);
let nns_subnet_id = subnet_test_id(0);
let own_subnet_id = subnet_test_id(1);
let sender_subnet_id = subnet_test_id(1);
check_create_canister_succeeds(
sender,
nns_subnet_id,
own_subnet_id,
sender_subnet_id,
own_subnet_type,
log,
);
});
}
#[test]
fn create_canister_same_subnets_on_nns() {
with_test_replica_logger(|log| {
let own_subnet_type = SubnetType::System;
let sender = canister_test_id(7);
let nns_subnet_id = subnet_test_id(1);
let own_subnet_id = subnet_test_id(1);
let sender_subnet_id = subnet_test_id(1);
check_create_canister_succeeds(
sender,
nns_subnet_id,
own_subnet_id,
sender_subnet_id,
own_subnet_type,
log,
);
});
}
fn execute_setup_initial_dkg_request(
sender: CanisterId,
nns_subnet_id: SubnetId,
own_subnet_id: SubnetId,
sender_subnet_id: SubnetId,
subnet_type: SubnetType,
log: ReplicaLogger,
) -> ReplicatedState {
let receiver = canister_test_id(1);
let cycles = CANISTER_CREATION_FEE;
let (mut state, exec_env) = ExecutionEnvironmentBuilder::new()
.with_nns_subnet_id(nns_subnet_id)
.with_own_subnet_id(own_subnet_id)
.with_sender_subnet_id(sender_subnet_id)
.with_subnet_type(subnet_type)
.with_log(log)
.build();
let node_ids = vec![node_test_id(1)];
let request_payload = ic00::SetupInitialDKGArgs::new(node_ids, RegistryVersion::new(1));
state
.subnet_queues_mut()
.push_input(
QUEUE_INDEX_NONE,
RequestOrResponse::Request(
RequestBuilder::new()
.sender(sender)
.receiver(receiver)
.method_name(Method::SetupInitialDKG)
.method_payload(Encode!(&request_payload).unwrap())
.payment(Cycles::from(cycles.get()))
.build(),
),
InputQueueType::RemoteSubnet,
)
.unwrap();
exec_env
.execute_subnet_message(
state.subnet_queues_mut().pop_input().unwrap(),
state,
MAX_NUM_INSTRUCTIONS,
&mut mock_random_number_generator(),
&None,
&ProvisionalWhitelist::Set(BTreeSet::new()),
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
MAX_NUMBER_OF_CANISTERS,
)
.0
}
#[test]
fn setup_initial_dkg_sender_on_nns() {
with_test_replica_logger(|log| {
let subnet_type = SubnetType::Application;
let sender = canister_test_id(1);
let nns_subnet_id = subnet_test_id(2);
let own_subnet_id = subnet_test_id(1);
let sender_subnet_id = nns_subnet_id;
let mut state = execute_setup_initial_dkg_request(
sender,
nns_subnet_id,
own_subnet_id,
sender_subnet_id,
subnet_type,
log,
);
assert_eq!(state.subnet_queues_mut().pop_canister_output(&sender), None);
});
}
#[test]
fn setup_initial_dkg_sender_not_on_nns() {
with_test_replica_logger(|log| {
let subnet_type = SubnetType::Application;
let sender = canister_test_id(10);
let nns_subnet_id = subnet_test_id(2);
let own_subnet_id = subnet_test_id(1);
let sender_subnet_id = own_subnet_id;
let mut state = execute_setup_initial_dkg_request(
sender,
nns_subnet_id,
own_subnet_id,
sender_subnet_id,
subnet_type,
log,
);
let response = state
.subnet_queues_mut()
.pop_canister_output(&sender)
.unwrap()
.1;
assert_eq!(
response,
RequestOrResponse::Response(Response {
originator: sender,
respondent: CanisterId::from(own_subnet_id),
originator_reply_callback: CallbackId::new(0),
refund: CANISTER_CREATION_FEE,
response_payload: Payload::Reject(RejectContext {
code: RejectCode::CanisterError,
message: format!(
"{} is called by {}. It can only be called by NNS.",
ic00::Method::SetupInitialDKG,
sender,
)
})
})
);
});
}
#[test]
fn install_code_fails_on_invalid_compute_allocation() {
with_setup(SubnetType::Application, |exec_env, state, _, _| {
let install_args = InstallCodeArgs::new(
CanisterInstallMode::Install,
CanisterId::new(PrincipalId::try_from([1, 2, 3].as_ref()).unwrap()).unwrap(),
vec![],
vec![],
Some(1000), // <-- Invalid. Should fail.
None,
None,
);
let sender = user_test_id(1);
let state = exec_env
.execute_subnet_message(
CanisterInputMessage::Ingress(
IngressBuilder::new()
.message_id(MessageId::from([0; 32]))
.source(sender)
.receiver(ic00::IC_00)
.method_payload(install_args.encode())
.method_name(Method::InstallCode)
.build(),
),
state,
MAX_NUM_INSTRUCTIONS,
&mut mock_random_number_generator(),
&None,
&ProvisionalWhitelist::Set(BTreeSet::new()),
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
MAX_NUMBER_OF_CANISTERS,
)
.0;
assert_eq!(
state.get_ingress_status(&MessageId::from([0; 32])),
IngressStatus::Failed {
receiver: ic00::IC_00.get(),
user_id: sender,
error: UserError::new(
ErrorCode::CanisterContractViolation,
"ComputeAllocation expected to be in the range [0..100], got 1_000"
),
time: mock_time(),
}
);
});
}
#[test]
fn install_code_fails_on_invalid_memory_allocation() {
with_setup(SubnetType::Application, |exec_env, state, _, _| {
let install_args = InstallCodeArgs::new(
CanisterInstallMode::Install,
CanisterId::new(PrincipalId::try_from([1, 2, 3].as_ref()).unwrap()).unwrap(),
vec![],
vec![],
None,
Some(u64::MAX), // <-- Invalid. Should fail.
None,
);
let sender = user_test_id(1);
let state = exec_env
.execute_subnet_message(
CanisterInputMessage::Ingress(
IngressBuilder::new()
.message_id(MessageId::from([0; 32]))
.source(sender)
.receiver(ic00::IC_00)
.method_payload(install_args.encode())
.method_name(Method::InstallCode)
.build(),
),
state,
MAX_NUM_INSTRUCTIONS,
&mut mock_random_number_generator(),
&None,
&ProvisionalWhitelist::Set(BTreeSet::new()),
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
MAX_NUMBER_OF_CANISTERS,
)
.0;
assert_eq!(
state.get_ingress_status(&MessageId::from([0; 32])),
IngressStatus::Failed {
receiver: ic00::IC_00.get(),
user_id: sender,
error: UserError::new(
ErrorCode::CanisterContractViolation,
"MemoryAllocation expected to be in the range [0..12_884_901_888], got 18_446_744_073_709_551_615"
),
time: mock_time(),
});
});
}
#[test]
fn metrics_are_observed_for_subnet_messages() {
let mut csprng = mock_random_number_generator();
with_test_replica_logger(|log| {
let subnet_id = subnet_test_id(1);
let metrics_registry = MetricsRegistry::new();
let subnet_type = SubnetType::Application;
let cycles_account_manager = Arc::new(
CyclesAccountManagerBuilder::new()
.with_subnet_type(subnet_type)
.build(),
);
let hypervisor = Hypervisor::new(
execution_environment::Config::default(),
&metrics_registry,
subnet_id,
subnet_type,
log.clone(),
Arc::clone(&cycles_account_manager),
);
let hypervisor = Arc::new(hypervisor);
let ingress_history_writer = IngressHistoryWriterImpl::new(
execution_environment::Config::default(),
log.clone(),
&metrics_registry,
);
let ingress_history_writer = Arc::new(ingress_history_writer);
let exec_env = ExecutionEnvironmentImpl::new(
log,
hypervisor,
ingress_history_writer,
&metrics_registry,
subnet_id,
subnet_type,
1,
execution_environment::Config::default(),
cycles_account_manager,
);
// Send a subnet message to some of the ic:00 methods, but with malformed
// candid. The request should fail and an error should be observed in metrics.
let (_, _, _, state) = initial_state(subnet_type);
let methods: [ic00::Method; 6] = [
ic00::Method::CreateCanister,
ic00::Method::InstallCode,
ic00::Method::SetController,
ic00::Method::StartCanister,
ic00::Method::StopCanister,
ic00::Method::DeleteCanister,
];
for method in methods.iter() {
exec_env.execute_subnet_message(
CanisterInputMessage::Ingress(
IngressBuilder::new()
.receiver(ic00::IC_00)
.method_payload(vec![]) // Empty payload (invalid Candid)
.method_name(*method)
.build(),
),
state.clone(),
MAX_NUM_INSTRUCTIONS,
&mut csprng,
&None,
&ProvisionalWhitelist::Set(BTreeSet::new()),
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
MAX_NUMBER_OF_CANISTERS,
);
}
// Send subnet message with unknown method name.
exec_env.execute_subnet_message(
CanisterInputMessage::Ingress(
IngressBuilder::new()
.receiver(ic00::IC_00)
.method_payload(vec![]) // Empty payload (invalid Candid)
.method_name("method_that_doesnt_exist".to_string())
.build(),
),
state,
MAX_NUM_INSTRUCTIONS,
&mut csprng,
&None,
&ProvisionalWhitelist::Set(BTreeSet::new()),
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
MAX_NUMBER_OF_CANISTERS,
);
assert_eq!(
metric_vec(&[
(
&[
("method_name", "ic00_create_canister"),
("outcome", "error"),
("status", "CanisterMethodNotFound"),
],
1
),
(
&[
("method_name", "ic00_install_code"),
("outcome", "error"),
("status", "CanisterContractViolation"),
],
1
),
(
&[
("method_name", "ic00_set_controller"),
("outcome", "error"),
("status", "CanisterContractViolation"),
],
1
),
(
&[
("method_name", "ic00_start_canister"),
("outcome", "error"),
("status", "CanisterContractViolation"),
],
1
),
(
&[
("method_name", "ic00_stop_canister"),
("outcome", "error"),
("status", "CanisterContractViolation"),
],
1
),
(
&[
("method_name", "ic00_delete_canister"),
("outcome", "error"),
("status", "CanisterContractViolation"),
],
1
),
(
&[
("method_name", "unknown_method"),
("outcome", "error"),
("status", "CanisterMethodNotFound"),
],
1
),
]),
fetch_histogram_vec_count(
&metrics_registry,
"execution_subnet_message_duration_seconds"
)
);
});
}
#[test]
fn can_update_canisters_cycles_account_when_an_ingress_is_executed() {
with_setup(SubnetType::Application, |exec_env, _, _, routing_table| {
let canister = get_running_canister(canister_test_id(0));
let initial_cycles_balance = canister.system_state.balance();
let ingress = IngressBuilder::new().build();
let cycles_account_manager = Arc::new(CyclesAccountManagerBuilder::new().build());
let result = exec_env.execute_canister_message(
canister,
MAX_NUM_INSTRUCTIONS,
CanisterInputMessage::Ingress(ingress),
mock_time(),
routing_table,
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
);
assert_eq!(
result.canister.system_state.balance(),
initial_cycles_balance
- cycles_account_manager
.execution_cost(MAX_NUM_INSTRUCTIONS - result.num_instructions_left,),
);
});
}
#[test]
fn can_reject_a_request_when_canister_is_out_of_cycles() {
with_setup(SubnetType::Application, |exec_env, _, _, routing_table| {
// Set the canister's cycles balance to a low value to force the request to be
// rejected.
let available_cycles = Cycles::from(1000);
let mut canister = get_running_canister_with_balance(canister_test_id(0), available_cycles);
canister.system_state.freeze_threshold = NumSeconds::from(0);
let cycles_account_manager = Arc::new(CyclesAccountManagerBuilder::new().build());
let cycles = 50;
let req = RequestBuilder::new()
.sender(canister_test_id(13))
.payment(Cycles::from(cycles))
.build();
let reply_callback = req.sender_reply_callback;
canister
.system_state
.queues_mut()
.push_input(
QueueIndex::from(0),
RequestOrResponse::Request(req),
InputQueueType::RemoteSubnet,
)
.unwrap();
let msg = canister.pop_input().unwrap();
let canister_id = canister.canister_id();
let mut result = exec_env.execute_canister_message(
canister,
MAX_NUM_INSTRUCTIONS,
msg,
mock_time(),
routing_table,
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
);
assert_eq!(
result
.canister
.system_state
.queues_mut()
.pop_canister_output(&canister_test_id(13))
.unwrap()
.1,
RequestOrResponse::Response(Response {
originator: canister_test_id(13),
respondent: canister_test_id(0),
originator_reply_callback: reply_callback,
refund: Cycles::from(cycles),
response_payload: Payload::Reject(RejectContext {
code: RejectCode::SysTransient,
message: format!(
"Canister {} is out of cycles: requested {} cycles but the available balance is {} cycles and the freezing threshold {} cycles",
canister_id,
cycles_account_manager.execution_cost(MAX_NUM_INSTRUCTIONS),
available_cycles,
Cycles::from(0),
),
}),
})
);
// Verify the canister's cycles balance is still the same.
assert_eq!(result.canister.system_state.balance(), Cycles::from(1000));
});
}
#[test]
fn can_reject_an_ingress_when_canister_is_out_of_cycles() {
with_setup(SubnetType::Application, |exec_env, _, _, routing_table| {
// Set the canister's cycles balance to a low value to force the request to be
// rejected.
let available_cycles = Cycles::from(1000);
let canister = get_running_canister_with_balance(canister_test_id(0), available_cycles);
let cycles_account_manager = Arc::new(CyclesAccountManagerBuilder::new().build());
let ingress = IngressBuilder::new().build();
let source = ingress.source;
let canister_id = canister.canister_id();
let result = exec_env.execute_canister_message(
canister,
MAX_NUM_INSTRUCTIONS,
CanisterInputMessage::Ingress(ingress),
mock_time(),
routing_table,
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
);
assert_eq!(
result.ingress_status,
Some((MessageId::from([0; 32]), IngressStatus::Failed {
receiver: canister_id.get(),
user_id: source,
error: UserError::new(
ErrorCode::CanisterOutOfCycles,
format!(
"Canister {} is out of cycles: requested {} cycles but the available balance is {} cycles and the freezing threshold {} cycles",
canister_id,
cycles_account_manager.execution_cost(MAX_NUM_INSTRUCTIONS),
available_cycles,
Cycles::from(0),
),
),
time: mock_time(),
}))
);
// Verify the canister's cycles balance is still the same.
assert_eq!(result.canister.system_state.balance(), Cycles::from(1000));
});
}
#[test]
fn canister_heartbeat_doesnt_run_when_canister_is_stopped() {
with_setup(SubnetType::System, |exec_env, _, _, routing_table| {
let canister = get_stopped_canister_on_system_subnet(canister_test_id(0));
let result = exec_env
.execute_canister_heartbeat(
canister,
MAX_NUM_INSTRUCTIONS,
routing_table,
mock_time(),
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
)
.2;
assert_eq!(
result,
Err(CanisterHeartbeatError::CanisterNotRunning {
status: CanisterStatusType::Stopped,
})
);
});
}
#[test]
fn canister_heartbeat_doesnt_run_when_canister_is_stopping() {
with_setup(SubnetType::System, |exec_env, _, _, routing_table| {
let canister = get_stopping_canister_on_nns(canister_test_id(0));
let result = exec_env
.execute_canister_heartbeat(
canister,
MAX_NUM_INSTRUCTIONS,
routing_table,
mock_time(),
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
)
.2;
assert_eq!(
result,
Err(CanisterHeartbeatError::CanisterNotRunning {
status: CanisterStatusType::Stopping,
})
);
});
}
#[test]
fn message_to_canister_with_not_enough_balance_is_rejected() {
with_setup(SubnetType::Application, |exec_env, _, _, _| {
let canister_id = canister_test_id(0);
let ingress = SignedIngressBuilder::new()
.canister_id(canister_id)
.build()
.content()
.clone();
let cycles_account_manager = Arc::new(CyclesAccountManagerBuilder::new().build());
let ingress_induction_cost = cycles_account_manager
.ingress_induction_cost(&ingress)
.unwrap()
.cost();
let available = ingress_induction_cost - Cycles::from(1);
assert_eq!(
exec_env
.should_accept_ingress_message(
Arc::new(
ReplicatedStateBuilder::default()
.with_canister(
CanisterStateBuilder::default()
.with_canister_id(canister_id)
// Just under the cycles required to accept the message.
.with_cycles(available)
.build()
)
.build()
),
&ProvisionalWhitelist::new_empty(),
&ingress,
ExecutionMode::NonReplicated,
)
.unwrap_err()
.code(),
ErrorCode::CanisterOutOfCycles,
);
});
}
#[test]
fn message_to_canister_with_enough_balance_is_accepted() {
with_setup(SubnetType::Application, |exec_env, _, _, _| {
let ingress = SignedIngressBuilder::new()
.canister_id(canister_test_id(0))
.build()
.content()
.clone();
let config = CyclesAccountManagerConfig::application_subnet();
let cycles_account_manager = CyclesAccountManagerBuilder::new().build();
assert_eq!(
exec_env.should_accept_ingress_message(
Arc::new(
ReplicatedStateBuilder::default()
.with_canister(
CanisterStateBuilder::default()
.with_canister_id(canister_test_id(0))
// Exactly the amount of cycles needed to accept
// the message plus a bit extra for the canister's storage
.with_cycles(
cycles_account_manager
.ingress_induction_cost(&ingress,)
.unwrap()
.cost()
+ config.gib_storage_per_second_fee * Cycles::from(10)
)
.with_wasm(vec![1, 2, 3])
.build()
)
.build()
),
&ProvisionalWhitelist::new_empty(),
&ingress,
ExecutionMode::NonReplicated,
),
Ok(())
);
});
}
#[test]
fn management_message_to_canister_with_enough_balance_is_accepted() {
with_setup(SubnetType::Application, |exec_env, _, _, _| {
for receiver in [IC_00, CanisterId::from(subnet_test_id(1))].iter() {
let ingress = SignedIngressBuilder::new()
.sender(user_test_id(0))
.canister_id(*receiver)
.method_name("start_canister")
.method_payload(CanisterIdRecord::from(canister_test_id(0)).encode())
.build();
assert_eq!(
exec_env.should_accept_ingress_message(
Arc::new(
ReplicatedStateBuilder::default()
.with_canister(
CanisterStateBuilder::default()
.with_canister_id(canister_test_id(0))
.with_controller(user_test_id(0).get())
.with_cycles(u128::MAX)
.with_wasm(vec![1, 2, 3])
.build()
)
.build()
),
&ProvisionalWhitelist::new_empty(),
ingress.content(),
ExecutionMode::NonReplicated,
),
Ok(())
);
}
});
}
#[test]
fn management_message_to_canister_with_not_enough_balance_is_not_accepted() {
with_setup(SubnetType::Application, |exec_env, _, _, _| {
for receiver in [IC_00, CanisterId::from(subnet_test_id(1))].iter() {
let canister_id = canister_test_id(0);
let ingress = SignedIngressBuilder::new()
.sender(user_test_id(0))
.canister_id(*receiver)
.method_name("start_canister")
.method_payload(CanisterIdRecord::from(canister_id).encode())
.build();
assert_eq!(
exec_env
.should_accept_ingress_message(
Arc::new(
ReplicatedStateBuilder::default()
.with_canister(
CanisterStateBuilder::default()
.with_canister_id(canister_id)
.with_controller(user_test_id(0).get())
.with_cycles(0)
.with_wasm(vec![1, 2, 3])
.build()
)
.build()
),
&ProvisionalWhitelist::new_empty(),
ingress.content(),
ExecutionMode::NonReplicated,
)
.unwrap_err()
.code(),
ErrorCode::CanisterOutOfCycles,
);
}
});
}
#[test]
fn management_message_to_canister_that_doesnt_exist_is_not_accepted() {
with_setup(SubnetType::Application, |exec_env, _, _, _| {
for receiver in [IC_00, CanisterId::from(subnet_test_id(1))].iter() {
let ingress = SignedIngressBuilder::new()
.sender(user_test_id(0))
.canister_id(*receiver)
.method_name("start_canister")
.method_payload(CanisterIdRecord::from(canister_test_id(0)).encode())
.build();
assert_eq!(
exec_env
.should_accept_ingress_message(
Arc::new(ReplicatedStateBuilder::default().build()),
&ProvisionalWhitelist::new_empty(),
ingress.content(),
ExecutionMode::NonReplicated,
)
.unwrap_err()
.code(),
ErrorCode::CanisterNotFound,
);
}
});
}
#[test]
fn management_message_with_invalid_payload_is_not_accepted() {
with_setup(SubnetType::Application, |exec_env, _, _, _| {
for receiver in [IC_00, CanisterId::from(subnet_test_id(1))].iter() {
let ingress = SignedIngressBuilder::new()
.sender(user_test_id(0))
.canister_id(*receiver)
.method_name("start_canister")
.method_payload(vec![]) // an invalid payload
.build();
assert_eq!(
exec_env
.should_accept_ingress_message(
Arc::new(ReplicatedStateBuilder::default().build()),
&ProvisionalWhitelist::new_empty(),
ingress.content(),
ExecutionMode::NonReplicated,
)
.unwrap_err()
.code(),
ErrorCode::InvalidManagementPayload
);
}
});
}
#[test]
fn management_message_with_invalid_method_is_not_accepted() {
with_setup(SubnetType::Application, |exec_env, _, _, _| {
for receiver in [IC_00, CanisterId::from(subnet_test_id(1))].iter() {
let ingress = SignedIngressBuilder::new()
.sender(user_test_id(0))
.canister_id(*receiver)
.method_name("invalid_method")
.build();
assert_eq!(
exec_env
.should_accept_ingress_message(
Arc::new(ReplicatedStateBuilder::default().build()),
&ProvisionalWhitelist::new_empty(),
ingress.content(),
ExecutionMode::NonReplicated,
)
.unwrap_err()
.code(),
ErrorCode::CanisterMethodNotFound,
);
}
});
}
// A Wasm module that allocates 10 wasm pages of heap memory and 10 wasm
// pages of stable memory and then (optionally) traps.
const MEMORY_ALLOCATION_WAT: &str = r#"(module
(import "ic0" "stable64_grow" (func $stable64_grow (param i64) (result i64)))
(import "ic0" "trap" (func $ic_trap (param i32) (param i32)))
(func $test_without_trap
;; Grow heap by 10 pages.
(if (i32.ne (memory.grow (i32.const 10)) (i32.const 1))
(then (unreachable))
)
;; Grow stable memory by 10 pages.
(if (i64.ne (call $stable64_grow (i64.const 10)) (i64.const 0))
(then (unreachable))
)
)
(func $test_with_trap
;; Grow memory.
(call $test_without_trap)
;; Trap to trigger a failed execution
(call $ic_trap (i32.const 0) (i32.const 15))
)
(export "canister_update test_without_trap" (func $test_without_trap))
(export "canister_update test_with_trap" (func $test_with_trap))
(memory $memory 1)
(export "memory" (memory $memory))
(data (i32.const 0) "This is a trap!")
)"#;
/// This test verifies that if the canister allocates memory during message
/// execution and the message fails, the allocated memory is returned to the
/// subnet's available memory.
#[test]
fn subnet_available_memory_reclaimed_when_execution_fails() {
with_setup(SubnetType::Application, |exec_env, _, _, routing_table| {
let wasm_binary = wabt::wat2wasm(MEMORY_ALLOCATION_WAT).unwrap();
let tmpdir = tempfile::Builder::new().prefix("test").tempdir().unwrap();
let system_state = SystemStateBuilder::default()
.freeze_threshold(NumSeconds::from(0))
.build();
let execution_state = exec_env
.hypervisor_for_testing()
.create_execution_state(
wasm_binary,
tmpdir.path().to_path_buf(),
system_state.canister_id(),
)
.unwrap();
let mut canister = CanisterState {
system_state,
execution_state: Some(execution_state),
scheduler_state: SchedulerState::default(),
};
let input_message = CanisterInputMessage::Ingress(
IngressBuilder::default()
.method_name("test_with_trap".to_string())
.build(),
);
let subnet_available_memory_bytes_num = 1 << 30;
let subnet_available_memory: SubnetAvailableMemory = AvailableMemory::new(
subnet_available_memory_bytes_num,
subnet_available_memory_bytes_num,
)
.into();
canister.system_state.memory_allocation =
MemoryAllocation::try_from(NumBytes::new(1 << 30)).unwrap();
exec_env.execute_canister_message(
canister,
MAX_NUM_INSTRUCTIONS,
input_message,
mock_time(),
routing_table,
subnet_available_memory.clone(),
);
assert_eq!(
subnet_available_memory_bytes_num,
subnet_available_memory.get_total_memory()
);
assert_eq!(
subnet_available_memory_bytes_num,
subnet_available_memory.get_message_memory()
);
});
}
#[test]
fn test_allocating_memory_reduces_subnet_available_memory() {
with_setup(SubnetType::Application, |exec_env, _, _, routing_table| {
let wasm_binary = wabt::wat2wasm(MEMORY_ALLOCATION_WAT).unwrap();
let tmpdir = tempfile::Builder::new().prefix("test").tempdir().unwrap();
let system_state = SystemStateBuilder::default()
.freeze_threshold(NumSeconds::from(0))
.build();
let execution_state = exec_env
.hypervisor_for_testing()
.create_execution_state(
wasm_binary,
tmpdir.path().to_path_buf(),
system_state.canister_id(),
)
.unwrap();
let mut canister = CanisterState {
system_state,
execution_state: Some(execution_state),
scheduler_state: SchedulerState::default(),
};
let input_message = CanisterInputMessage::Ingress(
IngressBuilder::default()
.method_name("test_without_trap".to_string())
.build(),
);
let subnet_available_memory_bytes_num = 1 << 30;
let subnet_available_memory: SubnetAvailableMemory = AvailableMemory::new(
subnet_available_memory_bytes_num,
subnet_available_memory_bytes_num,
)
.into();
canister.system_state.memory_allocation =
MemoryAllocation::try_from(NumBytes::new(1 << 30)).unwrap();
exec_env.execute_canister_message(
canister,
MAX_NUM_INSTRUCTIONS,
input_message,
mock_time(),
routing_table,
subnet_available_memory.clone(),
);
// The canister allocates 10 wasm pages in the heap and 10 wasm pages of stable
// memory.
let new_memory_allocated = 20 * WASM_PAGE_SIZE_IN_BYTES as i64;
assert_eq!(
subnet_available_memory_bytes_num - new_memory_allocated,
subnet_available_memory.get_total_memory()
);
assert_eq!(
subnet_available_memory_bytes_num,
subnet_available_memory.get_message_memory()
);
});
}
#[test]
fn execute_canister_http_request() {
with_test_replica_logger(|log| {
let (mut state, exec_env) = ExecutionEnvironmentBuilder::new().with_log(log).build();
// Enable http requests feature.
state.metadata.own_subnet_features.http_requests = true;
// Create payload of the request.
let url = "https::/".to_string();
let transform_method_name = Some("transform".to_string());
let request_payload = CanisterHttpRequestArgs {
url: url.clone(),
headers: Vec::new(),
body: None,
http_method: HttpMethodType::GET,
transform_method_name: transform_method_name.clone(),
};
// Create request to HTTP_REQUEST method.
let sender = canister_test_id(257);
let request = RequestBuilder::new()
.sender(sender)
.receiver(IC_00)
.method_name(Method::HttpRequest)
.method_payload(Encode!(&request_payload).unwrap())
.build();
// Push the request in the subnet queue.
state
.subnet_queues_mut()
.push_input(
QUEUE_INDEX_NONE,
RequestOrResponse::Request(request.clone()),
InputQueueType::LocalSubnet,
)
.unwrap();
// Execute IC00::HTTP_REQUEST.
let (new_state, _) = exec_env.execute_subnet_message(
state.subnet_queues_mut().pop_input().unwrap(),
state,
MAX_NUM_INSTRUCTIONS,
&mut mock_random_number_generator(),
&None,
&ProvisionalWhitelist::Set(BTreeSet::new()),
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
MAX_NUMBER_OF_CANISTERS,
);
// Check that the SubnetCallContextManager contains the request.
let canister_http_request_contexts = new_state
.metadata
.subnet_call_context_manager
.canister_http_request_contexts;
assert_eq!(canister_http_request_contexts.len(), 1);
let http_request_context = canister_http_request_contexts
.get(&CallbackId::from(0))
.unwrap();
assert_eq!(http_request_context.url, url);
assert_eq!(
http_request_context.transform_method_name,
transform_method_name
);
assert_eq!(http_request_context.http_method, HttpMethodType::GET);
assert_eq!(http_request_context.request, request);
});
}
#[test]
fn execute_canister_http_request_disabled() {
with_test_replica_logger(|log| {
let (mut state, exec_env) = ExecutionEnvironmentBuilder::new().with_log(log).build();
// Enable http requests feature.
state.metadata.own_subnet_features.http_requests = false;
// Create payload of the request.
let request_payload = CanisterHttpRequestArgs {
url: "https::/".to_string(),
headers: Vec::new(),
body: None,
http_method: HttpMethodType::GET,
transform_method_name: Some("transform".to_string()),
};
// Create request to HTTP_REQUEST method.
let sender = canister_test_id(257);
let request = RequestBuilder::new()
.sender(sender)
.receiver(IC_00)
.method_name(Method::HttpRequest)
.method_payload(Encode!(&request_payload).unwrap())
.build();
// Push the request in the subnet queue.
state
.subnet_queues_mut()
.push_input(
QUEUE_INDEX_NONE,
RequestOrResponse::Request(request),
InputQueueType::LocalSubnet,
)
.unwrap();
// Execute IC00::HTTP_REQUEST.
let (new_state, _) = exec_env.execute_subnet_message(
state.subnet_queues_mut().pop_input().unwrap(),
state,
MAX_NUM_INSTRUCTIONS,
&mut mock_random_number_generator(),
&None,
&ProvisionalWhitelist::Set(BTreeSet::new()),
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
MAX_NUMBER_OF_CANISTERS,
);
// Check that the SubnetCallContextManager does not contains any request.
let canister_http_request_contexts = new_state
.metadata
.subnet_call_context_manager
.canister_http_request_contexts;
assert_eq!(canister_http_request_contexts.len(), 0);
});
}
fn execute_compute_initial_ecdsa_dealings(
sender: CanisterId,
nns_subnet_id: SubnetId,
own_subnet_id: SubnetId,
sender_subnet_id: SubnetId,
own_subnet_is_ecdsa_enabled: bool,
key_id: String,
log: ReplicaLogger,
) -> ReplicatedState {
let receiver = canister_test_id(1);
let (mut state, exec_env) = ExecutionEnvironmentBuilder::new()
.with_log(log)
.with_nns_subnet_id(nns_subnet_id)
.with_own_subnet_id(own_subnet_id)
.with_sender_subnet_id(sender_subnet_id)
.with_sender_canister(sender)
.build();
state.metadata.own_subnet_features.ecdsa_signatures = own_subnet_is_ecdsa_enabled;
let node_ids = vec![node_test_id(1), node_test_id(2)].into_iter().collect();
let request_payload =
ic00::ComputeInitialEcdsaDealingsArgs::new(key_id, node_ids, RegistryVersion::from(100));
state
.subnet_queues_mut()
.push_input(
QUEUE_INDEX_NONE,
RequestOrResponse::Request(
RequestBuilder::new()
.sender(sender)
.receiver(receiver)
.method_name(Method::ComputeInitialEcdsaDealings)
.method_payload(Encode!(&request_payload).unwrap())
.payment(Cycles::from(0u64))
.build(),
),
InputQueueType::RemoteSubnet,
)
.unwrap();
exec_env
.execute_subnet_message(
state.subnet_queues_mut().pop_input().unwrap(),
state,
MAX_NUM_INSTRUCTIONS,
&mut mock_random_number_generator(),
&None,
&ProvisionalWhitelist::Set(BTreeSet::new()),
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
MAX_NUMBER_OF_CANISTERS,
)
.0
}
fn get_reject_message(response: RequestOrResponse) -> String {
match response {
RequestOrResponse::Request(_) => panic!("Expected Response"),
RequestOrResponse::Response(resp) => match resp.response_payload {
Payload::Data(_) => panic!("Expected Reject"),
Payload::Reject(reject) => reject.message,
},
}
}
#[test]
fn compute_initial_ecdsa_dealings_sender_on_nns() {
with_test_replica_logger(|log| {
let sender = canister_test_id(0x10);
let nns_subnet_id = subnet_test_id(2);
let own_subnet_id = subnet_test_id(1);
let sender_subnet_id = nns_subnet_id;
let mut state = execute_compute_initial_ecdsa_dealings(
sender,
nns_subnet_id,
own_subnet_id,
sender_subnet_id,
true,
"secp256k1".to_string(),
log,
);
assert_eq!(state.subnet_queues_mut().pop_canister_output(&sender), None);
});
}
#[test]
fn compute_initial_ecdsa_dealings_sender_not_on_nns() {
with_test_replica_logger(|log| {
let sender = canister_test_id(0x10);
let nns_subnet_id = subnet_test_id(2);
let own_subnet_id = subnet_test_id(1);
let sender_subnet_id = subnet_test_id(3); // sender not on nns subnet
let mut state = execute_compute_initial_ecdsa_dealings(
sender,
nns_subnet_id,
own_subnet_id,
sender_subnet_id,
true,
"secp256k1".to_string(),
log,
);
let (_refund, response) = state
.subnet_queues_mut()
.pop_canister_output(&sender)
.unwrap();
assert_eq!(
get_reject_message(response),
format!(
"{} is called by {sender}. It can only be called by NNS.",
Method::ComputeInitialEcdsaDealings
)
)
});
}
#[test]
fn compute_initial_ecdsa_dealings_without_ecdsa_enabled() {
with_test_replica_logger(|log| {
let sender = canister_test_id(0x10);
let nns_subnet_id = subnet_test_id(2);
let own_subnet_id = subnet_test_id(1);
let sender_subnet_id = nns_subnet_id;
let mut state = execute_compute_initial_ecdsa_dealings(
sender,
nns_subnet_id,
own_subnet_id,
sender_subnet_id,
false,
"secp256k1".to_string(),
log,
);
let (_refund, response) = state
.subnet_queues_mut()
.pop_canister_output(&sender)
.unwrap();
assert_eq!(
get_reject_message(response),
format!(
"The {} API is not enabled on this subnet.",
Method::ComputeInitialEcdsaDealings
)
)
});
}
// TODO EXC-1060: After supporting multiple keys, execution will know which key_ids are
// supported and can send the correct rejection message.
#[test]
#[ignore]
fn compute_initial_ecdsa_dealings_with_unknown_key() {
with_test_replica_logger(|log| {
let sender = canister_test_id(0x10);
let nns_subnet_id = subnet_test_id(2);
let own_subnet_id = subnet_test_id(1);
let sender_subnet_id = nns_subnet_id;
let mut state = execute_compute_initial_ecdsa_dealings(
sender,
nns_subnet_id,
own_subnet_id,
sender_subnet_id,
true,
"foo".to_string(),
log,
);
let (_refund, response) = state
.subnet_queues_mut()
.pop_canister_output(&sender)
.unwrap();
assert_eq!(
get_reject_message(response),
"key_id must be \"secp256k1\"".to_string()
)
});
}
fn execute_ecdsa_signing(
sender: CanisterId,
ecdsa_signature_fee: Cycles,
payment: Cycles,
sender_is_nns: bool,
log: ReplicaLogger,
) -> ReplicatedState {
let nns_subnet = subnet_test_id(2);
let sender_subnet = if sender_is_nns {
subnet_test_id(2)
} else {
subnet_test_id(1)
};
let (mut state, exec_env) = ExecutionEnvironmentBuilder::new()
.with_log(log)
.with_sender_subnet_id(sender_subnet)
.with_nns_subnet_id(nns_subnet)
.with_sender_canister(sender)
.with_ecdsa_signature_fee(ecdsa_signature_fee)
.build();
state.metadata.own_subnet_features.ecdsa_signatures = true;
let request_payload = ic00::SignWithECDSAArgs {
message_hash: [1; 32].to_vec(),
derivation_path: vec![],
key_id: "secp256k1".to_string(),
};
state
.subnet_queues_mut()
.push_input(
QUEUE_INDEX_NONE,
RequestOrResponse::Request(
RequestBuilder::new()
.sender(sender)
//.receiver(receiver)
.method_name(Method::SignWithECDSA)
.method_payload(Encode!(&request_payload).unwrap())
.payment(payment)
.build(),
),
InputQueueType::RemoteSubnet,
)
.unwrap();
exec_env
.execute_subnet_message(
state.subnet_queues_mut().pop_input().unwrap(),
state,
MAX_NUM_INSTRUCTIONS,
&mut mock_random_number_generator(),
&None,
&ProvisionalWhitelist::Set(BTreeSet::new()),
MAX_SUBNET_AVAILABLE_MEMORY.clone(),
MAX_NUMBER_OF_CANISTERS,
)
.0
}
#[test]
fn ecdsa_signature_fee_charged() {
with_test_replica_logger(|log| {
let fee = Cycles::from(1_000_000u64);
let payment = Cycles::from(2_000_000u64);
let sender = canister_test_id(1);
let mut state = execute_ecdsa_signing(sender, fee, payment, false, log);
assert_eq!(state.subnet_queues_mut().pop_canister_output(&sender), None);
let (_, context) = state
.metadata
.subnet_call_context_manager
.sign_with_ecdsa_contexts
.iter()
.next()
.unwrap();
assert_eq!(context.request.payment, payment - fee)
});
}
#[test]
fn ecdsa_signature_rejected_without_fee() {
with_test_replica_logger(|log| {
let fee = Cycles::from(2_000_000u64);
let payment = fee - Cycles::from(1);
let sender = canister_test_id(1);
let mut state = execute_ecdsa_signing(sender, fee, payment, false, log);
let (_refund, response) = state
.subnet_queues_mut()
.pop_canister_output(&sender)
.unwrap();
assert_eq!(
get_reject_message(response),
"sign_with_ecdsa request sent with 1999999 cycles, but 2000000 cycles are required."
.to_string()
)
});
}
#[test]
fn ecdsa_signature_fee_ignored_for_nns() {
with_test_replica_logger(|log| {
let fee = Cycles::from(1_000_000u64);
let payment = Cycles::zero();
let sender = canister_test_id(1);
let mut state = execute_ecdsa_signing(sender, fee, payment, true, log);
assert_eq!(state.subnet_queues_mut().pop_canister_output(&sender), None);
let (_, context) = state
.metadata
.subnet_call_context_manager
.sign_with_ecdsa_contexts
.iter()
.next()
.unwrap();
assert_eq!(context.request.payment, payment)
});
}
| 35.655434 | 152 | 0.545913 |
1e90ad967b37eac07f7530c97c723d62fda42596 | 15,819 | //! AWS Regions and helper functions.
//!
//! Mostly used for translating the Region enum to a string AWS accepts.
//!
//! For example: `UsEast1` to "us-east-1"
use crate::credential::ProfileProvider;
use serde::ser::SerializeTuple;
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
use std;
use std::error::Error;
use std::fmt::{self, Display, Error as FmtError, Formatter};
use std::str::FromStr;
/// An AWS region.
///
/// # Default
///
/// `Region` implements the `Default` trait. Calling `Region::default()` will attempt to read the
/// `AWS_DEFAULT_REGION` or `AWS_REGION` environment variable. If it is malformed, it will fall back to `Region::UsEast1`.
/// If it is not present it will fallback on the value associated with the current profile in `~/.aws/config` or the file
/// specified by the `AWS_CONFIG_FILE` environment variable. If that is malformed of absent it will fall back on `Region::UsEast1`
///
/// # AWS-compatible services
///
/// `Region::Custom` can be used to connect to AWS-compatible services such as DynamoDB Local or Ceph.
///
/// ```
/// # use rusoto_signature::Region;
/// Region::Custom {
/// name: "eu-east-3".to_owned(),
/// endpoint: "http://localhost:8000".to_owned(),
/// };
/// ```
///
/// # Caveats
///
/// `CnNorth1` is currently untested due to Rusoto maintainers not having access to AWS China.
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub enum Region {
/// Region that covers the Eastern part of Asia Pacific
ApEast1,
/// Region that covers the North-Eastern part of Asia Pacific
ApNortheast1,
/// Region that covers the North-Eastern part of Asia Pacific
ApNortheast2,
/// Region that covers the North-Eastern part of Asia Pacific
ApNortheast3,
/// Region that covers the Southern part of Asia Pacific
ApSouth1,
/// Region that covers the South-Eastern part of Asia Pacific
ApSoutheast1,
/// Region that covers the South-Eastern part of Asia Pacific
ApSoutheast2,
/// Region that covers Canada
CaCentral1,
/// Region that covers Central Europe
EuCentral1,
/// Region that covers Western Europe
EuWest1,
/// Region that covers Western Europe
EuWest2,
/// Region that covers Western Europe
EuWest3,
/// Region that covers Northern Europe
EuNorth1,
/// Region that covers Southern Europe
EuSouth1,
/// Bahrain, Middle East South
MeSouth1,
/// Region that covers South America
SaEast1,
/// Region that covers the Eastern part of the United States
UsEast1,
/// Region that covers the Eastern part of the United States
UsEast2,
/// Region that covers the Western part of the United States
UsWest1,
/// Region that covers the Western part of the United States
UsWest2,
/// Region that covers the Eastern part of the United States for the US Government
UsGovEast1,
/// Region that covers the Western part of the United States for the US Government
UsGovWest1,
/// Region that covers China
CnNorth1,
/// Region that covers North-Western part of China
CnNorthwest1,
/// Region that covers southern part Africa
AfSouth1,
/// Specifies a custom region, such as a local Ceph target
Custom {
/// Name of the endpoint (e.g. `"eu-east-2"`).
name: String,
/// Endpoint to be used. For instance, `"https://s3.my-provider.net"` or just
/// `"s3.my-provider.net"` (default scheme is https).
endpoint: String,
},
}
impl Region {
/// Name of the region
///
/// ```
/// # use rusoto_signature::Region;
/// assert_eq!(Region::CaCentral1.name(), "ca-central-1");
/// assert_eq!(
/// Region::Custom { name: "eu-east-3".to_owned(), endpoint: "s3.net".to_owned() }.name(),
/// "eu-east-3"
/// );
/// ```
pub fn name(&self) -> &str {
match *self {
Region::ApEast1 => "ap-east-1",
Region::ApNortheast1 => "ap-northeast-1",
Region::ApNortheast2 => "ap-northeast-2",
Region::ApNortheast3 => "ap-northeast-3",
Region::ApSouth1 => "ap-south-1",
Region::ApSoutheast1 => "ap-southeast-1",
Region::ApSoutheast2 => "ap-southeast-2",
Region::CaCentral1 => "ca-central-1",
Region::EuCentral1 => "eu-central-1",
Region::EuWest1 => "eu-west-1",
Region::EuWest2 => "eu-west-2",
Region::EuWest3 => "eu-west-3",
Region::EuNorth1 => "eu-north-1",
Region::EuSouth1 => "eu-south-1",
Region::MeSouth1 => "me-south-1",
Region::SaEast1 => "sa-east-1",
Region::UsEast1 => "us-east-1",
Region::UsEast2 => "us-east-2",
Region::UsWest1 => "us-west-1",
Region::UsWest2 => "us-west-2",
Region::UsGovEast1 => "us-gov-east-1",
Region::UsGovWest1 => "us-gov-west-1",
Region::CnNorth1 => "cn-north-1",
Region::CnNorthwest1 => "cn-northwest-1",
Region::AfSouth1 => "af-south-1",
Region::Custom { ref name, .. } => name,
}
}
}
/// An error produced when attempting to convert a `str` into a `Region` fails.
#[derive(Debug, PartialEq)]
pub struct ParseRegionError {
message: String,
}
// Manually created for lack of a way to flatten the `Region::Custom` variant
// Related: https://github.com/serde-rs/serde/issues/119
impl Serialize for Region {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut seq = serializer.serialize_tuple(2)?;
if let Region::Custom {
ref endpoint,
ref name,
} = *self
{
seq.serialize_element(&name)?;
seq.serialize_element(&Some(&endpoint))?;
} else {
seq.serialize_element(self.name())?;
seq.serialize_element(&None as &Option<&str>)?;
}
seq.end()
}
}
struct RegionVisitor;
impl<'de> de::Visitor<'de> for RegionVisitor {
type Value = Region;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("sequence of (name, Some(endpoint))")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: de::SeqAccess<'de>,
{
let name: String = seq
.next_element::<String>()?
.ok_or_else(|| de::Error::custom("region is missing name"))?;
let endpoint: Option<String> = seq.next_element::<Option<String>>()?.unwrap_or_default();
match (name, endpoint) {
(name, Some(endpoint)) => Ok(Region::Custom { name, endpoint }),
(name, None) => name.parse().map_err(de::Error::custom),
}
}
}
// Manually created for lack of a way to flatten the `Region::Custom` variant
// Related: https://github.com/serde-rs/serde/issues/119
impl<'de> Deserialize<'de> for Region {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_tuple(2, RegionVisitor)
}
}
impl FromStr for Region {
type Err = ParseRegionError;
fn from_str(s: &str) -> Result<Region, ParseRegionError> {
let v: &str = &s.to_lowercase();
match v {
"ap-east-1" | "apeast1" => Ok(Region::ApEast1),
"ap-northeast-1" | "apnortheast1" => Ok(Region::ApNortheast1),
"ap-northeast-2" | "apnortheast2" => Ok(Region::ApNortheast2),
"ap-northeast-3" | "apnortheast3" => Ok(Region::ApNortheast3),
"ap-south-1" | "apsouth1" => Ok(Region::ApSouth1),
"ap-southeast-1" | "apsoutheast1" => Ok(Region::ApSoutheast1),
"ap-southeast-2" | "apsoutheast2" => Ok(Region::ApSoutheast2),
"ca-central-1" | "cacentral1" => Ok(Region::CaCentral1),
"eu-central-1" | "eucentral1" => Ok(Region::EuCentral1),
"eu-west-1" | "euwest1" => Ok(Region::EuWest1),
"eu-west-2" | "euwest2" => Ok(Region::EuWest2),
"eu-west-3" | "euwest3" => Ok(Region::EuWest3),
"eu-north-1" | "eunorth1" => Ok(Region::EuNorth1),
"eu-south-1" | "eusouth1" => Ok(Region::EuSouth1),
"me-south-1" | "mesouth1" => Ok(Region::MeSouth1),
"sa-east-1" | "saeast1" => Ok(Region::SaEast1),
"us-east-1" | "useast1" => Ok(Region::UsEast1),
"us-east-2" | "useast2" => Ok(Region::UsEast2),
"us-west-1" | "uswest1" => Ok(Region::UsWest1),
"us-west-2" | "uswest2" => Ok(Region::UsWest2),
"us-gov-east-1" | "usgoveast1" => Ok(Region::UsGovEast1),
"us-gov-west-1" | "usgovwest1" => Ok(Region::UsGovWest1),
"cn-north-1" | "cnnorth1" => Ok(Region::CnNorth1),
"cn-northwest-1" | "cnnorthwest1" => Ok(Region::CnNorthwest1),
"af-south-1"| "afsouth1" => Ok(Region::AfSouth1),
s => Err(ParseRegionError::new(s)),
}
}
}
impl ParseRegionError {
/// Parses a region given as a string literal into a type `Region'
pub fn new(input: &str) -> Self {
ParseRegionError {
message: format!("Not a valid AWS region: {}", input),
}
}
}
impl Error for ParseRegionError {}
impl Display for ParseRegionError {
fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> {
write!(f, "{}", self.message)
}
}
impl Default for Region {
fn default() -> Region {
match std::env::var("AWS_DEFAULT_REGION").or_else(|_| std::env::var("AWS_REGION")) {
Ok(ref v) => Region::from_str(v).unwrap_or(Region::UsEast1),
Err(_) => match ProfileProvider::region() {
Ok(Some(region)) => Region::from_str(®ion).unwrap_or(Region::UsEast1),
_ => Region::UsEast1,
},
}
}
}
#[cfg(test)]
mod tests {
extern crate serde_test;
use self::serde_test::{assert_tokens, Token};
use super::*;
#[test]
fn from_str() {
assert_eq!(
"foo"
.parse::<Region>()
.err()
.expect("Parsing foo as a Region was not an error")
.to_string(),
"Not a valid AWS region: foo".to_owned()
);
assert_eq!("ap-east-1".parse(), Ok(Region::ApEast1));
assert_eq!("ap-northeast-1".parse(), Ok(Region::ApNortheast1));
assert_eq!("ap-northeast-2".parse(), Ok(Region::ApNortheast2));
assert_eq!("ap-northeast-3".parse(), Ok(Region::ApNortheast3));
assert_eq!("ap-south-1".parse(), Ok(Region::ApSouth1));
assert_eq!("ap-southeast-1".parse(), Ok(Region::ApSoutheast1));
assert_eq!("ap-southeast-2".parse(), Ok(Region::ApSoutheast2));
assert_eq!("ca-central-1".parse(), Ok(Region::CaCentral1));
assert_eq!("eu-central-1".parse(), Ok(Region::EuCentral1));
assert_eq!("eu-west-1".parse(), Ok(Region::EuWest1));
assert_eq!("eu-west-2".parse(), Ok(Region::EuWest2));
assert_eq!("eu-west-3".parse(), Ok(Region::EuWest3));
assert_eq!("eu-north-1".parse(), Ok(Region::EuNorth1));
assert_eq!("eu-south-1".parse(), Ok(Region::EuSouth1));
assert_eq!("me-south-1".parse(), Ok(Region::MeSouth1));
assert_eq!("sa-east-1".parse(), Ok(Region::SaEast1));
assert_eq!("us-east-1".parse(), Ok(Region::UsEast1));
assert_eq!("us-east-2".parse(), Ok(Region::UsEast2));
assert_eq!("us-west-1".parse(), Ok(Region::UsWest1));
assert_eq!("us-west-2".parse(), Ok(Region::UsWest2));
assert_eq!("us-gov-east-1".parse(), Ok(Region::UsGovEast1));
assert_eq!("us-gov-west-1".parse(), Ok(Region::UsGovWest1));
assert_eq!("cn-north-1".parse(), Ok(Region::CnNorth1));
assert_eq!("cn-northwest-1".parse(), Ok(Region::CnNorthwest1));
assert_eq!("af-south-1".parse(), Ok(Region::AfSouth1));
}
#[test]
fn region_serialize_deserialize() {
assert_tokens(&Region::ApEast1, &tokens_for_region("ap-east-1"));
assert_tokens(&Region::ApNortheast1, &tokens_for_region("ap-northeast-1"));
assert_tokens(&Region::ApNortheast2, &tokens_for_region("ap-northeast-2"));
assert_tokens(&Region::ApNortheast3, &tokens_for_region("ap-northeast-3"));
assert_tokens(&Region::ApSouth1, &tokens_for_region("ap-south-1"));
assert_tokens(&Region::ApSoutheast1, &tokens_for_region("ap-southeast-1"));
assert_tokens(&Region::ApSoutheast2, &tokens_for_region("ap-southeast-2"));
assert_tokens(&Region::CaCentral1, &tokens_for_region("ca-central-1"));
assert_tokens(&Region::EuCentral1, &tokens_for_region("eu-central-1"));
assert_tokens(&Region::EuWest1, &tokens_for_region("eu-west-1"));
assert_tokens(&Region::EuWest2, &tokens_for_region("eu-west-2"));
assert_tokens(&Region::EuWest3, &tokens_for_region("eu-west-3"));
assert_tokens(&Region::EuNorth1, &tokens_for_region("eu-north-1"));
assert_tokens(&Region::EuSouth1, &tokens_for_region("eu-south-1"));
assert_tokens(&Region::MeSouth1, &tokens_for_region("me-south-1"));
assert_tokens(&Region::SaEast1, &tokens_for_region("sa-east-1"));
assert_tokens(&Region::UsEast1, &tokens_for_region("us-east-1"));
assert_tokens(&Region::UsEast2, &tokens_for_region("us-east-2"));
assert_tokens(&Region::UsWest1, &tokens_for_region("us-west-1"));
assert_tokens(&Region::UsWest2, &tokens_for_region("us-west-2"));
assert_tokens(&Region::UsGovEast1, &tokens_for_region("us-gov-east-1"));
assert_tokens(&Region::UsGovWest1, &tokens_for_region("us-gov-west-1"));
assert_tokens(&Region::CnNorth1, &tokens_for_region("cn-north-1"));
assert_tokens(&Region::CnNorthwest1, &tokens_for_region("cn-northwest-1"));
assert_tokens(&Region::AfSouth1, &tokens_for_region("af-south-1"));
}
fn tokens_for_region(name: &'static str) -> [Token; 4] {
[
Token::Tuple { len: 2 },
Token::String(name),
Token::None,
Token::TupleEnd,
]
}
#[test]
fn region_serialize_deserialize_custom() {
let custom_region = Region::Custom {
endpoint: "http://localhost:8000".to_owned(),
name: "eu-east-1".to_owned(),
};
assert_tokens(
&custom_region,
&[
Token::Tuple { len: 2 },
Token::String("eu-east-1"),
Token::Some,
Token::String("http://localhost:8000"),
Token::TupleEnd,
],
);
let expected = "[\"eu-east-1\",\"http://localhost:8000\"]";
let region_deserialized = serde_json::to_string(&custom_region).unwrap();
assert_eq!(region_deserialized, expected);
let from_json = serde_json::de::from_str(®ion_deserialized).unwrap();
assert_eq!(custom_region, from_json);
}
#[test]
fn region_serialize_deserialize_standard() {
let r = Region::UsWest2;
let region_deserialized = serde_json::to_string(&r).unwrap();
let expected = "[\"us-west-2\",null]";
assert_eq!(region_deserialized, expected);
let from_json = serde_json::de::from_str(®ion_deserialized).unwrap();
assert_eq!(r, from_json);
}
#[test]
fn region_serialize_deserialize_standard_only_region_name() {
let r = Region::UsWest2;
let only_region_name = "[\"us-west-2\"]";
let from_json = serde_json::de::from_str(&only_region_name).unwrap();
assert_eq!(r, from_json);
}
}
| 37.308962 | 130 | 0.594601 |
f950f75229803cc621fccc73c9e0c911f0335251 | 394 | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use ocamlrep_derive::OcamlRep;
use ocamlvalue_macro::Ocamlvalue;
#[derive(Clone, Copy, Debug, Eq, OcamlRep, Ocamlvalue, PartialEq)]
pub enum InferMissing {
Deactivated,
InferReturn,
InferParams,
}
| 26.266667 | 66 | 0.736041 |
1e29ec179bcaa0ac293e44d2422f15d1a196ca1b | 3,219 | use std::{
cell::RefCell,
os::raw::{c_int, c_void},
ptr,
};
use failure::Error;
use crate::allocator::Allocation;
const WS_ALIGN: usize = 64; // taken from `kTempAllocaAlignment` in `device_api.h`
struct WorkspacePool {
workspaces: Vec<Allocation>,
free: Vec<usize>,
in_use: Vec<usize>,
}
impl WorkspacePool {
fn new() -> Self {
WorkspacePool {
workspaces: Vec::new(),
free: Vec::new(),
in_use: Vec::new(),
}
}
fn alloc_new(&mut self, size: usize) -> Result<*mut u8, Error> {
self.workspaces.push(Allocation::new(size, Some(WS_ALIGN))?);
self.in_use.push(self.workspaces.len() - 1);
Ok(self.workspaces[self.workspaces.len() - 1].as_mut_ptr())
}
fn alloc(&mut self, size: usize) -> Result<*mut u8, Error> {
if self.free.len() == 0 {
return self.alloc_new(size);
}
let idx = self
.free
.iter()
.fold(None, |cur_ws_idx: Option<usize>, &idx| {
let ws_size = self.workspaces[idx].size();
if !ws_size >= size {
return cur_ws_idx;
}
cur_ws_idx.or(Some(idx)).and_then(|cur_idx| {
let cur_size = self.workspaces[cur_idx].size();
Some(match ws_size <= cur_size {
true => idx,
false => cur_idx,
})
})
});
match idx {
Some(idx) => {
self.free.remove_item(&idx).unwrap();
self.in_use.push(idx);
Ok(self.workspaces[idx].as_mut_ptr())
}
None => self.alloc_new(size),
}
}
fn free(&mut self, ptr: *mut u8) -> Result<(), Error> {
let mut ws_idx = None;
for i in 0..self.in_use.len() {
let idx = self.in_use[i];
if self.workspaces[idx].as_mut_ptr() == ptr {
self.in_use.remove(i);
ws_idx = Some(idx);
break;
}
}
Ok(self
.free
.push(ws_idx.ok_or(format_err!("Tried to free nonexistent workspace."))?))
}
}
thread_local!(static WORKSPACE_POOL: RefCell<WorkspacePool> = RefCell::new(WorkspacePool::new()));
const WORKSPACE_PAGE_SIZE: usize = 4 << 10;
#[no_mangle]
pub extern "C" fn TVMBackendAllocWorkspace(
_device_type: c_int,
_device_id: c_int,
size: u64,
_dtype_code_hint: c_int,
_dtype_bits_hint: c_int,
) -> *mut c_void {
let nbytes = if size == 0 {
WORKSPACE_PAGE_SIZE
} else {
size as usize
};
WORKSPACE_POOL.with(|pool_cell| {
pool_cell
.borrow_mut()
.alloc(nbytes as usize)
.unwrap_or(ptr::null_mut()) as *mut c_void
})
}
#[no_mangle]
pub extern "C" fn TVMBackendFreeWorkspace(
_device_type: c_int,
_device_id: c_int,
ptr: *mut c_void,
) -> c_int {
WORKSPACE_POOL.with(|pool_cell| {
(match pool_cell.borrow_mut().free(ptr as *mut u8) {
Ok(()) => 0,
Err(_) => -1,
}) as c_int
});
return 0;
}
| 27.05042 | 98 | 0.51196 |
727f629de16ba2ee98af56705a7dd8c570441584 | 2,246 | #[doc = "Register `DTUPD0` writer"]
pub struct W(crate::W<DTUPD0_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<DTUPD0_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl core::convert::From<crate::W<DTUPD0_SPEC>> for W {
fn from(writer: crate::W<DTUPD0_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `DTHUPD` writer - Dead-Time Value Update for PWMHx Output"]
pub struct DTHUPD_W<'a> {
w: &'a mut W,
}
impl<'a> DTHUPD_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !0xffff) | (value as u32 & 0xffff);
self.w
}
}
#[doc = "Field `DTLUPD` writer - Dead-Time Value Update for PWMLx Output"]
pub struct DTLUPD_W<'a> {
w: &'a mut W,
}
impl<'a> DTLUPD_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !(0xffff << 16)) | ((value as u32 & 0xffff) << 16);
self.w
}
}
impl W {
#[doc = "Bits 0:15 - Dead-Time Value Update for PWMHx Output"]
#[inline(always)]
pub fn dthupd(&mut self) -> DTHUPD_W {
DTHUPD_W { w: self }
}
#[doc = "Bits 16:31 - Dead-Time Value Update for PWMLx Output"]
#[inline(always)]
pub fn dtlupd(&mut self) -> DTLUPD_W {
DTLUPD_W { w: self }
}
#[doc = "Writes raw bits to the register."]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "PWM Channel Dead Time Update Register (ch_num = 0)\n\nThis register you can [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [dtupd0](index.html) module"]
pub struct DTUPD0_SPEC;
impl crate::RegisterSpec for DTUPD0_SPEC {
type Ux = u32;
}
#[doc = "`write(|w| ..)` method takes [dtupd0::W](W) writer structure"]
impl crate::Writable for DTUPD0_SPEC {
type Writer = W;
}
| 31.633803 | 281 | 0.597507 |
7665b730caddf940a6a73dbc7a6ae1b7640e97f1 | 120,103 | // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Translate the completed AST to the LLVM IR.
//!
//! Some functions here, such as trans_block and trans_expr, return a value --
//! the result of the translation to LLVM -- while others, such as trans_fn
//! and trans_item, are called only for the side effect of adding a
//! particular definition to the LLVM IR output we're producing.
//!
//! Hopefully useful general knowledge about trans:
//!
//! * There's no way to find out the Ty type of a ValueRef. Doing so
//! would be "trying to get the eggs out of an omelette" (credit:
//! pcwalton). You can, instead, find out its TypeRef by calling val_ty,
//! but one TypeRef corresponds to many `Ty`s; for instance, tup(int, int,
//! int) and rec(x=int, y=int, z=int) will have the same TypeRef.
#![allow(non_camel_case_types)]
pub use self::ValueOrigin::*;
use super::CrateTranslation;
use super::ModuleTranslation;
use back::{link, symbol_names};
use lint;
use llvm::{BasicBlockRef, Linkage, ValueRef, Vector, get_param};
use llvm;
use rustc::cfg;
use rustc::hir::def_id::DefId;
use middle::lang_items::{LangItem, ExchangeMallocFnLangItem, StartFnLangItem};
use middle::weak_lang_items;
use rustc::hir::pat_util::simple_name;
use rustc::ty::subst::{self, Substs};
use rustc::traits;
use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
use rustc::ty::adjustment::CustomCoerceUnsized;
use rustc::dep_graph::DepNode;
use rustc::hir::map as hir_map;
use rustc::util::common::time;
use rustc::mir::mir_map::MirMap;
use session::config::{self, NoDebugInfo, FullDebugInfo};
use session::Session;
use _match;
use abi::{self, Abi, FnType};
use adt;
use attributes;
use build::*;
use builder::{Builder, noname};
use callee::{Callee, CallArgs, ArgExprs, ArgVals};
use cleanup::{self, CleanupMethods, DropHint};
use closure;
use common::{Block, C_bool, C_bytes_in_context, C_i32, C_int, C_uint, C_integral};
use collector::{self, TransItem, TransItemState, TransItemCollectionMode};
use common::{C_null, C_struct_in_context, C_u64, C_u8, C_undef};
use common::{CrateContext, DropFlagHintsMap, Field, FunctionContext};
use common::{Result, NodeIdAndSpan, VariantInfo};
use common::{node_id_type, fulfill_obligation};
use common::{type_is_immediate, type_is_zero_size, val_ty};
use common;
use consts;
use context::{SharedCrateContext, CrateContextList};
use controlflow;
use datum;
use debuginfo::{self, DebugLoc, ToDebugLoc};
use declare;
use expr;
use glue;
use inline;
use machine;
use machine::{llalign_of_min, llsize_of, llsize_of_real};
use meth;
use mir;
use monomorphize::{self, Instance};
use partitioning::{self, PartitioningStrategy, InstantiationMode, CodegenUnit};
use symbol_names_test;
use tvec;
use type_::Type;
use type_of;
use type_of::*;
use value::Value;
use Disr;
use util::common::indenter;
use util::sha2::Sha256;
use util::nodemap::{NodeMap, NodeSet};
use arena::TypedArena;
use libc::c_uint;
use std::ffi::{CStr, CString};
use std::cell::{Cell, RefCell};
use std::collections::{HashMap, HashSet};
use std::str;
use std::{i8, i16, i32, i64};
use syntax::codemap::{Span, DUMMY_SP};
use syntax::parse::token::InternedString;
use syntax::attr::AttrMetaMethods;
use syntax::attr;
use rustc::hir::intravisit::{self, Visitor};
use rustc::hir;
use syntax::ast;
thread_local! {
static TASK_LOCAL_INSN_KEY: RefCell<Option<Vec<&'static str>>> = {
RefCell::new(None)
}
}
pub fn with_insn_ctxt<F>(blk: F)
where F: FnOnce(&[&'static str])
{
TASK_LOCAL_INSN_KEY.with(move |slot| {
slot.borrow().as_ref().map(move |s| blk(s));
})
}
pub fn init_insn_ctxt() {
TASK_LOCAL_INSN_KEY.with(|slot| {
*slot.borrow_mut() = Some(Vec::new());
});
}
pub struct _InsnCtxt {
_cannot_construct_outside_of_this_module: (),
}
impl Drop for _InsnCtxt {
fn drop(&mut self) {
TASK_LOCAL_INSN_KEY.with(|slot| {
match slot.borrow_mut().as_mut() {
Some(ctx) => {
ctx.pop();
}
None => {}
}
})
}
}
pub fn push_ctxt(s: &'static str) -> _InsnCtxt {
debug!("new InsnCtxt: {}", s);
TASK_LOCAL_INSN_KEY.with(|slot| {
if let Some(ctx) = slot.borrow_mut().as_mut() {
ctx.push(s)
}
});
_InsnCtxt {
_cannot_construct_outside_of_this_module: (),
}
}
pub struct StatRecorder<'a, 'tcx: 'a> {
ccx: &'a CrateContext<'a, 'tcx>,
name: Option<String>,
istart: usize,
}
impl<'a, 'tcx> StatRecorder<'a, 'tcx> {
pub fn new(ccx: &'a CrateContext<'a, 'tcx>, name: String) -> StatRecorder<'a, 'tcx> {
let istart = ccx.stats().n_llvm_insns.get();
StatRecorder {
ccx: ccx,
name: Some(name),
istart: istart,
}
}
}
impl<'a, 'tcx> Drop for StatRecorder<'a, 'tcx> {
fn drop(&mut self) {
if self.ccx.sess().trans_stats() {
let iend = self.ccx.stats().n_llvm_insns.get();
self.ccx
.stats()
.fn_stats
.borrow_mut()
.push((self.name.take().unwrap(), iend - self.istart));
self.ccx.stats().n_fns.set(self.ccx.stats().n_fns.get() + 1);
// Reset LLVM insn count to avoid compound costs.
self.ccx.stats().n_llvm_insns.set(self.istart);
}
}
}
pub fn kind_for_closure(ccx: &CrateContext, closure_id: DefId) -> ty::ClosureKind {
*ccx.tcx().tables.borrow().closure_kinds.get(&closure_id).unwrap()
}
fn require_alloc_fn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, info_ty: Ty<'tcx>, it: LangItem) -> DefId {
match bcx.tcx().lang_items.require(it) {
Ok(id) => id,
Err(s) => {
bcx.sess().fatal(&format!("allocation of `{}` {}", info_ty, s));
}
}
}
// The following malloc_raw_dyn* functions allocate a box to contain
// a given type, but with a potentially dynamic size.
pub fn malloc_raw_dyn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
llty_ptr: Type,
info_ty: Ty<'tcx>,
size: ValueRef,
align: ValueRef,
debug_loc: DebugLoc)
-> Result<'blk, 'tcx> {
let _icx = push_ctxt("malloc_raw_exchange");
// Allocate space:
let def_id = require_alloc_fn(bcx, info_ty, ExchangeMallocFnLangItem);
let r = Callee::def(bcx.ccx(), def_id, bcx.tcx().mk_substs(Substs::empty()))
.call(bcx, debug_loc, ArgVals(&[size, align]), None);
Result::new(r.bcx, PointerCast(r.bcx, r.val, llty_ptr))
}
pub fn bin_op_to_icmp_predicate(op: hir::BinOp_,
signed: bool)
-> llvm::IntPredicate {
match op {
hir::BiEq => llvm::IntEQ,
hir::BiNe => llvm::IntNE,
hir::BiLt => if signed { llvm::IntSLT } else { llvm::IntULT },
hir::BiLe => if signed { llvm::IntSLE } else { llvm::IntULE },
hir::BiGt => if signed { llvm::IntSGT } else { llvm::IntUGT },
hir::BiGe => if signed { llvm::IntSGE } else { llvm::IntUGE },
op => {
bug!("comparison_op_to_icmp_predicate: expected comparison operator, \
found {:?}",
op)
}
}
}
pub fn bin_op_to_fcmp_predicate(op: hir::BinOp_) -> llvm::RealPredicate {
match op {
hir::BiEq => llvm::RealOEQ,
hir::BiNe => llvm::RealUNE,
hir::BiLt => llvm::RealOLT,
hir::BiLe => llvm::RealOLE,
hir::BiGt => llvm::RealOGT,
hir::BiGe => llvm::RealOGE,
op => {
bug!("comparison_op_to_fcmp_predicate: expected comparison operator, \
found {:?}",
op);
}
}
}
pub fn compare_fat_ptrs<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
lhs_addr: ValueRef,
lhs_extra: ValueRef,
rhs_addr: ValueRef,
rhs_extra: ValueRef,
_t: Ty<'tcx>,
op: hir::BinOp_,
debug_loc: DebugLoc)
-> ValueRef {
match op {
hir::BiEq => {
let addr_eq = ICmp(bcx, llvm::IntEQ, lhs_addr, rhs_addr, debug_loc);
let extra_eq = ICmp(bcx, llvm::IntEQ, lhs_extra, rhs_extra, debug_loc);
And(bcx, addr_eq, extra_eq, debug_loc)
}
hir::BiNe => {
let addr_eq = ICmp(bcx, llvm::IntNE, lhs_addr, rhs_addr, debug_loc);
let extra_eq = ICmp(bcx, llvm::IntNE, lhs_extra, rhs_extra, debug_loc);
Or(bcx, addr_eq, extra_eq, debug_loc)
}
hir::BiLe | hir::BiLt | hir::BiGe | hir::BiGt => {
// a OP b ~ a.0 STRICT(OP) b.0 | (a.0 == b.0 && a.1 OP a.1)
let (op, strict_op) = match op {
hir::BiLt => (llvm::IntULT, llvm::IntULT),
hir::BiLe => (llvm::IntULE, llvm::IntULT),
hir::BiGt => (llvm::IntUGT, llvm::IntUGT),
hir::BiGe => (llvm::IntUGE, llvm::IntUGT),
_ => bug!(),
};
let addr_eq = ICmp(bcx, llvm::IntEQ, lhs_addr, rhs_addr, debug_loc);
let extra_op = ICmp(bcx, op, lhs_extra, rhs_extra, debug_loc);
let addr_eq_extra_op = And(bcx, addr_eq, extra_op, debug_loc);
let addr_strict = ICmp(bcx, strict_op, lhs_addr, rhs_addr, debug_loc);
Or(bcx, addr_strict, addr_eq_extra_op, debug_loc)
}
_ => {
bug!("unexpected fat ptr binop");
}
}
}
pub fn compare_scalar_types<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
lhs: ValueRef,
rhs: ValueRef,
t: Ty<'tcx>,
op: hir::BinOp_,
debug_loc: DebugLoc)
-> ValueRef {
match t.sty {
ty::TyTuple(ref tys) if tys.is_empty() => {
// We don't need to do actual comparisons for nil.
// () == () holds but () < () does not.
match op {
hir::BiEq | hir::BiLe | hir::BiGe => return C_bool(bcx.ccx(), true),
hir::BiNe | hir::BiLt | hir::BiGt => return C_bool(bcx.ccx(), false),
// refinements would be nice
_ => bug!("compare_scalar_types: must be a comparison operator"),
}
}
ty::TyFnDef(..) | ty::TyFnPtr(_) | ty::TyBool | ty::TyUint(_) | ty::TyChar => {
ICmp(bcx,
bin_op_to_icmp_predicate(op, false),
lhs,
rhs,
debug_loc)
}
ty::TyRawPtr(mt) if common::type_is_sized(bcx.tcx(), mt.ty) => {
ICmp(bcx,
bin_op_to_icmp_predicate(op, false),
lhs,
rhs,
debug_loc)
}
ty::TyRawPtr(_) => {
let lhs_addr = Load(bcx, GEPi(bcx, lhs, &[0, abi::FAT_PTR_ADDR]));
let lhs_extra = Load(bcx, GEPi(bcx, lhs, &[0, abi::FAT_PTR_EXTRA]));
let rhs_addr = Load(bcx, GEPi(bcx, rhs, &[0, abi::FAT_PTR_ADDR]));
let rhs_extra = Load(bcx, GEPi(bcx, rhs, &[0, abi::FAT_PTR_EXTRA]));
compare_fat_ptrs(bcx,
lhs_addr,
lhs_extra,
rhs_addr,
rhs_extra,
t,
op,
debug_loc)
}
ty::TyInt(_) => {
ICmp(bcx,
bin_op_to_icmp_predicate(op, true),
lhs,
rhs,
debug_loc)
}
ty::TyFloat(_) => {
FCmp(bcx,
bin_op_to_fcmp_predicate(op),
lhs,
rhs,
debug_loc)
}
// Should never get here, because t is scalar.
_ => bug!("non-scalar type passed to compare_scalar_types"),
}
}
pub fn compare_simd_types<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
lhs: ValueRef,
rhs: ValueRef,
t: Ty<'tcx>,
ret_ty: Type,
op: hir::BinOp_,
debug_loc: DebugLoc)
-> ValueRef {
let signed = match t.sty {
ty::TyFloat(_) => {
let cmp = bin_op_to_fcmp_predicate(op);
return SExt(bcx, FCmp(bcx, cmp, lhs, rhs, debug_loc), ret_ty);
},
ty::TyUint(_) => false,
ty::TyInt(_) => true,
_ => bug!("compare_simd_types: invalid SIMD type"),
};
let cmp = bin_op_to_icmp_predicate(op, signed);
// LLVM outputs an `< size x i1 >`, so we need to perform a sign extension
// to get the correctly sized type. This will compile to a single instruction
// once the IR is converted to assembly if the SIMD instruction is supported
// by the target architecture.
SExt(bcx, ICmp(bcx, cmp, lhs, rhs, debug_loc), ret_ty)
}
// Iterates through the elements of a structural type.
pub fn iter_structural_ty<'blk, 'tcx, F>(cx: Block<'blk, 'tcx>,
av: ValueRef,
t: Ty<'tcx>,
mut f: F)
-> Block<'blk, 'tcx>
where F: FnMut(Block<'blk, 'tcx>, ValueRef, Ty<'tcx>) -> Block<'blk, 'tcx>
{
let _icx = push_ctxt("iter_structural_ty");
fn iter_variant<'blk, 'tcx, F>(cx: Block<'blk, 'tcx>,
repr: &adt::Repr<'tcx>,
av: adt::MaybeSizedValue,
variant: ty::VariantDef<'tcx>,
substs: &Substs<'tcx>,
f: &mut F)
-> Block<'blk, 'tcx>
where F: FnMut(Block<'blk, 'tcx>, ValueRef, Ty<'tcx>) -> Block<'blk, 'tcx>
{
let _icx = push_ctxt("iter_variant");
let tcx = cx.tcx();
let mut cx = cx;
for (i, field) in variant.fields.iter().enumerate() {
let arg = monomorphize::field_ty(tcx, substs, field);
cx = f(cx,
adt::trans_field_ptr(cx, repr, av, Disr::from(variant.disr_val), i),
arg);
}
return cx;
}
let value = if common::type_is_sized(cx.tcx(), t) {
adt::MaybeSizedValue::sized(av)
} else {
let data = Load(cx, expr::get_dataptr(cx, av));
let info = Load(cx, expr::get_meta(cx, av));
adt::MaybeSizedValue::unsized_(data, info)
};
let mut cx = cx;
match t.sty {
ty::TyStruct(..) => {
let repr = adt::represent_type(cx.ccx(), t);
let VariantInfo { fields, discr } = VariantInfo::from_ty(cx.tcx(), t, None);
for (i, &Field(_, field_ty)) in fields.iter().enumerate() {
let llfld_a = adt::trans_field_ptr(cx, &repr, value, Disr::from(discr), i);
let val = if common::type_is_sized(cx.tcx(), field_ty) {
llfld_a
} else {
let scratch = datum::rvalue_scratch_datum(cx, field_ty, "__fat_ptr_iter");
Store(cx, llfld_a, expr::get_dataptr(cx, scratch.val));
Store(cx, value.meta, expr::get_meta(cx, scratch.val));
scratch.val
};
cx = f(cx, val, field_ty);
}
}
ty::TyClosure(_, ref substs) => {
let repr = adt::represent_type(cx.ccx(), t);
for (i, upvar_ty) in substs.upvar_tys.iter().enumerate() {
let llupvar = adt::trans_field_ptr(cx, &repr, value, Disr(0), i);
cx = f(cx, llupvar, upvar_ty);
}
}
ty::TyArray(_, n) => {
let (base, len) = tvec::get_fixed_base_and_len(cx, value.value, n);
let unit_ty = t.sequence_element_type(cx.tcx());
cx = tvec::iter_vec_raw(cx, base, unit_ty, len, f);
}
ty::TySlice(_) | ty::TyStr => {
let unit_ty = t.sequence_element_type(cx.tcx());
cx = tvec::iter_vec_raw(cx, value.value, unit_ty, value.meta, f);
}
ty::TyTuple(ref args) => {
let repr = adt::represent_type(cx.ccx(), t);
for (i, arg) in args.iter().enumerate() {
let llfld_a = adt::trans_field_ptr(cx, &repr, value, Disr(0), i);
cx = f(cx, llfld_a, *arg);
}
}
ty::TyEnum(en, substs) => {
let fcx = cx.fcx;
let ccx = fcx.ccx;
let repr = adt::represent_type(ccx, t);
let n_variants = en.variants.len();
// NB: we must hit the discriminant first so that structural
// comparison know not to proceed when the discriminants differ.
match adt::trans_switch(cx, &repr, av, false) {
(_match::Single, None) => {
if n_variants != 0 {
assert!(n_variants == 1);
cx = iter_variant(cx, &repr, adt::MaybeSizedValue::sized(av),
&en.variants[0], substs, &mut f);
}
}
(_match::Switch, Some(lldiscrim_a)) => {
cx = f(cx, lldiscrim_a, cx.tcx().types.isize);
// Create a fall-through basic block for the "else" case of
// the switch instruction we're about to generate. Note that
// we do **not** use an Unreachable instruction here, even
// though most of the time this basic block will never be hit.
//
// When an enum is dropped it's contents are currently
// overwritten to DTOR_DONE, which means the discriminant
// could have changed value to something not within the actual
// range of the discriminant. Currently this function is only
// used for drop glue so in this case we just return quickly
// from the outer function, and any other use case will only
// call this for an already-valid enum in which case the `ret
// void` will never be hit.
let ret_void_cx = fcx.new_temp_block("enum-iter-ret-void");
RetVoid(ret_void_cx, DebugLoc::None);
let llswitch = Switch(cx, lldiscrim_a, ret_void_cx.llbb, n_variants);
let next_cx = fcx.new_temp_block("enum-iter-next");
for variant in &en.variants {
let variant_cx = fcx.new_temp_block(&format!("enum-iter-variant-{}",
&variant.disr_val
.to_string()));
let case_val = adt::trans_case(cx, &repr, Disr::from(variant.disr_val));
AddCase(llswitch, case_val, variant_cx.llbb);
let variant_cx = iter_variant(variant_cx,
&repr,
value,
variant,
substs,
&mut f);
Br(variant_cx, next_cx.llbb, DebugLoc::None);
}
cx = next_cx;
}
_ => ccx.sess().unimpl("value from adt::trans_switch in iter_structural_ty"),
}
}
_ => {
cx.sess().unimpl(&format!("type in iter_structural_ty: {}", t))
}
}
return cx;
}
/// Retrieve the information we are losing (making dynamic) in an unsizing
/// adjustment.
///
/// The `old_info` argument is a bit funny. It is intended for use
/// in an upcast, where the new vtable for an object will be drived
/// from the old one.
pub fn unsized_info<'ccx, 'tcx>(ccx: &CrateContext<'ccx, 'tcx>,
source: Ty<'tcx>,
target: Ty<'tcx>,
old_info: Option<ValueRef>)
-> ValueRef {
let (source, target) = ccx.tcx().struct_lockstep_tails(source, target);
match (&source.sty, &target.sty) {
(&ty::TyArray(_, len), &ty::TySlice(_)) => C_uint(ccx, len),
(&ty::TyTrait(_), &ty::TyTrait(_)) => {
// For now, upcasts are limited to changes in marker
// traits, and hence never actually require an actual
// change to the vtable.
old_info.expect("unsized_info: missing old info for trait upcast")
}
(_, &ty::TyTrait(box ty::TraitTy { ref principal, .. })) => {
// Note that we preserve binding levels here:
let substs = principal.0.substs.with_self_ty(source).erase_regions();
let substs = ccx.tcx().mk_substs(substs);
let trait_ref = ty::Binder(ty::TraitRef {
def_id: principal.def_id(),
substs: substs,
});
consts::ptrcast(meth::get_vtable(ccx, trait_ref),
Type::vtable_ptr(ccx))
}
_ => bug!("unsized_info: invalid unsizing {:?} -> {:?}",
source,
target),
}
}
/// Coerce `src` to `dst_ty`. `src_ty` must be a thin pointer.
pub fn unsize_thin_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
src: ValueRef,
src_ty: Ty<'tcx>,
dst_ty: Ty<'tcx>)
-> (ValueRef, ValueRef) {
debug!("unsize_thin_ptr: {:?} => {:?}", src_ty, dst_ty);
match (&src_ty.sty, &dst_ty.sty) {
(&ty::TyBox(a), &ty::TyBox(b)) |
(&ty::TyRef(_, ty::TypeAndMut { ty: a, .. }),
&ty::TyRef(_, ty::TypeAndMut { ty: b, .. })) |
(&ty::TyRef(_, ty::TypeAndMut { ty: a, .. }),
&ty::TyRawPtr(ty::TypeAndMut { ty: b, .. })) |
(&ty::TyRawPtr(ty::TypeAndMut { ty: a, .. }),
&ty::TyRawPtr(ty::TypeAndMut { ty: b, .. })) => {
assert!(common::type_is_sized(bcx.tcx(), a));
let ptr_ty = type_of::in_memory_type_of(bcx.ccx(), b).ptr_to();
(PointerCast(bcx, src, ptr_ty),
unsized_info(bcx.ccx(), a, b, None))
}
_ => bug!("unsize_thin_ptr: called on bad types"),
}
}
/// Coerce `src`, which is a reference to a value of type `src_ty`,
/// to a value of type `dst_ty` and store the result in `dst`
pub fn coerce_unsized_into<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
src: ValueRef,
src_ty: Ty<'tcx>,
dst: ValueRef,
dst_ty: Ty<'tcx>) {
match (&src_ty.sty, &dst_ty.sty) {
(&ty::TyBox(..), &ty::TyBox(..)) |
(&ty::TyRef(..), &ty::TyRef(..)) |
(&ty::TyRef(..), &ty::TyRawPtr(..)) |
(&ty::TyRawPtr(..), &ty::TyRawPtr(..)) => {
let (base, info) = if common::type_is_fat_ptr(bcx.tcx(), src_ty) {
// fat-ptr to fat-ptr unsize preserves the vtable
load_fat_ptr(bcx, src, src_ty)
} else {
let base = load_ty(bcx, src, src_ty);
unsize_thin_ptr(bcx, base, src_ty, dst_ty)
};
store_fat_ptr(bcx, base, info, dst, dst_ty);
}
// This can be extended to enums and tuples in the future.
// (&ty::TyEnum(def_id_a, _), &ty::TyEnum(def_id_b, _)) |
(&ty::TyStruct(def_a, _), &ty::TyStruct(def_b, _)) => {
assert_eq!(def_a, def_b);
let src_repr = adt::represent_type(bcx.ccx(), src_ty);
let src_fields = match &*src_repr {
&adt::Repr::Univariant(ref s, _) => &s.fields,
_ => bug!("struct has non-univariant repr"),
};
let dst_repr = adt::represent_type(bcx.ccx(), dst_ty);
let dst_fields = match &*dst_repr {
&adt::Repr::Univariant(ref s, _) => &s.fields,
_ => bug!("struct has non-univariant repr"),
};
let src = adt::MaybeSizedValue::sized(src);
let dst = adt::MaybeSizedValue::sized(dst);
let iter = src_fields.iter().zip(dst_fields).enumerate();
for (i, (src_fty, dst_fty)) in iter {
if type_is_zero_size(bcx.ccx(), dst_fty) {
continue;
}
let src_f = adt::trans_field_ptr(bcx, &src_repr, src, Disr(0), i);
let dst_f = adt::trans_field_ptr(bcx, &dst_repr, dst, Disr(0), i);
if src_fty == dst_fty {
memcpy_ty(bcx, dst_f, src_f, src_fty);
} else {
coerce_unsized_into(bcx, src_f, src_fty, dst_f, dst_fty);
}
}
}
_ => bug!("coerce_unsized_into: invalid coercion {:?} -> {:?}",
src_ty,
dst_ty),
}
}
pub fn custom_coerce_unsize_info<'scx, 'tcx>(scx: &SharedCrateContext<'scx, 'tcx>,
source_ty: Ty<'tcx>,
target_ty: Ty<'tcx>)
-> CustomCoerceUnsized {
let trait_substs = Substs::new(subst::VecPerParamSpace::new(vec![target_ty],
vec![source_ty],
Vec::new()),
subst::VecPerParamSpace::empty());
let trait_ref = ty::Binder(ty::TraitRef {
def_id: scx.tcx().lang_items.coerce_unsized_trait().unwrap(),
substs: scx.tcx().mk_substs(trait_substs)
});
match fulfill_obligation(scx, DUMMY_SP, trait_ref) {
traits::VtableImpl(traits::VtableImplData { impl_def_id, .. }) => {
scx.tcx().custom_coerce_unsized_kind(impl_def_id)
}
vtable => {
bug!("invalid CoerceUnsized vtable: {:?}", vtable);
}
}
}
pub fn cast_shift_expr_rhs(cx: Block, op: hir::BinOp_, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
cast_shift_rhs(op, lhs, rhs, |a, b| Trunc(cx, a, b), |a, b| ZExt(cx, a, b))
}
pub fn cast_shift_const_rhs(op: hir::BinOp_, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
cast_shift_rhs(op,
lhs,
rhs,
|a, b| unsafe { llvm::LLVMConstTrunc(a, b.to_ref()) },
|a, b| unsafe { llvm::LLVMConstZExt(a, b.to_ref()) })
}
fn cast_shift_rhs<F, G>(op: hir::BinOp_,
lhs: ValueRef,
rhs: ValueRef,
trunc: F,
zext: G)
-> ValueRef
where F: FnOnce(ValueRef, Type) -> ValueRef,
G: FnOnce(ValueRef, Type) -> ValueRef
{
// Shifts may have any size int on the rhs
if op.is_shift() {
let mut rhs_llty = val_ty(rhs);
let mut lhs_llty = val_ty(lhs);
if rhs_llty.kind() == Vector {
rhs_llty = rhs_llty.element_type()
}
if lhs_llty.kind() == Vector {
lhs_llty = lhs_llty.element_type()
}
let rhs_sz = rhs_llty.int_width();
let lhs_sz = lhs_llty.int_width();
if lhs_sz < rhs_sz {
trunc(rhs, lhs_llty)
} else if lhs_sz > rhs_sz {
// FIXME (#1877: If shifting by negative
// values becomes not undefined then this is wrong.
zext(rhs, lhs_llty)
} else {
rhs
}
} else {
rhs
}
}
pub fn llty_and_min_for_signed_ty<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
val_t: Ty<'tcx>)
-> (Type, u64) {
match val_t.sty {
ty::TyInt(t) => {
let llty = Type::int_from_ty(cx.ccx(), t);
let min = match t {
ast::IntTy::Is if llty == Type::i32(cx.ccx()) => i32::MIN as u64,
ast::IntTy::Is => i64::MIN as u64,
ast::IntTy::I8 => i8::MIN as u64,
ast::IntTy::I16 => i16::MIN as u64,
ast::IntTy::I32 => i32::MIN as u64,
ast::IntTy::I64 => i64::MIN as u64,
};
(llty, min)
}
_ => bug!(),
}
}
pub fn fail_if_zero_or_overflows<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
call_info: NodeIdAndSpan,
divrem: hir::BinOp,
lhs: ValueRef,
rhs: ValueRef,
rhs_t: Ty<'tcx>)
-> Block<'blk, 'tcx> {
let (zero_text, overflow_text) = if divrem.node == hir::BiDiv {
("attempted to divide by zero",
"attempted to divide with overflow")
} else {
("attempted remainder with a divisor of zero",
"attempted remainder with overflow")
};
let debug_loc = call_info.debug_loc();
let (is_zero, is_signed) = match rhs_t.sty {
ty::TyInt(t) => {
let zero = C_integral(Type::int_from_ty(cx.ccx(), t), 0, false);
(ICmp(cx, llvm::IntEQ, rhs, zero, debug_loc), true)
}
ty::TyUint(t) => {
let zero = C_integral(Type::uint_from_ty(cx.ccx(), t), 0, false);
(ICmp(cx, llvm::IntEQ, rhs, zero, debug_loc), false)
}
ty::TyStruct(def, _) if def.is_simd() => {
let mut res = C_bool(cx.ccx(), false);
for i in 0..rhs_t.simd_size(cx.tcx()) {
res = Or(cx,
res,
IsNull(cx, ExtractElement(cx, rhs, C_int(cx.ccx(), i as i64))),
debug_loc);
}
(res, false)
}
_ => {
bug!("fail-if-zero on unexpected type: {}", rhs_t);
}
};
let bcx = with_cond(cx, is_zero, |bcx| {
controlflow::trans_fail(bcx, call_info, InternedString::new(zero_text))
});
// To quote LLVM's documentation for the sdiv instruction:
//
// Division by zero leads to undefined behavior. Overflow also leads
// to undefined behavior; this is a rare case, but can occur, for
// example, by doing a 32-bit division of -2147483648 by -1.
//
// In order to avoid undefined behavior, we perform runtime checks for
// signed division/remainder which would trigger overflow. For unsigned
// integers, no action beyond checking for zero need be taken.
if is_signed {
let (llty, min) = llty_and_min_for_signed_ty(cx, rhs_t);
let minus_one = ICmp(bcx,
llvm::IntEQ,
rhs,
C_integral(llty, !0, false),
debug_loc);
with_cond(bcx, minus_one, |bcx| {
let is_min = ICmp(bcx,
llvm::IntEQ,
lhs,
C_integral(llty, min, true),
debug_loc);
with_cond(bcx, is_min, |bcx| {
controlflow::trans_fail(bcx, call_info, InternedString::new(overflow_text))
})
})
} else {
bcx
}
}
pub fn invoke<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
llfn: ValueRef,
llargs: &[ValueRef],
debug_loc: DebugLoc)
-> (ValueRef, Block<'blk, 'tcx>) {
let _icx = push_ctxt("invoke_");
if bcx.unreachable.get() {
return (C_null(Type::i8(bcx.ccx())), bcx);
}
match bcx.opt_node_id {
None => {
debug!("invoke at ???");
}
Some(id) => {
debug!("invoke at {}", bcx.tcx().map.node_to_string(id));
}
}
if need_invoke(bcx) {
debug!("invoking {:?} at {:?}", Value(llfn), bcx.llbb);
for &llarg in llargs {
debug!("arg: {:?}", Value(llarg));
}
let normal_bcx = bcx.fcx.new_temp_block("normal-return");
let landing_pad = bcx.fcx.get_landing_pad();
let llresult = Invoke(bcx,
llfn,
&llargs[..],
normal_bcx.llbb,
landing_pad,
debug_loc);
return (llresult, normal_bcx);
} else {
debug!("calling {:?} at {:?}", Value(llfn), bcx.llbb);
for &llarg in llargs {
debug!("arg: {:?}", Value(llarg));
}
let llresult = Call(bcx, llfn, &llargs[..], debug_loc);
return (llresult, bcx);
}
}
/// Returns whether this session's target will use SEH-based unwinding.
///
/// This is only true for MSVC targets, and even then the 64-bit MSVC target
/// currently uses SEH-ish unwinding with DWARF info tables to the side (same as
/// 64-bit MinGW) instead of "full SEH".
pub fn wants_msvc_seh(sess: &Session) -> bool {
sess.target.target.options.is_like_msvc
}
pub fn avoid_invoke(bcx: Block) -> bool {
bcx.sess().no_landing_pads() || bcx.lpad().is_some()
}
pub fn need_invoke(bcx: Block) -> bool {
if avoid_invoke(bcx) {
false
} else {
bcx.fcx.needs_invoke()
}
}
pub fn load_if_immediate<'blk, 'tcx>(cx: Block<'blk, 'tcx>, v: ValueRef, t: Ty<'tcx>) -> ValueRef {
let _icx = push_ctxt("load_if_immediate");
if type_is_immediate(cx.ccx(), t) {
return load_ty(cx, v, t);
}
return v;
}
/// Helper for loading values from memory. Does the necessary conversion if the in-memory type
/// differs from the type used for SSA values. Also handles various special cases where the type
/// gives us better information about what we are loading.
pub fn load_ty<'blk, 'tcx>(cx: Block<'blk, 'tcx>, ptr: ValueRef, t: Ty<'tcx>) -> ValueRef {
if cx.unreachable.get() {
return C_undef(type_of::type_of(cx.ccx(), t));
}
load_ty_builder(&B(cx), ptr, t)
}
pub fn load_ty_builder<'a, 'tcx>(b: &Builder<'a, 'tcx>, ptr: ValueRef, t: Ty<'tcx>) -> ValueRef {
let ccx = b.ccx;
if type_is_zero_size(ccx, t) {
return C_undef(type_of::type_of(ccx, t));
}
unsafe {
let global = llvm::LLVMIsAGlobalVariable(ptr);
if !global.is_null() && llvm::LLVMIsGlobalConstant(global) == llvm::True {
let val = llvm::LLVMGetInitializer(global);
if !val.is_null() {
if t.is_bool() {
return llvm::LLVMConstTrunc(val, Type::i1(ccx).to_ref());
}
return val;
}
}
}
if t.is_bool() {
b.trunc(b.load_range_assert(ptr, 0, 2, llvm::False), Type::i1(ccx))
} else if t.is_char() {
// a char is a Unicode codepoint, and so takes values from 0
// to 0x10FFFF inclusive only.
b.load_range_assert(ptr, 0, 0x10FFFF + 1, llvm::False)
} else if (t.is_region_ptr() || t.is_unique()) &&
!common::type_is_fat_ptr(ccx.tcx(), t) {
b.load_nonnull(ptr)
} else {
b.load(ptr)
}
}
/// Helper for storing values in memory. Does the necessary conversion if the in-memory type
/// differs from the type used for SSA values.
pub fn store_ty<'blk, 'tcx>(cx: Block<'blk, 'tcx>, v: ValueRef, dst: ValueRef, t: Ty<'tcx>) {
if cx.unreachable.get() {
return;
}
debug!("store_ty: {:?} : {:?} <- {:?}", Value(dst), t, Value(v));
if common::type_is_fat_ptr(cx.tcx(), t) {
Store(cx,
ExtractValue(cx, v, abi::FAT_PTR_ADDR),
expr::get_dataptr(cx, dst));
Store(cx,
ExtractValue(cx, v, abi::FAT_PTR_EXTRA),
expr::get_meta(cx, dst));
} else {
Store(cx, from_immediate(cx, v), dst);
}
}
pub fn store_fat_ptr<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
data: ValueRef,
extra: ValueRef,
dst: ValueRef,
_ty: Ty<'tcx>) {
// FIXME: emit metadata
Store(cx, data, expr::get_dataptr(cx, dst));
Store(cx, extra, expr::get_meta(cx, dst));
}
pub fn load_fat_ptr<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
src: ValueRef,
_ty: Ty<'tcx>)
-> (ValueRef, ValueRef) {
// FIXME: emit metadata
(Load(cx, expr::get_dataptr(cx, src)),
Load(cx, expr::get_meta(cx, src)))
}
pub fn from_immediate(bcx: Block, val: ValueRef) -> ValueRef {
if val_ty(val) == Type::i1(bcx.ccx()) {
ZExt(bcx, val, Type::i8(bcx.ccx()))
} else {
val
}
}
pub fn to_immediate(bcx: Block, val: ValueRef, ty: Ty) -> ValueRef {
if ty.is_bool() {
Trunc(bcx, val, Type::i1(bcx.ccx()))
} else {
val
}
}
pub fn init_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, local: &hir::Local) -> Block<'blk, 'tcx> {
debug!("init_local(bcx={}, local.id={})", bcx.to_str(), local.id);
let _indenter = indenter();
let _icx = push_ctxt("init_local");
_match::store_local(bcx, local)
}
pub fn raw_block<'blk, 'tcx>(fcx: &'blk FunctionContext<'blk, 'tcx>,
llbb: BasicBlockRef)
-> Block<'blk, 'tcx> {
common::BlockS::new(llbb, None, fcx)
}
pub fn with_cond<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, val: ValueRef, f: F) -> Block<'blk, 'tcx>
where F: FnOnce(Block<'blk, 'tcx>) -> Block<'blk, 'tcx>
{
let _icx = push_ctxt("with_cond");
if bcx.unreachable.get() || common::const_to_opt_uint(val) == Some(0) {
return bcx;
}
let fcx = bcx.fcx;
let next_cx = fcx.new_temp_block("next");
let cond_cx = fcx.new_temp_block("cond");
CondBr(bcx, val, cond_cx.llbb, next_cx.llbb, DebugLoc::None);
let after_cx = f(cond_cx);
if !after_cx.terminated.get() {
Br(after_cx, next_cx.llbb, DebugLoc::None);
}
next_cx
}
enum Lifetime { Start, End }
// If LLVM lifetime intrinsic support is enabled (i.e. optimizations
// on), and `ptr` is nonzero-sized, then extracts the size of `ptr`
// and the intrinsic for `lt` and passes them to `emit`, which is in
// charge of generating code to call the passed intrinsic on whatever
// block of generated code is targetted for the intrinsic.
//
// If LLVM lifetime intrinsic support is disabled (i.e. optimizations
// off) or `ptr` is zero-sized, then no-op (does not call `emit`).
fn core_lifetime_emit<'blk, 'tcx, F>(ccx: &'blk CrateContext<'blk, 'tcx>,
ptr: ValueRef,
lt: Lifetime,
emit: F)
where F: FnOnce(&'blk CrateContext<'blk, 'tcx>, machine::llsize, ValueRef)
{
if ccx.sess().opts.optimize == config::OptLevel::No {
return;
}
let _icx = push_ctxt(match lt {
Lifetime::Start => "lifetime_start",
Lifetime::End => "lifetime_end"
});
let size = machine::llsize_of_alloc(ccx, val_ty(ptr).element_type());
if size == 0 {
return;
}
let lifetime_intrinsic = ccx.get_intrinsic(match lt {
Lifetime::Start => "llvm.lifetime.start",
Lifetime::End => "llvm.lifetime.end"
});
emit(ccx, size, lifetime_intrinsic)
}
pub fn call_lifetime_start(cx: Block, ptr: ValueRef) {
core_lifetime_emit(cx.ccx(), ptr, Lifetime::Start, |ccx, size, lifetime_start| {
let ptr = PointerCast(cx, ptr, Type::i8p(ccx));
Call(cx,
lifetime_start,
&[C_u64(ccx, size), ptr],
DebugLoc::None);
})
}
pub fn call_lifetime_end(cx: Block, ptr: ValueRef) {
core_lifetime_emit(cx.ccx(), ptr, Lifetime::End, |ccx, size, lifetime_end| {
let ptr = PointerCast(cx, ptr, Type::i8p(ccx));
Call(cx,
lifetime_end,
&[C_u64(ccx, size), ptr],
DebugLoc::None);
})
}
// Generates code for resumption of unwind at the end of a landing pad.
pub fn trans_unwind_resume(bcx: Block, lpval: ValueRef) {
if !bcx.sess().target.target.options.custom_unwind_resume {
Resume(bcx, lpval);
} else {
let exc_ptr = ExtractValue(bcx, lpval, 0);
bcx.fcx.eh_unwind_resume()
.call(bcx, DebugLoc::None, ArgVals(&[exc_ptr]), None);
}
}
pub fn call_memcpy<'bcx, 'tcx>(b: &Builder<'bcx, 'tcx>,
dst: ValueRef,
src: ValueRef,
n_bytes: ValueRef,
align: u32) {
let _icx = push_ctxt("call_memcpy");
let ccx = b.ccx;
let ptr_width = &ccx.sess().target.target.target_pointer_width[..];
let key = format!("llvm.memcpy.p0i8.p0i8.i{}", ptr_width);
let memcpy = ccx.get_intrinsic(&key);
let src_ptr = b.pointercast(src, Type::i8p(ccx));
let dst_ptr = b.pointercast(dst, Type::i8p(ccx));
let size = b.intcast(n_bytes, ccx.int_type());
let align = C_i32(ccx, align as i32);
let volatile = C_bool(ccx, false);
b.call(memcpy, &[dst_ptr, src_ptr, size, align, volatile], None);
}
pub fn memcpy_ty<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, dst: ValueRef, src: ValueRef, t: Ty<'tcx>) {
let _icx = push_ctxt("memcpy_ty");
let ccx = bcx.ccx();
if type_is_zero_size(ccx, t) || bcx.unreachable.get() {
return;
}
if t.is_structural() {
let llty = type_of::type_of(ccx, t);
let llsz = llsize_of(ccx, llty);
let llalign = type_of::align_of(ccx, t);
call_memcpy(&B(bcx), dst, src, llsz, llalign as u32);
} else if common::type_is_fat_ptr(bcx.tcx(), t) {
let (data, extra) = load_fat_ptr(bcx, src, t);
store_fat_ptr(bcx, data, extra, dst, t);
} else {
store_ty(bcx, load_ty(bcx, src, t), dst, t);
}
}
pub fn drop_done_fill_mem<'blk, 'tcx>(cx: Block<'blk, 'tcx>, llptr: ValueRef, t: Ty<'tcx>) {
if cx.unreachable.get() {
return;
}
let _icx = push_ctxt("drop_done_fill_mem");
let bcx = cx;
memfill(&B(bcx), llptr, t, adt::DTOR_DONE);
}
pub fn init_zero_mem<'blk, 'tcx>(cx: Block<'blk, 'tcx>, llptr: ValueRef, t: Ty<'tcx>) {
if cx.unreachable.get() {
return;
}
let _icx = push_ctxt("init_zero_mem");
let bcx = cx;
memfill(&B(bcx), llptr, t, 0);
}
// Always use this function instead of storing a constant byte to the memory
// in question. e.g. if you store a zero constant, LLVM will drown in vreg
// allocation for large data structures, and the generated code will be
// awful. (A telltale sign of this is large quantities of
// `mov [byte ptr foo],0` in the generated code.)
fn memfill<'a, 'tcx>(b: &Builder<'a, 'tcx>, llptr: ValueRef, ty: Ty<'tcx>, byte: u8) {
let _icx = push_ctxt("memfill");
let ccx = b.ccx;
let llty = type_of::type_of(ccx, ty);
let llptr = b.pointercast(llptr, Type::i8(ccx).ptr_to());
let llzeroval = C_u8(ccx, byte);
let size = machine::llsize_of(ccx, llty);
let align = C_i32(ccx, type_of::align_of(ccx, ty) as i32);
call_memset(b, llptr, llzeroval, size, align, false);
}
pub fn call_memset<'bcx, 'tcx>(b: &Builder<'bcx, 'tcx>,
ptr: ValueRef,
fill_byte: ValueRef,
size: ValueRef,
align: ValueRef,
volatile: bool) {
let ccx = b.ccx;
let ptr_width = &ccx.sess().target.target.target_pointer_width[..];
let intrinsic_key = format!("llvm.memset.p0i8.i{}", ptr_width);
let llintrinsicfn = ccx.get_intrinsic(&intrinsic_key);
let volatile = C_bool(ccx, volatile);
b.call(llintrinsicfn, &[ptr, fill_byte, size, align, volatile], None);
}
/// In general, when we create an scratch value in an alloca, the
/// creator may not know if the block (that initializes the scratch
/// with the desired value) actually dominates the cleanup associated
/// with the scratch value.
///
/// To deal with this, when we do an alloca (at the *start* of whole
/// function body), we optionally can also set the associated
/// dropped-flag state of the alloca to "dropped."
#[derive(Copy, Clone, Debug)]
pub enum InitAlloca {
/// Indicates that the state should have its associated drop flag
/// set to "dropped" at the point of allocation.
Dropped,
/// Indicates the value of the associated drop flag is irrelevant.
/// The embedded string literal is a programmer provided argument
/// for why. This is a safeguard forcing compiler devs to
/// document; it might be a good idea to also emit this as a
/// comment with the alloca itself when emitting LLVM output.ll.
Uninit(&'static str),
}
pub fn alloc_ty<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
t: Ty<'tcx>,
name: &str) -> ValueRef {
// pnkfelix: I do not know why alloc_ty meets the assumptions for
// passing Uninit, but it was never needed (even back when we had
// the original boolean `zero` flag on `lvalue_scratch_datum`).
alloc_ty_init(bcx, t, InitAlloca::Uninit("all alloc_ty are uninit"), name)
}
/// This variant of `fn alloc_ty` does not necessarily assume that the
/// alloca should be created with no initial value. Instead the caller
/// controls that assumption via the `init` flag.
///
/// Note that if the alloca *is* initialized via `init`, then we will
/// also inject an `llvm.lifetime.start` before that initialization
/// occurs, and thus callers should not call_lifetime_start
/// themselves. But if `init` says "uninitialized", then callers are
/// in charge of choosing where to call_lifetime_start and
/// subsequently populate the alloca.
///
/// (See related discussion on PR #30823.)
pub fn alloc_ty_init<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
t: Ty<'tcx>,
init: InitAlloca,
name: &str) -> ValueRef {
let _icx = push_ctxt("alloc_ty");
let ccx = bcx.ccx();
let ty = type_of::type_of(ccx, t);
assert!(!t.has_param_types());
match init {
InitAlloca::Dropped => alloca_dropped(bcx, t, name),
InitAlloca::Uninit(_) => alloca(bcx, ty, name),
}
}
pub fn alloca_dropped<'blk, 'tcx>(cx: Block<'blk, 'tcx>, ty: Ty<'tcx>, name: &str) -> ValueRef {
let _icx = push_ctxt("alloca_dropped");
let llty = type_of::type_of(cx.ccx(), ty);
if cx.unreachable.get() {
unsafe { return llvm::LLVMGetUndef(llty.ptr_to().to_ref()); }
}
let p = alloca(cx, llty, name);
let b = cx.fcx.ccx.builder();
b.position_before(cx.fcx.alloca_insert_pt.get().unwrap());
// This is just like `call_lifetime_start` (but latter expects a
// Block, which we do not have for `alloca_insert_pt`).
core_lifetime_emit(cx.ccx(), p, Lifetime::Start, |ccx, size, lifetime_start| {
let ptr = b.pointercast(p, Type::i8p(ccx));
b.call(lifetime_start, &[C_u64(ccx, size), ptr], None);
});
memfill(&b, p, ty, adt::DTOR_DONE);
p
}
pub fn alloca(cx: Block, ty: Type, name: &str) -> ValueRef {
let _icx = push_ctxt("alloca");
if cx.unreachable.get() {
unsafe {
return llvm::LLVMGetUndef(ty.ptr_to().to_ref());
}
}
DebugLoc::None.apply(cx.fcx);
Alloca(cx, ty, name)
}
pub fn set_value_name(val: ValueRef, name: &str) {
unsafe {
let name = CString::new(name).unwrap();
llvm::LLVMSetValueName(val, name.as_ptr());
}
}
struct FindNestedReturn {
found: bool,
}
impl FindNestedReturn {
fn new() -> FindNestedReturn {
FindNestedReturn {
found: false,
}
}
}
impl<'v> Visitor<'v> for FindNestedReturn {
fn visit_expr(&mut self, e: &hir::Expr) {
match e.node {
hir::ExprRet(..) => {
self.found = true;
}
_ => intravisit::walk_expr(self, e),
}
}
}
fn build_cfg<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
id: ast::NodeId)
-> (ast::NodeId, Option<cfg::CFG>) {
let blk = match tcx.map.find(id) {
Some(hir_map::NodeItem(i)) => {
match i.node {
hir::ItemFn(_, _, _, _, _, ref blk) => {
blk
}
_ => bug!("unexpected item variant in has_nested_returns"),
}
}
Some(hir_map::NodeTraitItem(trait_item)) => {
match trait_item.node {
hir::MethodTraitItem(_, Some(ref body)) => body,
_ => {
bug!("unexpected variant: trait item other than a provided method in \
has_nested_returns")
}
}
}
Some(hir_map::NodeImplItem(impl_item)) => {
match impl_item.node {
hir::ImplItemKind::Method(_, ref body) => body,
_ => {
bug!("unexpected variant: non-method impl item in has_nested_returns")
}
}
}
Some(hir_map::NodeExpr(e)) => {
match e.node {
hir::ExprClosure(_, _, ref blk, _) => blk,
_ => bug!("unexpected expr variant in has_nested_returns"),
}
}
Some(hir_map::NodeVariant(..)) |
Some(hir_map::NodeStructCtor(..)) => return (ast::DUMMY_NODE_ID, None),
// glue, shims, etc
None if id == ast::DUMMY_NODE_ID => return (ast::DUMMY_NODE_ID, None),
_ => bug!("unexpected variant in has_nested_returns: {}",
tcx.node_path_str(id)),
};
(blk.id, Some(cfg::CFG::new(tcx, blk)))
}
// Checks for the presence of "nested returns" in a function.
// Nested returns are when the inner expression of a return expression
// (the 'expr' in 'return expr') contains a return expression. Only cases
// where the outer return is actually reachable are considered. Implicit
// returns from the end of blocks are considered as well.
//
// This check is needed to handle the case where the inner expression is
// part of a larger expression that may have already partially-filled the
// return slot alloca. This can cause errors related to clean-up due to
// the clobbering of the existing value in the return slot.
fn has_nested_returns(tcx: TyCtxt, cfg: &cfg::CFG, blk_id: ast::NodeId) -> bool {
for index in cfg.graph.depth_traverse(cfg.entry) {
let n = cfg.graph.node_data(index);
match tcx.map.find(n.id()) {
Some(hir_map::NodeExpr(ex)) => {
if let hir::ExprRet(Some(ref ret_expr)) = ex.node {
let mut visitor = FindNestedReturn::new();
intravisit::walk_expr(&mut visitor, &ret_expr);
if visitor.found {
return true;
}
}
}
Some(hir_map::NodeBlock(blk)) if blk.id == blk_id => {
let mut visitor = FindNestedReturn::new();
walk_list!(&mut visitor, visit_expr, &blk.expr);
if visitor.found {
return true;
}
}
_ => {}
}
}
return false;
}
impl<'blk, 'tcx> FunctionContext<'blk, 'tcx> {
/// Create a function context for the given function.
/// Beware that you must call `fcx.init` or `fcx.bind_args`
/// before doing anything with the returned function context.
pub fn new(ccx: &'blk CrateContext<'blk, 'tcx>,
llfndecl: ValueRef,
fn_ty: FnType,
definition: Option<(Instance<'tcx>, &ty::FnSig<'tcx>, Abi)>,
block_arena: &'blk TypedArena<common::BlockS<'blk, 'tcx>>)
-> FunctionContext<'blk, 'tcx> {
let (param_substs, def_id) = match definition {
Some((instance, _, _)) => {
common::validate_substs(instance.substs);
(instance.substs, Some(instance.def))
}
None => (ccx.tcx().mk_substs(Substs::empty()), None)
};
let inlined_did = def_id.and_then(|def_id| inline::get_local_instance(ccx, def_id));
let inlined_id = inlined_did.and_then(|id| ccx.tcx().map.as_local_node_id(id));
let local_id = def_id.and_then(|id| ccx.tcx().map.as_local_node_id(id));
debug!("FunctionContext::new({})",
definition.map_or(String::new(), |d| d.0.to_string()));
let cfg = inlined_id.map(|id| build_cfg(ccx.tcx(), id));
let nested_returns = if let Some((blk_id, Some(ref cfg))) = cfg {
has_nested_returns(ccx.tcx(), cfg, blk_id)
} else {
false
};
let check_attrs = |attrs: &[ast::Attribute]| {
let default_to_mir = ccx.sess().opts.debugging_opts.orbit;
let invert = if default_to_mir { "rustc_no_mir" } else { "rustc_mir" };
(default_to_mir ^ attrs.iter().any(|item| item.check_name(invert)),
attrs.iter().any(|item| item.check_name("no_debug")))
};
let (use_mir, no_debug) = if let Some(id) = local_id {
check_attrs(ccx.tcx().map.attrs(id))
} else if let Some(def_id) = def_id {
check_attrs(&ccx.sess().cstore.item_attrs(def_id))
} else {
check_attrs(&[])
};
let mir = if use_mir {
def_id.and_then(|id| ccx.get_mir(id))
} else {
None
};
let debug_context = if let (false, Some(definition)) = (no_debug, definition) {
let (instance, sig, abi) = definition;
debuginfo::create_function_debug_context(ccx, instance, sig, abi, llfndecl)
} else {
debuginfo::empty_function_debug_context(ccx)
};
FunctionContext {
needs_ret_allocas: nested_returns && mir.is_none(),
mir: mir,
llfn: llfndecl,
llretslotptr: Cell::new(None),
param_env: ccx.tcx().empty_parameter_environment(),
alloca_insert_pt: Cell::new(None),
llreturn: Cell::new(None),
landingpad_alloca: Cell::new(None),
lllocals: RefCell::new(NodeMap()),
llupvars: RefCell::new(NodeMap()),
lldropflag_hints: RefCell::new(DropFlagHintsMap::new()),
fn_ty: fn_ty,
param_substs: param_substs,
span: inlined_id.and_then(|id| ccx.tcx().map.opt_span(id)),
block_arena: block_arena,
lpad_arena: TypedArena::new(),
ccx: ccx,
debug_context: debug_context,
scopes: RefCell::new(Vec::new()),
cfg: cfg.and_then(|(_, cfg)| cfg)
}
}
/// Performs setup on a newly created function, creating the entry
/// scope block and allocating space for the return pointer.
pub fn init(&'blk self, skip_retptr: bool, fn_did: Option<DefId>)
-> Block<'blk, 'tcx> {
let entry_bcx = self.new_temp_block("entry-block");
// Use a dummy instruction as the insertion point for all allocas.
// This is later removed in FunctionContext::cleanup.
self.alloca_insert_pt.set(Some(unsafe {
Load(entry_bcx, C_null(Type::i8p(self.ccx)));
llvm::LLVMGetFirstInstruction(entry_bcx.llbb)
}));
if !self.fn_ty.ret.is_ignore() && !skip_retptr {
// We normally allocate the llretslotptr, unless we
// have been instructed to skip it for immediate return
// values, or there is nothing to return at all.
// We create an alloca to hold a pointer of type `ret.original_ty`
// which will hold the pointer to the right alloca which has the
// final ret value
let llty = self.fn_ty.ret.memory_ty(self.ccx);
let slot = if self.needs_ret_allocas {
// Let's create the stack slot
let slot = AllocaFcx(self, llty.ptr_to(), "llretslotptr");
// and if we're using an out pointer, then store that in our newly made slot
if self.fn_ty.ret.is_indirect() {
let outptr = get_param(self.llfn, 0);
let b = self.ccx.builder();
b.position_before(self.alloca_insert_pt.get().unwrap());
b.store(outptr, slot);
}
slot
} else {
// But if there are no nested returns, we skip the indirection
// and have a single retslot
if self.fn_ty.ret.is_indirect() {
get_param(self.llfn, 0)
} else {
AllocaFcx(self, llty, "sret_slot")
}
};
self.llretslotptr.set(Some(slot));
}
// Create the drop-flag hints for every unfragmented path in the function.
let tcx = self.ccx.tcx();
let tables = tcx.tables.borrow();
let mut hints = self.lldropflag_hints.borrow_mut();
let fragment_infos = tcx.fragment_infos.borrow();
// Intern table for drop-flag hint datums.
let mut seen = HashMap::new();
let fragment_infos = fn_did.and_then(|did| fragment_infos.get(&did));
if let Some(fragment_infos) = fragment_infos {
for &info in fragment_infos {
let make_datum = |id| {
let init_val = C_u8(self.ccx, adt::DTOR_NEEDED_HINT);
let llname = &format!("dropflag_hint_{}", id);
debug!("adding hint {}", llname);
let ty = tcx.types.u8;
let ptr = alloc_ty(entry_bcx, ty, llname);
Store(entry_bcx, init_val, ptr);
let flag = datum::Lvalue::new_dropflag_hint("FunctionContext::init");
datum::Datum::new(ptr, ty, flag)
};
let (var, datum) = match info {
ty::FragmentInfo::Moved { var, .. } |
ty::FragmentInfo::Assigned { var, .. } => {
let opt_datum = seen.get(&var).cloned().unwrap_or_else(|| {
let ty = tables.node_types[&var];
if self.type_needs_drop(ty) {
let datum = make_datum(var);
seen.insert(var, Some(datum.clone()));
Some(datum)
} else {
// No drop call needed, so we don't need a dropflag hint
None
}
});
if let Some(datum) = opt_datum {
(var, datum)
} else {
continue
}
}
};
match info {
ty::FragmentInfo::Moved { move_expr: expr_id, .. } => {
debug!("FragmentInfo::Moved insert drop hint for {}", expr_id);
hints.insert(expr_id, DropHint::new(var, datum));
}
ty::FragmentInfo::Assigned { assignee_id: expr_id, .. } => {
debug!("FragmentInfo::Assigned insert drop hint for {}", expr_id);
hints.insert(expr_id, DropHint::new(var, datum));
}
}
}
}
entry_bcx
}
/// Creates lvalue datums for each of the incoming function arguments,
/// matches all argument patterns against them to produce bindings,
/// and returns the entry block (see FunctionContext::init).
fn bind_args(&'blk self,
args: &[hir::Arg],
abi: Abi,
id: ast::NodeId,
closure_env: closure::ClosureEnv,
arg_scope: cleanup::CustomScopeIndex)
-> Block<'blk, 'tcx> {
let _icx = push_ctxt("FunctionContext::bind_args");
let fn_did = self.ccx.tcx().map.local_def_id(id);
let mut bcx = self.init(false, Some(fn_did));
let arg_scope_id = cleanup::CustomScope(arg_scope);
let mut idx = 0;
let mut llarg_idx = self.fn_ty.ret.is_indirect() as usize;
let has_tupled_arg = match closure_env {
closure::ClosureEnv::NotClosure => abi == Abi::RustCall,
closure::ClosureEnv::Closure(..) => {
closure_env.load(bcx, arg_scope_id);
let env_arg = &self.fn_ty.args[idx];
idx += 1;
if env_arg.pad.is_some() {
llarg_idx += 1;
}
if !env_arg.is_ignore() {
llarg_idx += 1;
}
false
}
};
let tupled_arg_id = if has_tupled_arg {
args[args.len() - 1].id
} else {
ast::DUMMY_NODE_ID
};
// Return an array wrapping the ValueRefs that we get from `get_param` for
// each argument into datums.
//
// For certain mode/type combinations, the raw llarg values are passed
// by value. However, within the fn body itself, we want to always
// have all locals and arguments be by-ref so that we can cancel the
// cleanup and for better interaction with LLVM's debug info. So, if
// the argument would be passed by value, we store it into an alloca.
// This alloca should be optimized away by LLVM's mem-to-reg pass in
// the event it's not truly needed.
let uninit_reason = InitAlloca::Uninit("fn_arg populate dominates dtor");
for hir_arg in args {
let arg_ty = node_id_type(bcx, hir_arg.id);
let arg_datum = if hir_arg.id != tupled_arg_id {
let arg = &self.fn_ty.args[idx];
idx += 1;
if arg.is_indirect() && bcx.sess().opts.debuginfo != FullDebugInfo {
// Don't copy an indirect argument to an alloca, the caller
// already put it in a temporary alloca and gave it up, unless
// we emit extra-debug-info, which requires local allocas :(.
let llarg = get_param(self.llfn, llarg_idx as c_uint);
llarg_idx += 1;
self.schedule_lifetime_end(arg_scope_id, llarg);
self.schedule_drop_mem(arg_scope_id, llarg, arg_ty, None);
datum::Datum::new(llarg,
arg_ty,
datum::Lvalue::new("FunctionContext::bind_args"))
} else {
unpack_datum!(bcx, datum::lvalue_scratch_datum(bcx, arg_ty, "",
uninit_reason,
arg_scope_id, |bcx, dst| {
debug!("FunctionContext::bind_args: {:?}: {:?}", hir_arg, arg_ty);
let b = &bcx.build();
if common::type_is_fat_ptr(bcx.tcx(), arg_ty) {
let meta = &self.fn_ty.args[idx];
idx += 1;
arg.store_fn_arg(b, &mut llarg_idx, expr::get_dataptr(bcx, dst));
meta.store_fn_arg(b, &mut llarg_idx, expr::get_meta(bcx, dst));
} else {
arg.store_fn_arg(b, &mut llarg_idx, dst);
}
bcx
}))
}
} else {
// FIXME(pcwalton): Reduce the amount of code bloat this is responsible for.
let tupled_arg_tys = match arg_ty.sty {
ty::TyTuple(ref tys) => tys,
_ => bug!("last argument of `rust-call` fn isn't a tuple?!")
};
unpack_datum!(bcx, datum::lvalue_scratch_datum(bcx,
arg_ty,
"tupled_args",
uninit_reason,
arg_scope_id,
|bcx, llval| {
debug!("FunctionContext::bind_args: tupled {:?}: {:?}", hir_arg, arg_ty);
for (j, &tupled_arg_ty) in tupled_arg_tys.iter().enumerate() {
let dst = StructGEP(bcx, llval, j);
let arg = &self.fn_ty.args[idx];
idx += 1;
let b = &bcx.build();
if common::type_is_fat_ptr(bcx.tcx(), tupled_arg_ty) {
let meta = &self.fn_ty.args[idx];
idx += 1;
arg.store_fn_arg(b, &mut llarg_idx, expr::get_dataptr(bcx, dst));
meta.store_fn_arg(b, &mut llarg_idx, expr::get_meta(bcx, dst));
} else {
arg.store_fn_arg(b, &mut llarg_idx, dst);
}
}
bcx
}))
};
let pat = &hir_arg.pat;
bcx = if let Some(name) = simple_name(pat) {
// Generate nicer LLVM for the common case of fn a pattern
// like `x: T`
set_value_name(arg_datum.val, &bcx.name(name));
self.lllocals.borrow_mut().insert(pat.id, arg_datum);
bcx
} else {
// General path. Copy out the values that are used in the
// pattern.
_match::bind_irrefutable_pat(bcx, pat, arg_datum.match_input(), arg_scope_id)
};
debuginfo::create_argument_metadata(bcx, hir_arg);
}
bcx
}
/// Ties up the llstaticallocas -> llloadenv -> lltop edges,
/// and builds the return block.
pub fn finish(&'blk self, last_bcx: Block<'blk, 'tcx>,
ret_debug_loc: DebugLoc) {
let _icx = push_ctxt("FunctionContext::finish");
let ret_cx = match self.llreturn.get() {
Some(llreturn) => {
if !last_bcx.terminated.get() {
Br(last_bcx, llreturn, DebugLoc::None);
}
raw_block(self, llreturn)
}
None => last_bcx,
};
self.build_return_block(ret_cx, ret_debug_loc);
DebugLoc::None.apply(self);
self.cleanup();
}
// Builds the return block for a function.
pub fn build_return_block(&self, ret_cx: Block<'blk, 'tcx>,
ret_debug_location: DebugLoc) {
if self.llretslotptr.get().is_none() ||
ret_cx.unreachable.get() ||
(!self.needs_ret_allocas && self.fn_ty.ret.is_indirect()) {
return RetVoid(ret_cx, ret_debug_location);
}
let retslot = if self.needs_ret_allocas {
Load(ret_cx, self.llretslotptr.get().unwrap())
} else {
self.llretslotptr.get().unwrap()
};
let retptr = Value(retslot);
let llty = self.fn_ty.ret.original_ty;
match (retptr.get_dominating_store(ret_cx), self.fn_ty.ret.cast) {
// If there's only a single store to the ret slot, we can directly return
// the value that was stored and omit the store and the alloca.
// However, we only want to do this when there is no cast needed.
(Some(s), None) => {
let mut retval = s.get_operand(0).unwrap().get();
s.erase_from_parent();
if retptr.has_no_uses() {
retptr.erase_from_parent();
}
if self.fn_ty.ret.is_indirect() {
Store(ret_cx, retval, get_param(self.llfn, 0));
RetVoid(ret_cx, ret_debug_location)
} else {
if llty == Type::i1(self.ccx) {
retval = Trunc(ret_cx, retval, llty);
}
Ret(ret_cx, retval, ret_debug_location)
}
}
(_, cast_ty) if self.fn_ty.ret.is_indirect() => {
// Otherwise, copy the return value to the ret slot.
assert_eq!(cast_ty, None);
let llsz = llsize_of(self.ccx, self.fn_ty.ret.ty);
let llalign = llalign_of_min(self.ccx, self.fn_ty.ret.ty);
call_memcpy(&B(ret_cx), get_param(self.llfn, 0),
retslot, llsz, llalign as u32);
RetVoid(ret_cx, ret_debug_location)
}
(_, Some(cast_ty)) => {
let load = Load(ret_cx, PointerCast(ret_cx, retslot, cast_ty.ptr_to()));
let llalign = llalign_of_min(self.ccx, self.fn_ty.ret.ty);
unsafe {
llvm::LLVMSetAlignment(load, llalign);
}
Ret(ret_cx, load, ret_debug_location)
}
(_, None) => {
let retval = if llty == Type::i1(self.ccx) {
let val = LoadRangeAssert(ret_cx, retslot, 0, 2, llvm::False);
Trunc(ret_cx, val, llty)
} else {
Load(ret_cx, retslot)
};
Ret(ret_cx, retval, ret_debug_location)
}
}
}
}
/// Builds an LLVM function out of a source function.
///
/// If the function closes over its environment a closure will be returned.
pub fn trans_closure<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
decl: &hir::FnDecl,
body: &hir::Block,
llfndecl: ValueRef,
instance: Instance<'tcx>,
inlined_id: ast::NodeId,
sig: &ty::FnSig<'tcx>,
abi: Abi,
closure_env: closure::ClosureEnv) {
ccx.stats().n_closures.set(ccx.stats().n_closures.get() + 1);
if collector::collecting_debug_information(ccx.shared()) {
ccx.record_translation_item_as_generated(TransItem::Fn(instance));
}
let _icx = push_ctxt("trans_closure");
if !ccx.sess().no_landing_pads() {
attributes::emit_uwtable(llfndecl, true);
}
debug!("trans_closure(..., {})", instance);
let fn_ty = FnType::new(ccx, abi, sig, &[]);
let (arena, fcx): (TypedArena<_>, FunctionContext);
arena = TypedArena::new();
fcx = FunctionContext::new(ccx, llfndecl, fn_ty, Some((instance, sig, abi)), &arena);
if fcx.mir.is_some() {
return mir::trans_mir(&fcx);
}
debuginfo::fill_scope_map_for_function(&fcx, decl, body, inlined_id);
// cleanup scope for the incoming arguments
let fn_cleanup_debug_loc = debuginfo::get_cleanup_debug_loc_for_ast_node(
ccx, inlined_id, body.span, true);
let arg_scope = fcx.push_custom_cleanup_scope_with_debug_loc(fn_cleanup_debug_loc);
// Set up arguments to the function.
debug!("trans_closure: function: {:?}", Value(fcx.llfn));
let bcx = fcx.bind_args(&decl.inputs, abi, inlined_id, closure_env, arg_scope);
// Up until here, IR instructions for this function have explicitly not been annotated with
// source code location, so we don't step into call setup code. From here on, source location
// emitting should be enabled.
debuginfo::start_emitting_source_locations(&fcx);
let dest = if fcx.fn_ty.ret.is_ignore() {
expr::Ignore
} else {
expr::SaveIn(fcx.get_ret_slot(bcx, "iret_slot"))
};
// This call to trans_block is the place where we bridge between
// translation calls that don't have a return value (trans_crate,
// trans_mod, trans_item, et cetera) and those that do
// (trans_block, trans_expr, et cetera).
let mut bcx = controlflow::trans_block(bcx, body, dest);
match dest {
expr::SaveIn(slot) if fcx.needs_ret_allocas => {
Store(bcx, slot, fcx.llretslotptr.get().unwrap());
}
_ => {}
}
match fcx.llreturn.get() {
Some(_) => {
Br(bcx, fcx.return_exit_block(), DebugLoc::None);
fcx.pop_custom_cleanup_scope(arg_scope);
}
None => {
// Microoptimization writ large: avoid creating a separate
// llreturn basic block
bcx = fcx.pop_and_trans_custom_cleanup_scope(bcx, arg_scope);
}
};
// Put return block after all other blocks.
// This somewhat improves single-stepping experience in debugger.
unsafe {
let llreturn = fcx.llreturn.get();
if let Some(llreturn) = llreturn {
llvm::LLVMMoveBasicBlockAfter(llreturn, bcx.llbb);
}
}
// Insert the mandatory first few basic blocks before lltop.
fcx.finish(bcx, fn_cleanup_debug_loc.debug_loc());
}
/// Creates an LLVM function corresponding to a source language function.
pub fn trans_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
decl: &hir::FnDecl,
body: &hir::Block,
llfndecl: ValueRef,
param_substs: &'tcx Substs<'tcx>,
id: ast::NodeId) {
let _s = StatRecorder::new(ccx, ccx.tcx().node_path_str(id));
debug!("trans_fn(param_substs={:?})", param_substs);
let _icx = push_ctxt("trans_fn");
let def_id = if let Some(&def_id) = ccx.external_srcs().borrow().get(&id) {
def_id
} else {
ccx.tcx().map.local_def_id(id)
};
let fn_ty = ccx.tcx().lookup_item_type(def_id).ty;
let fn_ty = monomorphize::apply_param_substs(ccx.tcx(), param_substs, &fn_ty);
let sig = ccx.tcx().erase_late_bound_regions(fn_ty.fn_sig());
let sig = ccx.tcx().normalize_associated_type(&sig);
let abi = fn_ty.fn_abi();
trans_closure(ccx,
decl,
body,
llfndecl,
Instance::new(def_id, param_substs),
id,
&sig,
abi,
closure::ClosureEnv::NotClosure);
}
pub fn trans_named_tuple_constructor<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
ctor_ty: Ty<'tcx>,
disr: Disr,
args: CallArgs,
dest: expr::Dest,
debug_loc: DebugLoc)
-> Result<'blk, 'tcx> {
let ccx = bcx.fcx.ccx;
let sig = ccx.tcx().erase_late_bound_regions(&ctor_ty.fn_sig());
let sig = ccx.tcx().normalize_associated_type(&sig);
let result_ty = sig.output.unwrap();
// Get location to store the result. If the user does not care about
// the result, just make a stack slot
let llresult = match dest {
expr::SaveIn(d) => d,
expr::Ignore => {
if !type_is_zero_size(ccx, result_ty) {
let llresult = alloc_ty(bcx, result_ty, "constructor_result");
call_lifetime_start(bcx, llresult);
llresult
} else {
C_undef(type_of::type_of(ccx, result_ty).ptr_to())
}
}
};
if !type_is_zero_size(ccx, result_ty) {
match args {
ArgExprs(exprs) => {
let fields = exprs.iter().map(|x| &**x).enumerate().collect::<Vec<_>>();
bcx = expr::trans_adt(bcx,
result_ty,
disr,
&fields[..],
None,
expr::SaveIn(llresult),
debug_loc);
}
_ => bug!("expected expr as arguments for variant/struct tuple constructor"),
}
} else {
// Just eval all the expressions (if any). Since expressions in Rust can have arbitrary
// contents, there could be side-effects we need from them.
match args {
ArgExprs(exprs) => {
for expr in exprs {
bcx = expr::trans_into(bcx, expr, expr::Ignore);
}
}
_ => (),
}
}
// If the caller doesn't care about the result
// drop the temporary we made
let bcx = match dest {
expr::SaveIn(_) => bcx,
expr::Ignore => {
let bcx = glue::drop_ty(bcx, llresult, result_ty, debug_loc);
if !type_is_zero_size(ccx, result_ty) {
call_lifetime_end(bcx, llresult);
}
bcx
}
};
Result::new(bcx, llresult)
}
pub fn trans_ctor_shim<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
ctor_id: ast::NodeId,
disr: Disr,
param_substs: &'tcx Substs<'tcx>,
llfndecl: ValueRef) {
let ctor_ty = ccx.tcx().node_id_to_type(ctor_id);
let ctor_ty = monomorphize::apply_param_substs(ccx.tcx(), param_substs, &ctor_ty);
let sig = ccx.tcx().erase_late_bound_regions(&ctor_ty.fn_sig());
let sig = ccx.tcx().normalize_associated_type(&sig);
let fn_ty = FnType::new(ccx, Abi::Rust, &sig, &[]);
let (arena, fcx): (TypedArena<_>, FunctionContext);
arena = TypedArena::new();
fcx = FunctionContext::new(ccx, llfndecl, fn_ty, None, &arena);
let bcx = fcx.init(false, None);
assert!(!fcx.needs_ret_allocas);
if !fcx.fn_ty.ret.is_ignore() {
let dest = fcx.get_ret_slot(bcx, "eret_slot");
let dest_val = adt::MaybeSizedValue::sized(dest); // Can return unsized value
let repr = adt::represent_type(ccx, sig.output.unwrap());
let mut llarg_idx = fcx.fn_ty.ret.is_indirect() as usize;
let mut arg_idx = 0;
for (i, arg_ty) in sig.inputs.into_iter().enumerate() {
let lldestptr = adt::trans_field_ptr(bcx, &repr, dest_val, Disr::from(disr), i);
let arg = &fcx.fn_ty.args[arg_idx];
arg_idx += 1;
let b = &bcx.build();
if common::type_is_fat_ptr(bcx.tcx(), arg_ty) {
let meta = &fcx.fn_ty.args[arg_idx];
arg_idx += 1;
arg.store_fn_arg(b, &mut llarg_idx, expr::get_dataptr(bcx, lldestptr));
meta.store_fn_arg(b, &mut llarg_idx, expr::get_meta(bcx, lldestptr));
} else {
arg.store_fn_arg(b, &mut llarg_idx, lldestptr);
}
}
adt::trans_set_discr(bcx, &repr, dest, disr);
}
fcx.finish(bcx, DebugLoc::None);
}
fn enum_variant_size_lint(ccx: &CrateContext, enum_def: &hir::EnumDef, sp: Span, id: ast::NodeId) {
let mut sizes = Vec::new(); // does no allocation if no pushes, thankfully
let print_info = ccx.sess().print_enum_sizes();
let levels = ccx.tcx().node_lint_levels.borrow();
let lint_id = lint::LintId::of(lint::builtin::VARIANT_SIZE_DIFFERENCES);
let lvlsrc = levels.get(&(id, lint_id));
let is_allow = lvlsrc.map_or(true, |&(lvl, _)| lvl == lint::Allow);
if is_allow && !print_info {
// we're not interested in anything here
return;
}
let ty = ccx.tcx().node_id_to_type(id);
let avar = adt::represent_type(ccx, ty);
match *avar {
adt::General(_, ref variants, _) => {
for var in variants {
let mut size = 0;
for field in var.fields.iter().skip(1) {
// skip the discriminant
size += llsize_of_real(ccx, sizing_type_of(ccx, *field));
}
sizes.push(size);
}
},
_ => { /* its size is either constant or unimportant */ }
}
let (largest, slargest, largest_index) = sizes.iter().enumerate().fold((0, 0, 0),
|(l, s, li), (idx, &size)|
if size > l {
(size, l, idx)
} else if size > s {
(l, size, li)
} else {
(l, s, li)
}
);
// FIXME(#30505) Should use logging for this.
if print_info {
let llty = type_of::sizing_type_of(ccx, ty);
let sess = &ccx.tcx().sess;
sess.span_note_without_error(sp,
&format!("total size: {} bytes", llsize_of_real(ccx, llty)));
match *avar {
adt::General(..) => {
for (i, var) in enum_def.variants.iter().enumerate() {
ccx.tcx()
.sess
.span_note_without_error(var.span,
&format!("variant data: {} bytes", sizes[i]));
}
}
_ => {}
}
}
// we only warn if the largest variant is at least thrice as large as
// the second-largest.
if !is_allow && largest > slargest * 3 && slargest > 0 {
// Use lint::raw_emit_lint rather than sess.add_lint because the lint-printing
// pass for the latter already ran.
lint::raw_struct_lint(&ccx.tcx().sess,
&ccx.tcx().sess.lint_store.borrow(),
lint::builtin::VARIANT_SIZE_DIFFERENCES,
*lvlsrc.unwrap(),
Some(sp),
&format!("enum variant is more than three times larger ({} bytes) \
than the next largest (ignoring padding)",
largest))
.span_note(enum_def.variants[largest_index].span,
"this variant is the largest")
.emit();
}
}
pub fn llvm_linkage_by_name(name: &str) -> Option<Linkage> {
// Use the names from src/llvm/docs/LangRef.rst here. Most types are only
// applicable to variable declarations and may not really make sense for
// Rust code in the first place but whitelist them anyway and trust that
// the user knows what s/he's doing. Who knows, unanticipated use cases
// may pop up in the future.
//
// ghost, dllimport, dllexport and linkonce_odr_autohide are not supported
// and don't have to be, LLVM treats them as no-ops.
match name {
"appending" => Some(llvm::AppendingLinkage),
"available_externally" => Some(llvm::AvailableExternallyLinkage),
"common" => Some(llvm::CommonLinkage),
"extern_weak" => Some(llvm::ExternalWeakLinkage),
"external" => Some(llvm::ExternalLinkage),
"internal" => Some(llvm::InternalLinkage),
"linkonce" => Some(llvm::LinkOnceAnyLinkage),
"linkonce_odr" => Some(llvm::LinkOnceODRLinkage),
"private" => Some(llvm::PrivateLinkage),
"weak" => Some(llvm::WeakAnyLinkage),
"weak_odr" => Some(llvm::WeakODRLinkage),
_ => None,
}
}
/// Enum describing the origin of an LLVM `Value`, for linkage purposes.
#[derive(Copy, Clone)]
pub enum ValueOrigin {
/// The LLVM `Value` is in this context because the corresponding item was
/// assigned to the current compilation unit.
OriginalTranslation,
/// The `Value`'s corresponding item was assigned to some other compilation
/// unit, but the `Value` was translated in this context anyway because the
/// item is marked `#[inline]`.
InlinedCopy,
}
/// Set the appropriate linkage for an LLVM `ValueRef` (function or global).
/// If the `llval` is the direct translation of a specific Rust item, `id`
/// should be set to the `NodeId` of that item. (This mapping should be
/// 1-to-1, so monomorphizations and drop/visit glue should have `id` set to
/// `None`.) `llval_origin` indicates whether `llval` is the translation of an
/// item assigned to `ccx`'s compilation unit or an inlined copy of an item
/// assigned to a different compilation unit.
pub fn update_linkage(ccx: &CrateContext,
llval: ValueRef,
id: Option<ast::NodeId>,
llval_origin: ValueOrigin) {
match llval_origin {
InlinedCopy => {
// `llval` is a translation of an item defined in a separate
// compilation unit. This only makes sense if there are at least
// two compilation units.
assert!(ccx.sess().opts.cg.codegen_units > 1 ||
ccx.sess().opts.debugging_opts.incremental.is_some());
// `llval` is a copy of something defined elsewhere, so use
// `AvailableExternallyLinkage` to avoid duplicating code in the
// output.
llvm::SetLinkage(llval, llvm::AvailableExternallyLinkage);
return;
},
OriginalTranslation => {},
}
if let Some(id) = id {
let item = ccx.tcx().map.get(id);
if let hir_map::NodeItem(i) = item {
if let Some(name) = attr::first_attr_value_str_by_name(&i.attrs, "linkage") {
if let Some(linkage) = llvm_linkage_by_name(&name) {
llvm::SetLinkage(llval, linkage);
} else {
ccx.sess().span_fatal(i.span, "invalid linkage specified");
}
return;
}
}
}
let (is_reachable, is_generic) = if let Some(id) = id {
(ccx.reachable().contains(&id), false)
} else {
(false, true)
};
// We need external linkage for items reachable from other translation units, this include
// other codegen units in case of parallel compilations.
if is_reachable || ccx.sess().opts.cg.codegen_units > 1 {
if is_generic {
// This only happens with multiple codegen units, in which case we need to use weak_odr
// linkage because other crates might expose the same symbol. We cannot use
// linkonce_odr here because the symbol might then get dropped before the other codegen
// units get to link it.
llvm::SetUniqueComdat(ccx.llmod(), llval);
llvm::SetLinkage(llval, llvm::WeakODRLinkage);
} else {
llvm::SetLinkage(llval, llvm::ExternalLinkage);
}
} else {
llvm::SetLinkage(llval, llvm::InternalLinkage);
}
}
fn set_global_section(ccx: &CrateContext, llval: ValueRef, i: &hir::Item) {
match attr::first_attr_value_str_by_name(&i.attrs, "link_section") {
Some(sect) => {
if contains_null(§) {
ccx.sess().fatal(&format!("Illegal null byte in link_section value: `{}`", §));
}
unsafe {
let buf = CString::new(sect.as_bytes()).unwrap();
llvm::LLVMSetSection(llval, buf.as_ptr());
}
},
None => ()
}
}
pub fn trans_item(ccx: &CrateContext, item: &hir::Item) {
let _icx = push_ctxt("trans_item");
let tcx = ccx.tcx();
let from_external = ccx.external_srcs().borrow().contains_key(&item.id);
match item.node {
hir::ItemFn(ref decl, _, _, _, ref generics, ref body) => {
if !generics.is_type_parameterized() {
let trans_everywhere = attr::requests_inline(&item.attrs);
// Ignore `trans_everywhere` for cross-crate inlined items
// (`from_external`). `trans_item` will be called once for each
// compilation unit that references the item, so it will still get
// translated everywhere it's needed.
for (ref ccx, is_origin) in ccx.maybe_iter(!from_external && trans_everywhere) {
let def_id = tcx.map.local_def_id(item.id);
let empty_substs = ccx.empty_substs_for_def_id(def_id);
let llfn = Callee::def(ccx, def_id, empty_substs).reify(ccx).val;
trans_fn(ccx, &decl, &body, llfn, empty_substs, item.id);
set_global_section(ccx, llfn, item);
update_linkage(ccx,
llfn,
Some(item.id),
if is_origin {
OriginalTranslation
} else {
InlinedCopy
});
if is_entry_fn(ccx.sess(), item.id) {
create_entry_wrapper(ccx, item.span, llfn);
// check for the #[rustc_error] annotation, which forces an
// error in trans. This is used to write compile-fail tests
// that actually test that compilation succeeds without
// reporting an error.
if tcx.has_attr(def_id, "rustc_error") {
tcx.sess.span_fatal(item.span, "compilation successful");
}
}
}
}
}
hir::ItemImpl(_, _, ref generics, _, _, ref impl_items) => {
// Both here and below with generic methods, be sure to recurse and look for
// items that we need to translate.
if !generics.ty_params.is_empty() {
return;
}
for impl_item in impl_items {
if let hir::ImplItemKind::Method(ref sig, ref body) = impl_item.node {
if sig.generics.ty_params.is_empty() {
let trans_everywhere = attr::requests_inline(&impl_item.attrs);
for (ref ccx, is_origin) in ccx.maybe_iter(trans_everywhere) {
let def_id = tcx.map.local_def_id(impl_item.id);
let empty_substs = ccx.empty_substs_for_def_id(def_id);
let llfn = Callee::def(ccx, def_id, empty_substs).reify(ccx).val;
trans_fn(ccx, &sig.decl, body, llfn, empty_substs, impl_item.id);
update_linkage(ccx, llfn, Some(impl_item.id),
if is_origin {
OriginalTranslation
} else {
InlinedCopy
});
}
}
}
}
}
hir::ItemEnum(ref enum_definition, ref gens) => {
if gens.ty_params.is_empty() {
// sizes only make sense for non-generic types
enum_variant_size_lint(ccx, enum_definition, item.span, item.id);
}
}
hir::ItemStatic(_, m, ref expr) => {
let g = match consts::trans_static(ccx, m, expr, item.id, &item.attrs) {
Ok(g) => g,
Err(err) => ccx.tcx().sess.span_fatal(expr.span, &err.description()),
};
set_global_section(ccx, g, item);
update_linkage(ccx, g, Some(item.id), OriginalTranslation);
}
hir::ItemForeignMod(ref m) => {
if m.abi == Abi::RustIntrinsic || m.abi == Abi::PlatformIntrinsic {
return;
}
for fi in &m.items {
let lname = imported_name(fi.name, &fi.attrs).to_string();
ccx.item_symbols().borrow_mut().insert(fi.id, lname);
}
}
_ => {}
}
}
pub fn is_entry_fn(sess: &Session, node_id: ast::NodeId) -> bool {
match *sess.entry_fn.borrow() {
Some((entry_id, _)) => node_id == entry_id,
None => false,
}
}
/// Create the `main` function which will initialise the rust runtime and call users’ main
/// function.
pub fn create_entry_wrapper(ccx: &CrateContext, sp: Span, main_llfn: ValueRef) {
let et = ccx.sess().entry_type.get().unwrap();
match et {
config::EntryMain => {
create_entry_fn(ccx, sp, main_llfn, true);
}
config::EntryStart => create_entry_fn(ccx, sp, main_llfn, false),
config::EntryNone => {} // Do nothing.
}
fn create_entry_fn(ccx: &CrateContext,
sp: Span,
rust_main: ValueRef,
use_start_lang_item: bool) {
let llfty = Type::func(&[ccx.int_type(), Type::i8p(ccx).ptr_to()], &ccx.int_type());
if declare::get_defined_value(ccx, "main").is_some() {
// FIXME: We should be smart and show a better diagnostic here.
ccx.sess().struct_span_err(sp, "entry symbol `main` defined multiple times")
.help("did you use #[no_mangle] on `fn main`? Use #[start] instead")
.emit();
ccx.sess().abort_if_errors();
bug!();
}
let llfn = declare::declare_cfn(ccx, "main", llfty);
let llbb = unsafe {
llvm::LLVMAppendBasicBlockInContext(ccx.llcx(), llfn, "top\0".as_ptr() as *const _)
};
let bld = ccx.raw_builder();
unsafe {
llvm::LLVMPositionBuilderAtEnd(bld, llbb);
debuginfo::gdb::insert_reference_to_gdb_debug_scripts_section_global(ccx);
let (start_fn, args) = if use_start_lang_item {
let start_def_id = match ccx.tcx().lang_items.require(StartFnLangItem) {
Ok(id) => id,
Err(s) => ccx.sess().fatal(&s)
};
let empty_substs = ccx.tcx().mk_substs(Substs::empty());
let start_fn = Callee::def(ccx, start_def_id, empty_substs).reify(ccx).val;
let args = {
let opaque_rust_main =
llvm::LLVMBuildPointerCast(bld,
rust_main,
Type::i8p(ccx).to_ref(),
"rust_main\0".as_ptr() as *const _);
vec![opaque_rust_main, get_param(llfn, 0), get_param(llfn, 1)]
};
(start_fn, args)
} else {
debug!("using user-defined start fn");
let args = vec![get_param(llfn, 0 as c_uint), get_param(llfn, 1 as c_uint)];
(rust_main, args)
};
let result = llvm::LLVMRustBuildCall(bld,
start_fn,
args.as_ptr(),
args.len() as c_uint,
0 as *mut _,
noname());
llvm::LLVMBuildRet(bld, result);
}
}
}
pub fn exported_name<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
instance: Instance<'tcx>,
attrs: &[ast::Attribute])
-> String {
let id = ccx.tcx().map.as_local_node_id(instance.def).unwrap();
match ccx.external_srcs().borrow().get(&id) {
Some(&did) => {
let sym = ccx.sess().cstore.item_symbol(did);
debug!("found item {} in other crate...", sym);
return sym;
}
None => {}
}
match attr::find_export_name_attr(ccx.sess().diagnostic(), attrs) {
// Use provided name
Some(name) => name.to_string(),
_ => {
if attr::contains_name(attrs, "no_mangle") {
// Don't mangle
ccx.tcx().map.name(id).as_str().to_string()
} else {
match weak_lang_items::link_name(attrs) {
Some(name) => name.to_string(),
None => {
// Usual name mangling
symbol_names::exported_name(ccx, &instance)
}
}
}
}
}
}
pub fn imported_name(name: ast::Name, attrs: &[ast::Attribute]) -> InternedString {
match attr::first_attr_value_str_by_name(attrs, "link_name") {
Some(ln) => ln.clone(),
None => match weak_lang_items::link_name(attrs) {
Some(name) => name,
None => name.as_str(),
}
}
}
fn contains_null(s: &str) -> bool {
s.bytes().any(|b| b == 0)
}
pub fn write_metadata<'a, 'tcx>(cx: &SharedCrateContext<'a, 'tcx>,
krate: &hir::Crate,
reachable: &NodeSet,
mir_map: &MirMap<'tcx>)
-> Vec<u8> {
use flate;
let any_library = cx.sess()
.crate_types
.borrow()
.iter()
.any(|ty| *ty != config::CrateTypeExecutable);
if !any_library {
return Vec::new();
}
let cstore = &cx.tcx().sess.cstore;
let metadata = cstore.encode_metadata(cx.tcx(),
cx.export_map(),
cx.item_symbols(),
cx.link_meta(),
reachable,
mir_map,
krate);
let mut compressed = cstore.metadata_encoding_version().to_vec();
compressed.extend_from_slice(&flate::deflate_bytes(&metadata));
let llmeta = C_bytes_in_context(cx.metadata_llcx(), &compressed[..]);
let llconst = C_struct_in_context(cx.metadata_llcx(), &[llmeta], false);
let name = format!("rust_metadata_{}_{}",
cx.link_meta().crate_name,
cx.link_meta().crate_hash);
let buf = CString::new(name).unwrap();
let llglobal = unsafe {
llvm::LLVMAddGlobal(cx.metadata_llmod(), val_ty(llconst).to_ref(), buf.as_ptr())
};
unsafe {
llvm::LLVMSetInitializer(llglobal, llconst);
let name =
cx.tcx().sess.cstore.metadata_section_name(&cx.sess().target.target);
let name = CString::new(name).unwrap();
llvm::LLVMSetSection(llglobal, name.as_ptr())
}
return metadata;
}
/// Find any symbols that are defined in one compilation unit, but not declared
/// in any other compilation unit. Give these symbols internal linkage.
fn internalize_symbols(cx: &CrateContextList, reachable: &HashSet<&str>) {
unsafe {
let mut declared = HashSet::new();
// Collect all external declarations in all compilation units.
for ccx in cx.iter() {
for val in iter_globals(ccx.llmod()).chain(iter_functions(ccx.llmod())) {
let linkage = llvm::LLVMGetLinkage(val);
// We only care about external declarations (not definitions)
// and available_externally definitions.
if !(linkage == llvm::ExternalLinkage as c_uint &&
llvm::LLVMIsDeclaration(val) != 0) &&
!(linkage == llvm::AvailableExternallyLinkage as c_uint) {
continue;
}
let name = CStr::from_ptr(llvm::LLVMGetValueName(val))
.to_bytes()
.to_vec();
declared.insert(name);
}
}
// Examine each external definition. If the definition is not used in
// any other compilation unit, and is not reachable from other crates,
// then give it internal linkage.
for ccx in cx.iter() {
for val in iter_globals(ccx.llmod()).chain(iter_functions(ccx.llmod())) {
let linkage = llvm::LLVMGetLinkage(val);
// We only care about external definitions.
if !((linkage == llvm::ExternalLinkage as c_uint ||
linkage == llvm::WeakODRLinkage as c_uint) &&
llvm::LLVMIsDeclaration(val) == 0) {
continue;
}
let name = CStr::from_ptr(llvm::LLVMGetValueName(val))
.to_bytes()
.to_vec();
if !declared.contains(&name) &&
!reachable.contains(str::from_utf8(&name).unwrap()) {
llvm::SetLinkage(val, llvm::InternalLinkage);
llvm::SetDLLStorageClass(val, llvm::DefaultStorageClass);
llvm::UnsetComdat(val);
}
}
}
}
}
// Create a `__imp_<symbol> = &symbol` global for every public static `symbol`.
// This is required to satisfy `dllimport` references to static data in .rlibs
// when using MSVC linker. We do this only for data, as linker can fix up
// code references on its own.
// See #26591, #27438
fn create_imps(cx: &CrateContextList) {
// The x86 ABI seems to require that leading underscores are added to symbol
// names, so we need an extra underscore on 32-bit. There's also a leading
// '\x01' here which disables LLVM's symbol mangling (e.g. no extra
// underscores added in front).
let prefix = if cx.shared().sess().target.target.target_pointer_width == "32" {
"\x01__imp__"
} else {
"\x01__imp_"
};
unsafe {
for ccx in cx.iter() {
let exported: Vec<_> = iter_globals(ccx.llmod())
.filter(|&val| {
llvm::LLVMGetLinkage(val) ==
llvm::ExternalLinkage as c_uint &&
llvm::LLVMIsDeclaration(val) == 0
})
.collect();
let i8p_ty = Type::i8p(&ccx);
for val in exported {
let name = CStr::from_ptr(llvm::LLVMGetValueName(val));
let mut imp_name = prefix.as_bytes().to_vec();
imp_name.extend(name.to_bytes());
let imp_name = CString::new(imp_name).unwrap();
let imp = llvm::LLVMAddGlobal(ccx.llmod(),
i8p_ty.to_ref(),
imp_name.as_ptr() as *const _);
let init = llvm::LLVMConstBitCast(val, i8p_ty.to_ref());
llvm::LLVMSetInitializer(imp, init);
llvm::SetLinkage(imp, llvm::ExternalLinkage);
}
}
}
}
struct ValueIter {
cur: ValueRef,
step: unsafe extern "C" fn(ValueRef) -> ValueRef,
}
impl Iterator for ValueIter {
type Item = ValueRef;
fn next(&mut self) -> Option<ValueRef> {
let old = self.cur;
if !old.is_null() {
self.cur = unsafe { (self.step)(old) };
Some(old)
} else {
None
}
}
}
fn iter_globals(llmod: llvm::ModuleRef) -> ValueIter {
unsafe {
ValueIter {
cur: llvm::LLVMGetFirstGlobal(llmod),
step: llvm::LLVMGetNextGlobal,
}
}
}
fn iter_functions(llmod: llvm::ModuleRef) -> ValueIter {
unsafe {
ValueIter {
cur: llvm::LLVMGetFirstFunction(llmod),
step: llvm::LLVMGetNextFunction,
}
}
}
/// The context provided lists a set of reachable ids as calculated by
/// middle::reachable, but this contains far more ids and symbols than we're
/// actually exposing from the object file. This function will filter the set in
/// the context to the set of ids which correspond to symbols that are exposed
/// from the object file being generated.
///
/// This list is later used by linkers to determine the set of symbols needed to
/// be exposed from a dynamic library and it's also encoded into the metadata.
pub fn filter_reachable_ids(scx: &SharedCrateContext) -> NodeSet {
scx.reachable().iter().map(|x| *x).filter(|id| {
// First, only worry about nodes which have a symbol name
scx.item_symbols().borrow().contains_key(id)
}).filter(|&id| {
// Next, we want to ignore some FFI functions that are not exposed from
// this crate. Reachable FFI functions can be lumped into two
// categories:
//
// 1. Those that are included statically via a static library
// 2. Those included otherwise (e.g. dynamically or via a framework)
//
// Although our LLVM module is not literally emitting code for the
// statically included symbols, it's an export of our library which
// needs to be passed on to the linker and encoded in the metadata.
//
// As a result, if this id is an FFI item (foreign item) then we only
// let it through if it's included statically.
match scx.tcx().map.get(id) {
hir_map::NodeForeignItem(..) => {
scx.sess().cstore.is_statically_included_foreign_item(id)
}
_ => true,
}
}).collect()
}
pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
mir_map: &MirMap<'tcx>,
analysis: ty::CrateAnalysis)
-> CrateTranslation {
let _task = tcx.dep_graph.in_task(DepNode::TransCrate);
// Be careful with this krate: obviously it gives access to the
// entire contents of the krate. So if you push any subtasks of
// `TransCrate`, you need to be careful to register "reads" of the
// particular items that will be processed.
let krate = tcx.map.krate();
let ty::CrateAnalysis { export_map, reachable, name, .. } = analysis;
let check_overflow = if let Some(v) = tcx.sess.opts.debugging_opts.force_overflow_checks {
v
} else {
tcx.sess.opts.debug_assertions
};
let check_dropflag = if let Some(v) = tcx.sess.opts.debugging_opts.force_dropflag_checks {
v
} else {
tcx.sess.opts.debug_assertions
};
let link_meta = link::build_link_meta(tcx, name);
let shared_ccx = SharedCrateContext::new(tcx,
&mir_map,
export_map,
Sha256::new(),
link_meta.clone(),
reachable,
check_overflow,
check_dropflag);
let codegen_units = collect_and_partition_translation_items(&shared_ccx);
let codegen_unit_count = codegen_units.len();
assert!(tcx.sess.opts.cg.codegen_units == codegen_unit_count ||
tcx.sess.opts.debugging_opts.incremental.is_some());
let crate_context_list = CrateContextList::new(&shared_ccx, codegen_units);
{
let ccx = crate_context_list.get_ccx(0);
// Translate all items. See `TransModVisitor` for
// details on why we walk in this particular way.
{
let _icx = push_ctxt("text");
intravisit::walk_mod(&mut TransItemsWithinModVisitor { ccx: &ccx }, &krate.module);
krate.visit_all_items(&mut TransModVisitor { ccx: &ccx });
}
collector::print_collection_results(ccx.shared());
symbol_names_test::report_symbol_names(&ccx);
}
for ccx in crate_context_list.iter() {
if ccx.sess().opts.debuginfo != NoDebugInfo {
debuginfo::finalize(&ccx);
}
for &(old_g, new_g) in ccx.statics_to_rauw().borrow().iter() {
unsafe {
let bitcast = llvm::LLVMConstPointerCast(new_g, llvm::LLVMTypeOf(old_g));
llvm::LLVMReplaceAllUsesWith(old_g, bitcast);
llvm::LLVMDeleteGlobal(old_g);
}
}
}
let reachable_symbol_ids = filter_reachable_ids(&shared_ccx);
// Translate the metadata.
let metadata = time(tcx.sess.time_passes(), "write metadata", || {
write_metadata(&shared_ccx, krate, &reachable_symbol_ids, mir_map)
});
if shared_ccx.sess().trans_stats() {
let stats = shared_ccx.stats();
println!("--- trans stats ---");
println!("n_glues_created: {}", stats.n_glues_created.get());
println!("n_null_glues: {}", stats.n_null_glues.get());
println!("n_real_glues: {}", stats.n_real_glues.get());
println!("n_fns: {}", stats.n_fns.get());
println!("n_monos: {}", stats.n_monos.get());
println!("n_inlines: {}", stats.n_inlines.get());
println!("n_closures: {}", stats.n_closures.get());
println!("fn stats:");
stats.fn_stats.borrow_mut().sort_by(|&(_, insns_a), &(_, insns_b)| {
insns_b.cmp(&insns_a)
});
for tuple in stats.fn_stats.borrow().iter() {
match *tuple {
(ref name, insns) => {
println!("{} insns, {}", insns, *name);
}
}
}
}
if shared_ccx.sess().count_llvm_insns() {
for (k, v) in shared_ccx.stats().llvm_insns.borrow().iter() {
println!("{:7} {}", *v, *k);
}
}
let modules = crate_context_list.iter()
.map(|ccx| ModuleTranslation { llcx: ccx.llcx(), llmod: ccx.llmod() })
.collect();
let sess = shared_ccx.sess();
let mut reachable_symbols = reachable_symbol_ids.iter().map(|id| {
shared_ccx.item_symbols().borrow()[id].to_string()
}).collect::<Vec<_>>();
if sess.entry_fn.borrow().is_some() {
reachable_symbols.push("main".to_string());
}
// For the purposes of LTO, we add to the reachable set all of the upstream
// reachable extern fns. These functions are all part of the public ABI of
// the final product, so LTO needs to preserve them.
if sess.lto() {
for cnum in sess.cstore.crates() {
let syms = sess.cstore.reachable_ids(cnum);
reachable_symbols.extend(syms.into_iter().filter(|did| {
sess.cstore.is_extern_item(shared_ccx.tcx(), *did)
}).map(|did| {
sess.cstore.item_symbol(did)
}));
}
}
if codegen_unit_count > 1 {
internalize_symbols(&crate_context_list,
&reachable_symbols.iter().map(|x| &x[..]).collect());
}
if sess.target.target.options.is_like_msvc &&
sess.crate_types.borrow().iter().any(|ct| *ct == config::CrateTypeRlib) {
create_imps(&crate_context_list);
}
let metadata_module = ModuleTranslation {
llcx: shared_ccx.metadata_llcx(),
llmod: shared_ccx.metadata_llmod(),
};
let no_builtins = attr::contains_name(&krate.attrs, "no_builtins");
CrateTranslation {
modules: modules,
metadata_module: metadata_module,
link: link_meta,
metadata: metadata,
reachable: reachable_symbols,
no_builtins: no_builtins,
}
}
/// We visit all the items in the krate and translate them. We do
/// this in two walks. The first walk just finds module items. It then
/// walks the full contents of those module items and translates all
/// the items within. Note that this entire process is O(n). The
/// reason for this two phased walk is that each module is
/// (potentially) placed into a distinct codegen-unit. This walk also
/// ensures that the immediate contents of each module is processed
/// entirely before we proceed to find more modules, helping to ensure
/// an equitable distribution amongst codegen-units.
pub struct TransModVisitor<'a, 'tcx: 'a> {
pub ccx: &'a CrateContext<'a, 'tcx>,
}
impl<'a, 'tcx, 'v> Visitor<'v> for TransModVisitor<'a, 'tcx> {
fn visit_item(&mut self, i: &hir::Item) {
match i.node {
hir::ItemMod(_) => {
let item_ccx = self.ccx.rotate();
intravisit::walk_item(&mut TransItemsWithinModVisitor { ccx: &item_ccx }, i);
}
_ => { }
}
}
}
/// Translates all the items within a given module. Expects owner to
/// invoke `walk_item` on a module item. Ignores nested modules.
pub struct TransItemsWithinModVisitor<'a, 'tcx: 'a> {
pub ccx: &'a CrateContext<'a, 'tcx>,
}
impl<'a, 'tcx, 'v> Visitor<'v> for TransItemsWithinModVisitor<'a, 'tcx> {
fn visit_nested_item(&mut self, item_id: hir::ItemId) {
self.visit_item(self.ccx.tcx().map.expect_item(item_id.id));
}
fn visit_item(&mut self, i: &hir::Item) {
match i.node {
hir::ItemMod(..) => {
// skip modules, they will be uncovered by the TransModVisitor
}
_ => {
let def_id = self.ccx.tcx().map.local_def_id(i.id);
let tcx = self.ccx.tcx();
// Create a subtask for trans'ing a particular item. We are
// giving `trans_item` access to this item, so also record a read.
tcx.dep_graph.with_task(DepNode::TransCrateItem(def_id), || {
tcx.dep_graph.read(DepNode::Hir(def_id));
// We are going to be accessing various tables
// generated by TypeckItemBody; we also assume
// that the body passes type check. These tables
// are not individually tracked, so just register
// a read here.
tcx.dep_graph.read(DepNode::TypeckItemBody(def_id));
trans_item(self.ccx, i);
});
intravisit::walk_item(self, i);
}
}
}
}
fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>)
-> Vec<CodegenUnit<'tcx>> {
let time_passes = scx.sess().time_passes();
let collection_mode = match scx.sess().opts.debugging_opts.print_trans_items {
Some(ref s) => {
let mode_string = s.to_lowercase();
let mode_string = mode_string.trim();
if mode_string == "eager" {
TransItemCollectionMode::Eager
} else {
if mode_string != "lazy" {
let message = format!("Unknown codegen-item collection mode '{}'. \
Falling back to 'lazy' mode.",
mode_string);
scx.sess().warn(&message);
}
TransItemCollectionMode::Lazy
}
}
None => TransItemCollectionMode::Lazy
};
let (items, reference_map) = time(time_passes, "translation item collection", || {
collector::collect_crate_translation_items(scx, collection_mode)
});
let strategy = if scx.sess().opts.debugging_opts.incremental.is_some() {
PartitioningStrategy::PerModule
} else {
PartitioningStrategy::FixedUnitCount(scx.sess().opts.cg.codegen_units)
};
let codegen_units = time(time_passes, "codegen unit partitioning", || {
partitioning::partition(scx.tcx(),
items.iter().cloned(),
strategy,
&reference_map)
});
if scx.sess().opts.debugging_opts.print_trans_items.is_some() {
let mut item_to_cgus = HashMap::new();
for cgu in &codegen_units {
for (&trans_item, &linkage) in &cgu.items {
item_to_cgus.entry(trans_item)
.or_insert(Vec::new())
.push((cgu.name.clone(), linkage));
}
}
let mut item_keys: Vec<_> = items
.iter()
.map(|i| {
let mut output = i.to_string(scx.tcx());
output.push_str(" @@");
let mut empty = Vec::new();
let mut cgus = item_to_cgus.get_mut(i).unwrap_or(&mut empty);
cgus.as_mut_slice().sort_by_key(|&(ref name, _)| name.clone());
cgus.dedup();
for &(ref cgu_name, linkage) in cgus.iter() {
output.push_str(" ");
output.push_str(&cgu_name[..]);
let linkage_abbrev = match linkage {
InstantiationMode::Def(llvm::ExternalLinkage) => "External",
InstantiationMode::Def(llvm::AvailableExternallyLinkage) => "Available",
InstantiationMode::Def(llvm::LinkOnceAnyLinkage) => "OnceAny",
InstantiationMode::Def(llvm::LinkOnceODRLinkage) => "OnceODR",
InstantiationMode::Def(llvm::WeakAnyLinkage) => "WeakAny",
InstantiationMode::Def(llvm::WeakODRLinkage) => "WeakODR",
InstantiationMode::Def(llvm::AppendingLinkage) => "Appending",
InstantiationMode::Def(llvm::InternalLinkage) => "Internal",
InstantiationMode::Def(llvm::PrivateLinkage) => "Private",
InstantiationMode::Def(llvm::ExternalWeakLinkage) => "ExternalWeak",
InstantiationMode::Def(llvm::CommonLinkage) => "Common",
InstantiationMode::Decl => "Declaration",
};
output.push_str("[");
output.push_str(linkage_abbrev);
output.push_str("]");
}
output
})
.collect();
item_keys.sort();
for item in item_keys {
println!("TRANS_ITEM {}", item);
}
let mut ccx_map = scx.translation_items().borrow_mut();
for cgi in items {
ccx_map.insert(cgi, TransItemState::PredictedButNotGenerated);
}
}
codegen_units
}
| 39.729739 | 99 | 0.522701 |
e2e1e9f539224c05d477e3464dcc18b9f7179d3b | 29,416 | use crate::data_structures::{Accumulator, AccumulatorRef, InputRef};
use crate::error::{ASError, BoxedError};
use crate::ConstraintF;
use crate::{AccumulationScheme, MakeZK};
use ark_ec::AffineCurve;
use ark_ff::{to_bytes, One, Zero};
use ark_poly::polynomial::univariate::DensePolynomial;
use ark_poly_commit::trivial_pc::TrivialPC;
use ark_poly_commit::{
trivial_pc, Error as PCError, LabeledCommitment, LabeledPolynomial, PCCommitterKey,
PolynomialCommitment, PolynomialLabel, UVPolynomial,
};
use ark_sponge::{absorb, Absorbable, CryptographicSponge, FieldElementSize};
use ark_std::format;
use ark_std::marker::PhantomData;
use ark_std::ops::{Add, Div, Mul};
use ark_std::rand::RngCore;
use ark_std::string::ToString;
use ark_std::vec;
use ark_std::vec::Vec;
mod data_structures;
pub use data_structures::*;
/// The verifier constraints of [`ASForTrivialPC`].
#[cfg(feature = "r1cs")]
pub mod constraints;
/// Sizes of squeezed challenges in terms of number of bits.
pub(self) const LINEAR_COMBINATION_CHALLENGE_SIZE: usize = 126;
pub(self) const CHALLENGE_POINT_SIZE: usize = 184;
/// An accumulation scheme for a trivial homomorphic commitment schemes.
/// This implementation is specialized for [`TrivialPC`][trivial-pc].
/// The construction is described in detail in Section A of [\[BCLMS20\]][bclms20].
///
/// The implementation substitutes power challenges with multiple independent challenges when
/// possible to lower constraint costs for the verifier.
/// See Remark 9.1 in [\[BCLMS20\]][bclms20] for more details.
///
/// [trivial-pc]: ark_poly_commit::trivial_pc::TrivialPC
/// [bclms20]: https://eprint.iacr.org/2020/1618
///
/// # Example Input
/// ```
///
/// use ark_accumulation::trivial_pc_as::{ASForTrivialPC, InputInstance};
/// use ark_accumulation::Input;
/// use ark_ec::AffineCurve;
/// use ark_ff::Field;
/// use ark_poly::univariate::DensePolynomial;
/// use ark_poly_commit::{trivial_pc, LabeledCommitment, LabeledPolynomial};
/// use ark_sponge::{Absorbable, CryptographicSponge};
///
/// type ConstraintF<G> = <<G as AffineCurve>::BaseField as Field>::BasePrimeField;
///
/// // An accumulation input for this scheme is formed from:
/// // 1. A TrivialPC commitment to a polynomial: `comm`
/// // 2. A point where the polynomial will be evaluated at: `point`
/// // 3. The evaluation of the polynomial at the point: `eval`
/// // 4. The TrivialPC opening proof: `proof`
/// fn new_accumulation_input<G, S>(
/// comm: LabeledCommitment<trivial_pc::Commitment<G>>,
/// point: G::ScalarField,
/// eval: G::ScalarField,
/// proof: trivial_pc::Proof<G::ScalarField, DensePolynomial<G::ScalarField>>,
/// ) -> Input<ConstraintF<G>, S, ASForTrivialPC<G, S>>
/// where
/// G: AffineCurve + Absorbable<ConstraintF<G>>,
/// ConstraintF<G>: Absorbable<ConstraintF<G>>,
/// S: CryptographicSponge<ConstraintF<G>>,
/// {
/// let instance = InputInstance {
/// commitment: comm,
/// point,
/// eval,
/// };
///
/// let witness = proof.polynomial;
///
/// Input::<_, _, ASForTrivialPC<G, S>> { instance, witness }
/// }
/// ```
pub struct ASForTrivialPC<G, S>
where
G: AffineCurve + Absorbable<ConstraintF<G>>,
ConstraintF<G>: Absorbable<ConstraintF<G>>,
S: CryptographicSponge<ConstraintF<G>>,
{
_curve: PhantomData<G>,
_sponge: PhantomData<S>,
}
impl<G, S> ASForTrivialPC<G, S>
where
G: AffineCurve + Absorbable<ConstraintF<G>>,
ConstraintF<G>: Absorbable<ConstraintF<G>>,
S: CryptographicSponge<ConstraintF<G>>,
{
/// Check that the input instance is properly structured.
fn check_input_instance_structure(
instance: &InputInstance<G>,
is_accumulator: bool,
) -> Result<&InputInstance<G>, BoxedError> {
// Accumulating commitments with degree bounds are unsupported.
if instance.commitment.degree_bound().is_some() {
if is_accumulator {
return Err(BoxedError::new(ASError::MalformedAccumulator(
"Degree bounds on accumulator instances are unsupported.".to_string(),
)));
}
return Err(BoxedError::new(ASError::MalformedInput(
"Degree bounds on input instances are unsupported.".to_string(),
)));
}
Ok(instance)
}
/// Check that the input witness is properly structured.
fn check_input_witness_structure<'a>(
witness: &'a LabeledPolynomial<G::ScalarField, DensePolynomial<G::ScalarField>>,
prover_key: &trivial_pc::CommitterKey<G>,
is_accumulator: bool,
) -> Result<&'a LabeledPolynomial<G::ScalarField, DensePolynomial<G::ScalarField>>, BoxedError>
{
// Accumulating polynomials with degree bounds are unsupported.
if witness.degree_bound().is_some() {
if is_accumulator {
return Err(BoxedError::new(ASError::MalformedAccumulator(
"Degree bounds on accumulator witnesses are unsupported.".to_string(),
)));
}
return Err(BoxedError::new(ASError::MalformedInput(
"Degree bounds on input witnesses are unsupported.".to_string(),
)));
}
// Accumulating polynomials with hiding bounds are unsupported.
if witness.hiding_bound().is_some() {
if is_accumulator {
return Err(BoxedError::new(ASError::MalformedAccumulator(
"Hiding bounds on accumulator witnesses are unsupported.".to_string(),
)));
}
return Err(BoxedError::new(ASError::MalformedInput(
"Hiding bounds on input witnesses are unsupported.".to_string(),
)));
}
// The polynomial to be accumulated must have a degree that is supported by the prover key.
if witness.degree() > prover_key.supported_degree() {
if is_accumulator {
return Err(BoxedError::new(ASError::MalformedAccumulator(format!(
"An accumulator witness of degree {} is unsupported for this prover key",
witness.degree()
))));
}
return Err(BoxedError::new(ASError::MalformedInput(format!(
"An input witness of degree {} is unsupported for this prover key",
witness.degree()
))));
}
Ok(witness)
}
/// Check that the proof is properly structured.
fn check_proof_structure(proof: &Proof<G>, num_inputs: usize) -> bool {
// Each proof must correspond to an input.
return proof.len() == num_inputs;
}
/// Compute the witness polynomials and witness commitments from the inputs.
/// For a claim (p, z, v), the witness polynomial is w(X) = (p(X) - v)/(X - z).
fn compute_witness_polynomials_and_commitments<'a>(
ck: &trivial_pc::CommitterKey<G>,
inputs: impl IntoIterator<Item = &'a InputRef<'a, ConstraintF<G>, S, Self>>,
) -> Result<
(
Vec<LabeledPolynomial<G::ScalarField, DensePolynomial<G::ScalarField>>>,
Vec<LabeledCommitment<trivial_pc::Commitment<G>>>,
),
PCError,
> {
let mut witness_polynomials = Vec::new();
let mut witness_commitments = Vec::new();
for input in inputs.into_iter() {
let point = input.instance.point;
let eval = input.instance.eval;
let numerator = (&DensePolynomial::from_coefficients_vec(vec![-eval]))
.add(input.witness.polynomial());
let denominator =
DensePolynomial::from_coefficients_vec(vec![-point, G::ScalarField::one()]);
let witness_polynomial = (&numerator).div(&denominator);
let labeled_witness_polynomial = LabeledPolynomial::new(
PolynomialLabel::new(),
witness_polynomial.clone(),
None,
None,
);
let witness_commitment =
TrivialPC::commit(ck, vec![&labeled_witness_polynomial], None)?
.0
.pop()
.unwrap();
witness_polynomials.push(labeled_witness_polynomial);
witness_commitments.push(witness_commitment);
}
Ok((witness_polynomials, witness_commitments))
}
/// Compute the linear combination of polynomials p = \sum challenge_i * p_i.
fn combine_polynomials<'a>(
labeled_polynomials: impl IntoIterator<
Item = &'a LabeledPolynomial<G::ScalarField, DensePolynomial<G::ScalarField>>,
>,
challenges: &[G::ScalarField],
) -> DensePolynomial<G::ScalarField> {
let mut combined_polynomial = DensePolynomial::zero();
for (i, p) in labeled_polynomials.into_iter().enumerate() {
combined_polynomial += (challenges[i], p.polynomial());
}
combined_polynomial
}
/// Compute the linear combination of evaluations v = \sum challenge_i * v_i.
fn combine_evaluations<'a>(
evaluations: impl IntoIterator<Item = &'a G::ScalarField>,
challenges: &[G::ScalarField],
) -> G::ScalarField {
let mut combined_eval = G::ScalarField::zero();
for (i, eval) in evaluations.into_iter().enumerate() {
combined_eval += &eval.mul(&challenges[i]);
}
combined_eval
}
/// Compute the linear combination of commitments C = \sum challenge_i * C_i.
fn combine_commitments<'a>(
commitments: impl IntoIterator<Item = &'a LabeledCommitment<trivial_pc::Commitment<G>>>,
challenges: &[G::ScalarField],
) -> trivial_pc::Commitment<G> {
let mut scalar_commitment_pairs = Vec::new();
for (i, c) in commitments.into_iter().enumerate() {
scalar_commitment_pairs.push((challenges[i], c.commitment().clone()));
}
scalar_commitment_pairs.into_iter().sum()
}
}
impl<G, S> AccumulationScheme<ConstraintF<G>, S> for ASForTrivialPC<G, S>
where
G: AffineCurve + Absorbable<ConstraintF<G>>,
ConstraintF<G>: Absorbable<ConstraintF<G>>,
S: CryptographicSponge<ConstraintF<G>>,
{
type PublicParameters = ();
type PredicateParams = trivial_pc::UniversalParams<G>;
type PredicateIndex = usize;
type ProverKey = trivial_pc::CommitterKey<G>;
type VerifierKey = usize;
type DeciderKey = trivial_pc::CommitterKey<G>;
type InputInstance = InputInstance<G>;
type InputWitness = LabeledPolynomial<G::ScalarField, DensePolynomial<G::ScalarField>>;
type AccumulatorInstance = InputInstance<G>;
type AccumulatorWitness = LabeledPolynomial<G::ScalarField, DensePolynomial<G::ScalarField>>;
type Proof = Proof<G>;
type Error = BoxedError;
fn setup(_rng: &mut impl RngCore) -> Result<Self::PublicParameters, Self::Error> {
Ok(())
}
fn index(
_public_params: &Self::PublicParameters,
predicate_params: &Self::PredicateParams,
predicate_index: &Self::PredicateIndex,
) -> Result<(Self::ProverKey, Self::VerifierKey, Self::DeciderKey), Self::Error> {
let (ck, vk) = TrivialPC::<G, DensePolynomial<G::ScalarField>>::trim(
predicate_params,
*predicate_index,
0,
None,
)
.map_err(|e| BoxedError::new(e))?;
Ok((ck, *predicate_index, vk))
}
fn prove<'a>(
prover_key: &Self::ProverKey,
inputs: impl IntoIterator<Item = InputRef<'a, ConstraintF<G>, S, Self>>,
old_accumulators: impl IntoIterator<Item = AccumulatorRef<'a, ConstraintF<G>, S, Self>>,
_make_zk: MakeZK<'_>,
sponge: Option<S>,
) -> Result<(Accumulator<ConstraintF<G>, S, Self>, Self::Proof), Self::Error>
where
Self: 'a,
S: 'a,
{
let sponge = sponge.unwrap_or_else(|| S::new());
let mut inputs = inputs.into_iter().collect::<Vec<_>>();
let accumulators = old_accumulators.into_iter().collect::<Vec<_>>();
// Default input in the case there are no provided inputs or accumulators.
let default_input_instance;
let default_input_witness;
if inputs.is_empty() && accumulators.is_empty() {
default_input_instance = Some(InputInstance::zero());
default_input_witness = Some(LabeledPolynomial::new(
PolynomialLabel::new(),
DensePolynomial::zero(),
None,
None,
));
inputs.push(InputRef::<_, _, Self> {
instance: default_input_instance.as_ref().unwrap(),
witness: default_input_witness.as_ref().unwrap(),
});
}
let all_input_instances = inputs
.iter()
.map(|input| Self::check_input_instance_structure(input.instance, false))
.chain(accumulators.iter().map(|accumulator| {
Self::check_input_instance_structure(accumulator.instance, true)
}))
.collect::<Result<Vec<_>, BoxedError>>()?;
let all_input_witnesses = inputs
.iter()
.map(|input| Self::check_input_witness_structure(input.witness, prover_key, false))
.chain(accumulators.iter().map(|accumulator| {
Self::check_input_witness_structure(accumulator.witness, prover_key, true)
}))
.collect::<Result<Vec<_>, BoxedError>>()?;
// Steps 1c-1d of the scheme's accumulation prover, as detailed in BCLMS20.
let (witness_polynomials, witness_commitments) =
Self::compute_witness_polynomials_and_commitments(
&prover_key,
inputs.iter().chain(&accumulators),
)
.map_err(|e| BoxedError::new(e))?;
assert_eq!(all_input_witnesses.len(), witness_polynomials.len());
assert_eq!(all_input_witnesses.len(), witness_commitments.len());
// Step 2 of the scheme's accumulation prover, as detailed in BCLMS20.
let mut challenge_point_sponge = sponge.clone();
challenge_point_sponge.absorb(&prover_key.supported_degree());
for (instance, witness_commitment) in all_input_instances.iter().zip(&witness_commitments) {
absorb![
&mut challenge_point_sponge,
instance,
witness_commitment.commitment().elem
];
}
let challenge_point = challenge_point_sponge
.squeeze_nonnative_field_elements_with_sizes(&[FieldElementSize::Truncated(
CHALLENGE_POINT_SIZE,
)])
.pop()
.unwrap();
let mut linear_combination_challenges_sponge = sponge;
let mut challenge_point_bytes = to_bytes!(challenge_point).unwrap();
challenge_point_bytes.resize_with((CHALLENGE_POINT_SIZE + 7) / 8, || 0u8);
linear_combination_challenges_sponge.absorb(&challenge_point_bytes);
let mut proof = Vec::new();
for ((input_witness, witness_polynomial), witness_commitment) in all_input_witnesses
.iter()
.zip(&witness_polynomials)
.zip(&witness_commitments)
{
// Step 3 of the scheme's accumulation prover, as detailed in BCLMS20.
let input_witness_eval = input_witness.evaluate(&challenge_point);
let witness_eval = witness_polynomial.evaluate(&challenge_point);
// Step 4 of the scheme's accumulation prover, as detailed in BCLMS20.
absorb![
&mut linear_combination_challenges_sponge,
&to_bytes!(&input_witness_eval).unwrap(),
&to_bytes!(&witness_eval).unwrap()
];
let single_proof = SingleProof {
witness_commitment: witness_commitment.clone(),
witness_eval,
eval: input_witness_eval,
};
proof.push(single_proof);
}
// Step 4 of the scheme's accumulation prover, as detailed in BCLMS20.
let linear_combination_challenges = linear_combination_challenges_sponge
.squeeze_nonnative_field_elements_with_sizes(
vec![
FieldElementSize::Truncated(LINEAR_COMBINATION_CHALLENGE_SIZE);
proof.len() * 2
]
.as_slice(),
);
// Step 5 of the scheme's accumulation prover, as detailed in BCLMS20.
let combined_polynomial = Self::combine_polynomials(
all_input_witnesses.into_iter().chain(&witness_polynomials),
linear_combination_challenges.as_slice(),
);
let combined_polynomial =
LabeledPolynomial::new(PolynomialLabel::new(), combined_polynomial, None, None);
// Step 6 of the scheme's accumulation prover, as detailed in BCLMS20.
let combined_eval = combined_polynomial.evaluate(&challenge_point);
// Step 7 of the scheme's accumulation prover, as detailed in BCLMS20.
let combined_commitment = Self::combine_commitments(
all_input_instances
.into_iter()
.map(|instance| &instance.commitment)
.chain(&witness_commitments),
linear_combination_challenges.as_slice(),
);
let combined_commitment =
LabeledCommitment::new(PolynomialLabel::new(), combined_commitment, None);
// Step 8-10 of the scheme's accumulation prover, as detailed in BCLMS20.
let new_accumulator_instance = InputInstance {
commitment: combined_commitment,
point: challenge_point,
eval: combined_eval,
};
let new_accumulator = Accumulator::<_, _, Self> {
instance: new_accumulator_instance,
witness: combined_polynomial,
};
Ok((new_accumulator, proof))
}
fn verify<'a>(
verifier_key: &Self::VerifierKey,
input_instances: impl IntoIterator<Item = &'a Self::InputInstance>,
old_accumulator_instances: impl IntoIterator<Item = &'a Self::AccumulatorInstance>,
new_accumulator_instance: &Self::AccumulatorInstance,
proof: &Self::Proof,
sponge: Option<S>,
) -> Result<bool, Self::Error>
where
Self: 'a,
{
let sponge = sponge.unwrap_or_else(|| S::new());
// Collect the input and run basic checks on them.
let all_input_instances = input_instances
.into_iter()
.map(|instance| Self::check_input_instance_structure(instance, false))
.chain(
old_accumulator_instances
.into_iter()
.map(|instance| Self::check_input_instance_structure(instance, true)),
)
.collect::<Result<Vec<_>, BoxedError>>();
if all_input_instances.is_err() {
return Ok(false);
}
let mut all_input_instances = all_input_instances.unwrap();
// Default input in the case there are no provided inputs or accumulators.
let default_input_instance;
if all_input_instances.is_empty() {
default_input_instance = Some(InputInstance::zero());
all_input_instances.push(default_input_instance.as_ref().unwrap());
}
let new_accumulator_instance =
Self::check_input_instance_structure(new_accumulator_instance, true);
if new_accumulator_instance.is_err() {
return Ok(false);
}
let new_accumulator_instance = new_accumulator_instance.unwrap();
if all_input_instances.len() == 0 {
return Ok(false);
}
if !Self::check_proof_structure(proof, all_input_instances.len()) {
return Ok(false);
}
// Step 3 of the scheme's accumulation verifier, as detailed in BCLMS20.
let mut challenge_point_sponge = sponge.clone();
challenge_point_sponge.absorb(verifier_key);
let mut commitments = Vec::new();
for (input_instance, p) in all_input_instances.into_iter().zip(proof) {
// Step 3 of the scheme's accumulation verifier, as detailed in BCLMS20.
absorb![
&mut challenge_point_sponge,
input_instance,
p.witness_commitment.commitment().elem
];
// Step 4 of the scheme's accumulation verifier, as detailed in BCLMS20.
let eval_check_lhs = p.eval - &input_instance.eval;
let eval_check_rhs = p
.witness_eval
.mul(&(new_accumulator_instance.point - &input_instance.point));
if !eval_check_lhs.eq(&eval_check_rhs) {
return Ok(false);
}
commitments.push(&input_instance.commitment);
}
// Step 3 of the scheme's accumulation verifier, as detailed in BCLMS20.
let challenge_point: G::ScalarField = challenge_point_sponge
.squeeze_nonnative_field_elements_with_sizes(&[FieldElementSize::Truncated(
CHALLENGE_POINT_SIZE,
)])
.pop()
.unwrap();
if !challenge_point.eq(&new_accumulator_instance.point) {
return Ok(false);
}
// Step 5 of the scheme's accumulation verifier, as detailed in BCLMS20.
let mut linear_combination_challenges_sponge = sponge;
let mut challenge_point_bytes = to_bytes!(challenge_point).unwrap();
challenge_point_bytes.resize_with((CHALLENGE_POINT_SIZE + 7) / 8, || 0u8);
linear_combination_challenges_sponge.absorb(&challenge_point_bytes);
for single_proof in proof {
absorb![
&mut linear_combination_challenges_sponge,
&to_bytes!(&single_proof.eval).unwrap(),
&to_bytes!(&single_proof.witness_eval).unwrap()
];
}
let linear_combination_challenges = linear_combination_challenges_sponge
.squeeze_nonnative_field_elements_with_sizes(
vec![
FieldElementSize::Truncated(LINEAR_COMBINATION_CHALLENGE_SIZE);
proof.len() * 2
]
.as_slice(),
);
// Step 6 of the scheme's accumulation verifier, as detailed in BCLMS20.
let combined_eval = Self::combine_evaluations(
proof
.into_iter()
.map(|p| &p.eval)
.chain(proof.into_iter().map(|p| &p.witness_eval)),
linear_combination_challenges.as_slice(),
);
if !combined_eval.eq(&new_accumulator_instance.eval) {
return Ok(false);
}
// Step 7 of the scheme's accumulation verifier, as detailed in BCLMS20.
let combined_commitment = Self::combine_commitments(
commitments
.into_iter()
.chain(proof.into_iter().map(|p| &p.witness_commitment)),
linear_combination_challenges.as_slice(),
);
if !combined_commitment.eq(new_accumulator_instance.commitment.commitment()) {
return Ok(false);
}
Ok(true)
}
fn decide<'a>(
decider_key: &Self::DeciderKey,
accumulator: AccumulatorRef<'_, ConstraintF<G>, S, Self>,
_sponge: Option<S>,
) -> Result<bool, Self::Error>
where
Self: 'a,
{
let check = TrivialPC::check_individual_opening_challenges(
decider_key,
vec![&accumulator.instance.commitment],
&accumulator.instance.point,
vec![accumulator.instance.eval],
&trivial_pc::Proof {
polynomial: accumulator.witness.clone(),
},
&|_| G::ScalarField::one(),
None,
);
Ok(check.is_ok() && check.ok().unwrap())
}
}
#[cfg(test)]
pub mod tests {
use crate::data_structures::Input;
use crate::error::BoxedError;
use crate::tests::*;
use crate::trivial_pc_as::{ASForTrivialPC, InputInstance};
use crate::AccumulationScheme;
use crate::ConstraintF;
use ark_ec::AffineCurve;
use ark_ff::ToConstraintField;
use ark_poly::polynomial::univariate::DensePolynomial;
use ark_poly_commit::trivial_pc::TrivialPC;
use ark_poly_commit::{
trivial_pc, LabeledPolynomial, PCCommitterKey, PolynomialCommitment, UVPolynomial,
};
use ark_sponge::poseidon::PoseidonSponge;
use ark_sponge::{Absorbable, CryptographicSponge};
use ark_std::rand::RngCore;
use ark_std::vec::Vec;
use ark_std::UniformRand;
pub struct ASForTrivialPCTestParams {
pub(crate) degree: usize,
}
impl TestParameters for ASForTrivialPCTestParams {
fn make_zk(&self) -> bool {
false
}
}
pub struct ASForTrivialPCTestInput {}
impl<G, S> ASTestInput<ConstraintF<G>, S, ASForTrivialPC<G, S>> for ASForTrivialPCTestInput
where
G: AffineCurve + ToConstraintField<ConstraintF<G>> + Absorbable<ConstraintF<G>>,
ConstraintF<G>: Absorbable<ConstraintF<G>>,
S: CryptographicSponge<ConstraintF<G>>,
{
type TestParams = ASForTrivialPCTestParams;
type InputParams = trivial_pc::CommitterKey<G>;
fn setup(
test_params: &Self::TestParams,
rng: &mut impl RngCore,
) -> (
Self::InputParams,
<ASForTrivialPC<G, S> as AccumulationScheme<ConstraintF<G>, S>>::PredicateParams,
<ASForTrivialPC<G, S> as AccumulationScheme<ConstraintF<G>, S>>::PredicateIndex,
) {
let max_degree = test_params.degree;
let supported_degree = max_degree;
let supported_hiding_bound = 0;
let predicate_params =
TrivialPC::<G, DensePolynomial<G::ScalarField>>::setup(max_degree, None, rng)
.unwrap();
let (ck, _) = TrivialPC::<G, DensePolynomial<G::ScalarField>>::trim(
&predicate_params,
supported_degree,
supported_hiding_bound,
None,
)
.unwrap();
(ck, predicate_params, supported_degree)
}
fn generate_inputs(
input_params: &Self::InputParams,
num_inputs: usize,
rng: &mut impl RngCore,
) -> Vec<Input<ConstraintF<G>, S, ASForTrivialPC<G, S>>> {
let ck = input_params;
let degree = PCCommitterKey::supported_degree(ck);
let labeled_polynomials: Vec<
LabeledPolynomial<G::ScalarField, DensePolynomial<G::ScalarField>>,
> = (0..num_inputs)
.map(|i| {
let label = format!("Input{}", i);
let polynomial = DensePolynomial::rand(degree, rng);
let labeled_polynomial = LabeledPolynomial::new(label, polynomial, None, None);
labeled_polynomial
})
.collect();
let (labeled_commitments, _) = TrivialPC::<G, DensePolynomial<G::ScalarField>>::commit(
ck,
&labeled_polynomials,
Some(rng),
)
.unwrap();
let inputs = labeled_polynomials
.into_iter()
.zip(labeled_commitments)
.map(|(labeled_polynomial, labeled_commitment)| {
let point = G::ScalarField::rand(rng);
let eval = labeled_polynomial.evaluate(&point);
let instance = InputInstance {
commitment: labeled_commitment,
point,
eval,
};
Input::<_, _, ASForTrivialPC<G, S>> {
instance,
witness: labeled_polynomial,
}
})
.collect();
inputs
}
}
type G = ark_pallas::Affine;
type CF = ark_pallas::Fq;
type Sponge = PoseidonSponge<CF>;
type AS = ASForTrivialPC<G, Sponge>;
type I = ASForTrivialPCTestInput;
type Tests = ASTests<CF, Sponge, AS, I>;
#[test]
pub fn single_input_init_test() -> Result<(), BoxedError> {
Tests::single_input_init_test(&ASForTrivialPCTestParams { degree: 11 })
}
#[test]
pub fn multiple_inputs_init_test() -> Result<(), BoxedError> {
Tests::multiple_inputs_init_test(&ASForTrivialPCTestParams { degree: 11 })
}
#[test]
pub fn simple_accumulation_test() -> Result<(), BoxedError> {
Tests::simple_accumulation_test(&ASForTrivialPCTestParams { degree: 11 })
}
#[test]
pub fn multiple_inputs_accumulation_test() -> Result<(), BoxedError> {
Tests::multiple_inputs_accumulation_test(&ASForTrivialPCTestParams { degree: 11 })
}
#[test]
pub fn accumulators_only_test() -> Result<(), BoxedError> {
Tests::accumulators_only_test(&ASForTrivialPCTestParams { degree: 11 })
}
#[test]
pub fn no_inputs_init_test() -> Result<(), BoxedError> {
Tests::no_inputs_init_test(&ASForTrivialPCTestParams { degree: 11 })
}
}
| 36.954774 | 100 | 0.606337 |
0aa9179b35cd9b2ca30e60cdbaba6bbbaff4b92f | 10,794 | use std::io::{Cursor, Read};
use std::sync::Mutex;
use ruzstd::frame_decoder::FrameDecoder;
use ruzstd::streaming_decoder::StreamingDecoder;
use serde::{Deserialize, Serialize};
use shengji_core::{
bidding::{Bid, BidPolicy, JokerBidPolicy},
hands::Hands,
player::Player,
scoring::{
compute_level_deltas, explain_level_deltas, GameScoreResult, GameScoringParameters,
POINTS_PER_DECK,
},
trick::{Trick, TrickDrawPolicy, TrickFormat, TrickUnit, UnitLike},
types::{Card, EffectiveSuit, PlayerID, Trump},
};
use shengji_types::ZSTD_ZSTD_DICT;
use smallvec::SmallVec;
use wasm_bindgen::prelude::*;
// use web_sys::console;
lazy_static::lazy_static! {
static ref ZSTD_DICT: Vec<u8> = {
let mut reader = Cursor::new(ZSTD_ZSTD_DICT);
let mut decoder =
StreamingDecoder::new(&mut reader).map_err(|_| "Failed to construct decoder").unwrap();
let mut v = Vec::new();
decoder
.read_to_end(&mut v)
.map_err(|e| format!("Failed to decode data {:?}", e)).unwrap();
v
};
static ref ZSTD_DECODER: Mutex<Option<FrameDecoder>> = {
let mut fd = FrameDecoder::new();
fd.add_dict(&ZSTD_DICT).unwrap();
Mutex::new(Some(fd))
};
}
#[derive(Deserialize)]
struct FindViablePlaysRequest {
trump: Trump,
cards: Vec<Card>,
}
#[derive(Serialize)]
struct FindViablePlaysResult {
results: Vec<FoundViablePlay>,
}
#[derive(Serialize)]
struct FoundViablePlay {
grouping: SmallVec<[TrickUnit; 4]>,
description: String,
}
#[wasm_bindgen]
pub fn find_viable_plays(req: JsValue) -> Result<JsValue, JsValue> {
#[cfg(debug_assertions)]
console_error_panic_hook::set_once();
let FindViablePlaysRequest { trump, cards } = req
.into_serde()
.map_err(|_| "Failed to deserialize request")?;
let results = TrickUnit::find_plays(trump, cards)
.into_iter()
.map(|p| {
let description = UnitLike::multi_description(p.iter().map(UnitLike::from));
FoundViablePlay {
grouping: p,
description,
}
})
.collect::<Vec<_>>();
Ok(JsValue::from_serde(&FindViablePlaysResult { results })
.map_err(|_| "failed to serialize response")?)
}
#[derive(Deserialize)]
struct DecomposeTrickFormatRequest {
trick_format: TrickFormat,
hands: Hands,
player_id: PlayerID,
trick_draw_policy: TrickDrawPolicy,
}
#[derive(Serialize)]
struct DecomposeTrickFormatResponse {
results: Vec<DecomposedTrickFormat>,
}
#[derive(Serialize)]
struct DecomposedTrickFormat {
format: SmallVec<[UnitLike; 4]>,
description: String,
playable: Vec<Card>,
}
#[wasm_bindgen]
pub fn decompose_trick_format(req: JsValue) -> Result<JsValue, JsValue> {
#[cfg(debug_assertions)]
console_error_panic_hook::set_once();
let DecomposeTrickFormatRequest {
trick_format,
hands,
player_id,
trick_draw_policy,
} = req
.into_serde()
.map_err(|_| "Failed to deserialize request")?;
let hand = hands
.get(player_id)
.map_err(|_| "Couldn't find hand for player")?;
let available_cards = Card::cards(
hand.iter()
.filter(|(c, _)| trick_format.trump().effective_suit(**c) == trick_format.suit()),
)
.copied()
.collect::<Vec<_>>();
let results = trick_format
.decomposition()
.map(|format| {
let description = UnitLike::multi_description(format.iter().cloned());
let (playable, units) = UnitLike::check_play(
trick_format.trump(),
available_cards.iter().copied(),
format.iter().cloned(),
trick_draw_policy,
);
DecomposedTrickFormat {
format,
description,
playable: if playable {
units
.into_iter()
.flat_map(|u| {
u.into_iter()
.flat_map(|(card, count)| std::iter::repeat(card.card).take(count))
.collect::<Vec<_>>()
})
.collect()
} else {
vec![]
},
}
})
.collect();
Ok(
JsValue::from_serde(&DecomposeTrickFormatResponse { results })
.map_err(|_| "failed to serialize response")?,
)
}
#[derive(Deserialize)]
struct CanPlayCardsRequest {
trick: Trick,
id: PlayerID,
hands: Hands,
cards: Vec<Card>,
trick_draw_policy: TrickDrawPolicy,
}
#[derive(Serialize)]
struct CanPlayCardsResponse {
playable: bool,
}
#[wasm_bindgen]
pub fn can_play_cards(req: JsValue) -> Result<JsValue, JsValue> {
#[cfg(debug_assertions)]
console_error_panic_hook::set_once();
let CanPlayCardsRequest {
trick,
id,
hands,
cards,
trick_draw_policy,
} = req
.into_serde()
.map_err(|_| "Failed to deserialize request")?;
Ok(JsValue::from_serde(&CanPlayCardsResponse {
playable: trick
.can_play_cards(id, &hands, &cards, trick_draw_policy)
.is_ok(),
})
.map_err(|_| "failed to serialize response")?)
}
#[derive(Deserialize)]
struct FindValidBidsRequest {
id: PlayerID,
bids: Vec<Bid>,
hands: Hands,
players: Vec<Player>,
landlord: Option<PlayerID>,
epoch: usize,
bid_policy: BidPolicy,
joker_bid_policy: JokerBidPolicy,
num_decks: usize,
}
#[derive(Serialize)]
struct FindValidBidsResult {
results: Vec<Bid>,
}
#[wasm_bindgen]
pub fn find_valid_bids(req: JsValue) -> Result<JsValue, JsValue> {
#[cfg(debug_assertions)]
console_error_panic_hook::set_once();
let req: FindValidBidsRequest = req
.into_serde()
.map_err(|_| "Failed to deserialize phase")?;
Ok(JsValue::from_serde(&FindValidBidsResult {
results: Bid::valid_bids(
req.id,
&req.bids,
&req.hands,
&req.players,
req.landlord,
req.epoch,
req.bid_policy,
req.joker_bid_policy,
req.num_decks,
)
.unwrap_or_default(),
})
.map_err(|_| "failed to serialize response")?)
}
#[derive(Deserialize)]
struct SortAndGroupCardsRequest {
trump: Trump,
cards: Vec<Card>,
}
#[derive(Serialize)]
struct SortAndGroupCardsResponse {
results: Vec<SuitGroup>,
}
#[derive(Serialize)]
struct SuitGroup {
suit: EffectiveSuit,
cards: Vec<Card>,
}
#[wasm_bindgen]
pub fn sort_and_group_cards(req: JsValue) -> Result<JsValue, JsValue> {
#[cfg(debug_assertions)]
console_error_panic_hook::set_once();
let SortAndGroupCardsRequest { trump, mut cards } = req
.into_serde()
.map_err(|_| "Failed to deserialize request")?;
cards.sort_by(|a, b| trump.compare(*a, *b));
let mut results: Vec<SuitGroup> = vec![];
for card in cards {
let suit = trump.effective_suit(card);
if let Some(group) = results.last_mut() {
if group.suit == suit {
group.cards.push(card);
continue;
}
}
results.push(SuitGroup {
suit,
cards: vec![card],
})
}
Ok(JsValue::from_serde(&SortAndGroupCardsResponse { results })
.map_err(|_| "failed to serialize response")?)
}
#[derive(Deserialize)]
struct ExplainScoringRequest {
num_decks: usize,
params: GameScoringParameters,
smaller_landlord_team_size: bool,
}
#[derive(Serialize)]
struct ExplainScoringResponse {
results: Vec<ScoreSegment>,
step_size: usize,
}
#[derive(Serialize)]
struct ScoreSegment {
point_threshold: isize,
results: GameScoreResult,
}
#[wasm_bindgen]
pub fn explain_scoring(req: JsValue) -> Result<JsValue, JsValue> {
#[cfg(debug_assertions)]
console_error_panic_hook::set_once();
let ExplainScoringRequest {
num_decks,
params,
smaller_landlord_team_size,
} = req
.into_serde()
.map_err(|_| "Failed to deserialize request")?;
let deltas = explain_level_deltas(
¶ms,
num_decks,
POINTS_PER_DECK,
smaller_landlord_team_size,
)
.map_err(|e| format!("Failed to explain scores: {:?}", e))?;
Ok(JsValue::from_serde(&ExplainScoringResponse {
results: deltas
.into_iter()
.map(|(pts, res)| ScoreSegment {
point_threshold: pts,
results: res,
})
.collect(),
step_size: params
.step_size(num_decks, 100)
.map_err(|e| format!("Failed to compute step size: {:?}", e))?,
})
.map_err(|_| "failed to serialize response")?)
}
#[derive(Deserialize)]
struct ComputeScoreRequest {
num_decks: usize,
params: GameScoringParameters,
smaller_landlord_team_size: bool,
non_landlord_points: isize,
}
#[derive(Serialize)]
struct ComputeScoreResponse {
score: GameScoreResult,
next_threshold: isize,
}
#[wasm_bindgen]
pub fn compute_score(req: JsValue) -> Result<JsValue, JsValue> {
#[cfg(debug_assertions)]
console_error_panic_hook::set_once();
let ComputeScoreRequest {
num_decks,
params,
smaller_landlord_team_size,
non_landlord_points,
} = req
.into_serde()
.map_err(|_| "Failed to deserialize request")?;
let score = compute_level_deltas(
¶ms,
num_decks,
POINTS_PER_DECK,
non_landlord_points,
smaller_landlord_team_size,
)
.map_err(|_| "Failed to compute score")?;
let next_threshold = params
.materialize(num_decks, 100)
.and_then(|n| n.next_relevant_score(non_landlord_points))
.map_err(|_| "Couldn't find next valid score")?
.0;
Ok(JsValue::from_serde(&ComputeScoreResponse {
score,
next_threshold,
})
.map_err(|_| "failed to serialize response")?)
}
#[wasm_bindgen]
pub fn zstd_decompress(req: &[u8]) -> Result<String, JsValue> {
let mut reader = Cursor::new(req);
let mut frame_decoder = ZSTD_DECODER.lock().unwrap();
let mut decoder =
StreamingDecoder::new_with_decoder(&mut reader, frame_decoder.take().unwrap())
.map_err(|_| "Failed to construct decoder")?;
let mut v = Vec::new();
decoder
.read_to_end(&mut v)
.map_err(|e| format!("Failed to decode data {:?}", e))?;
*frame_decoder = Some(decoder.inner());
drop(frame_decoder);
Ok(String::from_utf8(v).map_err(|_| "Failed to parse utf-8")?)
}
| 26.917706 | 99 | 0.602742 |
bb2f6350c677d65379ad12c0a9c0a8a73e122206 | 7,380 | //! A lowering for `use`-paths (more generally, paths without angle-bracketed segments).
use std::{
fmt::{self, Display},
iter,
};
use crate::{
db::AstDatabase,
hygiene::Hygiene,
name::{known, Name},
};
use base_db::CrateId;
use either::Either;
use syntax::{ast, AstNode};
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ModPath {
pub kind: PathKind,
segments: Vec<Name>,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum PathKind {
Plain,
/// `self::` is `Super(0)`
Super(u8),
Crate,
/// Absolute path (::foo)
Abs,
/// `$crate` from macro expansion
DollarCrate(CrateId),
}
impl ModPath {
pub fn from_src(db: &dyn AstDatabase, path: ast::Path, hygiene: &Hygiene) -> Option<ModPath> {
convert_path(db, None, path, hygiene)
}
pub fn from_segments(kind: PathKind, segments: impl IntoIterator<Item = Name>) -> ModPath {
let segments = segments.into_iter().collect::<Vec<_>>();
ModPath { kind, segments }
}
/// Creates a `ModPath` from a `PathKind`, with no extra path segments.
pub const fn from_kind(kind: PathKind) -> ModPath {
ModPath { kind, segments: Vec::new() }
}
pub fn segments(&self) -> &[Name] {
&self.segments
}
pub fn push_segment(&mut self, segment: Name) {
self.segments.push(segment);
}
pub fn pop_segment(&mut self) -> Option<Name> {
self.segments.pop()
}
/// Returns the number of segments in the path (counting special segments like `$crate` and
/// `super`).
pub fn len(&self) -> usize {
self.segments.len()
+ match self.kind {
PathKind::Plain => 0,
PathKind::Super(i) => i as usize,
PathKind::Crate => 1,
PathKind::Abs => 0,
PathKind::DollarCrate(_) => 1,
}
}
pub fn is_ident(&self) -> bool {
self.as_ident().is_some()
}
pub fn is_self(&self) -> bool {
self.kind == PathKind::Super(0) && self.segments.is_empty()
}
/// If this path is a single identifier, like `foo`, return its name.
pub fn as_ident(&self) -> Option<&Name> {
if self.kind != PathKind::Plain {
return None;
}
match &*self.segments {
[name] => Some(name),
_ => None,
}
}
}
impl Display for ModPath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut first_segment = true;
let mut add_segment = |s| -> fmt::Result {
if !first_segment {
f.write_str("::")?;
}
first_segment = false;
f.write_str(s)?;
Ok(())
};
match self.kind {
PathKind::Plain => {}
PathKind::Super(0) => add_segment("self")?,
PathKind::Super(n) => {
for _ in 0..n {
add_segment("super")?;
}
}
PathKind::Crate => add_segment("crate")?,
PathKind::Abs => add_segment("")?,
PathKind::DollarCrate(_) => add_segment("$crate")?,
}
for segment in &self.segments {
if !first_segment {
f.write_str("::")?;
}
first_segment = false;
segment.fmt(f)?;
}
Ok(())
}
}
impl From<Name> for ModPath {
fn from(name: Name) -> ModPath {
ModPath::from_segments(PathKind::Plain, iter::once(name))
}
}
fn convert_path(
db: &dyn AstDatabase,
prefix: Option<ModPath>,
path: ast::Path,
hygiene: &Hygiene,
) -> Option<ModPath> {
let prefix = match path.qualifier() {
Some(qual) => Some(convert_path(db, prefix, qual, hygiene)?),
None => prefix,
};
let segment = path.segment()?;
let mut mod_path = match segment.kind()? {
ast::PathSegmentKind::Name(name_ref) => {
match hygiene.name_ref_to_name(db, name_ref) {
Either::Left(name) => {
// no type args in use
let mut res = prefix.unwrap_or_else(|| {
ModPath::from_kind(
segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs),
)
});
res.segments.push(name);
res
}
Either::Right(crate_id) => {
return Some(ModPath::from_segments(
PathKind::DollarCrate(crate_id),
iter::empty(),
))
}
}
}
ast::PathSegmentKind::SelfTypeKw => {
if prefix.is_some() {
return None;
}
ModPath::from_segments(PathKind::Plain, Some(known::SELF_TYPE))
}
ast::PathSegmentKind::CrateKw => {
if prefix.is_some() {
return None;
}
ModPath::from_segments(PathKind::Crate, iter::empty())
}
ast::PathSegmentKind::SelfKw => {
if prefix.is_some() {
return None;
}
ModPath::from_segments(PathKind::Super(0), iter::empty())
}
ast::PathSegmentKind::SuperKw => {
let nested_super_count = match prefix.map(|p| p.kind) {
Some(PathKind::Super(n)) => n,
Some(_) => return None,
None => 0,
};
ModPath::from_segments(PathKind::Super(nested_super_count + 1), iter::empty())
}
ast::PathSegmentKind::Type { .. } => {
// not allowed in imports
return None;
}
};
// handle local_inner_macros :
// Basically, even in rustc it is quite hacky:
// https://github.com/rust-lang/rust/blob/614f273e9388ddd7804d5cbc80b8865068a3744e/src/librustc_resolve/macros.rs#L456
// We follow what it did anyway :)
if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain {
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
if let Some(crate_id) = hygiene.local_inner_macros(db, path) {
mod_path.kind = PathKind::DollarCrate(crate_id);
}
}
}
Some(mod_path)
}
pub use crate::name as __name;
#[macro_export]
macro_rules! __known_path {
(core::iter::IntoIterator) => {};
(core::iter::Iterator) => {};
(core::result::Result) => {};
(core::option::Option) => {};
(core::ops::Range) => {};
(core::ops::RangeFrom) => {};
(core::ops::RangeFull) => {};
(core::ops::RangeTo) => {};
(core::ops::RangeToInclusive) => {};
(core::ops::RangeInclusive) => {};
(core::future::Future) => {};
(core::ops::Try) => {};
($path:path) => {
compile_error!("Please register your known path in the path module")
};
}
#[macro_export]
macro_rules! __path {
($start:ident $(:: $seg:ident)*) => ({
$crate::__known_path!($start $(:: $seg)*);
$crate::mod_path::ModPath::from_segments($crate::mod_path::PathKind::Abs, vec![
$crate::mod_path::__name![$start], $($crate::mod_path::__name![$seg],)*
])
});
}
pub use crate::__path as path;
| 29.638554 | 122 | 0.515989 |
8936f2ea36c4c5263505a7ba00c27f6496277627 | 24,677 | //! Matrixa is a simple matrix manipulation library which supports
//! row and column matrix manipulations, scalar manipulation,
//! mathematical manipulation, filtering and parsing mechanisms for
//! various type of data stored in vector of vector (Vec<Vec<T>>).
//!
//! すべての行列で、すべての場合において可能な演算については値ないしは参照を返却し、
//! 行列と引数によっては演算が定義されないものについてはResult型を返却する。
//!
//! ```rust
//! use matrixa::core::Matrix;
//! use matrixa::mat;
//!
//! let mut im = Matrix::<i32>::new();
//! let mut fm = mat![f32: [1.0,2.0,3.0]];
//! im.push(vec![1,2,3,4,5])
//! .unwrap()
//! .push(vec![5,6,7,8,9])
//! .unwrap();
//! fm.debug()
//! .push(vec![1.23,4.56,7.89])
//! .unwrap();
//! im.add(1).print();
//! im.mul(3).print();
//! fm.print();
//! ```
//!
use std::fmt::Debug;
use PartialEq;
pub struct Matrix<T> {
pub data: Vec<Vec<T>>,
pub debug: bool,
current: usize,
max: usize,
}
/// イテレータ実装 / Iterator
///
/// Matrix はIterator を実装しており、for文等での数え上げに使える。
///
/// ```rust
/// use matrixa::core::Matrix;
/// use matrixa::mat;
///
/// let m = mat![i32: [1,2,3],[4,5,6]];
/// let v = vec![vec![1,2,3],vec![4,5,6]];
/// let mut i = 0;
/// for row in m {
/// let mut j = 0;
/// for datum in row {
/// assert_eq!(datum, v[i][j]);
/// j += 1;
/// }
/// i += 1;
/// }
/// ```
///
impl<T> Iterator for Matrix<T>
where T: Clone,
{
type Item = Vec<T>;
fn next(&mut self) -> Option<Vec<T>> {
self.current += 1;
if self.current - 1 < self.max {
let data = &self.data[self.current - 1];
Some(data.to_vec())
} else {
None
}
}
}
/// 行列の完全一致・不一致 / PartialEq
///
/// 行列の要素ごとの比較を行い、結果をboolで返却する。
/// 行および列の数が一致しない行列が指定された場合はパニックする。
///
/// ```rust
/// use matrixa::core::Matrix;
/// use matrixa::mat;
///
/// let m = mat![i32:[1,2],[3,4]];
/// let n = mat![i32:[1,2],[3,4]];
/// let o = mat![i32:[1,1],[3,4]];
/// assert_eq!(m == n, true);
/// assert_eq!(n == n, true);
/// assert_eq!(m == o, false);
/// assert_eq!(n == o, false);
/// ```
///
impl<T: Clone + std::cmp::PartialEq + std::fmt::Debug> PartialEq for Matrix<T>
where T: PartialEq
{
fn eq(&self, other: &Self) -> bool {
if !self.has_same_size_with(other.clone()) {
return false
}
for i in 0..self.data.len() {
for j in 0..self.data[i].len() {
if self.data[i][j] != other.data[i][j] {
return false
}
}
}
true
}
fn ne(&self, other: &Self) -> bool {
for i in 0..self.data.len() {
for j in 0..self.data[i].len() {
if self.data[i][j] != other.data[i][j] {
return true
}
}
}
false
}
}
/// 行列インスンタンス初期化用マクロ / initialization macro
///
/// ```rust
/// use matrixa::core::Matrix;
/// use matrixa::mat;
///
/// let mut im = mat![i32];
/// assert_eq!(im.rows(), 0);
/// im.print();
///
/// let fm = mat![
/// f32:
/// [1.0,2.0,3.0]
/// ];
/// assert_eq!(fm.cols(), 3);
/// assert_eq!(fm.row(0)[2], 3.0);
///
/// fm.print();
/// ```
#[macro_export]
macro_rules! mat {
( $t:ty : $( [ $( $x:expr ),+ ] ),* ) => {
{
let mut matrix: Matrix<$t> = Matrix::new();
let mut vec_len = 0;
$(
let mut t_vec = Vec::new();
$(
t_vec.push($x);
)*
if vec_len == 0 {
vec_len = t_vec.len();
}
if vec_len != t_vec.len() {
panic!("invalid vector length for {:?}!", t_vec)
}
matrix.push(t_vec).expect("failed to push new vector into the matrix");
)*
matrix
}
};
( $x:ty ) => {
{
Matrix::<$x>::new()
}
};
}
impl<T> Matrix<T> {
/// 行列生成
///
/// 新規の行列インスタンスを生成し、空行列として返却する
///
pub fn new() -> Self {
let v: Vec<Vec<T>> = Vec::new();
Matrix {
data: v,
debug: false,
current: 0,
max: 0,
}
}
}
impl<T: Debug> Matrix<T>
{
/// サイズ検証 / size matcher
///
/// 行列のサイズを引数行列のサイズと比較し、結果をResult型にくるんで返却する。
/// 一致する場合にはSelf型、一致しない場合にはエラーメッセージを返却する。
///
pub fn has_same_size_with(&self, other: &Self) -> bool {
match self {
_ if self.data.len() == 0 => {
println!("zero length origin for addition");
false
},
_ if self.data.len() != other.data.len() => {
println!("column number not matched {}, {}", self.data.len(),other.data.len());
false
},
_ if self.data[0].len() != other.data[0].len() => {
println!("row number not matched {}, {}", self.data[0].len(), other.data[0].len());
for i in 0..self.data.len() {
for j in 0..self.data[i].len() {
print!("d: {:?}, o: {:?}", self.data[i][j], other.data[i][j]);
}
}
false
}
_ => true,
}
}
}
impl<T: Copy> Clone for Matrix<T> {
/// 複製 / Clone
///
/// selfと同一のデータを有する新規インスタンスを生成する。
/// TがCopyを実装する場合、Matrix構造体のデータはVec<Vec<T>>であるためCloneを実装する。
/// selfの保持するデータと同一のデータを保持する新規インスタンスを生成することができる。
///
/// ```rust
/// use matrixa::core::Matrix;
/// use matrixa::mat;
///
/// let x = mat![i32:[1,2,3],[1,5,6]];
/// let y = x.clone();
///
/// for i in 0..y.rows() {
/// for j in 0..y.cols() {
/// assert_eq!(y.row(i)[j], x.row(i)[j]);
/// }
/// }
///
/// // xはxを所有しており、この時点でも自身のデータにアクセスできる
/// x.print();
///
/// ```
///
fn clone(&self) -> Matrix<T> {
let mut res = Matrix::new();
for i in 0..self.data.len() {
res.data.push(self.data[i].clone());
}
res
}
}
impl<T: std::fmt::Debug> Matrix<T> {
/// データ表示関数
///
pub fn print(&self) {
println!("{:?}", self.data)
}
/// 行列データ取得
///
/// データペイロードとしてベクトルベクトルを返却する。
pub fn dump(&self) -> &Vec<Vec<T>> {
&self.data
}
/// デバッガ
///
/// デバッグフラグを有効化したインスタンスを返す。
///
pub fn debug(&mut self) -> &mut Self {
self.debug = true;
println!("debugging for: {:?}", self.data);
self
}
/// データ追加
///
/// データ末尾にVec<T>型で指定した新規列を追加。
/// マクロ実装の関係上、pushメソッドについてはMatrix型に直に記述している。
///
pub fn push(&mut self, data: Vec<T>) -> Result<&mut Self, &str> {
self.max += 1;
if self.data.len() != 0 {
if self.data[0].len() != data.len() {
//println!("Invalid vector length: {}, expected: {}",data.len(), self.data.len());
return Err("Invalid vector length");
}
}
self.data.push(data);
Ok(self)
}
}
/// [行列一般] 基本メソッド群
///
/// 数値行列および文字行列のいずれにも対応したメソッドを定義。
///
impl<T> Matrix<T>
where
T: std::fmt::Debug + Copy,
{
/// 行抽出関数
///
/// Vec<T>として行を返却
///
pub fn row(&self, num: usize) -> Vec<T> {
self.integrity_check().unwrap();
if num >= self.data.len() {
panic!(
"row number {} is out of order: must be less than {}",
num,
self.data.len()
);
} else {
let mut res: Vec<T> = Vec::new();
for i in 0..self.data[num].len() {
res.push(self.data[num][i]);
}
res
}
}
/// 列抽出関数
///
/// Vec<T>として列を返却
///
pub fn col(&self, num: usize) -> Vec<T> {
self.integrity_check().unwrap();
let mut res: Vec<T> = Vec::new();
for i in 0..self.data.len() {
res.push(self.data[i][num]);
}
res
}
/// 正方行列判定
///
/// 正方行列であるかどうかを判定し、Result型に格納したオブジェクト参照を返却する
///
/// ```rust
/// use matrixa::core::Matrix;
/// use matrixa::mat;
///
/// let m = mat![i32: [1,2,3], [2,3,4],[3,4,5]];
/// m.is_square().unwrap();
/// ```
///
pub fn is_square(&self) -> Result<&Self, &str> {
if self.data.len() != self.data[0].len() {
Err("not a square matrix")
} else {
Ok(&self)
}
}
/// 行数表示関数
///
pub fn rows(&self) -> usize {
self.data.len()
}
/// 列数表示関数
///
pub fn cols(&self) -> usize {
if self.data.len() == 0 {
0
} else {
self.data[0].len()
}
}
/// 行置換操作
///
pub fn row_replace(&mut self, src: usize, dst: usize) -> Result<&mut Self, &str> {
self.integrity_check()
.expect("data corrupted before row replacement");
self.row_check(src)
.expect("src row is out of order for the replacement")
.row_check(dst)
.expect("dst row is out of order for the replacement");
let mut src_data = Vec::new();
let mut dst_data = Vec::new();
for i in 0..self.data[0].len() {
src_data.push(self.data[src][i]);
dst_data.push(self.data[dst][i]);
}
self.data[src] = dst_data;
self.data[dst] = src_data;
if self.debug {
println!("matrix row replacement: {} with {}", src, dst);
println!("{:?}", self.data);
}
self.integrity_check()
.expect("data corrupted after row replacement");
Ok(self)
}
/// 列置換操作
///
pub fn col_replace(&mut self, src: usize, dst: usize) -> Result<&mut Self, &str> {
self.integrity_check()
.expect("data corrupted before row replacement");
self.row_check(src)
.expect("src column is out of order for the replacement")
.row_check(dst)
.expect("dst column is out of order for the replacement");
for i in 0..self.data.len() {
println!("col_rep try for row[{}]: {:?}", i, self.data[i]);
let src_data = self.data[i][src];
self.data[i][src] = self.data[i][dst];
self.data[i][dst] = src_data;
}
if self.debug {
println!("matrix column replacement: {} with {}", src, dst);
println!("{:?}", self.data);
}
self.integrity_check()
.expect("data corrupted after row replacement");
Ok(self)
}
/// 転置
///
/// 転置行列でデータを更新し、オブジェクト参照を返却する。
///
///```rust
/// use matrixa::core::Matrix;
/// use matrixa::mat;
/// let mut m = mat![i32: [1,2,3,4,5], [2,3,4,5,6],[3,4,5,6,7]];
/// assert_eq!(m.rows(),3);
/// assert_eq!(m.cols(),5);
/// let res = mat![
/// i32:
/// [1,2,3],
/// [2,3,4],
/// [3,4,5],
/// [4,5,6],
/// [5,6,7]
/// ];
/// m.transpose();
/// assert_eq!(m.rows(),5);
/// assert_eq!(m.cols(),3);
/// for i in 0..m.rows(){
/// for j in 0..m.cols(){
/// assert_eq!(m.dump()[i][j], res.dump()[i][j]);
/// }
/// }
///
///```
pub fn transpose(&mut self) -> &mut Self {
self.integrity_check().unwrap();
let mut res: Vec<Vec<T>> = Vec::new();
let cols = self.data[0].len();
for i in 0..cols {
res.push(Vec::new());
let col = self.col(i);
for j in 0..self.data.len() {
res[i].push(col[j]);
}
if self.debug {
println!("res[{}]: {:?}", i, res[i]);
}
}
while self.data.len() != 0 {
self.data.pop();
}
for i in 0..cols {
self.data.push(Vec::new());
for j in 0..res[0].len() {
self.data[i].push(res[i][j]);
}
}
if self.debug {
println!("matrix transpose");
println!("{:?}", self.data);
}
self
}
/// 行列データ整合性検証
///
/// 長さ0もしくは長さの一致しないデータを検出した時点で強制終了
///
pub fn integrity_check(&self) -> Result<&Self, &str> {
if self.data.len() == 0 {
return Err("zero matrix length detected");
}
for i in 0..self.data.len() {
let len = self.data[0].len();
if self.data[i].len() != len {
println!(
"matrix corrupted at column {} (data: {:?}, length: {}, expected {})",
i,
self.data[i],
self.data[i].len(),
len
);
return Err("matrix corrupted");
}
}
Ok(&self)
}
/// 行の存在性検証
///
/// 行の値をusizeで指定し、行列の高さに収まるかどうかを検証。
/// 結果をResult型にオブジェクト参照を格納して返却
///
fn row_check(&self, row: usize) -> Result<&Self, &str> {
match row < self.data.len() {
true => Ok(&self),
false => Err("row is out of order"),
}
}
/// 列の存在性検証
///
/// 列の値をusizeで指定し、行列の幅に収まるかどうかを検証。
/// 結果をResult型にオブジェクト参照を格納して返却
///
fn col_check(&self, row: usize) -> Result<&Self, &str> {
match row < self.data.len() {
true => Ok(&self),
false => Err("column is out of order"),
}
}
/// 行・列の存在性検証
///
/// 行および列の値をusizeで指定し、行列の幅・高さに収まるかどうかを検証。
/// 結果をResult型にオブジェクト参照を格納して返却
///
pub fn range_check(&self, row: usize, col: usize) -> Result<&Self, &str> {
self.row_check(row)?.col_check(col)
}
/// 行列セッタ / data setter
///
/// Vec<Vec<T>>への参照として行列データをセットする関数。
///
/// ```rust
/// use matrixa::core::Matrix;
/// use matrixa::mat;
///
/// let mut m = mat![i32:[1]];
/// let v = vec![
/// vec![1,2,3],
/// vec![1,2,3],
/// ];
///
/// m.set(&v);
/// for i in 0..1 {
/// for j in 0..2 {
/// assert_eq!(m.dump()[i][j],v[i][j]);
/// }
/// }
/// m.print();
/// ```
///
pub fn set(&mut self, m: &Vec<Vec<T>>) {
if m.len() == 0 {
panic!("argument has zero length");
}
if self.debug {
println!("new data set: {:?}", m);
}
self.data = m.clone();
}
/// 行列ゲッタ / data getter
///
/// 設定済みの行列データを新規 Vec<Vec<T>> インスタンスとして返却する。
///
/// ```rust
/// use matrixa::core::Matrix;
/// use matrixa::mat;
///
/// let mut m = mat![i32];
/// let v = vec![
/// vec![1,2,3],
/// vec![1,2,3],
/// ];
/// m.data = v.clone();
/// m.print();
///
/// let e = m.get();
/// for i in 0..1 {
/// for j in 0..2 {
/// assert_eq!(e[i][j], v[i][j]);
/// }
/// }
/// ```
///
pub fn get(&self) -> Vec<Vec<T>> {
self.data.clone()
}
}
/// 変換系メソッド群 / conversion methods
///
/// 異なる型を元とする行列への型変換を行う関数群。
///
impl<T: Copy + ToString> Matrix<T> {
/// 文字列行列への変換 / conversion to String matrix
///
/// ToStringを実装する元を有する行列について、全要素をString型に変換したMatrix<String>を返却。
///
pub fn to_string(&self) -> Matrix<String> {
let mut res = mat![String];
for i in 0..self.data.len() {
res.data.push(Vec::new());
for j in 0..self.data.len() {
res.data[i].push(self.data[i][j].to_string());
}
}
res
}
}
#[cfg(test)]
mod tests_matrix {
use crate::core::Matrix;
use crate::mat;
#[test]
fn test_new_i32() {
let m = Matrix::<i32>::new();
assert_eq!(m.data.len(), 0);
}
#[test]
fn test_new_f32() {
let m = Matrix::<f32>::new();
assert_eq!(m.data.len(), 0);
}
#[test]
fn test_macro_with_type() {
let m = mat![f32];
assert_eq!(m.data.len(), 0);
//m.data.push(vec![1.234,5.678]);
}
#[test]
fn test_macro_with_values() {
let m = mat![i32: [1,2,3,4,5], [2,3,4,5,6],[3,4,5,6,7]];
assert_eq!(m.data.len(), 3);
assert_eq!(m.data[0].len(), 5);
assert_eq!(m.data[2][2], 5);
}
#[test]
fn test_macro_with_str() {
let m = mat![
&str:
["abcde","fghij","klmn0"],
["bbcde","matched","olmn0"]
];
assert_eq!(m.data.len(), 2);
assert_eq!(m.data[0].len(), 3);
assert_eq!(m == m, true);
}
#[test]
fn test_macro_with_string() {
let m = mat![
String:
[String::from("abcde"),String::from("fghij"),String::from("klmn0")],
[String::from("bbcde"),String::from("matched"),String::from("olmn0")]
];
assert_eq!(m.data.len(), 2);
assert_eq!(m.data[0].len(), 3);
}
#[test]
fn test_macro_with_bool() {
let m = mat![
bool:
[true,true,false,true,false],
[true,false,false,true,true],
[false,false,true,true,true],
[false,true,true,false,true]
];
assert_eq!(m.data.len(), 4);
assert_eq!(m.data[0].len(), 5);
}
#[test]
#[should_panic]
fn test_macro_invalid_len() {
mat![i32: [1,23],[4,5,6]];
}
#[test]
fn test_eq() {
let m = mat![i32: [1,2,3,5,5], [3,6,1,4,2], [3,6,0,1,5]];
let n = mat![i32: [1,2,3,5,5], [3,6,1,4,2], [3,6,0,1,5]];
assert_eq!(m.has_same_size_with(&n), true);
assert_eq!(m == m, true);
assert_eq!(m == n, true);
}
#[test]
fn test_ne() {
let m = mat![i32: [1,2,3,5,5], [3,6,1,4,2], [3,6,0,1,5]];
let n = mat![i32: [1,2,3,5,5], [3,6,199293,4,2], [3,6,0,1,5]];
assert_eq!(m.has_same_size_with(&n), true);
assert_eq!(m != n, true);
}
#[test]
fn test_eq_str() {
let m = mat![
&str:
["abcde","fghij","klmn0"],
["bbcde","matched","olmn0"]
];
let n = mat![
&str:
["abcde","fghij","klmn0"],
["bbcde","matched","olmn0"]
];
let p = mat![
&str:
["abcde","fghij","klmn0"],
["bbcde","NOT matched","olmn0"]
]; assert_eq!(m.data.len(), 2);
assert_eq!(m.data[0].len(), 3);
assert_eq!(m == m, true);
assert_eq!(m.has_same_size_with(&n), true);
assert_eq!(m == n, true);
assert_eq!(m != n, false);
assert_eq!(m == p, false);
assert_eq!(m != p, true);
assert_eq!(n == p, false);
assert_eq!(n != p, true);
println!("{}",m.data[0][0].contains("a"))
}
#[test]
fn test_eq_string() {
let m = mat![
&str:
["abcde","fghij","klmn0"],
["bbcde","matched","olmn0"]
].to_string();
let n = mat![
&str:
["abcde","fghij","klmn0"],
["bbcde","matched","olmn0"]
].to_string();
let p = mat![
&str:
["abcde","fghij","klmn0"],
["bbcde","NOT matched","olmn0"]
].to_string();
assert_eq!(m == m, true);
assert_eq!(m.has_same_size_with(&n), true);
assert_eq!(m == n, true);
assert_eq!(m != n, false);
assert_eq!(m == p, false);
assert_eq!(m != p, true);
assert_eq!(n == p, false);
assert_eq!(n != p, true);
println!("{}",m.data[0][0].contains("a"))
}
#[test]
fn test_assign() {
let m = mat![i32: [1,2,3],[4,5,6],[7,8,9]];
let n = m.clone();
let o = n;
assert_eq!(m == o, true);
}
#[test]
fn test_assign_str() {
let m = mat![
&str:
["新宿","渋谷","代々木","神田"],
["吉祥寺","飯田橋","阿佐ヶ谷","白金"],
["保土ヶ谷","荻窪","墨田","北千住"]
];
let n = m.clone();
let o = n;
assert_eq!(m == o, true);
}
#[test]
fn test_print() {
let m = mat![i32: [1,2,3,4,5], [2,3,4,5,6],[3,4,5,6,7]];
m.print();
}
#[test]
#[should_panic]
fn test_is_square_error() {
let m = mat![i32: [1,2,3,4,5], [2,3,4,5,6],[3,4,5,6,7]];
m.is_square().unwrap();
}
#[test]
//正しい行列(矩形)でOk返却
fn test_integrity() {
let m = mat![i32: [1,2,3,4,5], [2,3,4,5,6],[3,4,5,6,7]];
m.integrity_check().unwrap();
}
#[test]
#[should_panic]
//ゼロ値でエラー返却
fn test_integrity_error_zero() {
let m = Matrix::<i32>::new();
m.integrity_check().unwrap();
}
#[test]
#[should_panic]
//行列でないデータではErrを返却
fn test_integrity_error_corrupted() {
let mut m = Matrix::<i32>::new();
m.data.push(vec![1, 2, 3]);
m.data.push(vec![1, 2, 3, 4, 5]);
m.integrity_check().unwrap();
}
#[test]
fn test_push() {
Matrix::<i32>::new()
.push(vec![1, 2])
.unwrap()
.push(vec![3, 4])
.unwrap();
}
#[test]
#[should_panic]
fn test_push_unmatched_len() {
Matrix::<i32>::new()
.push(vec![1, 2, 3])
.unwrap()
.push(vec![1])
.unwrap();
}
#[test]
fn test_row() {
let m = mat![i32: [1,2,3,4,5], [2,3,4,5,6],[3,4,5,6,7]];
assert_eq!(m.row(1)[2], 4);
assert_eq!(m.row(0)[1], m.row(1)[0]);
}
#[test]
#[should_panic]
fn test_row_error() {
let m = mat![i32: [1,2,3,4,5], [2,3,4,5,6],[3,4,5,6,7]];
m.row(5);
}
#[test]
fn test_col() {
let m = mat![i32: [1,2,3,4,5], [2,3,4,5,6],[3,4,5,6,7]];
assert_eq!(m.col(1)[1], 3);
assert_eq!(m.col(1)[2], m.col(3)[0]);
}
#[test]
fn test_rows() {
let m = mat![i32: [1,2,3,4,5], [2,3,4,5,6],[3,4,5,6,7]];
assert_eq!(m.rows(), 3);
}
#[test]
fn test_cols() {
let m = mat![i32: [1,2,3,4,5], [2,3,4,5,6],[3,4,5,6,7]];
assert_eq!(m.cols(), 5);
}
#[test]
#[should_panic]
fn test_col_error() {
let m = mat![i32: [1,2,3,4,5], [2,3,4,5,6],[3,4,5,6,7]];
m.row(4);
}
#[test]
fn test_row_replace() {
let mut a = mat![i32: [1,2,3,4,5], [2,3,4,5,6],[3,4,5,6,7]];
let p0 = vec![1, 2, 3, 4, 5];
let p2 = vec![3, 4, 5, 6, 7];
for i in 0..a.data[0].len() {
assert_eq!(a.data[0][i], p0[i]);
assert_eq!(a.data[2][i], p2[i]);
}
a.row_replace(0, 2).unwrap();
for i in 0..a.data[0].len() {
assert_eq!(a.data[0][i], p2[i]);
assert_eq!(a.data[2][i], p0[i]);
}
}
#[test]
fn test_col_replace() {
let mut m = mat![i32: [1,2,3,4,5], [2,3,4,5,6],[3,4,5,6,7]];
let p0 = vec![1, 2, 3];
let p2 = vec![3, 4, 5];
for i in 0..m.data.len() {
assert_eq!(m.data[i][0], p0[i]);
assert_eq!(m.data[i][2], p2[i]);
}
m.col_replace(0, 2).unwrap();
for i in 0..m.data.len() {
assert_eq!(m.data[i][0], p2[i]);
assert_eq!(m.data[i][2], p0[i]);
}
}
#[test]
fn test_transpose() {
let mut m = mat![i32: [1,2,3], [3,4,5],[5,6,7]];
let res = mat![
i32:
[1,3,5],
[2,4,6],
[3,5,7]
];
m.transpose();
for i in 0..m.data.len() {
for j in 0..m.data[0].len() {
assert_eq!(m.data[i][j], res.data[i][j]);
}
}
}
}
#[cfg(test)]
mod tests_matrix_conversion {
use crate::core::Matrix;
use crate::mat;
#[test]
fn test_to_string(){
let m = mat![i32: [2,2,3],[4,5,6],[7,8,9]];
let res = mat![
&str:
["2","2","3"],
["4","5","6"],
["7","8","9"]
];
let s = m.to_string();
for i in 0..s.data.len() {
for j in 0..s.data[i].len() {
assert_eq!(s.data[i][j].as_str() == res.data[i][j], true);
}
}
let t = res.to_string();
for i in 0..t.data.len() {
for j in 0..t.data[i].len() {
assert_eq!(t.data[i][j].as_str() == res.data[i][j], true);
}
}
}
} | 24.926263 | 99 | 0.430522 |
6a4896cb3f469c2914f1bf30a95b595bc3d0cbd6 | 109,065 | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
mod context;
mod special_class_resolver;
mod write;
pub use context::Context;
pub use write::{Error, IoWrite, Result, Write};
use ffi::{Maybe, Maybe::*, Pair, Quadruple, Slice, Str, Triple};
use indexmap::IndexSet;
use itertools::Itertools;
use core_utils_rust::add_ns;
use escaper::{escape, escape_by, is_lit_printable};
use hhbc_by_ref_emit_type_hint as emit_type_hint;
use hhbc_by_ref_hhas_adata::{HhasAdata, DICT_PREFIX, KEYSET_PREFIX, VEC_PREFIX};
use hhbc_by_ref_hhas_attribute::{self as hhas_attribute, HhasAttribute};
use hhbc_by_ref_hhas_body::{HhasBody, HhasBodyEnv};
use hhbc_by_ref_hhas_class::{self as hhas_class, HhasClass};
use hhbc_by_ref_hhas_coeffects::{HhasCoeffects, HhasCtxConstant};
use hhbc_by_ref_hhas_constant::HhasConstant;
use hhbc_by_ref_hhas_function::HhasFunction;
use hhbc_by_ref_hhas_method::{HhasMethod, HhasMethodFlags};
use hhbc_by_ref_hhas_param::HhasParam;
use hhbc_by_ref_hhas_pos::{HhasPos, HhasSpan};
use hhbc_by_ref_hhas_program::HhasProgram;
use hhbc_by_ref_hhas_property::HhasProperty;
use hhbc_by_ref_hhas_record_def::{Field, HhasRecord};
use hhbc_by_ref_hhas_symbol_refs::{HhasSymbolRefs, IncludePath};
use hhbc_by_ref_hhas_type::{constraint, HhasTypeInfo};
use hhbc_by_ref_hhas_type_const::HhasTypeConstant;
use hhbc_by_ref_hhas_typedef::HhasTypedef;
use hhbc_by_ref_hhbc_ast::*;
use hhbc_by_ref_hhbc_id::{class::ClassType, Id};
use hhbc_by_ref_hhbc_string_utils::{
float, integer, is_class, is_parent, is_self, is_static, is_xhp, lstrip, mangle, quote_string,
quote_string_with_escape, strip_global_ns, strip_ns, triple_quote_string, types,
};
use hhbc_by_ref_instruction_sequence::{Error::Unrecoverable, InstrSeq};
use hhbc_by_ref_iterator::Id as IterId;
use hhbc_by_ref_label::Label;
use hhbc_by_ref_local::Local;
use hhbc_by_ref_runtime::TypedValue;
use lazy_static::lazy_static;
use naming_special_names_rust::classes;
use ocaml_helper::escaped;
use oxidized::{ast, ast_defs, local_id};
use regex::Regex;
use write::*;
use std::{borrow::Cow, io::Write as _, path::Path, write};
pub struct ExprEnv<'arena, 'e> {
pub codegen_env: Option<&'e HhasBodyEnv<'arena>>,
}
pub fn print_program<W: Write>(
ctx: &mut Context,
w: &mut W,
prog: &HhasProgram,
) -> Result<(), W::Error> {
match ctx.path {
Some(p) => {
let abs = p.to_absolute();
let p = escape(abs.to_str().ok_or(Error::InvalidUTF8)?);
concat_str_by(w, " ", ["#", p.as_ref(), "starts here"])?;
newline(w)?;
newline(w)?;
concat_str(w, [".filepath ", format!("\"{}\"", p).as_str(), ";"])?;
newline(w)?;
handle_not_impl(|| print_program_(ctx, w, prog))?;
newline(w)?;
concat_str_by(w, " ", ["#", p.as_ref(), "ends here"])?;
newline(w)
}
None => {
w.write("#starts here")?;
newline(w)?;
handle_not_impl(|| print_program_(ctx, w, prog))?;
newline(w)?;
w.write("#ends here")?;
newline(w)
}
}
}
fn get_fatal_op(f: &FatalOp) -> &str {
match f {
FatalOp::Parse => "Parse",
FatalOp::Runtime => "Runtime",
FatalOp::RuntimeOmitFrame => "RuntimeOmitFrame",
}
}
fn print_program_<W: Write>(
ctx: &mut Context,
w: &mut W,
prog: &HhasProgram,
) -> Result<(), W::Error> {
if let Just(Triple(fop, p, msg)) = &prog.fatal {
newline(w)?;
let HhasPos {
line_begin,
line_end,
col_begin,
col_end,
} = p;
let pos = format!("{}:{},{}:{}", line_begin, col_begin, line_end, col_end);
concat_str(
w,
[
".fatal ",
pos.as_ref(),
" ",
get_fatal_op(fop),
" \"",
escape(msg).as_ref(),
"\";",
],
)?;
}
newline(w)?;
concat(w, &prog.adata, |w, a| print_adata_region(ctx, w, a))?;
concat(w, &prog.functions, |w, f| print_fun_def(ctx, w, f))?;
concat(w, &prog.classes, |w, cd| print_class_def(ctx, w, cd))?;
concat(w, &prog.record_defs, |w, rd| print_record_def(ctx, w, rd))?;
concat(w, &prog.constants, |w, c| print_constant(ctx, w, c))?;
concat(w, &prog.typedefs, |w, td| print_typedef(ctx, w, td))?;
print_file_attributes(ctx, w, prog.file_attributes.as_ref())?;
if ctx.dump_symbol_refs() {
print_include_region(ctx, w, &prog.symbol_refs.includes)?;
print_symbol_ref_regions(ctx, w, &prog.symbol_refs)?;
}
Ok(())
}
fn print_include_region<W: Write>(
ctx: &mut Context,
w: &mut W,
includes: &Slice<IncludePath>,
) -> Result<(), W::Error> {
fn print_path<W: Write>(w: &mut W, p: &Path) -> Result<(), W::Error> {
option(w, p.to_str(), |w, p: &str| write!(w, "\n {}", p))
}
fn print_if_exists<W: Write>(w: &mut W, p: &Path) -> Result<(), W::Error> {
if p.exists() { print_path(w, p) } else { Ok(()) }
}
fn print_include<W: Write>(
ctx: &mut Context,
w: &mut W,
inc: IncludePath,
) -> Result<(), W::Error> {
let include_roots = ctx.include_roots;
let alloc = bumpalo::Bump::new();
match inc.into_doc_root_relative(&alloc, include_roots) {
IncludePath::Absolute(p) => print_if_exists(w, Path::new(&p.as_str())),
IncludePath::SearchPathRelative(p) => {
let path_from_cur_dirname = ctx
.path
.and_then(|p| p.path().parent())
.unwrap_or_else(|| Path::new(""))
.join(&p.as_str());
if path_from_cur_dirname.exists() {
print_path(w, &path_from_cur_dirname)
} else {
let search_paths = ctx.include_search_paths;
for prefix in search_paths.iter() {
let path = Path::new(prefix).join(&p.as_str());
if path.exists() {
return print_path(w, &path);
}
}
Ok(())
}
}
IncludePath::IncludeRootRelative(v, p) => {
if !p.is_empty() {
include_roots
.get(&v.as_str().to_owned())
.iter()
.try_for_each(|ir| {
let doc_root = ctx.doc_root;
let resolved = Path::new(doc_root).join(ir).join(&p.as_str());
print_if_exists(w, &resolved)
})?
}
Ok(())
}
IncludePath::DocRootRelative(p) => {
let doc_root = ctx.doc_root;
let resolved = Path::new(doc_root).join(&p.as_str());
print_if_exists(w, &resolved)
}
}
}
if !includes.is_empty() {
w.write("\n.includes {")?;
for inc in includes.as_ref().iter() {
// TODO(hrust): avoid clone. Rethink onwership of inc in
// hhas_symbol_refs_rust::IncludePath::into_doc_root_relative
print_include(ctx, w, inc.clone())?;
}
w.write("\n}\n")
} else {
Ok(())
}
}
fn print_symbol_ref_regions<'arena, W: Write>(
ctx: &mut Context,
w: &mut W,
symbol_refs: &HhasSymbolRefs<'arena>,
) -> Result<(), W::Error> {
let mut print_region = |name, refs: &Slice<'arena, Str<'arena>>| {
if !refs.is_empty() {
ctx.newline(w)?;
write!(w, ".{} {{", name)?;
ctx.block(w, |c, w| {
for s in refs.as_ref().iter() {
c.newline(w)?;
w.write(s)?;
}
Ok(())
})?;
w.write("\n}\n")
} else {
Ok(())
}
};
print_region("constant_refs", &symbol_refs.constants)?;
print_region("function_refs", &symbol_refs.functions)?;
print_region("class_refs", &symbol_refs.classes)
}
fn print_adata_region<W: Write>(
ctx: &mut Context,
w: &mut W,
adata: &HhasAdata,
) -> Result<(), W::Error> {
concat_str_by(w, " ", [".adata", adata.id.as_str(), "= "])?;
triple_quotes(w, |w| print_adata(ctx, w, &adata.value))?;
w.write(";")?;
ctx.newline(w)
}
fn print_typedef<W: Write>(ctx: &mut Context, w: &mut W, td: &HhasTypedef) -> Result<(), W::Error> {
newline(w)?;
w.write(".alias ")?;
print_typedef_attributes(ctx, w, td)?;
w.write(td.name.to_raw_string())?;
w.write(" = ")?;
print_typedef_info(w, &td.type_info)?;
w.write(" ")?;
print_span(w, &td.span)?;
w.write(" ")?;
triple_quotes(w, |w| print_adata(ctx, w, &td.type_structure))?;
w.write(";")
}
fn print_typedef_attributes<W: Write>(
ctx: &mut Context,
w: &mut W,
td: &HhasTypedef,
) -> Result<(), W::Error> {
let mut specials = vec![];
if ctx.is_system_lib() {
specials.push("persistent");
}
print_special_and_user_attrs(ctx, w, &specials[..], td.attributes.as_ref())
}
fn handle_not_impl<E: std::fmt::Debug, F: FnOnce() -> Result<(), E>>(f: F) -> Result<(), E> {
let r = f();
match &r {
Err(Error::NotImpl(msg)) => {
println!("#### NotImpl: {}", msg);
eprintln!("NotImpl: {}", msg);
Ok(())
}
_ => r,
}
}
fn print_fun_def<W: Write>(
ctx: &mut Context,
w: &mut W,
fun_def: &HhasFunction,
) -> Result<(), W::Error> {
let body = &fun_def.body;
newline(w)?;
w.write(".function ")?;
print_upper_bounds_(w, &body.upper_bounds)?;
w.write(" ")?;
print_fun_attrs(ctx, w, fun_def)?;
print_span(w, &fun_def.span)?;
w.write(" ")?;
option(
w,
&(Option::from(body.return_type_info.clone())),
|w, ti| {
print_type_info(w, ti)?;
w.write(" ")
},
)?;
w.write(fun_def.name.to_raw_string())?;
print_params(ctx, w, fun_def.params())?;
if fun_def.is_generator() {
w.write(" isGenerator")?;
}
if fun_def.is_async() {
w.write(" isAsync")?;
}
if fun_def.is_pair_generator() {
w.write(" isPairGenerator")?;
}
if fun_def.is_readonly_return() {
w.write(" isReadonlyReturn")?;
}
w.write(" ")?;
braces(w, |w| {
ctx.block(w, |c, w| print_body(c, w, body, &fun_def.coeffects))?;
newline(w)
})?;
newline(w)
}
fn print_requirement<W: Write>(
ctx: &mut Context,
w: &mut W,
r: &Pair<ClassType<'_>, hhas_class::TraitReqKind>,
) -> Result<(), W::Error> {
ctx.newline(w)?;
w.write(".require ")?;
match r {
Pair(name, hhas_class::TraitReqKind::MustExtend) => {
write!(w, "extends <{}>;", name.to_raw_string())
}
Pair(name, hhas_class::TraitReqKind::MustImplement) => {
write!(w, "implements <{}>;", name.to_raw_string())
}
}
}
fn print_type_constant<W: Write>(
ctx: &mut Context,
w: &mut W,
c: &HhasTypeConstant,
) -> Result<(), W::Error> {
ctx.newline(w)?;
concat_str_by(w, " ", [".const", c.name.as_str(), "isType"])?;
if c.is_abstract {
w.write(" isAbstract")?;
}
option(w, Option::from(c.initializer.as_ref()), |w, init| {
w.write(" = ")?;
triple_quotes(w, |w| print_adata(ctx, w, init))
})?;
w.write(";")
}
fn print_ctx_constant<W: Write>(
ctx: &mut Context,
w: &mut W,
c: &HhasCtxConstant,
) -> Result<(), W::Error> {
ctx.newline(w)?;
concat_str_by(w, " ", [".ctx", &c.name.as_str()])?;
if c.is_abstract {
w.write(" isAbstract")?;
}
if let Some(coeffects) =
HhasCoeffects::vec_to_string(&c.coeffects.0.as_ref(), |c| c.to_string())
{
w.write(" ")?;
w.write(coeffects)?;
}
if let Some(coeffects) =
HhasCoeffects::vec_to_string(&c.coeffects.1.as_ref(), |c| c.as_str().to_string())
{
w.write(" ")?;
w.write(coeffects)?;
}
w.write(";")?;
Ok(())
}
fn print_property_doc_comment<W: Write>(w: &mut W, p: &HhasProperty) -> Result<(), W::Error> {
if let Just(s) = p.doc_comment.as_ref() {
w.write(triple_quote_string(s.as_str()))?;
w.write(" ")?;
}
Ok(())
}
fn print_property_attributes<W: Write>(
ctx: &mut Context,
w: &mut W,
property: &HhasProperty,
) -> Result<(), W::Error> {
let mut special_attributes = vec![];
if property.is_late_init() {
special_attributes.push("late_init")
};
if property.is_no_bad_redeclare() {
special_attributes.push("no_bad_redeclare")
};
if property.initial_satisfies_tc() {
special_attributes.push("initial_satisfies_tc")
}
if property.no_implicit_null() {
special_attributes.push("no_implicit_null")
}
if property.has_system_initial() {
special_attributes.push("sys_initial_val")
}
if property.is_const() {
special_attributes.push("is_const")
}
if property.is_readonly() {
special_attributes.push("readonly")
}
if property.is_deep_init() {
special_attributes.push("deep_init")
}
if property.is_lsb() {
special_attributes.push("lsb")
}
if property.is_static() {
special_attributes.push("static")
}
special_attributes.push(property.visibility.as_ref());
special_attributes.reverse();
w.write("[")?;
concat_by(w, " ", &special_attributes, |w, a| w.write(a))?;
if !special_attributes.is_empty() && !property.attributes.is_empty() {
w.write(" ")?;
}
print_attributes(ctx, w, &property.attributes)?;
w.write("] ")
}
fn print_property_type_info<W: Write>(w: &mut W, p: &HhasProperty) -> Result<(), W::Error> {
print_type_info(w, &p.type_info)?;
w.write(" ")
}
fn print_property<W: Write>(
ctx: &mut Context,
w: &mut W,
class_def: &HhasClass,
property: &HhasProperty,
) -> Result<(), W::Error> {
newline(w)?;
w.write(" .property ")?;
print_property_attributes(ctx, w, property)?;
print_property_doc_comment(w, property)?;
print_property_type_info(w, property)?;
w.write(property.name.to_raw_string())?;
w.write(" =\n ")?;
let initial_value = property.initial_value.as_ref();
if class_def.is_closure() || initial_value == Just(&TypedValue::Uninit) {
w.write("uninit;")
} else {
triple_quotes(w, |w| match initial_value {
Nothing => w.write("N;"),
Just(value) => print_adata(ctx, w, &value),
})?;
w.write(";")
}
}
fn print_constant<W: Write>(
ctx: &mut Context,
w: &mut W,
c: &HhasConstant,
) -> Result<(), W::Error> {
ctx.newline(w)?;
w.write(".const ")?;
w.write(c.name.to_raw_string())?;
if c.is_abstract {
w.write(" isAbstract")?;
}
match c.value.as_ref() {
Just(TypedValue::Uninit) => w.write(" = uninit")?,
Just(value) => {
w.write(" = ")?;
triple_quotes(w, |w| print_adata(ctx, w, value))?;
}
Nothing => {}
}
w.write(";")
}
fn print_enum_ty<W: Write>(ctx: &mut Context, w: &mut W, c: &HhasClass) -> Result<(), W::Error> {
if let Just(et) = c.enum_type.as_ref() {
ctx.newline(w)?;
w.write(".enum_ty ")?;
print_type_info_(w, true, et)?;
w.write(";")?;
}
Ok(())
}
fn print_doc_comment<'arena, W: Write>(
ctx: &mut Context,
w: &mut W,
doc_comment: &Maybe<Str<'arena>>,
) -> Result<(), W::Error> {
if let Just(cmt) = doc_comment {
ctx.newline(w)?;
write!(w, ".doc {};", triple_quote_string(cmt.as_str()))?;
}
Ok(())
}
fn print_use_precedence<'arena, W: Write>(
ctx: &mut Context,
w: &mut W,
Triple(id1, id2, ids): &Triple<
ClassType<'arena>,
ClassType<'arena>,
Slice<'arena, ClassType<'arena>>,
>,
) -> Result<(), W::Error> {
ctx.newline(w)?;
concat_str(w, [id1.to_raw_string(), "::", id2.to_raw_string()])?;
w.write(" insteadof ")?;
let unique_ids: IndexSet<&str> = ids.as_ref().iter().map(|i| i.to_raw_string()).collect();
concat_str_by(w, " ", unique_ids.iter().collect::<Vec<_>>())?;
w.write(";")
}
fn print_use_as_visibility<W: Write>(w: &mut W, u: UseAsVisibility) -> Result<(), W::Error> {
w.write(match u {
UseAsVisibility::UseAsPublic => "public",
UseAsVisibility::UseAsPrivate => "private",
UseAsVisibility::UseAsProtected => "protected",
UseAsVisibility::UseAsFinal => "final",
})
}
fn print_use_alias<'arena, W: Write>(
ctx: &mut Context,
w: &mut W,
Quadruple(ido1, id, ido2, kindl): &Quadruple<
Maybe<ClassType<'arena>>,
ClassType<'arena>,
Maybe<ClassType<'arena>>,
Slice<'arena, UseAsVisibility>,
>,
) -> Result<(), W::Error> {
ctx.newline(w)?;
let id = id.to_raw_string();
option_or(
w,
ido1.as_ref(),
|w, i: &ClassType<'arena>| concat_str(w, [i.to_raw_string(), "::", id]),
id,
)?;
w.write(" as ")?;
if !kindl.is_empty() {
square(w, |w| {
concat_by(w, " ", kindl, |w, k| print_use_as_visibility(w, *k))
})?;
}
w.write_if(!kindl.is_empty() && ido2.is_just(), " ")?;
option(w, ido2.as_ref(), |w, i: &ClassType<'arena>| {
w.write(i.to_raw_string())
})?;
w.write(";")
}
fn print_uses<'arena, W: Write>(
ctx: &mut Context,
w: &mut W,
c: &HhasClass<'arena>,
) -> Result<(), W::Error> {
if c.uses.is_empty() {
Ok(())
} else {
let unique_ids: IndexSet<&str> = c
.uses
.as_ref()
.iter()
.map(|e| strip_global_ns(e.as_str()))
.collect();
let unique_ids: Vec<_> = unique_ids.into_iter().collect();
newline(w)?;
w.write(" .use ")?;
concat_by(w, " ", unique_ids, |w, id| w.write(id))?;
if c.use_aliases.is_empty() && c.use_precedences.is_empty() {
w.write(";")
} else {
w.write(" {")?;
ctx.block(w, |ctx, w| {
let precs: &[Triple<
ClassType<'arena>,
ClassType<'arena>,
Slice<'arena, ClassType<'arena>>,
>] = c.use_precedences.as_ref();
for x in precs {
print_use_precedence(ctx, w, x)?;
}
let aliases: &[Quadruple<
Maybe<ClassType<'arena>>,
ClassType<'arena>,
Maybe<ClassType<'arena>>,
Slice<'arena, UseAsVisibility>,
>] = c.use_aliases.as_ref();
for x in aliases {
print_use_alias(ctx, w, x)?;
}
Ok(())
})?;
newline(w)?;
w.write(" }")
}
}
}
fn print_class_special_attributes<W: Write>(
ctx: &mut Context,
w: &mut W,
c: &HhasClass,
) -> Result<(), W::Error> {
let user_attrs = c.attributes.as_ref();
let is_system_lib = ctx.is_system_lib();
let mut special_attributes: Vec<&str> = vec![];
if c.needs_no_reifiedinit() {
special_attributes.push("noreifiedinit")
}
if c.no_dynamic_props() {
special_attributes.push("no_dynamic_props")
}
if c.is_const() {
special_attributes.push("is_const")
}
if hhas_attribute::has_foldable(user_attrs) {
special_attributes.push("foldable")
}
if hhas_attribute::has_enum_class(user_attrs) {
special_attributes.push("enum_class")
}
if is_system_lib {
special_attributes.extend(&["persistent", "builtin", "unique"])
}
if hhas_attribute::has_dynamically_constructible(user_attrs) {
special_attributes.push("dyn_constructible");
}
if c.is_closure() && !is_system_lib {
special_attributes.push("unique");
}
if c.is_closure() {
special_attributes.push("no_override");
}
if c.is_trait() {
special_attributes.push("trait");
}
if c.is_interface() {
special_attributes.push("interface");
}
if c.is_final() {
special_attributes.push("final");
}
if c.is_sealed() {
special_attributes.push("sealed");
}
if c.enum_type.is_just() && !hhbc_by_ref_hhas_attribute::has_enum_class(user_attrs) {
special_attributes.push("enum");
}
if c.is_abstract() {
special_attributes.push("abstract");
}
if special_attributes.is_empty() && user_attrs.is_empty() {
return Ok(());
}
special_attributes.reverse();
wrap_by_(w, "[", "] ", |w| {
concat_by(w, " ", &special_attributes, |w, a| w.write(a))?;
w.write_if(
!special_attributes.is_empty() && !user_attrs.is_empty(),
" ",
)?;
print_attributes(ctx, w, &user_attrs)
})
}
fn print_implements<W: Write>(w: &mut W, implements: &[ClassType<'_>]) -> Result<(), W::Error> {
if implements.is_empty() {
return Ok(());
}
w.write(" implements (")?;
concat_str_by(
w,
" ",
implements
.iter()
.map(|x| x.to_raw_string())
.collect::<Vec<_>>(),
)?;
w.write(")")
}
fn print_enum_includes<W: Write>(
w: &mut W,
enum_includes: &[ClassType<'_>],
) -> Result<(), W::Error> {
if enum_includes.is_empty() {
return Ok(());
}
w.write(" enum_includes (")?;
concat_str_by(
w,
" ",
enum_includes
.iter()
.map(|x| x.to_raw_string())
.collect::<Vec<_>>(),
)?;
w.write(")")
}
fn print_shadowed_tparams<'arena, W: Write>(
w: &mut W,
shadowed_tparams: impl AsRef<[Str<'arena>]>,
) -> Result<(), W::Error> {
braces(w, |w| concat_str_by(w, ", ", shadowed_tparams))
}
fn print_method_def<W: Write>(
ctx: &mut Context,
w: &mut W,
method_def: &HhasMethod,
) -> Result<(), W::Error> {
let body = &method_def.body;
newline(w)?;
w.write(" .method ")?;
print_shadowed_tparams(w, &body.shadowed_tparams)?;
print_upper_bounds_(w, &body.upper_bounds)?;
w.write(" ")?;
print_method_attrs(ctx, w, method_def)?;
print_span(w, &method_def.span)?;
w.write(" ")?;
option(w, &(Option::from(body.return_type_info.clone())), |w, t| {
print_type_info(w, t)?;
w.write(" ")
})?;
w.write(method_def.name.to_raw_string())?;
print_params(ctx, w, &body.params)?;
if method_def.flags.contains(HhasMethodFlags::IS_GENERATOR) {
w.write(" isGenerator")?;
}
if method_def.flags.contains(HhasMethodFlags::IS_ASYNC) {
w.write(" isAsync")?;
}
if method_def
.flags
.contains(HhasMethodFlags::IS_PAIR_GENERATOR)
{
w.write(" isPairGenerator")?;
}
if method_def.flags.contains(HhasMethodFlags::IS_CLOSURE_BODY) {
w.write(" isClosureBody")?;
}
if method_def
.flags
.contains(HhasMethodFlags::IS_READONLY_RETURN)
{
w.write(" isReadonlyReturn")?;
}
w.write(" ")?;
braces(w, |w| {
ctx.block(w, |c, w| print_body(c, w, body, &method_def.coeffects))?;
newline(w)?;
w.write(" ")
})
}
fn print_method_attrs<W: Write>(
ctx: &mut Context,
w: &mut W,
m: &HhasMethod,
) -> Result<(), W::Error> {
use hhas_attribute::*;
let user_attrs = m.attributes.as_ref();
let mut special_attrs = vec![];
if has_provenance_skip_frame(user_attrs) {
special_attrs.push("prov_skip_frame")
}
if m.is_interceptable() {
special_attrs.push("interceptable");
}
let visibility = m.visibility.as_ref().to_string();
special_attrs.push(&visibility);
if m.flags.contains(HhasMethodFlags::IS_STATIC) {
special_attrs.push("static");
}
if m.flags.contains(HhasMethodFlags::IS_FINAL) {
special_attrs.push("final");
}
if m.flags.contains(HhasMethodFlags::IS_ABSTRACT) {
special_attrs.push("abstract");
}
if has_foldable(user_attrs) {
special_attrs.push("foldable");
}
if m.is_no_injection() {
special_attrs.push("no_injection");
}
if ctx.is_system_lib() && has_native(user_attrs) && !is_native_opcode_impl(user_attrs) {
special_attrs.push("unique");
}
if ctx.is_system_lib() {
special_attrs.push("builtin");
}
if ctx.is_system_lib() && has_native(user_attrs) && !is_native_opcode_impl(user_attrs) {
special_attrs.push("persistent");
}
if ctx.is_system_lib() || (has_dynamically_callable(user_attrs) && !m.is_memoize_impl()) {
special_attrs.push("dyn_callable")
}
print_special_and_user_attrs(ctx, w, &special_attrs, user_attrs)
}
fn print_class_def<'arena, W: Write>(
ctx: &mut Context,
w: &mut W,
class_def: &HhasClass<'arena>,
) -> Result<(), W::Error> {
newline(w)?;
w.write(".class ")?;
print_upper_bounds(w, class_def.upper_bounds.as_ref())?;
w.write(" ")?;
print_class_special_attributes(ctx, w, class_def)?;
w.write(class_def.name.to_raw_string())?;
w.write(" ")?;
print_span(w, &class_def.span)?;
print_extends(
w,
Option::from(class_def.base.as_ref()).map(|x: &ClassType<'arena>| x.to_raw_string()),
)?;
print_implements(w, class_def.implements.as_ref())?;
print_enum_includes(w, class_def.enum_includes.as_ref())?;
w.write(" {")?;
ctx.block(w, |c, w| {
print_doc_comment(c, w, &class_def.doc_comment)?;
print_uses(c, w, class_def)?;
print_enum_ty(c, w, class_def)?;
for x in class_def.requirements.as_ref() {
print_requirement(c, w, x)?;
}
for x in class_def.constants.as_ref() {
print_constant(c, w, x)?;
}
for x in class_def.type_constants.as_ref() {
print_type_constant(c, w, x)?;
}
for x in class_def.ctx_constants.as_ref() {
print_ctx_constant(c, w, x)?;
}
for x in class_def.properties.as_ref() {
print_property(c, w, class_def, x)?;
}
for m in class_def.methods.as_ref() {
print_method_def(c, w, m)?;
}
Ok(())
})?;
newline(w)?;
w.write("}")?;
newline(w)
}
fn print_pos_as_prov_tag<W: Write>(
ctx: &Context,
w: &mut W,
loc: &Option<ast_defs::Pos>,
) -> Result<(), W::Error> {
match loc {
Some(l) if ctx.array_provenance => {
let (line, ..) = l.info_pos();
let filename = l.filename().to_absolute();
let filename = match filename.to_str().unwrap() {
"" => "(unknown hackc filename)",
x => x,
};
write!(
w,
"p:i:{};s:{}:{};",
line,
filename.len(),
quote_string_with_escape(filename)
)
}
_ => Ok(()),
}
}
fn print_hhbc_id<'a, W: Write>(w: &mut W, id: &impl Id<'a>) -> Result<(), W::Error> {
quotes(w, |w| w.write(escape(id.to_raw_string())))
}
fn print_function_id<W: Write>(w: &mut W, id: &FunctionId) -> Result<(), W::Error> {
print_hhbc_id(w, id)
}
fn print_class_id<W: Write>(w: &mut W, id: &ClassId) -> Result<(), W::Error> {
print_hhbc_id(w, id)
}
fn print_method_id<W: Write>(w: &mut W, id: &MethodId) -> Result<(), W::Error> {
print_hhbc_id(w, id)
}
fn print_const_id<W: Write>(w: &mut W, id: &ConstId) -> Result<(), W::Error> {
print_hhbc_id(w, id)
}
fn print_prop_id<W: Write>(w: &mut W, id: &PropId) -> Result<(), W::Error> {
print_hhbc_id(w, id)
}
fn print_adata_id<W: Write>(w: &mut W, id: &AdataId) -> Result<(), W::Error> {
concat_str(w, ["@", id.as_str()])
}
fn print_adata_mapped_argument<W: Write, F, V>(
ctx: &mut Context,
w: &mut W,
col_type: &str,
loc: &Option<ast_defs::Pos>,
values: &[V],
f: F,
) -> Result<(), W::Error>
where
F: Fn(&mut Context, &mut W, &V) -> Result<(), W::Error>,
{
write!(w, "{}:{}:{{", col_type, values.len(),)?;
print_pos_as_prov_tag(ctx, w, loc)?;
for v in values {
f(ctx, w, v)?
}
write!(w, "}}")
}
fn print_adata_collection_argument<W: Write>(
ctx: &mut Context,
w: &mut W,
col_type: &str,
loc: &Option<ast_defs::Pos>,
values: &[TypedValue],
) -> Result<(), W::Error> {
print_adata_mapped_argument(ctx, w, col_type, loc, values, &print_adata)
}
fn print_adata_dict_collection_argument<W: Write>(
ctx: &mut Context,
w: &mut W,
col_type: &str,
loc: &Option<ast_defs::Pos>,
pairs: &[Pair<TypedValue, TypedValue>],
) -> Result<(), W::Error> {
print_adata_mapped_argument(ctx, w, col_type, loc, pairs, |ctx, w, Pair(v1, v2)| {
print_adata(ctx, w, v1)?;
print_adata(ctx, w, v2)
})
}
fn print_adata<W: Write>(ctx: &mut Context, w: &mut W, tv: &TypedValue) -> Result<(), W::Error> {
match tv {
TypedValue::Uninit => w.write("uninit"),
TypedValue::Null => w.write("N;"),
TypedValue::String(s) => {
write!(w, "s:{}:{};", s.len(), quote_string_with_escape(s.as_str()))
}
TypedValue::LazyClass(s) => {
write!(w, "l:{}:{};", s.len(), quote_string_with_escape(s.as_str()))
}
TypedValue::Float(f) => write!(w, "d:{};", float::to_string(*f)),
TypedValue::Int(i) => write!(w, "i:{};", i),
// TODO: The False case seems to sometimes be b:0 and sometimes i:0. Why?
TypedValue::Bool(false) => w.write("b:0;"),
TypedValue::Bool(true) => w.write("b:1;"),
TypedValue::Vec(values) => {
print_adata_collection_argument(ctx, w, VEC_PREFIX, &None, values.as_ref())
}
TypedValue::Dict(pairs) => {
print_adata_dict_collection_argument(ctx, w, DICT_PREFIX, &None, pairs.as_ref())
}
TypedValue::Keyset(values) => {
print_adata_collection_argument(ctx, w, KEYSET_PREFIX, &None, values.as_ref())
}
TypedValue::HhasAdata(s) => w.write(escaped(s.as_str())),
}
}
fn print_attribute<W: Write>(
ctx: &mut Context,
w: &mut W,
a: &HhasAttribute,
) -> Result<(), W::Error> {
write!(
w,
"\"{}\"(\"\"\"{}:{}:{{",
a.name.as_str(),
VEC_PREFIX,
a.arguments.len()
)?;
concat(w, &a.arguments, |w, arg| print_adata(ctx, w, arg))?;
w.write("}\"\"\")")
}
fn print_attributes<'a, W: Write>(
ctx: &mut Context,
w: &mut W,
al: impl AsRef<[HhasAttribute<'a>]>,
) -> Result<(), W::Error> {
// Adjust for underscore coming before alphabet
let al: Vec<&HhasAttribute> = al
.as_ref()
.iter()
.sorted_by_key(|a| (!a.name.as_str().starts_with("__"), a.name.as_str()))
.collect();
concat_by(w, " ", &al, |w, a| print_attribute(ctx, w, a))
}
fn print_file_attributes<W: Write>(
ctx: &mut Context,
w: &mut W,
al: &[HhasAttribute],
) -> Result<(), W::Error> {
if al.is_empty() {
return Ok(());
}
newline(w)?;
w.write(".file_attributes [")?;
print_attributes(ctx, w, al)?;
w.write("] ;")?;
newline(w)
}
fn is_bareword_char(c: &u8) -> bool {
match *c {
b'_' | b'.' | b'$' | b'\\' => true,
c => (b'0'..=b'9').contains(&c) || (b'a'..=b'z').contains(&c) || (b'A'..=b'Z').contains(&c),
}
}
fn print_body<W: Write>(
ctx: &mut Context,
w: &mut W,
body: &HhasBody,
coeffects: &HhasCoeffects,
) -> Result<(), W::Error> {
print_doc_comment(ctx, w, &body.doc_comment)?;
if body.is_memoize_wrapper {
ctx.newline(w)?;
w.write(".ismemoizewrapper;")?;
}
if body.is_memoize_wrapper_lsb {
ctx.newline(w)?;
w.write(".ismemoizewrapperlsb;")?;
}
if body.num_iters > 0 {
ctx.newline(w)?;
write!(w, ".numiters {};", body.num_iters)?;
}
if !body.decl_vars.is_empty() {
ctx.newline(w)?;
w.write(".declvars ")?;
concat_by(w, " ", &body.decl_vars, |w, var| {
if var.as_str().as_bytes().iter().all(is_bareword_char) {
w.write(var)
} else {
quotes(w, |w| w.write(escaper::escape(var.as_str())))
}
})?;
w.write(";")?;
}
if body.num_closures > 0 {
ctx.newline(w)?;
write!(w, ".numclosures {};", body.num_closures)?;
}
for s in HhasCoeffects::coeffects_to_hhas(&coeffects).iter() {
ctx.newline(w)?;
w.write(s)?;
}
print_instructions(ctx, w, &body.body_instrs)
}
fn print_instructions<W: Write>(
ctx: &mut Context,
w: &mut W,
instr_seq: &InstrSeq,
) -> Result<(), W::Error> {
use Instruct::*;
use InstructTry::*;
for instr in instr_seq.compact_iter() {
match instr {
ISpecialFlow(_) => return Err(Error::fail("Cannot break/continue 1 level")),
IComment(_) => {
// indetation = 0
newline(w)?;
print_instr(w, instr)?;
}
ILabel(_) => ctx.unblock(w, |c, w| {
c.newline(w)?;
print_instr(w, instr)
})?,
ITry(TryCatchBegin) => {
ctx.newline(w)?;
print_instr(w, instr)?;
ctx.indent_inc();
}
ITry(TryCatchMiddle) => ctx.unblock(w, |c, w| {
c.newline(w)?;
print_instr(w, instr)
})?,
ITry(TryCatchEnd) => {
ctx.indent_dec();
ctx.newline(w)?;
print_instr(w, instr)?;
}
_ => {
ctx.newline(w)?;
print_instr(w, instr)?;
}
}
}
Ok(())
}
fn if_then<F: FnOnce() -> R, R>(cond: bool, f: F) -> Option<R> {
if cond { Some(f()) } else { None }
}
fn print_fcall_args<W: Write>(
w: &mut W,
FcallArgs(fls, num_args, num_rets, inouts, readonly, async_eager_label, context): &FcallArgs,
) -> Result<(), W::Error> {
use FcallFlags as F;
let mut flags = vec![];
if_then(fls.contains(F::HAS_UNPACK), || flags.push("Unpack"));
if_then(fls.contains(F::HAS_GENERICS), || flags.push("Generics"));
if_then(fls.contains(F::LOCK_WHILE_UNWINDING), || {
flags.push("LockWhileUnwinding")
});
if_then(fls.contains(F::ENFORCE_MUTABLE_RETURN), || {
flags.push("EnforceMutableReturn")
});
angle(w, |w| concat_str_by(w, " ", flags))?;
w.write(" ")?;
print_int(w, num_args)?;
w.write(" ")?;
print_int(w, num_rets)?;
w.write(" ")?;
quotes(w, |w| {
concat_by(w, "", inouts, |w, i| w.write(if *i { "1" } else { "0" }))
})?;
w.write(" ")?;
quotes(w, |w| {
concat_by(w, "", readonly, |w, i| w.write(if *i { "1" } else { "0" }))
})?;
w.write(" ")?;
option_or(w, async_eager_label.as_ref(), print_label, "-")?;
w.write(" ")?;
match context {
Just(s) => quotes(w, |w| w.write(s)),
Nothing => w.write("\"\""),
}
}
fn print_special_cls_ref<W: Write>(w: &mut W, cls_ref: &SpecialClsRef) -> Result<(), W::Error> {
w.write(match cls_ref {
SpecialClsRef::Static => "Static",
SpecialClsRef::Self_ => "Self",
SpecialClsRef::Parent => "Parent",
})
}
fn print_null_flavor<W: Write>(w: &mut W, f: &ObjNullFlavor) -> Result<(), W::Error> {
w.write(match f {
ObjNullFlavor::NullThrows => "NullThrows",
ObjNullFlavor::NullSafe => "NullSafe",
})
}
fn print_instr<W: Write>(w: &mut W, instr: &Instruct) -> Result<(), W::Error> {
fn print_call<W: Write>(w: &mut W, call: &InstructCall) -> Result<(), W::Error> {
use InstructCall as I;
match call {
I::NewObj => w.write("NewObj"),
I::NewObjR => w.write("NewObjR"),
I::NewObjD(cid) => {
w.write("NewObjD ")?;
print_class_id(w, cid)
}
I::NewObjRD(cid) => {
w.write("NewObjRD ")?;
print_class_id(w, cid)
}
I::NewObjS(r) => {
w.write("NewObjS ")?;
print_special_cls_ref(w, r)
}
I::FCall(fcall_args) => {
w.write("FCall ")?;
print_fcall_args(w, &fcall_args)?;
w.write(r#" "" """#)
}
I::FCallClsMethod(fcall_args, is_log_as_dynamic_call) => {
w.write("FCallClsMethod ")?;
print_fcall_args(w, fcall_args)?;
w.write(r#" "" "#)?;
w.write(match is_log_as_dynamic_call {
IsLogAsDynamicCallOp::LogAsDynamicCall => "LogAsDynamicCall",
IsLogAsDynamicCallOp::DontLogAsDynamicCall => "DontLogAsDynamicCall",
})
}
I::FCallClsMethodD(fcall_args, cid, mid) => {
w.write("FCallClsMethodD ")?;
print_fcall_args(w, fcall_args)?;
w.write(r#" "" "#)?;
print_class_id(w, cid)?;
w.write(" ")?;
print_method_id(w, mid)
}
I::FCallClsMethodS(fcall_args, r) => {
w.write("FCallClsMethodS ")?;
print_fcall_args(w, fcall_args)?;
w.write(r#" "" "#)?;
print_special_cls_ref(w, r)
}
I::FCallClsMethodSD(fcall_args, r, mid) => {
w.write("FCallClsMethodSD ")?;
print_fcall_args(w, fcall_args)?;
w.write(r#" "" "#)?;
print_special_cls_ref(w, r)?;
w.write(" ")?;
print_method_id(w, mid)
}
I::FCallCtor(fcall_args) => {
w.write("FCallCtor ")?;
print_fcall_args(w, fcall_args)?;
w.write(r#" """#)
}
I::FCallFunc(fcall_args) => {
w.write("FCallFunc ")?;
print_fcall_args(w, fcall_args)
}
I::FCallFuncD(fcall_args, id) => {
w.write("FCallFuncD ")?;
print_fcall_args(w, fcall_args)?;
w.write(" ")?;
print_function_id(w, id)
}
I::FCallObjMethod(fcall_args, nf) => {
w.write("FCallObjMethod ")?;
print_fcall_args(w, fcall_args)?;
w.write(r#" "" "#)?;
print_null_flavor(w, nf)
}
I::FCallObjMethodD(fcall_args, nf, id) => {
w.write("FCallObjMethodD ")?;
print_fcall_args(w, fcall_args)?;
w.write(r#" "" "#)?;
print_null_flavor(w, nf)?;
w.write(" ")?;
print_method_id(w, id)
}
}
}
fn print_get<W: Write>(w: &mut W, get: &InstructGet) -> Result<(), W::Error> {
use InstructGet as IG;
match get {
IG::CGetL(id) => {
w.write("CGetL ")?;
print_local(w, id)
}
IG::CGetQuietL(id) => {
w.write("CGetQuietL ")?;
print_local(w, id)
}
IG::CGetL2(id) => {
w.write("CGetL2 ")?;
print_local(w, id)
}
IG::CUGetL(id) => {
w.write("CUGetL ")?;
print_local(w, id)
}
IG::PushL(id) => {
w.write("PushL ")?;
print_local(w, id)
}
IG::CGetG => w.write("CGetG"),
IG::CGetS(op) => {
w.write("CGetS ")?;
print_readonly_op(w, op)
}
IG::ClassGetC => w.write("ClassGetC"),
IG::ClassGetTS => w.write("ClassGetTS"),
}
}
use Instruct::*;
use InstructBasic as IB;
match instr {
IIterator(i) => print_iterator(w, i),
IBasic(b) => w.write(match b {
IB::Nop => "Nop",
IB::EntryNop => "EntryNop",
IB::PopC => "PopC",
IB::PopU => "PopU",
IB::Dup => "Dup",
}),
ILitConst(lit) => print_lit_const(w, lit),
IOp(op) => print_op(w, op),
IContFlow(cf) => print_control_flow(w, cf),
ICall(c) => print_call(w, c),
IMisc(misc) => print_misc(w, misc),
IGet(get) => print_get(w, get),
IMutator(mutator) => print_mutator(w, mutator),
ILabel(l) => {
print_label(w, l)?;
w.write(":")
}
IIsset(i) => print_isset(w, i),
IBase(i) => print_base(w, i),
IFinal(i) => print_final(w, i),
ITry(itry) => print_try(w, itry),
IComment(s) => concat_str_by(w, " ", ["#", s.as_str()]),
ISrcLoc(p) => write!(
w,
".srcloc {}:{},{}:{};",
p.line_begin, p.col_begin, p.line_end, p.col_end
),
IAsync(a) => print_async(w, a),
IGenerator(gen) => print_gen_creation_execution(w, gen),
IIncludeEvalDefine(ed) => print_include_eval_define(w, ed),
_ => Err(Error::fail("invalid instruction")),
}
}
fn print_base<W: Write>(w: &mut W, i: &InstructBase) -> Result<(), W::Error> {
use InstructBase as I;
match i {
I::BaseGC(si, m) => {
w.write("BaseGC ")?;
print_stack_index(w, si)?;
w.write(" ")?;
print_member_opmode(w, m)
}
I::BaseGL(id, m) => {
w.write("BaseGL ")?;
print_local(w, id)?;
w.write(" ")?;
print_member_opmode(w, m)
}
I::BaseSC(si1, si2, m, op) => {
w.write("BaseSC ")?;
print_stack_index(w, si1)?;
w.write(" ")?;
print_stack_index(w, si2)?;
w.write(" ")?;
print_member_opmode(w, m)?;
w.write(" ")?;
print_readonly_op(w, op)
}
I::BaseL(id, m, op) => {
w.write("BaseL ")?;
print_local(w, id)?;
w.write(" ")?;
print_member_opmode(w, m)?;
w.write(" ")?;
print_readonly_op(w, op)
}
I::BaseC(si, m) => {
w.write("BaseC ")?;
print_stack_index(w, si)?;
w.write(" ")?;
print_member_opmode(w, m)
}
I::BaseH => w.write("BaseH"),
I::Dim(m, mk) => {
w.write("Dim ")?;
print_member_opmode(w, m)?;
w.write(" ")?;
print_member_key(w, mk)
}
}
}
fn print_stack_index<W: Write>(w: &mut W, si: &StackIndex) -> Result<(), W::Error> {
w.write(si.to_string())
}
fn print_member_opmode<W: Write>(w: &mut W, m: &MemberOpMode) -> Result<(), W::Error> {
use MemberOpMode as M;
w.write(match m {
M::ModeNone => "None",
M::Warn => "Warn",
M::Define => "Define",
M::Unset => "Unset",
M::InOut => "InOut",
})
}
fn print_member_key<W: Write>(w: &mut W, mk: &MemberKey) -> Result<(), W::Error> {
use MemberKey as M;
match mk {
M::EC(si, op) => {
w.write("EC:")?;
print_stack_index(w, si)?;
w.write(" ")?;
print_readonly_op(w, op)
}
M::EL(local, op) => {
w.write("EL:")?;
print_local(w, local)?;
w.write(" ")?;
print_readonly_op(w, op)
}
M::ET(s, op) => {
w.write("ET:")?;
quotes(w, |w| w.write(escape(s.as_str())))?;
w.write(" ")?;
print_readonly_op(w, op)
}
M::EI(i, op) => {
concat_str(w, ["EI:", i.to_string().as_ref()])?;
w.write(" ")?;
print_readonly_op(w, op)
}
M::PC(si, op) => {
w.write("PC:")?;
print_stack_index(w, si)?;
w.write(" ")?;
print_readonly_op(w, op)
}
M::PL(local, op) => {
w.write("PL:")?;
print_local(w, local)?;
w.write(" ")?;
print_readonly_op(w, op)
}
M::PT(id, op) => {
w.write("PT:")?;
print_prop_id(w, id)?;
w.write(" ")?;
print_readonly_op(w, op)
}
M::QT(id, op) => {
w.write("QT:")?;
print_prop_id(w, id)?;
w.write(" ")?;
print_readonly_op(w, op)
}
M::W => w.write("W"),
}
}
fn print_iterator<W: Write>(w: &mut W, i: &InstructIterator) -> Result<(), W::Error> {
use InstructIterator as I;
match i {
I::IterInit(iter_args, label) => {
w.write("IterInit ")?;
print_iter_args(w, iter_args)?;
w.write(" ")?;
print_label(w, label)
}
I::IterNext(iter_args, label) => {
w.write("IterNext ")?;
print_iter_args(w, iter_args)?;
w.write(" ")?;
print_label(w, label)
}
I::IterFree(id) => {
w.write("IterFree ")?;
print_iterator_id(w, id)
}
}
}
fn print_iter_args<W: Write>(w: &mut W, iter_args: &IterArgs) -> Result<(), W::Error> {
print_iterator_id(w, &iter_args.iter_id)?;
w.write(" ")?;
match &iter_args.key_id {
Nothing => w.write("NK")?,
Just(k) => {
w.write("K:")?;
print_local(w, &k)?;
}
};
w.write(" ")?;
w.write("V:")?;
print_local(w, &iter_args.val_id)
}
fn print_iterator_id<W: Write>(w: &mut W, i: &IterId) -> Result<(), W::Error> {
write!(w, "{}", i)
}
fn print_async<W: Write>(w: &mut W, a: &AsyncFunctions) -> Result<(), W::Error> {
use AsyncFunctions as A;
match a {
A::WHResult => w.write("WHResult"),
A::Await => w.write("Await"),
A::AwaitAll(Just(Pair(Local::Unnamed(id), count))) => {
write!(w, "AwaitAll L:{}+{}", id, count)
}
A::AwaitAll(Nothing) => w.write("AwaitAll L:0+0"),
_ => Err(Error::fail("AwaitAll needs an unnamed local")),
}
}
fn print_query_op<W: Write>(w: &mut W, q: QueryOp) -> Result<(), W::Error> {
w.write(match q {
QueryOp::CGet => "CGet",
QueryOp::CGetQuiet => "CGetQuiet",
QueryOp::Isset => "Isset",
QueryOp::InOut => "InOut",
})
}
fn print_final<W: Write>(w: &mut W, f: &InstructFinal) -> Result<(), W::Error> {
use InstructFinal as F;
match f {
F::QueryM(n, op, mk) => {
w.write("QueryM ")?;
print_int(w, n)?;
w.write(" ")?;
print_query_op(w, *op)?;
w.write(" ")?;
print_member_key(w, mk)
}
F::UnsetM(n, mk) => {
w.write("UnsetM ")?;
print_int(w, n)?;
w.write(" ")?;
print_member_key(w, mk)
}
F::SetM(i, mk) => {
w.write("SetM ")?;
print_int(w, i)?;
w.write(" ")?;
print_member_key(w, mk)
}
F::SetOpM(i, op, mk) => {
w.write("SetOpM ")?;
print_int(w, i)?;
w.write(" ")?;
print_eq_op(w, &op)?;
w.write(" ")?;
print_member_key(w, mk)
}
F::IncDecM(i, op, mk) => {
w.write("IncDecM ")?;
print_int(w, i)?;
w.write(" ")?;
print_incdec_op(w, &op)?;
w.write(" ")?;
print_member_key(w, mk)
}
F::SetRangeM(i, s, op) => {
w.write("SetRangeM ")?;
print_int(w, i)?;
w.write(" ")?;
print_int(w, &(*s as usize))?;
w.write(" ")?;
w.write(match op {
SetrangeOp::Forward => "Forward",
SetrangeOp::Reverse => "Reverse",
})
}
}
}
fn print_isset<W: Write>(w: &mut W, isset: &InstructIsset) -> Result<(), W::Error> {
use InstructIsset as I;
match isset {
I::IssetC => w.write("IssetC"),
I::IssetL(local) => {
w.write("IssetL ")?;
print_local(w, local)
}
I::IsUnsetL(local) => {
w.write("IsUnsetL ")?;
print_local(w, local)
}
I::IssetG => w.write("IssetG"),
I::IssetS => w.write("IssetS"),
I::IsTypeC(op) => {
w.write("IsTypeC ")?;
print_istype_op(w, op)
}
I::IsTypeL(local, op) => {
w.write("IsTypeL ")?;
print_local(w, local)?;
w.write(" ")?;
print_istype_op(w, op)
}
}
}
fn print_istype_op<W: Write>(w: &mut W, op: &IstypeOp) -> Result<(), W::Error> {
use IstypeOp as Op;
match op {
Op::OpNull => w.write("Null"),
Op::OpBool => w.write("Bool"),
Op::OpInt => w.write("Int"),
Op::OpDbl => w.write("Dbl"),
Op::OpStr => w.write("Str"),
Op::OpObj => w.write("Obj"),
Op::OpRes => w.write("Res"),
Op::OpScalar => w.write("Scalar"),
Op::OpKeyset => w.write("Keyset"),
Op::OpDict => w.write("Dict"),
Op::OpVec => w.write("Vec"),
Op::OpArrLike => w.write("ArrLike"),
Op::OpLegacyArrLike => w.write("LegacyArrLike"),
Op::OpClsMeth => w.write("ClsMeth"),
Op::OpFunc => w.write("Func"),
Op::OpClass => w.write("Class"),
}
}
fn print_try<W: Write>(w: &mut W, itry: &InstructTry) -> Result<(), W::Error> {
use InstructTry as T;
match itry {
T::TryCatchBegin => w.write(".try {"),
T::TryCatchMiddle => w.write("} .catch {"),
T::TryCatchEnd => w.write("}"),
}
}
fn print_mutator<W: Write>(w: &mut W, mutator: &InstructMutator) -> Result<(), W::Error> {
use InstructMutator as M;
match mutator {
M::SetL(local) => {
w.write("SetL ")?;
print_local(w, local)
}
M::PopL(id) => {
w.write("PopL ")?;
print_local(w, id)
}
M::SetG => w.write("SetG"),
M::SetS(op) => {
w.write("SetS ")?;
print_readonly_op(w, op)
}
M::SetOpL(id, op) => {
w.write("SetOpL ")?;
print_local(w, id)?;
w.write(" ")?;
print_eq_op(w, op)
}
M::SetOpG(op) => {
w.write("SetOpG ")?;
print_eq_op(w, op)
}
M::SetOpS(op) => {
w.write("SetOpS ")?;
print_eq_op(w, op)
}
M::IncDecL(id, op) => {
w.write("IncDecL ")?;
print_local(w, id)?;
w.write(" ")?;
print_incdec_op(w, op)
}
M::IncDecG(op) => {
w.write("IncDecG ")?;
print_incdec_op(w, op)
}
M::IncDecS(op) => {
w.write("IncDecS ")?;
print_incdec_op(w, op)
}
M::UnsetL(id) => {
w.write("UnsetL ")?;
print_local(w, id)
}
M::UnsetG => w.write("UnsetG"),
M::CheckProp(id) => {
w.write("CheckProp ")?;
print_prop_id(w, id)
}
M::InitProp(id, op) => {
w.write("InitProp ")?;
print_prop_id(w, id)?;
w.write(" ")?;
match op {
InitpropOp::Static => w.write("Static"),
InitpropOp::NonStatic => w.write("NonStatic"),
}?;
w.write(" ")
}
}
}
fn print_eq_op<W: Write>(w: &mut W, op: &EqOp) -> Result<(), W::Error> {
w.write(match op {
EqOp::PlusEqual => "PlusEqual",
EqOp::MinusEqual => "MinusEqual",
EqOp::MulEqual => "MulEqual",
EqOp::ConcatEqual => "ConcatEqual",
EqOp::DivEqual => "DivEqual",
EqOp::PowEqual => "PowEqual",
EqOp::ModEqual => "ModEqual",
EqOp::AndEqual => "AndEqual",
EqOp::OrEqual => "OrEqual",
EqOp::XorEqual => "XorEqual",
EqOp::SlEqual => "SlEqual",
EqOp::SrEqual => "SrEqual",
EqOp::PlusEqualO => "PlusEqualO",
EqOp::MinusEqualO => "MinusEqualO",
EqOp::MulEqualO => "MulEqualO",
})
}
fn print_readonly_op<W: Write>(w: &mut W, op: &ReadonlyOp) -> Result<(), W::Error> {
w.write(match op {
ReadonlyOp::Readonly => "Readonly",
ReadonlyOp::Mutable => "Mutable",
ReadonlyOp::Any => "Any",
ReadonlyOp::CheckROCOW => "CheckROCOW",
ReadonlyOp::CheckMutROCOW => "CheckMutROCOW",
})
}
fn print_incdec_op<W: Write>(w: &mut W, op: &IncdecOp) -> Result<(), W::Error> {
w.write(match op {
IncdecOp::PreInc => "PreInc",
IncdecOp::PostInc => "PostInc",
IncdecOp::PreDec => "PreDec",
IncdecOp::PostDec => "PostDec",
IncdecOp::PreIncO => "PreIncO",
IncdecOp::PostIncO => "PostIncO",
IncdecOp::PreDecO => "PreDecO",
IncdecOp::PostDecO => "PostDecO",
})
}
fn print_gen_creation_execution<W: Write>(
w: &mut W,
gen: &GenCreationExecution,
) -> Result<(), W::Error> {
use GenCreationExecution as G;
match gen {
G::CreateCont => w.write("CreateCont"),
G::ContEnter => w.write("ContEnter"),
G::ContRaise => w.write("ContRaise"),
G::Yield => w.write("Yield"),
G::YieldK => w.write("YieldK"),
G::ContCheck(CheckStarted::IgnoreStarted) => w.write("ContCheck IgnoreStarted"),
G::ContCheck(CheckStarted::CheckStarted) => w.write("ContCheck CheckStarted"),
G::ContValid => w.write("ContValid"),
G::ContKey => w.write("ContKey"),
G::ContGetReturn => w.write("ContGetReturn"),
G::ContCurrent => w.write("ContCurrent"),
}
}
fn print_misc<W: Write>(w: &mut W, misc: &InstructMisc) -> Result<(), W::Error> {
use InstructMisc as M;
match misc {
M::This => w.write("This"),
M::CheckThis => w.write("CheckThis"),
M::FuncNumArgs => w.write("FuncNumArgs"),
M::ChainFaults => w.write("ChainFaults"),
M::VerifyRetTypeC => w.write("VerifyRetTypeC"),
M::VerifyRetTypeTS => w.write("VerifyRetTypeTS"),
M::Self_ => w.write("Self"),
M::Parent => w.write("Parent"),
M::LateBoundCls => w.write("LateBoundCls"),
M::ClassName => w.write("ClassName"),
M::LazyClassFromClass => w.write("LazyClassFromClass"),
M::RecordReifiedGeneric => w.write("RecordReifiedGeneric"),
M::CheckReifiedGenericMismatch => w.write("CheckReifiedGenericMismatch"),
M::NativeImpl => w.write("NativeImpl"),
M::AKExists => w.write("AKExists"),
M::Idx => w.write("Idx"),
M::ArrayIdx => w.write("ArrayIdx"),
M::ArrayMarkLegacy => w.write("ArrayMarkLegacy"),
M::ArrayUnmarkLegacy => w.write("ArrayUnmarkLegacy"),
M::BreakTraceHint => w.write("BreakTraceHint"),
M::CGetCUNop => w.write("CGetCUNop"),
M::UGetCUNop => w.write("UGetCUNop"),
M::LockObj => w.write("LockObj"),
M::ThrowNonExhaustiveSwitch => w.write("ThrowNonExhaustiveSwitch"),
M::RaiseClassStringConversionWarning => w.write("RaiseClassStringConversionWarning"),
M::VerifyParamType(id) => {
w.write("VerifyParamType ")?;
print_param_id(w, id)
}
M::VerifyParamTypeTS(id) => {
w.write("VerifyParamTypeTS ")?;
print_param_id(w, id)
}
M::Silence(local, op) => {
w.write("Silence ")?;
print_local(w, local)?;
w.write(" ")?;
match op {
OpSilence::Start => w.write("Start"),
OpSilence::End => w.write("End"),
}
}
M::VerifyOutType(id) => {
w.write("VerifyOutType ")?;
print_param_id(w, id)
}
M::CreateCl(n, cid) => concat_str_by(
w,
" ",
["CreateCl", n.to_string().as_str(), cid.to_string().as_str()],
),
M::BareThis(op) => concat_str_by(
w,
" ",
[
"BareThis",
match op {
BareThisOp::Notice => "Notice",
BareThisOp::NoNotice => "NoNotice",
BareThisOp::NeverNull => "NeverNull",
},
],
),
M::MemoGet(label, Just(Pair(Local::Unnamed(first), local_count))) => {
w.write("MemoGet ")?;
print_label(w, label)?;
write!(w, " L:{}+{}", first, local_count)
}
M::MemoGet(label, Nothing) => {
w.write("MemoGet ")?;
print_label(w, label)?;
w.write(" L:0+0")
}
M::MemoGet(_, _) => Err(Error::fail("MemoGet needs an unnamed local")),
M::MemoSet(Just(Pair(Local::Unnamed(first), local_count))) => {
write!(w, "MemoSet L:{}+{}", first, local_count)
}
M::MemoSet(Nothing) => w.write("MemoSet L:0+0"),
M::MemoSet(_) => Err(Error::fail("MemoSet needs an unnamed local")),
M::MemoGetEager(label1, label2, Just(Pair(Local::Unnamed(first), local_count))) => {
w.write("MemoGetEager ")?;
print_label(w, label1)?;
w.write(" ")?;
print_label(w, label2)?;
write!(w, " L:{}+{}", first, local_count)
}
M::MemoGetEager(label1, label2, Nothing) => {
w.write("MemoGetEager ")?;
print_label(w, label1)?;
w.write(" ")?;
print_label(w, label2)?;
w.write(" L:0+0")
}
M::MemoGetEager(_, _, _) => Err(Error::fail("MemoGetEager needs an unnamed local")),
M::MemoSetEager(Just(Pair(Local::Unnamed(first), local_count))) => {
write!(w, "MemoSetEager L:{}+{}", first, local_count)
}
M::MemoSetEager(Nothing) => w.write("MemoSetEager L:0+0"),
M::MemoSetEager(_) => Err(Error::fail("MemoSetEager needs an unnamed local")),
M::OODeclExists(k) => concat_str_by(
w,
" ",
[
"OODeclExists",
match k {
ClassishKind::Class => "Class",
ClassishKind::Interface => "Interface",
ClassishKind::Trait => "Trait",
ClassishKind::Enum => "Enum",
ClassishKind::EnumClass => "EnumClass",
},
],
),
M::AssertRATL(local, s) => {
w.write("AssertRATL ")?;
print_local(w, local)?;
w.write(" ")?;
w.write(s)
}
M::AssertRATStk(n, s) => {
concat_str_by(w, " ", ["AssertRATStk", n.to_string().as_str(), s.as_str()])
}
M::GetMemoKeyL(local) => {
w.write("GetMemoKeyL ")?;
print_local(w, local)
}
}
}
fn print_include_eval_define<W: Write>(
w: &mut W,
ed: &InstructIncludeEvalDefine,
) -> Result<(), W::Error> {
use InstructIncludeEvalDefine::*;
match ed {
Incl => w.write("Incl"),
InclOnce => w.write("InclOnce"),
Req => w.write("Req"),
ReqOnce => w.write("ReqOnce"),
ReqDoc => w.write("ReqDoc"),
Eval => w.write("Eval"),
}
}
fn print_control_flow<W: Write>(w: &mut W, cf: &InstructControlFlow) -> Result<(), W::Error> {
use InstructControlFlow as CF;
match cf {
CF::Jmp(l) => {
w.write("Jmp ")?;
print_label(w, l)
}
CF::JmpNS(l) => {
w.write("JmpNS ")?;
print_label(w, l)
}
CF::JmpZ(l) => {
w.write("JmpZ ")?;
print_label(w, l)
}
CF::JmpNZ(l) => {
w.write("JmpNZ ")?;
print_label(w, l)
}
CF::RetC => w.write("RetC"),
CF::RetCSuspended => w.write("RetCSuspended"),
CF::RetM(p) => concat_str_by(w, " ", ["RetM", p.to_string().as_str()]),
CF::Throw => w.write("Throw"),
CF::Switch(kind, base, labels) => print_switch(w, kind, base, labels.as_ref()),
CF::SSwitch(cases) => match cases.as_ref() {
[] => Err(Error::fail("sswitch should have at least one case")),
[rest @ .., Pair(_, lastlabel)] => {
w.write("SSwitch ")?;
angle(w, |w| {
concat_by(w, " ", rest, |w, Pair(s, l)| {
concat_str(w, [quote_string(s.as_ref()).as_str(), ":"])?;
print_label(w, l)
})?;
w.write(" -:")?;
print_label(w, &lastlabel)
})
}
},
}
}
fn print_switch<W: Write>(
w: &mut W,
kind: &Switchkind,
base: &isize,
labels: &[Label],
) -> Result<(), W::Error> {
w.write("Switch ")?;
w.write(match kind {
Switchkind::Bounded => "Bounded ",
Switchkind::Unbounded => "Unbounded ",
})?;
w.write(base.to_string())?;
w.write(" ")?;
angle(w, |w| concat_by(w, " ", labels, print_label))
}
fn print_lit_const<W: Write>(w: &mut W, lit: &InstructLitConst) -> Result<(), W::Error> {
use InstructLitConst as LC;
match lit {
LC::Null => w.write("Null"),
LC::Int(i) => concat_str_by(w, " ", ["Int", i.to_string().as_str()]),
LC::String(s) => {
w.write("String ")?;
quotes(w, |w| w.write(escape(s.as_str())))
}
LC::LazyClass(id) => {
w.write("LazyClass ")?;
print_class_id(w, id)
}
LC::True => w.write("True"),
LC::False => w.write("False"),
LC::Double(d) => concat_str_by(w, " ", ["Double", d.as_str()]),
LC::AddElemC => w.write("AddElemC"),
LC::AddNewElemC => w.write("AddNewElemC"),
LC::NewPair => w.write("NewPair"),
LC::File => w.write("File"),
LC::Dir => w.write("Dir"),
LC::Method => w.write("Method"),
LC::FuncCred => w.write("FuncCred"),
LC::Dict(id) => {
w.write("Dict ")?;
print_adata_id(w, id)
}
LC::Keyset(id) => {
w.write("Keyset ")?;
print_adata_id(w, id)
}
LC::Vec(id) => {
w.write("Vec ")?;
print_adata_id(w, id)
}
LC::NewDictArray(i) => concat_str_by(w, " ", ["NewDictArray", i.to_string().as_str()]),
LC::NewVec(i) => concat_str_by(w, " ", ["NewVec", i.to_string().as_str()]),
LC::NewKeysetArray(i) => concat_str_by(w, " ", ["NewKeysetArray", i.to_string().as_str()]),
LC::NewStructDict(l) => {
let ls: Vec<&str> = l.as_ref().into_iter().map(|s| s.as_str()).collect();
w.write("NewStructDict ")?;
angle(w, |w| print_shape_fields(w, &ls[0..]))
}
LC::NewRecord(cid, l) => {
let ls: Vec<&str> = l.as_ref().into_iter().map(|s| s.as_str()).collect();
w.write("NewRecord ")?;
print_class_id(w, cid)?;
w.write(" ")?;
angle(w, |w| print_shape_fields(w, &ls[0..]))
}
LC::CnsE(id) => {
w.write("CnsE ")?;
print_const_id(w, id)
}
LC::ClsCns(id) => {
w.write("ClsCns ")?;
print_const_id(w, id)
}
LC::ClsCnsD(const_id, cid) => {
w.write("ClsCnsD ")?;
print_const_id(w, const_id)?;
w.write(" ")?;
print_class_id(w, cid)
}
LC::ClsCnsL(id) => {
w.write("ClsCnsL ")?;
print_local(w, id)
}
LC::NewCol(ct) => {
w.write("NewCol ")?;
print_collection_type(w, ct)
}
LC::ColFromArray(ct) => {
w.write("ColFromArray ")?;
print_collection_type(w, ct)
}
LC::NullUninit => w.write("NullUninit"),
LC::TypedValue(_) => Err(Error::fail("print_lit_const: TypedValue")),
}
}
fn print_collection_type<W: Write>(w: &mut W, ct: &CollectionType) -> Result<(), W::Error> {
use CollectionType as CT;
match ct {
CT::Vector => w.write("Vector"),
CT::Map => w.write("Map"),
CT::Set => w.write("Set"),
CT::Pair => w.write("Pair"),
CT::ImmVector => w.write("ImmVector"),
CT::ImmMap => w.write("ImmMap"),
CT::ImmSet => w.write("ImmSet"),
CT::Dict => w.write("dict"),
CT::Array => w.write("array"),
CT::Keyset => w.write("keyset"),
CT::Vec => w.write("vec"),
}
}
fn print_shape_fields<W: Write>(w: &mut W, sf: &[&str]) -> Result<(), W::Error> {
concat_by(w, " ", sf, |w, f| quotes(w, |w| w.write(escape(*f))))
}
fn print_op<W: Write>(w: &mut W, op: &InstructOperator) -> Result<(), W::Error> {
use InstructOperator as I;
match op {
I::Concat => w.write("Concat"),
I::ConcatN(n) => concat_str_by(w, " ", ["ConcatN", n.to_string().as_str()]),
I::Add => w.write("Add"),
I::Sub => w.write("Sub"),
I::Mul => w.write("Mul"),
I::AddO => w.write("AddO"),
I::SubO => w.write("SubO"),
I::MulO => w.write("MulO"),
I::Div => w.write("Div"),
I::Mod => w.write("Mod"),
I::Pow => w.write("Pow"),
I::Not => w.write("Not"),
I::Same => w.write("Same"),
I::NSame => w.write("NSame"),
I::Eq => w.write("Eq"),
I::Neq => w.write("Neq"),
I::Lt => w.write("Lt"),
I::Lte => w.write("Lte"),
I::Gt => w.write("Gt"),
I::Gte => w.write("Gte"),
I::Cmp => w.write("Cmp"),
I::BitAnd => w.write("BitAnd"),
I::BitOr => w.write("BitOr"),
I::BitXor => w.write("BitXor"),
I::BitNot => w.write("BitNot"),
I::Shl => w.write("Shl"),
I::Shr => w.write("Shr"),
I::CastBool => w.write("CastBool"),
I::CastInt => w.write("CastInt"),
I::CastDouble => w.write("CastDouble"),
I::CastString => w.write("CastString"),
I::CastVec => w.write("CastVec"),
I::CastDict => w.write("CastDict"),
I::CastKeyset => w.write("CastKeyset"),
I::InstanceOf => w.write("InstanceOf"),
I::InstanceOfD(id) => {
w.write("InstanceOfD ")?;
print_class_id(w, id)
}
I::IsLateBoundCls => w.write("IsLateBoundCls"),
I::IsTypeStructC(op) => concat_str_by(
w,
" ",
[
"IsTypeStructC",
match op {
TypestructResolveOp::Resolve => "Resolve",
TypestructResolveOp::DontResolve => "DontResolve",
},
],
),
I::ThrowAsTypeStructException => w.write("ThrowAsTypeStructException"),
I::CombineAndResolveTypeStruct(n) => concat_str_by(
w,
" ",
["CombineAndResolveTypeStruct", n.to_string().as_str()],
),
I::Print => w.write("Print"),
I::Clone => w.write("Clone"),
I::Exit => w.write("Exit"),
I::ResolveFunc(id) => {
w.write("ResolveFunc ")?;
print_function_id(w, id)
}
I::ResolveRFunc(id) => {
w.write("ResolveRFunc ")?;
print_function_id(w, id)
}
I::ResolveMethCaller(id) => {
w.write("ResolveMethCaller ")?;
print_function_id(w, id)
}
I::ResolveClsMethod(mid) => {
w.write("ResolveClsMethod ")?;
print_method_id(w, mid)
}
I::ResolveClsMethodD(cid, mid) => {
w.write("ResolveClsMethodD ")?;
print_class_id(w, cid)?;
w.write(" ")?;
print_method_id(w, mid)
}
I::ResolveClsMethodS(r, mid) => {
w.write("ResolveClsMethodS ")?;
print_special_cls_ref(w, r)?;
w.write(" ")?;
print_method_id(w, mid)
}
I::ResolveRClsMethod(mid) => {
w.write("ResolveRClsMethod ")?;
print_method_id(w, mid)
}
I::ResolveRClsMethodD(cid, mid) => {
w.write("ResolveRClsMethodD ")?;
print_class_id(w, cid)?;
w.write(" ")?;
print_method_id(w, mid)
}
I::ResolveRClsMethodS(r, mid) => {
w.write("ResolveRClsMethodS ")?;
print_special_cls_ref(w, r)?;
w.write(" ")?;
print_method_id(w, mid)
}
I::ResolveClass(id) => {
w.write("ResolveClass ")?;
print_class_id(w, id)
}
I::Fatal(fatal_op) => print_fatal_op(w, fatal_op),
}
}
fn print_fatal_op<W: Write>(w: &mut W, f: &FatalOp) -> Result<(), W::Error> {
match f {
FatalOp::Parse => w.write("Fatal Parse"),
FatalOp::Runtime => w.write("Fatal Runtime"),
FatalOp::RuntimeOmitFrame => w.write("Fatal RuntimeOmitFrame"),
}
}
fn print_params<'arena, W: Write>(
ctx: &mut Context,
w: &mut W,
params: impl AsRef<[HhasParam<'arena>]>,
) -> Result<(), W::Error> {
paren(w, |w| {
concat_by(w, ", ", params, |w, i| print_param(ctx, w, i))
})
}
fn print_param<'arena, W: Write>(
ctx: &mut Context,
w: &mut W,
param: &HhasParam<'arena>,
) -> Result<(), W::Error> {
print_param_user_attributes(ctx, w, param)?;
w.write_if(param.is_inout, "inout ")?;
w.write_if(param.is_readonly, "readonly ")?;
w.write_if(param.is_variadic, "...")?;
option(w, &Option::from(param.type_info.clone()), |w, ty| {
print_type_info(w, ty)?;
w.write(" ")
})?;
w.write(¶m.name)?;
option(
w,
&Option::from(param.default_value.map(|x| (x.0, x.1))),
|w, i| print_param_default_value(w, i),
)
}
fn print_param_id<W: Write>(w: &mut W, param_id: &ParamId) -> Result<(), W::Error> {
match param_id {
ParamId::ParamUnnamed(i) => w.write(i.to_string()),
ParamId::ParamNamed(s) => w.write(s),
}
}
fn print_param_default_value<'arena, W: Write>(
w: &mut W,
default_val: &(Label, Str<'arena>),
) -> Result<(), W::Error> {
w.write(" = ")?;
print_label(w, &default_val.0)?;
paren(w, |w| triple_quotes(w, |w| w.write(default_val.1.as_str())))
}
fn print_label<W: Write>(w: &mut W, label: &Label) -> Result<(), W::Error> {
match label {
Label::Regular(id) => {
w.write("L")?;
print_int(w, id)
}
Label::DefaultArg(id) => {
w.write("DV")?;
print_int(w, id)
}
}
}
fn print_local<W: Write>(w: &mut W, local: &Local) -> Result<(), W::Error> {
match local {
Local::Unnamed(id) => {
w.write("_")?;
print_int(w, id)
}
Local::Named(id) => w.write(id),
}
}
fn print_int<W: Write>(w: &mut W, i: &usize) -> Result<(), W::Error> {
write!(w, "{}", i)
}
fn print_key_value<W: Write>(
ctx: &mut Context,
w: &mut W,
env: &ExprEnv,
k: &ast::Expr,
v: &ast::Expr,
) -> Result<(), W::Error> {
print_key_value_(ctx, w, env, k, print_expr, v)
}
fn print_key_value_<W: Write, K, KeyPrinter>(
ctx: &mut Context,
w: &mut W,
env: &ExprEnv,
k: K,
mut kp: KeyPrinter,
v: &ast::Expr,
) -> Result<(), W::Error>
where
KeyPrinter: FnMut(&mut Context, &mut W, &ExprEnv, K) -> Result<(), W::Error>,
{
kp(ctx, w, env, k)?;
w.write(" => ")?;
print_expr(ctx, w, env, v)
}
fn print_afield<W: Write>(
ctx: &mut Context,
w: &mut W,
env: &ExprEnv,
afield: &ast::Afield,
) -> Result<(), W::Error> {
use ast::Afield as A;
match afield {
A::AFvalue(e) => print_expr(ctx, w, env, &e),
A::AFkvalue(k, v) => print_key_value(ctx, w, env, &k, &v),
}
}
fn print_afields<W: Write>(
ctx: &mut Context,
w: &mut W,
env: &ExprEnv,
afields: impl AsRef<[ast::Afield]>,
) -> Result<(), W::Error> {
concat_by(w, ", ", afields, |w, i| print_afield(ctx, w, env, i))
}
fn print_uop<W: Write>(w: &mut W, op: ast::Uop) -> Result<(), W::Error> {
use ast::Uop as U;
w.write(match op {
U::Utild => "~",
U::Unot => "!",
U::Uplus => "+",
U::Uminus => "-",
U::Uincr => "++",
U::Udecr => "--",
U::Usilence => "@",
U::Upincr | U::Updecr => {
return Err(Error::fail(
"string_of_uop - should have been captures earlier",
));
}
})
}
fn print_key_values<W: Write>(
ctx: &mut Context,
w: &mut W,
env: &ExprEnv,
kvs: impl AsRef<[(ast::Expr, ast::Expr)]>,
) -> Result<(), W::Error> {
print_key_values_(ctx, w, env, print_expr, kvs)
}
fn print_key_values_<W: Write, K, KeyPrinter>(
ctx: &mut Context,
w: &mut W,
env: &ExprEnv,
mut kp: KeyPrinter,
kvs: impl AsRef<[(K, ast::Expr)]>,
) -> Result<(), W::Error>
where
KeyPrinter: Fn(&mut Context, &mut W, &ExprEnv, &K) -> Result<(), W::Error>,
{
concat_by(w, ", ", kvs, |w, (k, v)| {
print_key_value_(ctx, w, env, k, &mut kp, v)
})
}
fn print_expr_darray<W: Write, K, KeyPrinter>(
ctx: &mut Context,
w: &mut W,
env: &ExprEnv,
kp: KeyPrinter,
kvs: impl AsRef<[(K, ast::Expr)]>,
) -> Result<(), W::Error>
where
KeyPrinter: Fn(&mut Context, &mut W, &ExprEnv, &K) -> Result<(), W::Error>,
{
wrap_by_(w, "darray[", "]", |w| {
print_key_values_(ctx, w, env, kp, kvs)
})
}
fn print_expr_varray<W: Write>(
ctx: &mut Context,
w: &mut W,
env: &ExprEnv,
varray: &[ast::Expr],
) -> Result<(), W::Error> {
wrap_by_(w, "varray[", "]", |w| {
concat_by(w, ", ", varray, |w, e| print_expr(ctx, w, env, e))
})
}
fn print_shape_field_name<W: Write>(
_: &mut Context,
w: &mut W,
_: &ExprEnv,
field: &ast::ShapeFieldName,
) -> Result<(), W::Error> {
use ast::ShapeFieldName as S;
match field {
S::SFlitInt((_, s)) => print_expr_int(w, s.as_ref()),
S::SFlitStr((_, s)) => print_expr_string(w, s),
S::SFclassConst(_, (_, s)) => print_expr_string(w, s.as_bytes()),
}
}
fn print_expr_int<W: Write>(w: &mut W, i: &str) -> Result<(), W::Error> {
match integer::to_decimal(i) {
Ok(s) => w.write(s),
Err(_) => Err(Error::fail("ParseIntError")),
}
}
fn print_expr_string<W: Write>(w: &mut W, s: &[u8]) -> Result<(), W::Error> {
fn escape_char(c: u8) -> Option<Cow<'static, [u8]>> {
match c {
b'\n' => Some((&b"\\\\n"[..]).into()),
b'\r' => Some((&b"\\\\r"[..]).into()),
b'\t' => Some((&b"\\\\t"[..]).into()),
b'\\' => Some((&b"\\\\\\\\"[..]).into()),
b'"' => Some((&b"\\\\\\\""[..]).into()),
b'$' => Some((&b"\\\\$"[..]).into()),
c if is_lit_printable(c) => None,
c => {
let mut r = vec![];
write!(r, "\\\\{:03o}", c).unwrap();
Some(r.into())
}
}
}
// FIXME: This is not safe--string literals are binary strings.
// There's no guarantee that they're valid UTF-8.
let s = unsafe { std::str::from_utf8_unchecked(s) };
wrap_by(w, "\\\"", |w| w.write(escape_by(s.into(), escape_char)))
}
fn print_expr_to_string<W: Write>(
ctx: &mut Context,
env: &ExprEnv,
expr: &ast::Expr,
) -> Result<String, W::Error> {
let mut buf = String::new();
print_expr(ctx, &mut buf, env, expr).map_err(|e| match e {
Error::NotImpl(m) => Error::NotImpl(m),
_ => Error::Fail(format!("Failed: {}", e)),
})?;
Ok(buf)
}
pub fn print_expr<W: Write>(
ctx: &mut Context,
w: &mut W,
env: &ExprEnv,
ast::Expr(_, _, expr): &ast::Expr,
) -> Result<(), W::Error> {
fn adjust_id<'a>(env: &ExprEnv, id: &'a str) -> Cow<'a, str> {
let s: Cow<'a, str> = match env.codegen_env {
Some(env) => {
if env.is_namespaced
&& id
.as_bytes()
.iter()
.rposition(|c| *c == b'\\')
.map_or(true, |i| i < 1)
{
strip_global_ns(id).into()
} else {
add_ns(id)
}
}
_ => id.into(),
};
escaper::escape(s)
}
fn print_expr_id<'a, W: Write>(w: &mut W, env: &ExprEnv, s: &'a str) -> Result<(), W::Error> {
w.write(adjust_id(env, s))
}
fn fmt_class_name<'a>(is_class_constant: bool, id: Cow<'a, str>) -> Cow<'a, str> {
let cn: Cow<'a, str> = if is_xhp(strip_global_ns(&id)) {
escaper::escape(strip_global_ns(&mangle(id.into())))
.to_string()
.into()
} else {
escaper::escape(strip_global_ns(&id)).to_string().into()
};
if is_class_constant {
format!("\\\\{}", cn).into()
} else {
cn
}
}
fn get_class_name_from_id<'e>(
ctx: &mut Context,
env: Option<&'e HhasBodyEnv>,
should_format: bool,
is_class_constant: bool,
id: &'e str,
) -> Cow<'e, str> {
if id == classes::SELF || id == classes::PARENT || id == classes::STATIC {
let name = ctx.special_class_resolver.resolve(env, id);
return fmt_class_name(is_class_constant, name);
}
fn get<'a>(should_format: bool, is_class_constant: bool, id: &'a str) -> Cow<'a, str> {
if should_format {
fmt_class_name(is_class_constant, id.into())
} else {
id.into()
}
}
if env.is_some() {
let alloc = bumpalo::Bump::new();
let class_id = ClassType::from_ast_name(&alloc, id);
let id = class_id.to_raw_string();
get(should_format, is_class_constant, id)
.into_owned()
.into()
} else {
get(should_format, is_class_constant, id)
}
}
fn handle_possible_colon_colon_class_expr<W: Write>(
ctx: &mut Context,
w: &mut W,
env: &ExprEnv,
is_array_get: bool,
e_: &ast::Expr_,
) -> Result<Option<()>, W::Error> {
match e_.as_class_const() {
Some((
ast::ClassId(_, _, ast::ClassId_::CIexpr(ast::Expr(_, _, ast::Expr_::Id(id)))),
(_, s2),
)) if is_class(&s2) && !(is_self(&id.1) || is_parent(&id.1) || is_static(&id.1)) => {
Ok(Some({
let s1 = get_class_name_from_id(ctx, env.codegen_env, false, false, &id.1);
if is_array_get {
print_expr_id(w, env, s1.as_ref())?
} else {
print_expr_string(w, s1.as_bytes())?
}
}))
}
_ => Ok(None),
}
}
use ast::Expr_ as E_;
match expr {
E_::Id(id) => print_expr_id(w, env, id.1.as_ref()),
E_::Lvar(lid) => w.write(escaper::escape(&(lid.1).1)),
E_::Float(f) => {
if f.contains('E') || f.contains('e') {
let s = format!(
"{:.1E}",
f.parse::<f64>()
.map_err(|_| Error::fail(format!("ParseFloatError: {}", f)))?
)
// to_uppercase() here because s might be "inf" or "nan"
.to_uppercase();
lazy_static! {
static ref UNSIGNED_EXP: Regex =
Regex::new(r"(?P<first>E)(?P<second>\d+)").unwrap();
static ref SIGNED_SINGLE_DIGIT_EXP: Regex =
Regex::new(r"(?P<first>E[+-])(?P<second>\d$)").unwrap();
}
// turn mEn into mE+n
let s = UNSIGNED_EXP.replace(&s, "${first}+${second}");
// turn mE+n or mE-n into mE+0n or mE-0n (where n is a single digit)
let s = SIGNED_SINGLE_DIGIT_EXP.replace(&s, "${first}0${second}");
w.write(s)
} else {
w.write(f)
}
}
E_::Int(i) => print_expr_int(w, i.as_ref()),
E_::String(s) => print_expr_string(w, s),
E_::Null => w.write("NULL"),
E_::True => w.write("true"),
E_::False => w.write("false"),
// For arrays and collections, we are making a conscious decision to not
// match HHMV has HHVM's emitter has inconsistencies in the pretty printer
// https://fburl.com/tzom2qoe
E_::Collection(c) if (c.0).1 == "vec" || (c.0).1 == "dict" || (c.0).1 == "keyset" => {
w.write(&(c.0).1)?;
square(w, |w| print_afields(ctx, w, env, &c.2))
}
E_::Collection(c) => {
let name = strip_ns((c.0).1.as_str());
let name = types::fix_casing(&name);
match name {
"Set" | "Pair" | "Vector" | "Map" | "ImmSet" | "ImmVector" | "ImmMap" => {
w.write("HH\\\\")?;
w.write(name)?;
wrap_by_(w, " {", "}", |w| {
Ok(if !c.2.is_empty() {
w.write(" ")?;
print_afields(ctx, w, env, &c.2)?;
w.write(" ")?;
})
})
}
_ => Err(Error::fail(format!(
"Default value for an unknow collection - {}",
name
))),
}
}
E_::Shape(fl) => print_expr_darray(ctx, w, env, print_shape_field_name, fl),
E_::Binop(x) => {
let (bop, e1, e2) = &**x;
print_expr(ctx, w, env, e1)?;
w.write(" ")?;
print_bop(w, bop)?;
w.write(" ")?;
print_expr(ctx, w, env, e2)
}
E_::Call(c) => {
let (e, _, es, unpacked_element) = &**c;
match e.as_id() {
Some(ast_defs::Id(_, call_id)) => {
w.write(lstrip(adjust_id(env, &call_id).as_ref(), "\\\\"))?
}
None => {
let buf = print_expr_to_string::<W>(ctx, env, e)?;
w.write(lstrip(&buf, "\\\\"))?
}
};
paren(w, |w| {
concat_by(w, ", ", &es, |w, e| print_expr(ctx, w, env, e))?;
match unpacked_element {
None => Ok(()),
Some(e) => {
if !es.is_empty() {
w.write(", ")?;
}
// TODO: Should probably have ... also but we are not doing that in ocaml
print_expr(ctx, w, env, e)
}
}
})
}
E_::New(x) => {
let (cid, _, es, unpacked_element, _) = &**x;
match cid.2.as_ciexpr() {
Some(ci_expr) => {
w.write("new ")?;
match ci_expr.2.as_id() {
Some(ast_defs::Id(_, cname)) => w.write(lstrip(
&adjust_id(
env,
&ClassType::from_ast_name(&bumpalo::Bump::new(), cname)
.to_raw_string(),
),
"\\\\",
))?,
None => {
let buf = print_expr_to_string::<W>(ctx, env, ci_expr)?;
w.write(lstrip(&buf, "\\\\"))?
}
}
paren(w, |w| {
concat_by(w, ", ", es, |w, e| print_expr(ctx, w, env, e))?;
match unpacked_element {
None => Ok(()),
Some(e) => {
w.write(", ")?;
print_expr(ctx, w, env, e)
}
}
})
}
None => {
match cid.2.as_ci() {
Some(id) => {
// Xml exprs rewritten as New exprs come
// through here.
print_xml(ctx, w, env, &id.1, es)
}
None => not_impl!(),
}
}
}
}
E_::Record(r) => {
w.write(lstrip(adjust_id(env, &(r.0).1).as_ref(), "\\\\"))?;
print_key_values(ctx, w, env, &r.1)
}
E_::ClassGet(cg) => {
match &(cg.0).2 {
ast::ClassId_::CIexpr(e) => match e.as_id() {
Some(id) => w.write(&get_class_name_from_id(
ctx,
env.codegen_env,
true, /* should_format */
false, /* is_class_constant */
&id.1,
))?,
_ => print_expr(ctx, w, env, e)?,
},
_ => return Err(Error::fail("TODO Unimplemented unexpected non-CIexpr")),
}
w.write("::")?;
match &cg.1 {
ast::ClassGetExpr::CGstring((_, litstr)) => w.write(escaper::escape(litstr)),
ast::ClassGetExpr::CGexpr(e) => print_expr(ctx, w, env, e),
}
}
E_::ClassConst(cc) => {
if let Some(e1) = (cc.0).2.as_ciexpr() {
handle_possible_colon_colon_class_expr(ctx, w, env, false, expr)?.map_or_else(
|| {
let s2 = &(cc.1).1;
match e1.2.as_id() {
Some(ast_defs::Id(_, s1)) => {
let s1 =
get_class_name_from_id(ctx, env.codegen_env, true, true, s1);
concat_str_by(w, "::", [&s1.into(), s2])
}
_ => {
print_expr(ctx, w, env, e1)?;
w.write("::")?;
w.write(s2)
}
}
},
Ok,
)
} else {
Err(Error::fail("TODO: Only expected CIexpr in class_const"))
}
}
E_::Unop(u) => match u.0 {
ast::Uop::Upincr => {
print_expr(ctx, w, env, &u.1)?;
w.write("++")
}
ast::Uop::Updecr => {
print_expr(ctx, w, env, &u.1)?;
w.write("--")
}
_ => {
print_uop(w, u.0)?;
print_expr(ctx, w, env, &u.1)
}
},
E_::ObjGet(og) => {
print_expr(ctx, w, env, &og.0)?;
w.write(match og.2 {
ast::OgNullFlavor::OGNullthrows => "->",
ast::OgNullFlavor::OGNullsafe => "?->",
})?;
print_expr(ctx, w, env, &og.1)
}
E_::Clone(e) => {
w.write("clone ")?;
print_expr(ctx, w, env, e)
}
E_::ArrayGet(ag) => {
print_expr(ctx, w, env, &ag.0)?;
square(w, |w| {
option(w, &ag.1, |w, e: &ast::Expr| {
handle_possible_colon_colon_class_expr(ctx, w, env, true, &e.2)
.transpose()
.unwrap_or_else(|| print_expr(ctx, w, env, e))
})
})
}
E_::String2(ss) => concat_by(w, " . ", ss, |w, s| print_expr(ctx, w, env, s)),
E_::PrefixedString(s) => {
w.write(&s.0)?;
w.write(" . ")?;
print_expr(ctx, w, env, &s.1)
}
E_::Eif(eif) => {
print_expr(ctx, w, env, &eif.0)?;
w.write(" ? ")?;
option(w, &eif.1, |w, etrue| print_expr(ctx, w, env, etrue))?;
w.write(" : ")?;
print_expr(ctx, w, env, &eif.2)
}
E_::Cast(c) => {
paren(w, |w| print_hint(w, false, &c.0))?;
print_expr(ctx, w, env, &c.1)
}
E_::Pipe(p) => {
print_expr(ctx, w, env, &p.1)?;
w.write(" |> ")?;
print_expr(ctx, w, env, &p.2)
}
E_::Is(i) => {
print_expr(ctx, w, env, &i.0)?;
w.write(" is ")?;
print_hint(w, true, &i.1)
}
E_::As(a) => {
print_expr(ctx, w, env, &a.0)?;
w.write(if a.2 { " ?as " } else { " as " })?;
print_hint(w, true, &a.1)
}
E_::Varray(va) => print_expr_varray(ctx, w, env, &va.1),
E_::Darray(da) => print_expr_darray(ctx, w, env, print_expr, &da.1),
E_::Tuple(t) => wrap_by_(w, "varray[", "]", |w| {
// A tuple is represented by a varray when using reflection.
concat_by(w, ", ", t, |w, i| print_expr(ctx, w, env, i))
}),
E_::List(l) => wrap_by_(w, "list(", ")", |w| {
concat_by(w, ", ", l, |w, i| print_expr(ctx, w, env, i))
}),
E_::Yield(y) => {
w.write("yield ")?;
print_afield(ctx, w, env, y)
}
E_::Await(e) => {
w.write("await ")?;
print_expr(ctx, w, env, e)
}
E_::Import(i) => {
print_import_flavor(w, &i.0)?;
w.write(" ")?;
print_expr(ctx, w, env, &i.1)
}
E_::Xml(_) => Err(Error::fail(
"expected Xml to be converted to New during rewriting",
)),
E_::Efun(f) => print_efun(ctx, w, env, &f.0, &f.1),
E_::FunctionPointer(fp) => {
let (fp_id, targs) = &**fp;
match fp_id {
ast::FunctionPtrId::FPId(ast::Id(_, sid)) => {
w.write(lstrip(adjust_id(env, &sid).as_ref(), "\\\\"))?
}
ast::FunctionPtrId::FPClassConst(ast::ClassId(_, _, class_id), (_, meth_name)) => {
match class_id {
ast::ClassId_::CIexpr(e) => match e.as_id() {
Some(id) => w.write(&get_class_name_from_id(
ctx,
env.codegen_env,
true, /* should_format */
false, /* is_class_constant */
&id.1,
))?,
_ => print_expr(ctx, w, env, e)?,
},
_ => {
return Err(Error::fail(
"TODO Unimplemented unexpected non-CIexpr in function pointer",
));
}
}
w.write("::")?;
w.write(meth_name)?
}
};
wrap_by_(w, "<", ">", |w| {
concat_by(w, ", ", targs, |w, _targ| w.write("_"))
})
}
E_::Omitted => Ok(()),
E_::Lfun(lfun) => {
if ctx.dump_lambdas {
let fun_ = &lfun.0;
paren(w, |w| {
paren(w, |w| {
concat_by(w, ", ", &fun_.params, |w, param| {
print_fparam(ctx, w, env, param)
})
})?;
w.write(" ==> ")?;
print_block_(ctx, w, env, &fun_.body.fb_ast, None)
})
} else {
Err(Error::fail(
"expected Lfun to be converted to Efun during closure conversion print_expr",
))
}
}
E_::Callconv(_) => Err(Error::fail("illegal default value")),
E_::ETSplice(splice) => {
w.write("${")?;
print_expr(ctx, w, env, splice)?;
w.write("}")
}
_ => Err(Error::fail(format!(
"TODO Unimplemented: Cannot print: {:?}",
expr
))),
}
}
fn print_xml<W: Write>(
ctx: &mut Context,
w: &mut W,
env: &ExprEnv,
id: &str,
es: &[ast::Expr],
) -> Result<(), W::Error> {
use ast::{Expr as E, Expr_ as E_};
fn syntax_error<W: Write>(_: &W) -> crate::write::Error<<W as crate::write::Write>::Error> {
Error::NotImpl(String::from("print_xml: unexpected syntax"))
}
fn print_xhp_attr<W: Write>(
ctx: &mut Context,
w: &mut W,
env: &ExprEnv,
attr: &(ast_defs::ShapeFieldName, ast::Expr),
) -> Result<(), W::Error> {
match attr {
(ast_defs::ShapeFieldName::SFlitStr(s), e) => print_key_value_(
ctx,
w,
env,
&s.1,
|_, w, _, k| print_expr_string(w, k.as_slice()),
e,
),
_ => Err(syntax_error(w)),
}
}
let (attrs, children) = if es.len() < 2 {
Err(syntax_error(w))
} else {
match (&es[0], &es[1]) {
(E(_, _, E_::Shape(attrs)), E(_, _, E_::Varray(children))) => Ok((attrs, &children.1)),
_ => Err(syntax_error(w)),
}
}?;
let env = ExprEnv {
codegen_env: env.codegen_env,
};
write!(w, "new {}", mangle(id.into()))?;
paren(w, |w| {
wrap_by_(w, "darray[", "]", |w| {
concat_by(w, ", ", attrs, |w, attr| print_xhp_attr(ctx, w, &env, attr))
})?;
w.write(", ")?;
print_expr_varray(ctx, w, &env, children)?;
w.write(", __FILE__, __LINE__")
})
}
fn print_efun<W: Write>(
ctx: &mut Context,
w: &mut W,
env: &ExprEnv,
f: &ast::Fun_,
use_list: &[ast::Lid],
) -> Result<(), W::Error> {
w.write_if(
f.fun_kind.is_fasync() || f.fun_kind.is_fasync_generator(),
"async ",
)?;
w.write("function ")?;
paren(w, |w| {
concat_by(w, ", ", &f.params, |w, p| print_fparam(ctx, w, env, p))
})?;
w.write(" ")?;
if !use_list.is_empty() {
w.write("use ")?;
paren(w, |w| {
concat_by(w, ", ", use_list, |w: &mut W, ast::Lid(_, id)| {
w.write(local_id::get_name(id))
})
})?;
w.write(" ")?;
}
print_block_(ctx, w, env, &f.body.fb_ast, None)
}
fn print_block<W: Write>(
ctx: &mut Context,
w: &mut W,
env: &ExprEnv,
block: &[ast::Stmt],
ident: Option<&str>,
) -> Result<(), W::Error> {
match &block[..] {
[] | [ast::Stmt(_, ast::Stmt_::Noop)] => Ok(()),
[ast::Stmt(_, ast::Stmt_::Block(b))] if b.len() == 1 => print_block_(ctx, w, env, b, ident),
[_, _, ..] => print_block_(ctx, w, env, block, ident),
[stmt] => print_statement(ctx, w, env, stmt, None),
}
}
fn print_block_<W: Write>(
ctx: &mut Context,
w: &mut W,
env: &ExprEnv,
block: &[ast::Stmt],
ident: Option<&str>,
) -> Result<(), W::Error> {
wrap_by_(w, "{\\n", "}\\n", |w| {
concat(w, block, |w, stmt| {
option(w, ident, |w, i: &str| w.write(i))?;
print_statement(ctx, w, env, stmt, Some(" "))
})?;
option(w, ident, |w, i: &str| w.write(i))
})
}
fn print_statement<W: Write>(
ctx: &mut Context,
w: &mut W,
env: &ExprEnv,
stmt: &ast::Stmt,
ident: Option<&str>,
) -> Result<(), W::Error> {
use ast::Stmt_ as S_;
match &stmt.1 {
S_::Return(expr) => {
option(w, ident, |w, i: &str| w.write(i))?;
wrap_by_(w, "return", ";\\n", |w| {
option(w, &**expr, |w, e| {
w.write(" ")?;
print_expr(ctx, w, env, e)
})
})
}
S_::Expr(expr) => {
option(w, ident, |w, i: &str| w.write(i))?;
print_expr(ctx, w, env, &**expr)?;
w.write(";\\n")
}
S_::Throw(expr) => {
option(w, ident, |w, i: &str| w.write(i))?;
wrap_by_(w, "throw ", ";\\n", |w| print_expr(ctx, w, env, &**expr))
}
S_::Break => {
option(w, ident, |w, i: &str| w.write(i))?;
w.write("break;\\n")
}
S_::Continue => {
option(w, ident, |w, i: &str| w.write(i))?;
w.write("continue;\\n")
}
S_::While(x) => {
let (cond, block) = &**x;
option(w, ident, |w, i: &str| w.write(i))?;
wrap_by_(w, "while (", ") ", |w| print_expr(ctx, w, env, cond))?;
print_block(ctx, w, env, block.as_ref(), ident)
}
S_::If(x) => {
let (cond, if_block, else_block) = &**x;
option(w, ident, |w, i: &str| w.write(i))?;
wrap_by_(w, "if (", ") ", |w| print_expr(ctx, w, env, cond))?;
print_block(ctx, w, env, if_block, ident)?;
let mut buf = String::new();
print_block(ctx, &mut buf, env, else_block, ident).map_err(|e| match e {
Error::NotImpl(m) => Error::NotImpl(m),
_ => Error::Fail(format!("Failed: {}", e)),
})?;
w.write_if(!buf.is_empty(), " else ")?;
w.write_if(!buf.is_empty(), buf)
}
S_::Block(block) => {
option(w, ident, |w, i: &str| w.write(i))?;
print_block_(ctx, w, env, block, ident)
}
S_::Noop => Ok(()),
/* TODO(T29869930) */
_ => w.write("TODO Unimplemented NYI: Default value printing"),
}
}
fn print_fparam<W: Write>(
ctx: &mut Context,
w: &mut W,
env: &ExprEnv,
param: &ast::FunParam,
) -> Result<(), W::Error> {
if let Some(ast_defs::ParamKind::Pinout) = param.callconv {
w.write("inout ")?;
}
if param.is_variadic {
w.write("...")?;
}
option(w, &(param.type_hint).1, |w, h| {
print_hint(w, true, h)?;
w.write(" ")
})?;
w.write(¶m.name)?;
option(w, ¶m.expr, |w, e| {
w.write(" = ")?;
print_expr(ctx, w, env, e)
})
}
fn print_bop<W: Write>(w: &mut W, bop: &ast_defs::Bop) -> Result<(), W::Error> {
use ast_defs::Bop;
match bop {
Bop::Plus => w.write("+"),
Bop::Minus => w.write("-"),
Bop::Star => w.write("*"),
Bop::Slash => w.write("/"),
Bop::Eqeq => w.write("=="),
Bop::Eqeqeq => w.write("==="),
Bop::Starstar => w.write("**"),
Bop::Eq(None) => w.write("="),
Bop::Eq(Some(bop)) => {
w.write("=")?;
print_bop(w, bop)
}
Bop::Ampamp => w.write("&&"),
Bop::Barbar => w.write("||"),
Bop::Lt => w.write("<"),
Bop::Lte => w.write("<="),
Bop::Cmp => w.write("<=>"),
Bop::Gt => w.write(">"),
Bop::Gte => w.write(">="),
Bop::Dot => w.write("."),
Bop::Amp => w.write("&"),
Bop::Bar => w.write("|"),
Bop::Ltlt => w.write("<<"),
Bop::Gtgt => w.write(">>"),
Bop::Percent => w.write("%"),
Bop::Xor => w.write("^"),
Bop::Diff => w.write("!="),
Bop::Diff2 => w.write("!=="),
Bop::QuestionQuestion => w.write("??"),
}
}
fn print_hint<W: Write>(w: &mut W, ns: bool, hint: &ast::Hint) -> Result<(), W::Error> {
let alloc = bumpalo::Bump::new();
let h = emit_type_hint::fmt_hint(&alloc, &[], false, hint).map_err(|e| match e {
Unrecoverable(s) => Error::fail(s),
_ => Error::fail("Error printing hint"),
})?;
if ns {
w.write(escaper::escape(h))
} else {
w.write(escaper::escape(strip_ns(&h)))
}
}
fn print_import_flavor<W: Write>(w: &mut W, flavor: &ast::ImportFlavor) -> Result<(), W::Error> {
use ast::ImportFlavor as F;
w.write(match flavor {
F::Include => "include",
F::Require => "require",
F::IncludeOnce => "include_once",
F::RequireOnce => "require_once",
})
}
fn print_param_user_attributes<W: Write>(
ctx: &mut Context,
w: &mut W,
param: &HhasParam,
) -> Result<(), W::Error> {
match param.user_attributes.as_ref()[..] {
[] => Ok(()),
_ => square(w, |w| print_attributes(ctx, w, ¶m.user_attributes)),
}
}
fn print_span<W: Write>(
w: &mut W,
&HhasSpan(line_begin, line_end): &HhasSpan,
) -> Result<(), W::Error> {
write!(w, "({},{})", line_begin, line_end)
}
fn print_fun_attrs<W: Write>(
ctx: &mut Context,
w: &mut W,
f: &HhasFunction,
) -> Result<(), W::Error> {
use hhas_attribute::*;
let user_attrs = f.attributes.as_ref();
let mut special_attrs = vec![];
if has_meth_caller(user_attrs) {
special_attrs.push("builtin");
special_attrs.push("is_meth_caller");
}
if f.is_interceptable() {
special_attrs.push("interceptable");
}
if has_foldable(user_attrs) {
special_attrs.push("foldable");
}
if has_provenance_skip_frame(user_attrs) {
special_attrs.push("prov_skip_frame");
}
if f.is_no_injection() {
special_attrs.push("no_injection");
}
if ctx.is_system_lib() || (has_dynamically_callable(user_attrs) && !f.is_memoize_impl()) {
special_attrs.push("dyn_callable")
}
if ctx.is_system_lib() {
special_attrs.push("unique");
special_attrs.push("builtin");
special_attrs.push("persistent");
}
print_special_and_user_attrs(ctx, w, &special_attrs, user_attrs)
}
fn print_special_and_user_attrs<W: Write>(
ctx: &mut Context,
w: &mut W,
specials: &[&str],
users: &[HhasAttribute],
) -> Result<(), W::Error> {
if !users.is_empty() || !specials.is_empty() {
square(w, |w| {
concat_str_by(w, " ", specials)?;
if !specials.is_empty() && !users.is_empty() {
w.write(" ")?;
}
print_attributes(ctx, w, users)
})?;
w.write(" ")?;
}
Ok(())
}
fn print_upper_bounds<'arena, W: Write>(
w: &mut W,
ubs: impl AsRef<[Pair<Str<'arena>, Slice<'arena, HhasTypeInfo<'arena>>>]>,
) -> Result<(), W::Error> {
braces(w, |w| concat_by(w, ", ", ubs, print_upper_bound))
}
fn print_upper_bound<'arena, W: Write>(
w: &mut W,
Pair(id, tys): &Pair<Str<'arena>, Slice<'arena, HhasTypeInfo>>,
) -> Result<(), W::Error> {
paren(w, |w| {
concat_str_by(w, " ", [id.as_str(), "as", ""])?;
concat_by(w, ", ", &tys, print_type_info)
})
}
fn print_upper_bounds_<'arena, W: Write>(
w: &mut W,
ubs: impl AsRef<[Pair<Str<'arena>, Slice<'arena, HhasTypeInfo<'arena>>>]>,
) -> Result<(), W::Error> {
braces(w, |w| concat_by(w, ", ", ubs, print_upper_bound_))
}
fn print_upper_bound_<'arena, W: Write>(
w: &mut W,
Pair(id, tys): &Pair<Str<'arena>, Slice<'arena, HhasTypeInfo<'arena>>>,
) -> Result<(), W::Error> {
paren(w, |w| {
concat_str_by(w, " ", [id.as_str(), "as", ""])?;
concat_by(w, ", ", &tys, print_type_info)
})
}
fn print_type_info<W: Write>(w: &mut W, ti: &HhasTypeInfo) -> Result<(), W::Error> {
print_type_info_(w, false, ti)
}
fn print_type_flags<W: Write>(
w: &mut W,
flag: constraint::ConstraintFlags,
) -> Result<(), W::Error> {
let mut first = true;
let mut print_space = |w: &mut W| -> Result<(), W::Error> {
if !first {
w.write(" ")
} else {
Ok(first = false)
}
};
use constraint::ConstraintFlags as F;
if flag.contains(F::DISPLAY_NULLABLE) {
print_space(w)?;
w.write("display_nullable")?;
}
if flag.contains(F::EXTENDED_HINT) {
print_space(w)?;
w.write("extended_hint")?;
}
if flag.contains(F::NULLABLE) {
print_space(w)?;
w.write("nullable")?;
}
if flag.contains(F::SOFT) {
print_space(w)?;
w.write("soft")?;
}
if flag.contains(F::TYPE_CONSTANT) {
print_space(w)?;
w.write("type_constant")?;
}
if flag.contains(F::TYPE_VAR) {
print_space(w)?;
w.write("type_var")?;
}
if flag.contains(F::UPPERBOUND) {
print_space(w)?;
w.write("upper_bound")?;
}
Ok(())
}
fn print_type_info_<W: Write>(w: &mut W, is_enum: bool, ti: &HhasTypeInfo) -> Result<(), W::Error> {
let print_quote_str = |w: &mut W, opt: &Option<String>| {
option_or(
w,
opt,
|w, s: &String| quotes(w, |w| w.write(escape(s))),
"N",
)
};
angle(w, |w| {
print_quote_str(
w,
&Option::from(ti.user_type.map(|n| n.as_str().to_owned())),
)?;
w.write(" ")?;
if !is_enum {
print_quote_str(
w,
&Option::from(ti.type_constraint.name.map(|n| n.as_str().to_owned())),
)?;
w.write(" ")?;
}
print_type_flags(w, ti.type_constraint.flags)
})
}
fn print_typedef_info<W: Write>(w: &mut W, ti: &HhasTypeInfo) -> Result<(), W::Error> {
angle(w, |w| {
w.write(quote_string(
ti.type_constraint.name.as_ref().map_or("", |n| n.as_str()),
))?;
let flags = ti.type_constraint.flags & constraint::ConstraintFlags::NULLABLE;
if !flags.is_empty() {
wrap_by(w, " ", |w| {
print_type_flags(
w,
ti.type_constraint.flags & constraint::ConstraintFlags::NULLABLE,
)
})?;
}
Ok(())
})
}
fn print_extends<W: Write>(w: &mut W, base: Option<&str>) -> Result<(), W::Error> {
match base {
None => Ok(()),
Some(b) => concat_str_by(w, " ", [" extends", b]),
}
}
fn print_record_field<W: Write>(
ctx: &mut Context,
w: &mut W,
Field(name, type_info, intial_value): &Field,
) -> Result<(), W::Error> {
ctx.newline(w)?;
w.write(".property ")?;
match intial_value {
Just(_) => w.write("[public] ")?,
Nothing => w.write("[public sys_initial_val] ")?,
}
print_type_info(w, type_info)?;
concat_str_by(w, " ", ["", name.as_str(), "="])?;
ctx.block(w, |c, w| {
c.newline(w)?;
match intial_value {
Nothing => w.write("uninit")?,
Just(value) => triple_quotes(w, |w| print_adata(c, w, value))?,
}
w.write(";")
})
}
fn print_record_def<W: Write>(
ctx: &mut Context,
w: &mut W,
record: &HhasRecord,
) -> Result<(), W::Error> {
newline(w)?;
if record.is_abstract {
concat_str_by(w, " ", [".record", record.name.to_raw_string()])?;
} else {
concat_str_by(w, " ", [".record", "[final]", record.name.to_raw_string()])?;
}
w.write(" ")?;
print_span(w, &record.span)?;
print_extends(
w,
Option::from(record.base.as_ref().map(|b| b.to_raw_string())),
)?;
w.write(" ")?;
braces(w, |w| {
ctx.block(w, |c, w| {
concat(w, &record.fields, |w, rf| print_record_field(c, w, rf))
})?;
ctx.newline(w)
})?;
newline(w)
}
/// Convert an `Expr` to a `String` of the equivalent source code.
///
/// This is a debugging tool abusing a printer written for bytecode
/// emission. It does not support all Hack expressions, and panics
/// on unsupported syntax.
///
/// If you have an `Expr` with positions, you are much better off
/// getting the source code at those positions rather than using this.
pub fn expr_to_string_lossy(mut ctx: Context, expr: &ast::Expr) -> String {
ctx.dump_lambdas = true;
let env = ExprEnv { codegen_env: None };
let mut escaped_src = String::new();
print_expr(&mut ctx, &mut escaped_src, &env, expr).expect("Printing failed");
let bs = escaper::unescape_double(&escaped_src).expect("Unescaping failed");
let s = String::from_utf8_lossy(&bs);
s.to_string()
}
| 31.658926 | 100 | 0.482694 |
48f97abb49268eec95be25baa135edfaca7aa074 | 33,600 | //! Helper macros and traits built around
//! [tokio-postgres](https://docs.rs/tokio-postgres/0.5.1/tokio_postgres/index.html) to define
//! queries with human readable parameters and return values.
//!
//! # Example
//!
//! ```
//! # use tokio_postgres::Client;
//! # use postgres_query::{query, FromSqlRow, Result};
//! # fn connect() -> Client { unimplemented!() }
//! # async fn foo() -> Result<()> {
//! // Connect to the database
//! let client: Client = connect(/* ... */);
//!
//! // Construct the query
//! let query = query!(
//! "SELECT age, name FROM people WHERE age >= $min_age",
//! min_age = 18
//! );
//!
//! // Define the structure of the data returned from the query
//! #[derive(FromSqlRow)]
//! struct Person {
//! age: i32,
//! name: String,
//! }
//!
//! // Execute the query
//! let people: Vec<Person> = query.fetch(&client).await?;
//!
//! for person in people {
//! println!("{} is {} years young", person.name, person.age);
//! }
//! # Ok(())
//! # }
//! ```
//!
//! # Queries
//!
//! The preferred way of constructing a new [`Query`] is through the [`query!`] macro. It uses a
//! syntax similar to the `format!(...)` family of macros from the standard library. The first
//! parameter is the SQL query and is always given as a string literal (this might be relaxed in the
//! future). This string literal may contain parameter bindings on the form `$ident` where `ident`
//! is any valid Rust identifier (`$abc`, `$value_123`, etc.).
//!
//! ```
//! # use postgres_query::query;
//! let age = 42;
//! let insert_person = query!(
//! "INSERT INTO people VALUES ($age, $name)",
//! name = "John Wick", // Binds "$name" to "John Wick"
//! age, // Binds "$age" to the value of `age`
//! );
//! ```
//!
//! During compilation the query is converted into the format expected by PostgreSQL: parameter
//! bindings are converted to using numbers ($1, $2, etc.) and the actual parameter values are put
//! into a 1-indexed array. The code snippet above would be expanded into the following:
//!
//! ```
//! # use postgres_query::*;
//! let age = 42;
//! let insert_person = Query::new_static(
//! "INSERT INTO people VALUES ($1, $2)",
//! vec![&age, &"John Wick"],
//! );
//! ```
//!
//!
//! ## Dynamic Queries
//!
//! If necessary, queries may be constructed from `&str`s at runtime instead of the usual
//! compile-time string literals expected by the `query!` macro. This is achieved by using the
//! [`query_dyn!`] macro instead. In addition to dynamic queries, parameter bindings may also be
//! dynamically:
//!
//! ```
//! # use postgres_query::*;
//! let mut sql = "SELECT * FROM people WHERE name = $name".to_string();
//! let mut bindings = Vec::new();
//!
//! // Add a filter at runtime
//! sql += " AND age > $min_age";
//! bindings.push(("min_age", &42 as Parameter));
//!
//! let query: Result<Query> = query_dyn!(
//! &sql,
//! name = "John",
//! ..bindings,
//! );
//! ```
//!
//! Using dynamic queries does introduce some errors that cannot be caught at runtime: such as some
//! parameters in the query not having a matching binding. Because of this the value returned by the
//! [`query_dyn!`] macro is not a `Query` but a `Result<Query>` which carries an error you must
//! handle:
//!
//! ```
//! # use postgres_query::*;
//! let mut sql = "SELECT * FROM people".to_string();
//! sql += " WHERE age <= $max_age AND name = $name";
//!
//! let query: Result<Query> = query_dyn!(
//! &sql,
//! name = "John",
//! // Forgot to bind the parameter `max_age`.
//! // Will result in an error.
//! );
//!
//! assert!(query.is_err());
//! ```
//!
//!
//! # Data Extraction
//!
//! In addition to helping you define new queries this crate provides the [`FromSqlRow`] trait which
//! makes it easy to extract typed values from the resulting rows. The easiest way to implement this
//! trait for new `struct`s is to use the included [`derive(FromSqlRow)`] macro.
//!
//! - If used on a tuple struct, values will be extracted from the corresponding columns based on
//! their position in the tuple.
//! - If used on a stuct with named fields, values will be extracted from the column with the same
//! name as the field.
//!
//! ```
//! # use postgres_query::*;
//! #[derive(FromSqlRow)]
//! struct TupleData(i32, String);
//!
//! #[derive(FromSqlRow)]
//! struct NamedData {
//! age: i32,
//! name: String,
//! };
//! ```
//!
//! ## Multi-mapping
//!
//! If you query the same table multiple times it gets tedious to have to redefine structs with the
//! same fields over and over. Preferably we would like to reuse the same definition multiple times.
//! We can do this be utilizing "multi-mapping".
//!
//!
//! ### Partitions
//!
//! Multi-mapping works by splitting the columns of rows returned by a query into multiple
//! partitions (or slices). For example, if we had the query `SELECT books.*, authors.* FROM ...`,
//! we would like to extract the data into two structs: `Book` and `Author`. We accomplish this by
//! looking at the columns returned by the database and splitting them into partitions:
//!
//! ```text
//! Columns: id, title, release_date, genre, id, name, birthyear
//! Partitions: +------------Book-------------+ +------Author-----+
//! ```
//!
//!
//! ### Partitioning schemes
//!
//! There are two supported ways to partition a row: either we specify the number of columns
//! required to populate each struct (in the example above: 4 columns for Book and 3 for author), or
//! we split on the name of a column. The former should generally only be used when you know the
//! number of columns isn't going to change. The latter is less prone to break provided you choose
//! an appropriate column to split on (a good candidate is usually `id` as almost all tables have
//! this as their first
//! column).
//!
//! You choose which partitioning scheme you want to use by using the provided
//! [attributes](./derive.FromSqlRow.html#attributes). In order to accomplish the partitioning in
//! the example above we could split on the column name `id`:
//!
//! ```
//! # use postgres_query::FromSqlRow;
//! #[derive(FromSqlRow)]
//! struct Book {
//! id: i32,
//! title: String,
//! release_date: String,
//! genre: String,
//! }
//!
//! #[derive(FromSqlRow)]
//! struct Author {
//! id: i32,
//! name: String,
//! birthyear: i32,
//! }
//!
//! #[derive(FromSqlRow)]
//! #[row(split)]
//! struct BookAuthor {
//! #[row(flatten, split = "id")]
//! book: Book,
//! #[row(flatten, split = "id")]
//! author: Author,
//! }
//! ```
//!
//! Alternatively, we can make `Author` a part of the `Book` struct:
//!
//! ```
//! # use postgres_query::FromSqlRow;
//! #[derive(FromSqlRow)]
//! struct Author {
//! id: i32,
//! name: String,
//! birthyear: i32,
//! }
//!
//! #[derive(FromSqlRow)]
//! #[row(split)]
//! struct Book {
//! #[row(split = "id")]
//! id: i32,
//! title: String,
//! release_date: String,
//! genre: String,
//!
//! #[row(flatten, split = "id")]
//! author: Author,
//! }
//! ```
//!
//! ### Many-to-one Relationships
//!
//! In the previous examples we had a `Book` that contained an `Author`. This is what is called a
//! many-to-one relationship, since one book only has one author, but many books may share the same
//! author (or so we assume anyway). What if you instead had `Author` an author that contained many
//! `Book`s? We know that one author may write many books, so that is a one-to-many relationship. We
//! can write an extractor for that case as well:
//!
//! ```
//! # use postgres_query::*;
//! # use tokio_postgres::Client;
//! # async fn foo() -> Result<()> {
//! # let client: Client = unimplemented!();
//! #[derive(FromSqlRow)]
//! #[row(split, group)]
//! struct Author {
//! #[row(split = "id", key)]
//! id: i32,
//! name: String,
//! birthyear: i32,
//!
//! #[row(split = "id", merge)]
//! books: Vec<Book>,
//! }
//!
//! #[derive(FromSqlRow)]
//! struct Book {
//! id: i32,
//! title: String,
//! release_date: String,
//! genre: String,
//! }
//!
//! let authors: Vec<Author> = query!(
//! "SELECT authors.*, books.*
//! INNER JOIN books ON books.author = authors.id
//! GROUP BY authors.id"
//! )
//! .fetch(&client)
//! .await?;
//! # Ok(())
//! # }
//! ```
//!
//! See the section on [attributes](./derive.FromSqlRow.html#attributes) for a more advanced
//! in-depth explanation of multi-mapping.
//!
//!
//! # Caching queries
//!
//! From time to time you probably want to execute the same query multiple times, but with different
//! parameters. In times like these we can decrease the load on the database by preparing our
//! queries before executing them. By wrapping a client in a [`Caching`] struct this behaviour is
//! automatically provided for all queries that originate from this crate:
//!
//! ```
//! # use tokio_postgres::Client;
//! # use postgres_query::{query, Result, Caching};
//! # fn connect() -> Client { unimplemented!() }
//! # async fn foo() -> Result<()> {
//! // Connect to the database
//! let client: Client = connect(/* ... */);
//!
//! // Wrap the client in a query cache
//! let cached_client = Caching::new(client);
//!
//! for age in 0..100i32 {
//! let query = query!("SELECT name, weight FROM people WHERE age = $age", age);
//!
//! // The query is prepared and cached the first time it's executed.
//! // All subsequent fetches will use the cached Statement.
//! let people: Vec<(String, i32)> = query.fetch(&cached_client).await?;
//!
//! /* Do something with people */
//! }
//! # Ok(())
//! # }
//! ```
//!
//! [`Query`]: struct.Query.html
//! [`query!`]: macro.query.html
//! [`query_dyn!`]: macro.query_dyn.html
//! [`FromSqlRow`]: extract/trait.FromSqlRow.html
//! [`derive(FromSqlRow)`]: derive.FromSqlRow.html
//! [`Caching`]: client/struct.Caching.html
pub mod client;
pub mod execute;
pub mod extract;
mod error;
mod parse;
use postgres_types::ToSql;
use proc_macro_hack::proc_macro_hack;
use std::ops::Deref;
pub use crate::client::Caching;
pub use crate::error::{Error, Result};
pub use crate::extract::FromSqlRow;
/// Extract values from a row.
///
/// - If used on a tuple struct, values will be extracted from the corresponding columns based on
/// their position in the tuple.
/// - If used on a stuct with named fields, values will be extracted from the column with the same
/// name as the field.
///
/// # Example
///
/// ```
/// # use postgres_query::*;
/// #[derive(FromSqlRow)]
/// struct TupleData(i32, String);
///
/// #[derive(FromSqlRow)]
/// struct NamedData {
/// age: i32,
/// name: String,
/// };
/// ```
///
///
/// # Attributes
///
/// Data extraction can be customized by using the `#[row(...)]` attribute. Attributes can be
/// separated into two categories, those which go on the container itself:
///
/// - [`#[row(exact)]`](#rowexact)
/// - [`#[row(split)]`](#rowsplit)
/// - [`#[row(group)]`](#rowgroup)
/// - [`#[row(hash)]`](#rowhash)
///
/// and those which are placed on the container's fields:
///
/// - [`#[row(rename = "...")]`](#rowrename--)
/// - [`#[row(flatten)]`](#rowflatten)
/// - [`#[row(stride = N)]`](#rowstride--n)
/// - [`#[row(split = "...")]`](#rowsplit--)
/// - [`#[row(key)]`](#rowkey)
/// - [`#[row(merge)]`](#rowmerge)
///
///
/// ## Container attributes
///
/// These attributes are put on the struct itself.
///
///
/// ### `#[row(exact)]`
///
/// [Partition](./index.html#multi-mapping) the row according to the number of columns matched by
/// each group.
///
/// Note that no order is forced upon fields within any group. In the example below, that means that
/// even though the `generation` and `origin` fields are flipped relative to the query, the
/// extraction will be successful:
///
/// ```
/// # use postgres_query::{FromSqlRow, Result, query};
/// # use tokio_postgres::Client;
/// # async fn foo() -> Result<()> {
/// # let client: Client = unimplemented!();
/// #[derive(FromSqlRow)]
/// #[row(exact)]
/// struct Family {
/// generation: i32,
/// origin: String,
/// #[row(flatten)]
/// parent: Person,
/// #[row(flatten)]
/// child: Person,
/// }
///
/// #[derive(FromSqlRow)]
/// struct Person {
/// id: i32,
/// name: String,
/// }
///
/// let family = query!(
/// "SELECT
/// 'Germany' as origin, 7 as generation,
/// 1 as id, 'Bob' as name,
/// 2 as id, 'Ike' as name"
/// )
/// .fetch_one::<Family, _>(&client)
/// .await?;
/// # Ok(())
/// # }
/// ```
///
/// ### `#[row(split)]`
///
/// [Partition](./index.html#multi-mapping) the row according to the field's [split
/// points](extract/fn.split_columns_many.html#split-points).
///
/// Split points are introduced by using the [`#[row(split = "...")]`](#rowsplit---1) attribute on
/// fields.
///
/// ```
/// # use postgres_query::{FromSqlRow, Result, query};
/// # use tokio_postgres::Client;
/// # async fn foo() -> Result<()> {
/// # let client: Client = unimplemented!();
/// #[derive(FromSqlRow)]
/// #[row(split)]
/// struct Family {
/// generation: i32,
/// origin: String,
/// #[row(flatten, split = "id")]
/// parent: Person,
/// #[row(flatten, split = "id")]
/// child: Person,
/// }
///
/// #[derive(FromSqlRow)]
/// struct Person {
/// id: i32,
/// name: String,
/// }
///
/// let family = query!(
/// "SELECT
/// 'Germany' as origin, 7 as generation,
/// 1 as id, 'Bob' as name,
/// 2 as id, 'Ike' as name"
/// )
/// .fetch_one::<Family, _>(&client)
/// .await?;
/// # Ok(())
/// # }
/// ```
///
///
/// ### `#[row(group)]`
///
/// Enables one-to-many mapping for the container. One-to-many mapping requires that at least one
/// field has the `#[row(key)]` attribute and that one other field has the `#[row(merge)]` attribute.
///
/// When extracting values from multiple rows, any two **adjacent** rows that are identical on their
/// fields marked with `#[row(key)]` will have their fields tagged with `#[row(merge)]` merged. This
/// means that in order to get the expected relation back, you may need to include a `GROUP BY`
/// statement in your SQL query, hence the name `group`.
///
/// ```
/// # use postgres_query::*;
/// # use tokio_postgres::Client;
/// # async fn foo() -> Result<()> {
/// # let client: Client = unimplemented!();
/// #[derive(Debug, FromSqlRow)]
/// #[row(group)]
/// struct Author {
/// #[row(key)]
/// name: String,
///
/// #[row(merge)]
/// books: Vec<Book>,
/// }
///
/// #[derive(Debug, FromSqlRow)]
/// struct Book {
/// title: String,
/// }
///
/// let authors = query!(
/// "SELECT 'J.R.R. Tolkien' as name, 'The Fellowship of the Ring' as title
/// UNION ALL SELECT 'J.R.R. Tolkien', 'The Two Towers'
/// UNION ALL SELECT 'Andrzej Sapkowski', 'The Last Wish'
/// UNION ALL SELECT 'J.R.R. Tolkien', 'Return of the King'")
/// .fetch::<Author, _>(&client)
/// .await?;
///
/// assert_eq!(authors[0].name, "J.R.R. Tolkien");
/// assert_eq!(authors[0].books[0].title, "The Fellowship of the Ring");
/// assert_eq!(authors[0].books[1].title, "The Two Towers");
///
/// assert_eq!(authors[1].name, "Andrzej Sapkowski");
/// assert_eq!(authors[1].books[0].title, "The Last Wish");
///
/// assert_eq!(authors[2].name, "J.R.R. Tolkien");
/// assert_eq!(authors[2].books[0].title, "Return of the King");
/// # Ok(())
/// # }
/// ```
///
///
/// ### `#[row(hash)]`
///
/// Like `#[row(group)]`, but all previous rows are considered when merging. This is accomplished by
/// using a `HashMap`, hence the name. This implies that all keys have to implement the `Hash` and
/// `Eq` traits:
///
/// ```
/// # use postgres_query::*;
/// # use tokio_postgres::Client;
/// # async fn foo() -> Result<()> {
/// # let client: Client = unimplemented!();
/// #[derive(Debug, FromSqlRow)]
/// #[row(hash)]
/// struct Author {
/// #[row(key)]
/// name: String,
///
/// #[row(merge)]
/// books: Vec<Book>,
/// }
///
/// #[derive(Debug, FromSqlRow)]
/// struct Book {
/// title: String,
/// }
///
/// let authors = query!(
/// "SELECT 'J.R.R. Tolkien' as name, 'The Fellowship of the Ring' as title
/// UNION ALL SELECT 'J.R.R. Tolkien', 'The Two Towers'
/// UNION ALL SELECT 'Andrzej Sapkowski', 'The Last Wish'
/// UNION ALL SELECT 'J.R.R. Tolkien', 'Return of the King'")
/// .fetch::<Author, _>(&client)
/// .await?;
///
/// assert_eq!(authors[0].name, "J.R.R. Tolkien");
/// assert_eq!(authors[0].books[0].title, "The Fellowship of the Ring");
/// assert_eq!(authors[0].books[1].title, "The Two Towers");
/// assert_eq!(authors[0].books[2].title, "Return of the King");
///
/// assert_eq!(authors[1].name, "Andrzej Sapkowski");
/// assert_eq!(authors[1].books[0].title, "The Last Wish");
/// # Ok(())
/// # }
/// ```
///
/// ## Field attributes
///
/// These attributes are put on the fields of a container.
///
///
/// ### `#[row(rename = "...")]`
///
/// Use a name other than that of the field when looking up the name of the column.
///
/// ```
/// # use postgres_query::FromSqlRow;
/// #[derive(FromSqlRow)]
/// struct Person {
/// age: i32,
/// // matches the column named "first_name" instead of "name"
/// #[row(rename = "first_name")]
/// name: String,
/// }
/// ```
///
/// ### `#[row(flatten)]`
///
/// Flatten the contents of this field into its container by recursively calling `FromSqlRow` on the
/// field's type. This removes one level of nesting:
///
/// ```
/// # use postgres_query::{FromSqlRow, query, Result};
/// # use tokio_postgres::Client;
/// # async fn foo() -> Result<()> {
/// # let client: Client = unimplemented!();
/// #[derive(FromSqlRow)]
/// struct Customer {
/// id: i32,
/// #[row(flatten)]
/// info: Person,
/// }
///
/// #[derive(FromSqlRow)]
/// struct Person {
/// name: String,
/// age: i32
/// }
///
/// let customer: Customer = query!("SELECT 14 as id, 'Bob' as name, 47 as age")
/// .fetch_one(&client)
/// .await?;
///
/// assert_eq!(customer.id, 14);
/// assert_eq!(customer.info.name, "Bob");
/// assert_eq!(customer.info.age, 47);
/// # Ok(())
/// # }
/// ```
///
/// ### `#[row(stride = N)]`
///
/// Puts this field into a partition with exactly `N` columns. Only available when using the
/// `#[row(exact)]` attribute on the container,
///
/// ```
/// # use postgres_query::{FromSqlRow, query, Result};
/// # use tokio_postgres::Client;
/// # async fn foo() -> Result<()> {
/// # let client: Client = unimplemented!();
/// #[derive(Debug, FromSqlRow)]
/// struct Person {
/// id: i32,
/// name: String,
/// }
///
/// #[derive(Debug, FromSqlRow)]
/// #[row(exact)]
/// struct Family {
/// // Matches first 4 columns
/// #[row(flatten, stride = 4)]
/// parent: Person,
/// // Matches last 3 columns
/// #[row(flatten, stride = 3)]
/// child: Person,
/// }
///
/// let family = query!(
/// "SELECT
/// 11 as generation,
/// 1 as id, 'Bob' as name, 42 as age,
/// 2 as id, 'Ike' as name, 14 as age"
/// )
/// .fetch_one::<Family, _>(&client)
/// .await?;
///
/// assert_eq!(family.parent.id, 1);
/// assert_eq!(family.parent.name, "Bob");
/// assert_eq!(family.child.id, 2);
/// assert_eq!(family.child.name, "Ike");
/// # Ok(())
/// # }
/// ```
///
/// ### `#[row(split = "...")]`
///
/// Introduce an additional [split](extract/fn.split_columns_many.html#split-points) right
/// before this field. Requires that the container has the `split` attribute as well.
///
/// Intuitively this splits the row in two parts: every field before this attribute matches the
/// columns before the split and every field afterwards matches the second remaining columns.
///
/// ```
/// # use postgres_query::{FromSqlRow};
/// #[derive(FromSqlRow)]
/// #[row(split)]
/// struct User {
/// // `id` and `name` will only match the columns before `email`
/// id: i32,
/// name: String,
/// #[row(split = "email")]
/// // `email`, `address` and `shoe_size` will only
/// // match the columns after and including `email`
/// email: String,
/// address: String,
/// shoe_size: i32,
/// }
/// ```
///
/// Note that the first split always matches first occurence of that column. This can result in some
/// subtle bugs:
///
/// ```
/// # use postgres_query::{FromSqlRow, query};
/// #[derive(FromSqlRow)]
/// #[row(split)]
/// struct Family {
/// #[row(flatten)]
/// parent: Person,
/// #[row(flatten, split = "id")]
/// child: Person,
/// }
///
/// #[derive(FromSqlRow)]
/// struct Person {
/// name: String,
/// age: i32
/// }
///
/// let query = query!("SELECT parent.*, child.* FROM ...");
///
/// // Imagine the query above results in the following columns:
/// //
/// // Columns: id, name, id, name
/// // Splits: |
/// // Partitions: +-parent-+ +-----child------+
/// ```
///
/// The split causes `parent` to match against all columns before the first `id`, ie. an empty
/// partition. This would cause an error when executing the query.
///
/// A correct split would look like this:
///
/// ```
/// # use postgres_query::{FromSqlRow, query};
/// # #[derive(FromSqlRow)] struct Person;
/// #[derive(FromSqlRow)]
/// #[row(split)]
/// struct Family {
/// #[row(flatten, split = "id")]
/// parent: Person,
/// #[row(flatten, split = "id")]
/// child: Person,
/// }
/// ```
///
///
/// ### `#[row(key)]`
///
/// Specifies this field to be a `key` field. `key` fields are compared against each other when
/// extracting values from multiple rows. Rows are merged if the key fields in each row are
/// identical. You may have multiple `key` fields within a single container, but none of them may
/// have the `#[row(merge)]` attribute. Multiple `key` fields will be treated as a tuple in
/// comparisons.
///
///
/// ### `#[row(merge)]`
///
/// Specifies this field to be a `merge` field. This requires that the field's type implements the
/// [`Merge`] trait. When two rows have been deemed to be equal based on the `key` fields, the
/// corresponding `merge` fields in those rows will be merged. You may specify multiple `merge`
/// fields within one container, but none of them may have the `#[row(key)]` attribute.
///
/// [`Merge`]: extract/trait.Merge.html
pub use postgres_query_macro::FromSqlRow;
/// Constructs a new query at compile-time. See also `query_dyn!`.
///
/// # Usage
///
/// This macro expands to an expression with the type `Query`.
///
/// The first parameter is the SQL query and is always given as a string literal. This string
/// literal may contain parameter bindings on the form `$ident` where `ident` is any valid Rust
/// identifier (`$abc`, `$value_123`, etc.). The order of the parameters does not matter.
///
/// ```
/// # use postgres_query::query;
/// let age = 42;
/// let insert_person = query!(
/// "INSERT INTO people VALUES ($age, $name)",
/// name = "John Wick", // Binds "$name" to "John Wick"
/// age, // Binds "$age" to the value of `age`
/// );
/// ```
///
/// During compilation the query is converted into the format expected by PostgreSQL: parameter
/// bindings are converted to using numbers (`$1`, `$2`, etc.) and the actual parameter values are
/// put into a 1-indexed array. The code snippet above would be expanded into the following:
///
/// ```
/// # use postgres_query::*;
/// let age = 42;
/// let insert_person = Query::new_static(
/// "INSERT INTO people VALUES ($1, $2)",
/// vec![&age, &"John Wick"],
/// );
/// ```
#[macro_export]
macro_rules! query {
($($tt:tt)*) => {
$crate::__query_static!($($tt)*)
};
}
/// Constructs a new query dynamically at runtime. See also `query!`.
///
/// # Usage
///
/// This macro expands to an expression with the type `Result<Query>`.
///
/// The first parameter is the SQL query and is always given as a `&str`. This string may contain
/// parameter bindings on the form `$ident` where `ident` is any valid Rust identifier (`$abc`,
/// `$value_123`, etc.). The order of the parameters does not matter.
///
/// ```
/// # use postgres_query::{query_dyn, Result};
/// # fn foo() -> Result<()> {
/// // We can construct the actual query at runtime
/// let mut sql = "INSERT INTO people VALUES".to_owned();
/// sql.push_str("($age, $name)");
///
/// let age = 42;
///
/// let insert_person = query_dyn!(
/// &sql,
/// name = "John Wick", // Binds "$name" to "John Wick"
/// age, // Binds "$age" to the value of `age`
/// )?;
/// # Ok(())
/// # }
/// ```
///
/// The query and all the parameters are passed into `Query::parse`, so the above would be expanded
/// into:
///
/// ```
/// # use postgres_query::Query;
/// // We can construct the actual query at runtime
/// let mut sql = "INSERT INTO people VALUES".to_string();
/// sql.push_str("($age, $name)");
///
/// let age = 42;
///
/// let insert_person = Query::parse(
/// &sql,
/// &[("name", &"John Wick"), ("age", &age)],
/// );
/// ```
///
///
/// ## Dynamic Binding
///
/// Optionally, you may also choose to include additional bindings at runtime by using the
/// `..bindings` syntax. This is supported for any type that implements `IntoIterator<Item = (&str,
/// Parameter)>`, ie. `Vec<(&str, Parameter)>`, `HashMap<&str, Parameter>`, `Option<(&str,
/// Parameter)>`, iterators, and so on.
///
/// Dynamic bindings may be mixed with static bindings:
///
/// ```
/// # use postgres_query::{query_dyn, Parameter, Result};
/// # fn foo() -> Result<()> {
/// let mut bindings = Vec::new();
///
/// // We use the `as Parameter` to please the type checker.
/// // Alternatively, we could specify the type for bindings: `Vec<(&str, Parameter)>`.
/// bindings.push(("age", &42 as Parameter));
/// bindings.push(("name", &"John Wick" as Parameter));
///
/// let sql = "INSERT INTO people VALUES ($age, $name, $height)".to_string();
/// let insert_person = query_dyn!(
/// &sql,
/// height = 192,
/// ..bindings,
/// )?;
/// # Ok(())
/// # }
/// ```
///
///
/// # A larger example
///
/// Let's say that we wanted to dynamically add filters to our query:
///
/// ```
/// # use postgres_query::{query_dyn, Parameter, Query, Result};
/// # fn foo() -> Result<()> {
/// // We have the query we want to execute
/// let mut sql = "SELECT * FROM people".to_string();
///
/// // and some filters we got from the user.
/// let age_filter: Option<i32> = Some(32);
/// let name_filter: Option<&str> = None;
///
/// // Then we dynamically build a list of filters and bindings to use:
/// let mut filters = Vec::new();
/// let mut bindings = Vec::new();
///
/// // We add the filters as needed.
/// if let Some(age) = age_filter.as_ref() {
/// filters.push("age > $min_age");
/// bindings.push(("min_age", age as Parameter));
/// }
///
/// if let Some(name) = name_filter.as_ref() {
/// filters.push("name LIKE $name");
/// bindings.push(("name", name as Parameter));
/// }
///
/// // And add them to the query.
/// if filters.len() > 0 {
/// sql += &format!(" WHERE {}", filters.join(" AND "));
/// }
///
/// // Then we can use it as normal.
/// let query: Query = query_dyn!(&sql, ..bindings)?;
/// # Ok(())
/// # }
/// ```
#[macro_export]
macro_rules! query_dyn {
($($tt:tt)*) => {
$crate::__query_dynamic!($($tt)*)
};
}
#[proc_macro_hack]
#[doc(hidden)]
pub use postgres_query_macro::{query_dynamic as __query_dynamic, query_static as __query_static};
/// A shorthand for types that can be treated as SQL parameters.
///
/// A common use case for this type alias is when using dynamic bindings and you have to please the
/// type checker:
///
/// ```
/// # use postgres_query::{Parameter, query_dyn, Result};
/// # fn foo() -> Result<()> {
/// let mut bindings = Vec::new();
///
/// // Without the `as Parameter` the compiler assumes the type to be `&i32`.
/// bindings.push(("age", &32 as Parameter));
///
/// // Which would cause problems when adding something that is not an integer.
/// bindings.push(("name", &"John" as Parameter));
///
/// let query = query_dyn!(
/// "SELECT * FROM people WHERE age > $age AND name = $name",
/// ..bindings
/// )?;
/// # Ok(())
/// # }
/// ```
///
/// Alternatively we could just set the type on the container explicitly:
///
/// ```
/// # use postgres_query::Parameter;
/// let mut bindings: Vec<(&str, Parameter)> = Vec::new();
/// ```
pub type Parameter<'a> = &'a (dyn ToSql + Sync);
/// A static query with dynamic parameters.
///
/// # Usage
///
/// ## Constructing
///
/// The preferred way of constructing a [`Query`] is by using the [`query!`] and [`query_dyn!`]
/// macros.
///
/// You may also use the `Query::parse`, `Query::new_static` or `Query::new` methods.
///
///
/// ## Executing
///
/// When executing the query you have two options, either:
///
/// 1. use the provided methods: `execute`, `fetch`, `query`, etc.
/// 2. use the `sql` and `parameters` fields as arguments to the standard [`Client`] methods
///
/// ```
/// # use tokio_postgres::{Client, Row};
/// # use postgres_query::{query, FromSqlRow, Result};
/// # fn connect() -> Client { unimplemented!() }
/// # async fn foo() -> Result<(), Box<dyn std::error::Error>> {
/// #[derive(FromSqlRow)]
/// struct Person {
/// age: i32,
/// name: String,
/// }
///
/// let client: Client = connect(/* ... */);
/// let query = query!("SELECT age, name FROM people");
///
/// // Option 1
/// let people: Vec<Person> = query.fetch(&client).await?;
///
/// // Option 2
/// let rows: Vec<Row> = client.query(query.sql(), query.parameters()).await?;
/// let people: Vec<Person> = Person::from_row_multi(&rows)?;
/// # Ok(())
/// # }
/// ```
///
/// [`Query`]: struct.Query.html
/// [`query!`]: macro.query.html
/// [`query_dyn!`]: macro.query_dyn.html
/// [`Client`]: https://docs.rs/tokio-postgres/0.5.1/tokio_postgres/struct.Client.html
#[derive(Debug, Clone)]
pub struct Query<'a> {
sql: Sql,
parameters: Vec<Parameter<'a>>,
}
#[derive(Debug, Clone)]
enum Sql {
Static(&'static str),
Dynamic(String),
}
impl<'a> Query<'a> {
/// Create a new query an already prepared string.
///
/// IMPORTANT: This does not allow you to pass named parameter bindings (`$name`, `$abc_123`,
/// etc.). For that behaviour, refer to the `query!` macro. Instead bindings and parameters are
/// given in the same format required by `tokio_postgres` (`$1`, `$2`, ...).
pub fn new(sql: String, parameters: Vec<Parameter<'a>>) -> Query<'a> {
Query {
sql: Sql::Dynamic(sql),
parameters,
}
}
/// Create a new query with a static query string.
///
/// IMPORTANT: This does not allow you to pass named parameter bindings (`$name`, `$abc_123`,
/// etc.), For that behaviour, refer to the `query_dyn!` macro. Instead bindings and parameters
/// are given in the same format required by `tokio_postgres` (`$1`, `$2`, ...).
pub fn new_static(sql: &'static str, parameters: Vec<Parameter<'a>>) -> Query<'a> {
Query {
sql: Sql::Static(sql),
parameters,
}
}
/// Parses a string that may contain parameter bindings on the form `$abc_123`. This is the same
/// function that is called when passing dynamically generated strings to the `query_dyn!`
/// macro.
///
/// Because this is a function there will some runtime overhead unlike the `query!` macro which
/// has zero overhead when working with string literals.
pub fn parse(text: &str, bindings: &[(&str, Parameter<'a>)]) -> Result<Query<'a>> {
let (sql, parameters) = parse::parse(text, bindings)?;
Ok(Query {
sql: Sql::Dynamic(sql),
parameters,
})
}
/// Get this query as an SQL string.
pub fn sql(&'a self) -> &'a str {
&self.sql
}
/// Get the parameters of this query in the order expected by the query returned by
/// `Query::sql`.
pub fn parameters(&'a self) -> &[Parameter<'a>] {
&self.parameters
}
}
impl Deref for Sql {
type Target = str;
fn deref(&self) -> &Self::Target {
match self {
Sql::Static(text) => text,
Sql::Dynamic(text) => &text,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::error::ParseError;
macro_rules! is_match {
($expr:expr, $pattern:pat) => {
match $expr {
$pattern => true,
_ => false,
}
};
}
#[test]
fn parse_query_without_bindings() {
let query = Query::parse("SELECT 123, 'abc'", &[]).unwrap();
assert_eq!(query.sql(), "SELECT 123, 'abc'");
}
#[test]
fn parse_query_single_binding() {
let query = Query::parse("SELECT $number", &[("number", &123)]).unwrap();
assert_eq!(query.sql(), "SELECT $1");
}
#[test]
fn parse_query_missing_identifier_eof() {
let query = Query::parse("SELECT $", &[]);
assert!(is_match!(
query.unwrap_err(),
Error::Parse(ParseError::EmptyIdentifier { found: None })
));
}
#[test]
fn parse_query_missing_identifier() {
let query = Query::parse("SELECT $ FROM users", &[]);
assert!(is_match!(
query.unwrap_err(),
Error::Parse(ParseError::EmptyIdentifier { found: Some(' ') })
));
}
}
| 30.352304 | 101 | 0.588393 |
6168a95460accc9bfd57352addb1f58c3dc3dc85 | 6,394 | #![allow(non_snake_case)]
use super::test_harness::*;
use migration_connector::*;
#[test_each_connector]
async fn single_watch_migrations_must_work(api: &TestApi) {
let migration_persistence = api.migration_persistence();
let steps = vec![
create_model_step("Test"),
create_field_step("Test", "id", "Int"),
create_id_directive_step("Test", "id"),
];
let db_schema_1 = api.apply_migration(steps.clone(), "watch-0001").await.sql_schema;
let migrations = migration_persistence.load_all().await.unwrap();
assert_eq!(migrations.len(), 1);
assert_eq!(migrations.first().unwrap().name, "watch-0001");
let custom_migration_id = "a-custom-migration-id";
let db_schema_2 = api.apply_migration(steps, custom_migration_id).await.sql_schema;
assert_eq!(db_schema_1, db_schema_2);
let migrations = migration_persistence.load_all().await.unwrap();
assert_eq!(migrations.len(), 2);
assert_eq!(migrations[0].name, "watch-0001");
assert_eq!(migrations[1].name, custom_migration_id);
assert_eq!(migrations[1].status, MigrationStatus::MigrationSuccess);
assert!(migrations[1].finished_at.is_some());
}
#[test_each_connector]
async fn multiple_watch_migrations_must_work(api: &TestApi) {
let migration_persistence = api.migration_persistence();
let steps1 = vec![
create_model_step("Test"),
create_field_step("Test", "id", "Int"),
create_id_directive_step("Test", "id"),
];
api.apply_migration(steps1.clone(), "watch-0001").await;
let migrations = migration_persistence.load_all().await.unwrap();
assert_eq!(migrations.len(), 1);
assert_eq!(migrations[0].name, "watch-0001");
let steps2 = vec![create_field_step("Test", "field", "String")];
let db_schema_2 = api.apply_migration(steps2.clone(), "watch-0002").await.sql_schema;
let migrations = migration_persistence.load_all().await.unwrap();
assert_eq!(migrations.len(), 2);
assert_eq!(migrations[0].name, "watch-0001");
assert_eq!(migrations[1].name, "watch-0002");
let custom_migration_id = "a-custom-migration-id";
let mut final_steps = Vec::new();
final_steps.append(&mut steps1.clone());
final_steps.append(&mut steps2.clone());
let final_db_schema = api.apply_migration(final_steps, custom_migration_id).await.sql_schema;
assert_eq!(db_schema_2, final_db_schema);
let migrations = migration_persistence.load_all().await.unwrap();
assert_eq!(migrations.len(), 3);
assert_eq!(migrations[0].name, "watch-0001");
assert_eq!(migrations[1].name, "watch-0002");
assert_eq!(migrations[2].name, custom_migration_id);
assert_eq!(migrations[2].status, MigrationStatus::MigrationSuccess);
assert!(migrations[2].finished_at.is_some());
}
#[test_each_connector]
async fn steps_equivalence_criteria_is_satisfied_when_leaving_watch_mode(api: &TestApi) {
let migration_persistence = api.migration_persistence();
let steps1 = vec![
create_model_step("Test"),
create_field_step("Test", "id", "Int"),
create_id_directive_step("Test", "id"),
];
let db_schema1 = api.apply_migration(steps1.clone(), "watch-0001").await.sql_schema;
let steps2 = vec![create_field_step("Test", "field", "String")];
api.apply_migration(steps2.clone(), "watch-0002").await;
let steps3 = vec![delete_field_step("Test", "field")];
api.apply_migration(steps3.clone(), "watch-0003").await;
let custom_migration_id = "a-custom-migration-id";
let mut final_steps = Vec::new();
final_steps.append(&mut steps1.clone()); // steps2 and steps3 eliminate each other
let final_db_schema = api.apply_migration(final_steps, custom_migration_id).await.sql_schema;
assert_eq!(db_schema1, final_db_schema);
let migrations = migration_persistence.load_all().await.unwrap();
assert_eq!(migrations[0].name, "watch-0001");
assert_eq!(migrations[1].name, "watch-0002");
assert_eq!(migrations[2].name, "watch-0003");
assert_eq!(migrations[3].name, custom_migration_id);
}
#[test_each_connector]
async fn must_handle_additional_steps_when_transitioning_out_of_watch_mode(api: &TestApi) {
let migration_persistence = api.migration_persistence();
let steps1 = vec![
create_model_step("Test"),
create_field_step("Test", "id", "Int"),
create_id_directive_step("Test", "id"),
];
api.apply_migration(steps1.clone(), "watch-0001").await;
let steps2 = vec![create_field_step("Test", "field1", "String")];
api.apply_migration(steps2.clone(), "watch-0002").await;
let custom_migration_id = "a-custom-migration-id";
let additional_steps = vec![create_field_step("Test", "field2", "String")];
let mut final_steps = Vec::new();
final_steps.append(&mut steps1.clone());
final_steps.append(&mut steps2.clone());
final_steps.append(&mut additional_steps.clone());
let final_db_schema = api.apply_migration(final_steps, custom_migration_id).await.sql_schema;
assert_eq!(final_db_schema.tables.len(), 1);
let table = final_db_schema.table_bang("Test");
assert_eq!(table.columns.len(), 3);
table.column_bang("id");
table.column_bang("field1");
table.column_bang("field2");
let migrations = migration_persistence.load_all().await.unwrap();
assert_eq!(migrations[0].name, "watch-0001");
assert_eq!(migrations[1].name, "watch-0002");
assert_eq!(migrations[2].name, custom_migration_id);
}
#[test_each_connector]
async fn applying_an_already_applied_migration_must_return_an_error(api: &TestApi) -> Result<(), anyhow::Error> {
let steps = vec![
create_model_step("Test"),
create_field_step("Test", "id", "Int"),
create_id_directive_step("Test", "id"),
];
let migration_id = "duplicate-migration";
let input = ApplyBuilder::new()
.migration_id(Some(migration_id.to_owned()))
.steps(Some(steps))
.force(Some(true))
.build();
api.apply_migration_with(&input).await?;
assert_eq!(
api.apply_migration_with(&input)
.await
.map_err(|err| err.to_string())
.unwrap_err(),
"Failure during a migration command: Error in command input. (error: Invariant violation: the migration with id `duplicate-migration` has already been applied.)",
);
Ok(())
}
| 36.124294 | 170 | 0.692368 |
7a61e8c8056870b89807759d178c7f8f77a67566 | 22,755 | // Copyright (C) 2021 Ben Stern
// SPDX-License-Identifier: MIT OR Apache-2.0
#![forbid(unsafe_code)]
use std::cmp::Ordering;
use std::convert::TryFrom;
use std::error::Error;
use std::fmt::{Display, Debug, Error as FmtError, Formatter};
use std::str::FromStr;
use lazy_static::lazy_static;
use rand::{thread_rng, Rng};
use regex::{Captures, Regex};
use serde::{Deserialize, Serialize};
lazy_static! {
// ?x ignores space/comments in the regex, not in the string we're checking
static ref DICE_RE: Regex = Regex::new(r"(?xi) # ignore case
^
(?P<count> [1-9]\d*)?d(?P<sides> f|(?: [1-9]\d*)) # NdM/dM/NdF/dF
(?:/ (?P<updown> [HL]) (?P<amount> [1-9]\d*))?
(?P<exploding> ! (?P<fuse> [0-9]+)?)?
$
").expect("Couldn't compile DICE_RE");
}
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum DiceParseError {
/// An attempt was made to create zero dice.
ZeroCount,
/// A fuse of 1 was provided; such a fuse would always explode.
ShortFuse,
/// An impossible fuse was provided; such a fuse would never explode and
/// indicates a logic error.
LongFuse(u16),
/// More dice were kept than rolled.
TooManyKept(isize),
/// Fate dice and d1s can't explode.
CannotExplode,
/// Dice were requested with more sides than [Dice::SIDES_LIMIT].
TooManySides(u16),
/// A request for dice with no sides.
ZeroSides,
/// More than [Dice::COUNT_LIMIT] were requested.
TooManyDice(usize),
/// Not in the correct format (\[`n`\]d\[`m`]\[/<H|L>`keep`\]\[!\[`fuse`\]\]).
Unparseable,
/// The dice-matching regular expression matched when it shouldn't have,
/// which is probably a bug in `ndm`.
Regex
}
impl Eq for DiceParseError {}
impl Display for DiceParseError {
fn fmt(&self, fmt: &mut Formatter) -> Result<(), FmtError> {
write!(fmt, "{:?}", self)
}
}
impl Error for DiceParseError {}
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
pub struct Dice {
count: usize,
sides: u16,
keep: isize,
fuse: u16,
rolls: Vec<u16>,
total: i32,
}
impl Dice {
/// There's no compelling reason to roll more than 100 dice at a time. I
/// can't really see why anyone would want to roll more than about 20 at a
/// time, but 100d6 is a sort of obvious thing for people to do.
pub const COUNT_LIMIT: usize = 100;
/// There's no good reason to want anything bigger than a d1000. Even that
/// is pushing it, but I can think of games that wanted d1000s for no good
/// reason.
pub const SIDES_LIMIT: u16 = 1000;
/// Roll (simple) dice. Dice are rolled at creation time and not
/// otherwise modified.
///
/// - `n` is the number of dice and must be non-zero
/// - `m` is the number of sides; zero (0) is treated as a request to create [Fudge/Fate](https://en.wikipedia.org/wiki/Fudge_(role-playing_game_system)#Fudge_dice) dice
///
/// Dice created this way don't explode and are all kept.
///
/// ```
/// use ndm::Dice;
///
/// // create 1d6
/// let d6 = Dice::new(1, 6);
/// ```
pub fn new(n: usize, m: u16) -> Result<Self, DiceParseError> {
Dice::new_extended(n, m, 0, 0)
}
/// Roll "exploding" dice. Dice are rolled at creation time and not
/// otherwise modified.
///
/// Any die which is rolls a value greater than or
/// equal to `fuse` will be added to the total and rolled again.
///
/// Dice created this way are all kept.
///
/// One-sided dice can't explode, because if they did, they would never
/// stop. [Fudge/Fate](https://en.wikipedia.org/wiki/Fudge_(role-playing_game_system)#Fudge_dice)
/// dice can't explode either, because it doesn't make sense for them to.
///
/// ```
/// use ndm::Dice;
///
/// // create 1d6
/// let d6_explode_5 = Dice::new_exploding(1, 6, 5);
/// ```
pub fn new_exploding(count: usize, sides: u16, fuse: u16) -> Result<Self, DiceParseError> {
Dice::new_extended(count, sides, 0, fuse)
}
/// Roll dice, adding only the highest or lowest `n` rolls to the total.
/// Dice are rolled at creation time and not otherwise modified.
///
/// If `n` < 0, the `n` lowest dice are kept. If `n` > 0, the `n` highest
/// dice are kept. If `n` == 0, all dice are kept.
///
/// Dice created this way don't explode.
///
/// ```
/// use ndm::Dice;
///
/// // Roll 4 six-sided dice, keeping the highest 3.
/// let wis = Dice::new_keep_n(4, 6, 3);
/// // Roll 2 twenty-sided dice, keeping the lower roll.
/// let disadvantage = Dice::new_keep_n(2, 20, -1);
/// ```
pub fn new_keep_n(count: usize, sides: u16, n: isize) -> Result<Self, DiceParseError> {
Dice::new_extended(count, sides, n, 0)
}
/// Roll dice which may explode and optionally use some of the dice
/// when calculating the total.
/// Dice are rolled at creation time and not otherwise modified.
///
/// If `keep` < 0, the `keep` lowest dice are kept. If `keep` > 0, the
/// `keep` highest dice are kept. If `keep` == 0, all dice are kept.
///
/// Any die which is rolls a value greater than or
/// equal to `fuse` will be added to the total and rolled again.
///
/// One-sided dice can't explode, because if they did, they would never
/// stop. [Fudge/Fate](https://en.wikipedia.org/wiki/Fudge_(role-playing_game_system)#Fudge_dice)
/// dice can't explode either, because it doesn't make sense for them to.
///
/// ```
/// use ndm::Dice;
///
/// // Roll 8 sixteen-sided dice, keeping the highest 3 but exploding on 4 or
/// // higher.
/// let dice = Dice::new_extended(8, 16, 3, 4);
/// ```
pub fn new_extended(count: usize, sides: u16, keep: isize, fuse: u16) -> Result<Self, DiceParseError> {
//println!("Called with count: {}, sides: {}, keep: {}, fuse: {}", count, sides, keep, fuse);
if sides > Self::SIDES_LIMIT {
return Err(DiceParseError::TooManySides(sides));
} else if (sides < 2) && (fuse != 0) {
return Err(DiceParseError::CannotExplode);
}
if count > Self::COUNT_LIMIT {
return Err(DiceParseError::TooManyDice(count));
} else if count == 0 {
return Err(DiceParseError::ZeroCount);
}
if fuse == 1 {
return Err(DiceParseError::ShortFuse);
} else if fuse > sides {
return Err(DiceParseError::LongFuse(fuse));
}
if ((keep < 0) && (count < ((-keep) as usize))) || ((keep > 0) && (count < (keep as usize))) {
return Err(DiceParseError::TooManyKept(keep));
}
let mut rolls: Vec<u16>;
let mut rng = thread_rng();
let total = match sides {
0 => {
rolls = vec![0; 3];
for _ in 0 .. count {
let last_roll = rng.gen_range(0 ..= 2);
rolls[last_roll as usize] += 1;
}
// This is an optimization for the way Fate dice are stored and
// is therefore suspect as premature. On the other hand, it
// works (and there's a test for it).
match keep.cmp(&0) {
Ordering::Less => {
if rolls[0] as isize >= -keep {
keep as i32
} else if (rolls[0] + rolls[1]) as isize >= -keep {
-(rolls[0] as i32)
} else {
(-keep as i32) - (rolls[1] as i32) - 2 * (rolls[0] as i32)
}
},
Ordering::Greater => {
if rolls[2] as isize >= keep {
keep as i32
} else if (rolls[1] + rolls[2]) as isize >= keep {
rolls[2] as i32
} else {
(-keep as i32) + (rolls[1] as i32) + 2 * (rolls[2] as i32) as i32
}
},
Ordering::Equal => (rolls[2] as i32) - (rolls[0] as i32)
}
},
1 => {
rolls = Vec::with_capacity(0);
match keep.cmp(&0) {
Ordering::Less => -keep as i32,
Ordering::Greater => keep as i32,
Ordering::Equal => count as i32,
}
},
_ => {
rolls = Vec::new();
for _ in 0 .. count {
let mut last_roll = rng.gen_range(1 ..= sides);
rolls.push(last_roll);
if fuse > 1 {
while last_roll >= fuse {
last_roll = rng.gen_range(1 ..= sides);
rolls.push(last_roll);
}
}
}
if keep != 0 {
rolls.sort_unstable();
}
let range = match keep.cmp(&0) {
Ordering::Less => 0 .. (-keep as usize),
Ordering::Greater => rolls.len() - (keep as usize) .. rolls.len(),
Ordering::Equal => 0 .. rolls.len(),
};
rolls[range].iter().map(|&x| x as i32).sum()
}
};
Ok(Dice { count, sides, fuse, rolls, keep, total })
}
/// The net value of this roll (after dropping any dice that weren't [kept]
/// and adding any that met or exceeded the [fuse]).
pub fn total(&self) -> i32 { self.total }
/// The number of sides on the dice. Fate/Fudge dice have zero (0) sides.
pub fn sides(&self) -> u16 { self.sides }
/// The number of dice requested. This may differ from the number of dice
/// rolled (as returned by [rolls] and [all_rolls] if any dice exploded (see
/// [fuse]), or if any weren't kept (see [kept]). This will always be at
/// least one (1).
pub fn count(&self) -> usize { self.count }
/// The [kept] rolls. This may be less than the number of dice requested if
/// some weren't kept, or more if some rolls met or exceeded the [fuse].
/// See [all_rolls] for all dice rolled.
///
/// There are two special cases:
/// * Fudge/Fate dice always return a reference to a [Vec] of length 3:
/// * `[0]` is the number of minuses or failures
/// * `[1]` is the number of zeroes or neutral results
/// * `[2]` is the number of pluses or successes
/// * One-sided dice (d1) always return an empty [Vec], since they can't
/// roll anything but a one (1).
///
/// Note that neither Fudge/Fate dice nor `d1`s can "explode". Also note
/// that for these dice, the [total] is usually the most interesting value.
pub fn rolls(&self) -> &[u16] {
if self.sides > 1 {
match self.keep.cmp(&0) {
Ordering::Less => &self.rolls[0 .. (-self.keep as usize)],
Ordering::Greater => &self.rolls[self.rolls.len() - (self.keep as usize) .. self.rolls.len()],
Ordering::Equal => &self.rolls,
}
} else {
self.all_rolls()
}
}
/// All dice rolled, including any dice that weren't [kept] in the total.
/// This may differ from the [count] if any dice met or exceeded the [fuse].
///
/// There are two special cases:
/// * Fudge/Fate dice always return a reference to a [Vec] of length 3:
/// * `[0]` is the number of minuses or failures
/// * `[1]` is the number of zeroes or neutral results
/// * `[2]` is the number of pluses or successes
/// * One-sided dice (d1) always return an empty [Vec], since they can't
/// roll anything but a one (1).
///
/// Note that neither Fudge/Fate dice nor `d1`s can "explode".
/// Also note that for one-sided dice, the [count] and the [total] are
/// usually more useful data.
pub fn all_rolls(&self) -> &Vec<u16> { &self.rolls }
/// The number upon which dice "exploded", triggering a re-roll.
/// Zero (0) means the dice couldn't explode.
pub fn fuse(&self) -> u16 { self.fuse }
/// Indicates whether any dice actually exploded. This is a utility method
/// as the semantics of measuring how many dice were rolled is not trivially
/// derivable from [all_rolls]. Note that neither Fudge/Fate dice nor `d1`s
/// can explode.
pub fn exploded(&self) -> bool {
(self.sides >= 2) && (self.fuse != 0) && (self.rolls.len() > self.count)
}
/// The number of dice kept when calculating the [total] of this roll.
pub fn kept(&self) -> isize { self.keep }
}
impl Display for Dice {
/// The output format for [Dice] is currently unstable but considered
/// human-readable.
fn fmt(&self, fmt: &mut Formatter) -> Result<(), FmtError> {
write!(fmt, "{}d{}", self.count, self.sides)?;
if self.fuse > 1 {
write!(fmt, "!")?;
if self.fuse != self.sides {
write!(fmt, "{}", self.fuse)?;
}
}
let count = self.rolls.len();
if self.sides == 0 {
if count > 1 {
write!(fmt, " ([-\u{00d7}{}, _\u{00d7}{}, +\u{00d7}{}] \u{21e8} {:+})",
self.rolls[0], self.rolls[1], self.rolls[2], self.total)
} else {
write!(fmt, " [{:+}]", self.total)
}
} else if self.sides == 1 {
write!(fmt, " [{}]", self.total)
} else {
// TODO if there are a jillion rolls, only print summaries (a la the Fate dice above)
if self.rolls.len() > 1 {
write!(fmt, " ({:?} \u{21e8} {})", self.rolls, self.total)
} else {
write!(fmt, " [{}]", self.total)
}
}
}
}
impl Dice {
pub const SIDE_LIMIT: u16 = 1000;
}
impl FromStr for Dice {
type Err = DiceParseError;
fn from_str(line: &str) -> Result<Self, Self::Err> {
let mut caps = DICE_RE.captures_iter(&line);
let word = caps.next().ok_or(DiceParseError::Unparseable)?;
Dice::try_from(word)
}
}
impl TryFrom<Captures<'_>> for Dice {
type Error = DiceParseError;
fn try_from(cap: Captures) -> Result<Self, Self::Error> {
let count = cap.name("count")
.map(|m| m.as_str())
.unwrap_or("1")
.parse::<usize>().map_err(|_| DiceParseError::Unparseable)?;
let sides_s = cap.name("sides")
.map(|m| m.as_str())
.ok_or(DiceParseError::Regex)?;
let sides = if sides_s.to_lowercase() == "f" {
0
} else {
let val = sides_s.parse().map_err(|_| DiceParseError::Unparseable)?;
if val == 0 {
return Err(DiceParseError::ZeroSides);
}
val
};
if sides > Self::SIDE_LIMIT {
return Err(DiceParseError::TooManySides(sides));
}
//let hilo = if cap.name("hilo").is_some() {
// Self::get_lo_hi("hll", "hlh", &cap)?
//} else if cap.name("lohi").is_some() {
// Self::get_lo_hi("lhl", "lhh", &cap)?
//} else if cap.name("hihi").is_some() || cap.name("lolo").is_some() {
// Self::get_lo_hi("lo", "hi", &cap)?
//} else {
// (Some(1), Some(sides))
//};
let keep = if let Some(updown) = cap.name("updown") {
let amt = cap.name("amount").ok_or(DiceParseError::Regex)?
.as_str()
.parse::<isize>().map_err(|_| DiceParseError::Unparseable)?;
match &*updown.as_str().to_lowercase() {
"h" => amt,
"l" => -amt,
_ => return Err(DiceParseError::Regex),
}
} else {
0
};
let fuse = if cap.name("exploding").is_some() {
if let Some(fuse) = cap.name("fuse") {
let fuse_s = fuse.as_str();
let fuse_val = fuse_s.parse().map_err(|_| DiceParseError::Unparseable)?;
if fuse_val <= 1 {
return Err(DiceParseError::ShortFuse);
}
fuse_val
} else if sides > 1 {
sides
} else {
return Err(DiceParseError::CannotExplode);
}
} else {
0
};
Self::new_extended(count, sides, keep, fuse)
}
}
#[cfg(test)]
pub mod test {
use super::{Dice, DiceParseError};
#[macro_export]
macro_rules! expect_dice_similar {
($text: literal, $expect: expr) => {
let parsed = $text.parse::<Dice>().unwrap();
let provided = $expect.unwrap();
expect_dice_similar!(parsed, provided)
};
($d1: expr, $d2: expr) => {
//println!("Comparing:\n\t{:?}\n\t{:?}", $d1, $d2);
assert_eq!($d1.count(), $d2.count());
assert_eq!($d1.sides(), $d2.sides());
assert_eq!($d1.fuse(), $d2.fuse());
if $d1.fuse() == 0 {
assert_eq!($d1.rolls().len(), $d2.rolls().len());
}
};
}
#[test]
fn build_vs_new() {
expect_dice_similar!("1d6", Dice::new_extended(1, 6, 0, 0));
expect_dice_similar!("1d6!", Dice::new_exploding(1, 6, 6));
expect_dice_similar!("1d6/H1", Dice::new_keep_n(1, 6, 1));
expect_dice_similar!("1d6/L1", Dice::new_keep_n(1, 6, -1));
expect_dice_similar!("1d6/L1!2", Dice::new_extended(1, 6, -1, 2));
expect_dice_similar!("1d6", Some(Dice { count: 1, sides: 6, fuse: 0, rolls: vec![1], total: 1, keep: 0 }));
}
#[test]
fn r_1d6() {
expect_dice_similar!("1d6", Dice::new_extended(1, 6, 0, 0));
}
#[test]
fn r_1d6_exploding() {
expect_dice_similar!("1d6!", Dice::new_extended(1, 6, 0, 6));
}
#[test]
fn r_d6() {
expect_dice_similar!("1d6", Dice::new_extended(1, 6, 0, 0));
}
#[test]
fn r_1df() {
expect_dice_similar!("1df", Dice::new_extended(1, 0, 0, 0));
}
#[test]
fn r_1df_caps() {
expect_dice_similar!("1DF", Dice::new_extended(1, 0, 0, 0));
}
#[test]
fn no_explode() {
assert_eq!("1d1!".parse::<Dice>(), Err(DiceParseError::CannotExplode));
assert_eq!("1df!".parse::<Dice>(), Err(DiceParseError::CannotExplode));
}
#[test]
fn roll_d1() {
let roll = "3d1".parse::<Dice>().unwrap();
assert_eq!(roll.sides, 1);
assert_eq!(roll.total, 3);
}
#[test]
fn big_dice_bad() {
assert_eq!("d1001".parse::<Dice>(), Err(DiceParseError::TooManySides(1001)));
assert_eq!("1d1001".parse::<Dice>(), Err(DiceParseError::TooManySides(1001)));
assert_eq!("101d10".parse::<Dice>(), Err(DiceParseError::TooManyDice(101)));
}
#[test]
fn keep_three_high() {
let d = "4d6/h3".parse::<Dice>().unwrap();
assert_eq!(d.rolls.len(), 4);
assert_eq!(d.total, d.rolls[1..].to_vec().iter().fold(0, |acc, x| acc + (*x as i32)));
}
#[test]
fn keep_three_low() {
let d = "4d6/L3".parse::<Dice>().unwrap();
assert_eq!(d.rolls.len(), 4);
assert_eq!(d.total, d.rolls[..3].to_vec().iter().fold(0, |acc, x| acc + (*x as i32)));
}
#[test]
fn two_keep_three_bad() {
assert_eq!("2d20/H3".parse::<Dice>(), Err(DiceParseError::TooManyKept(3)));
}
#[test]
fn keep_d1() {
assert_eq!("12d1/l3".parse::<Dice>(), Dice::new_keep_n(12, 1, -3));
assert_eq!("12d1/h3".parse::<Dice>(), Dice::new_keep_n(12, 1, 3));
let d = "12d1/h3".parse::<Dice>().unwrap();
assert_eq!(d.total, 3);
let d = "12d1/l3".parse::<Dice>().unwrap();
assert_eq!(d.total, 3);
}
#[test]
fn keep_df() {
expect_dice_similar!("12df/l3", Dice::new_keep_n(12, 0, -3));
expect_dice_similar!("12dF/h3", Dice::new_keep_n(12, 0, 3));
let d = "12df/h3".parse::<Dice>().unwrap();
assert_eq!(d.kept(), 3);
}
#[test]
fn no_explode_1() {
assert_eq!("1d5!1".parse::<Dice>(), Err(DiceParseError::ShortFuse));
assert!("1d5!2".parse::<Dice>().is_ok());
}
fn calc_short_keep(rolls: &Vec<i32>, keep: isize) -> i32 {
use std::cmp::Ordering;
match keep.cmp(&0) {
Ordering::Less => {
if rolls[0] as isize >= -keep {
keep as i32
} else if (rolls[0] + rolls[1]) as isize >= -keep {
-(rolls[0] as i32)
} else {
(-keep as i32) - (rolls[1] as i32) - 2 * (rolls[0] as i32)
}
},
Ordering::Greater => {
if rolls[2] as isize >= keep {
keep as i32
} else if (rolls[1] + rolls[2]) as isize >= keep {
rolls[2] as i32
} else {
(-keep as i32) + (rolls[1] as i32) + 2 * (rolls[2] as i32) as i32
}
},
Ordering::Equal => (rolls[2] as i32) - (rolls[0] as i32)
}
}
fn calc_long_keep(rolls: &Vec<i32>, keep: isize) -> i32 {
use std::cmp::Ordering;
let range = match keep.cmp(&0) {
Ordering::Less => 0 .. (-keep as usize),
Ordering::Greater => rolls.len() - (keep as usize) .. rolls.len(),
Ordering::Equal => 0 .. rolls.len(),
};
rolls[range].iter().map(|&x| x as i32).sum()
}
#[test]
fn fate_keepers() {
// N.B., doubling EXP increases the runtime of this test by a factor of
// 10 (the runtime is exponential over LIMIT)
const EXP: u32 = 5;
let limit = 3usize.pow(EXP);
for i in 0 .. limit {
let mut short_rolls = vec![0i32; 3];
let mut long_rolls = vec![0i32; EXP as usize];
let mut ctr = i;
for j in 0 .. EXP as usize {
short_rolls[ctr % 3] += 1;
long_rolls[j] = (ctr % 3) as i32 - 1;
ctr /= 3;
}
long_rolls.sort_unstable();
for keep in -(EXP as isize) ..= EXP as isize {
assert_eq!(calc_short_keep(&short_rolls, keep), calc_long_keep(&long_rolls, keep));
}
}
}
}
| 36.176471 | 173 | 0.514041 |
3a7123d5d67e2c46105f1d13d0584fa9ede15dab | 3,758 | // Copyright 2022 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::str::FromStr;
use common_ast::ast::CreateStageStmt;
use common_exception::ErrorCode;
use common_exception::Result;
use common_meta_types::OnErrorMode;
use common_meta_types::StageType;
use common_meta_types::UserStageInfo;
use common_planners::CreateUserStagePlan;
use common_planners::ListPlan;
use common_planners::RemoveUserStagePlan;
use crate::sql::binder::Binder;
use crate::sql::plans::Plan;
use crate::sql::statements::parse_copy_file_format_options;
use crate::sql::statements::parse_stage_location;
use crate::sql::statements::parse_uri_location;
impl<'a> Binder {
pub(in crate::sql::planner::binder) async fn bind_list_stage(
&mut self,
location: &str,
pattern: &str,
) -> Result<Plan> {
let stage_name = format!("@{location}");
let (stage, path) = parse_stage_location(&self.ctx, stage_name.as_str()).await?;
let plan_node = ListPlan {
path,
stage,
pattern: pattern.to_string(),
};
Ok(Plan::ListStage(Box::new(plan_node)))
}
pub(in crate::sql::planner::binder) async fn bind_remove_stage(
&mut self,
location: &str,
pattern: &str,
) -> Result<Plan> {
let stage_name = format!("@{location}");
let (stage, path) = parse_stage_location(&self.ctx, stage_name.as_str()).await?;
let plan_node = RemoveUserStagePlan {
path,
stage,
pattern: pattern.to_string(),
};
Ok(Plan::RemoveStage(Box::new(plan_node)))
}
pub(in crate::sql::planner::binder) async fn bind_create_stage(
&mut self,
stmt: &CreateStageStmt,
) -> Result<Plan> {
let CreateStageStmt {
if_not_exists,
stage_name,
location,
credential_options,
encryption_options,
file_format_options,
on_error,
size_limit,
validation_mode: _,
comments: _,
} = stmt;
let mut stage_info = match location.is_empty() {
true => UserStageInfo {
stage_type: StageType::Internal,
..Default::default()
},
false => {
let (stage_storage, _) =
parse_uri_location(location, credential_options, encryption_options)?;
stage_storage
}
};
stage_info.stage_name = stage_name.clone();
if !file_format_options.is_empty() {
stage_info.file_format_options = parse_copy_file_format_options(file_format_options)?;
}
// Copy options.
{
// on_error.
if !on_error.is_empty() {
stage_info.copy_options.on_error =
OnErrorMode::from_str(on_error).map_err(ErrorCode::SyntaxException)?;
}
stage_info.copy_options.size_limit = *size_limit;
}
Ok(Plan::CreateStage(Box::new(CreateUserStagePlan {
if_not_exists: *if_not_exists,
tenant: self.ctx.get_tenant(),
user_stage_info: stage_info,
})))
}
}
| 31.847458 | 98 | 0.611495 |
db20e8d71186f9b520e4335fd6273ff7c04e5d83 | 56 | pub mod tick;
pub mod time;
pub mod format;
pub mod sys; | 14 | 15 | 0.732143 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.