file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
subscribe-user-agent-client.js | "use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SubscribeUserAgentClient = void 0;
var tslib_1 = require("tslib");
var subscription_dialog_1 = require("../dialogs/subscription-dialog");
var subscription_1 = require("../subscription");
var timers_1 = require("../timers");
var transactions_1 = require("../transactions");
var user_agent_client_1 = require("./user-agent-client");
/**
* SUBSCRIBE UAC.
* @remarks
* 4.1. Subscriber Behavior
* https://tools.ietf.org/html/rfc6665#section-4.1
*
* User agent client for installation of a single subscription per SUBSCRIBE request.
* TODO: Support for installation of multiple subscriptions on forked SUBSCRIBE reqeuests.
* @public | function SubscribeUserAgentClient(core, message, delegate) {
var _this = this;
// Get event from request message.
var event = message.getHeader("Event");
if (!event) {
throw new Error("Event undefined");
}
// Get expires from reqeust message.
var expires = message.getHeader("Expires");
if (!expires) {
throw new Error("Expires undefined");
}
_this = _super.call(this, transactions_1.NonInviteClientTransaction, core, message, delegate) || this;
_this.delegate = delegate;
// FIXME: Subscriber id should also be matching on event id.
_this.subscriberId = message.callId + message.fromTag + event;
_this.subscriptionExpiresRequested = _this.subscriptionExpires = Number(expires);
_this.subscriptionEvent = event;
_this.subscriptionState = subscription_1.SubscriptionState.NotifyWait;
// Start waiting for a NOTIFY we can use to create a subscription.
_this.waitNotifyStart();
return _this;
}
/**
* Destructor.
* Note that Timer N may live on waiting for an initial NOTIFY and
* the delegate may still receive that NOTIFY. If you don't want
* that behavior then either clear the delegate so the delegate
* doesn't get called (a 200 will be sent in response to the NOTIFY)
* or call `waitNotifyStop` which will clear Timer N and remove this
* UAC from the core (a 481 will be sent in response to the NOTIFY).
*/
SubscribeUserAgentClient.prototype.dispose = function () {
_super.prototype.dispose.call(this);
};
/**
* Handle out of dialog NOTIFY assoicated with SUBSCRIBE request.
* This is the first NOTIFY received after the SUBSCRIBE request.
* @param uas - User agent server handling the subscription creating NOTIFY.
*/
SubscribeUserAgentClient.prototype.onNotify = function (uas) {
// NOTIFY requests are matched to such SUBSCRIBE requests if they
// contain the same "Call-ID", a "To" header field "tag" parameter that
// matches the "From" header field "tag" parameter of the SUBSCRIBE
// request, and the same "Event" header field. Rules for comparisons of
// the "Event" header fields are described in Section 8.2.1.
// https://tools.ietf.org/html/rfc6665#section-4.4.1
var event = uas.message.parseHeader("Event").event;
if (!event || event !== this.subscriptionEvent) {
this.logger.warn("Failed to parse event.");
uas.reject({ statusCode: 489 });
return;
}
// NOTIFY requests MUST contain "Subscription-State" header fields that
// indicate the status of the subscription.
// https://tools.ietf.org/html/rfc6665#section-4.1.3
var subscriptionState = uas.message.parseHeader("Subscription-State");
if (!subscriptionState || !subscriptionState.state) {
this.logger.warn("Failed to parse subscription state.");
uas.reject({ statusCode: 489 });
return;
}
// Validate subscription state.
var state = subscriptionState.state;
switch (state) {
case "pending":
break;
case "active":
break;
case "terminated":
break;
default:
this.logger.warn("Invalid subscription state " + state);
uas.reject({ statusCode: 489 });
return;
}
// Dialogs usages are created upon completion of a NOTIFY transaction
// for a new subscription, unless the NOTIFY request contains a
// "Subscription-State" of "terminated."
// https://tools.ietf.org/html/rfc6665#section-4.4.1
if (state !== "terminated") {
// The Contact header field MUST be present and contain exactly one SIP
// or SIPS URI in any request that can result in the establishment of a
// dialog.
// https://tools.ietf.org/html/rfc3261#section-8.1.1.8
var contact = uas.message.parseHeader("contact");
if (!contact) {
this.logger.warn("Failed to parse contact.");
uas.reject({ statusCode: 489 });
return;
}
}
// In accordance with the rules for proxying non-INVITE requests as
// defined in [RFC3261], successful SUBSCRIBE requests will receive only
// one 200-class response; however, due to forking, the subscription may
// have been accepted by multiple nodes. The subscriber MUST therefore
// be prepared to receive NOTIFY requests with "From:" tags that differ
// from the "To:" tag received in the SUBSCRIBE 200-class response.
//
// If multiple NOTIFY requests are received in different dialogs in
// response to a single SUBSCRIBE request, each dialog represents a
// different destination to which the SUBSCRIBE request was forked.
// Subscriber handling in such situations varies by event package; see
// Section 5.4.9 for details.
// https://tools.ietf.org/html/rfc6665#section-4.1.4
// Each event package MUST specify whether forked SUBSCRIBE requests are
// allowed to install multiple subscriptions.
//
// If such behavior is not allowed, the first potential dialog-
// establishing message will create a dialog. All subsequent NOTIFY
// requests that correspond to the SUBSCRIBE request (i.e., have
// matching "To", "From", "Call-ID", and "Event" header fields, as well
// as "From" header field "tag" parameter and "Event" header field "id"
// parameter) but that do not match the dialog would be rejected with a
// 481 response. Note that the 200-class response to the SUBSCRIBE
// request can arrive after a matching NOTIFY request has been received;
// such responses might not correlate to the same dialog established by
// the NOTIFY request. Except as required to complete the SUBSCRIBE
// transaction, such non-matching 200-class responses are ignored.
//
// If installing of multiple subscriptions by way of a single forked
// SUBSCRIBE request is allowed, the subscriber establishes a new dialog
// towards each notifier by returning a 200-class response to each
// NOTIFY request. Each dialog is then handled as its own entity and is
// refreshed independently of the other dialogs.
//
// In the case that multiple subscriptions are allowed, the event
// package MUST specify whether merging of the notifications to form a
// single state is required, and how such merging is to be performed.
// Note that it is possible that some event packages may be defined in
// such a way that each dialog is tied to a mutually exclusive state
// that is unaffected by the other dialogs; this MUST be clearly stated
// if it is the case.
// https://tools.ietf.org/html/rfc6665#section-5.4.9
// *** NOTE: This implementation is only for event packages which
// do not allow forked requests to install muliple subscriptions.
// As such and in accordance with the specificaiton, we stop waiting
// and any future NOTIFY requests will be rejected with a 481.
if (this.dialog) {
throw new Error("Dialog already created. This implementation only supports install of single subscriptions.");
}
this.waitNotifyStop();
// Update expires.
this.subscriptionExpires =
subscriptionState.expires ?
Math.min(this.subscriptionExpires, Math.max(subscriptionState.expires, 0)) :
this.subscriptionExpires;
// Update subscriptoin state.
switch (state) {
case "pending":
this.subscriptionState = subscription_1.SubscriptionState.Pending;
break;
case "active":
this.subscriptionState = subscription_1.SubscriptionState.Active;
break;
case "terminated":
this.subscriptionState = subscription_1.SubscriptionState.Terminated;
break;
default:
throw new Error("Unrecognized state " + state + ".");
}
// Dialogs usages are created upon completion of a NOTIFY transaction
// for a new subscription, unless the NOTIFY request contains a
// "Subscription-State" of "terminated."
// https://tools.ietf.org/html/rfc6665#section-4.4.1
if (this.subscriptionState !== subscription_1.SubscriptionState.Terminated) {
// Because the dialog usage is established by the NOTIFY request, the
// route set at the subscriber is taken from the NOTIFY request itself,
// as opposed to the route set present in the 200-class response to the
// SUBSCRIBE request.
// https://tools.ietf.org/html/rfc6665#section-4.4.1
var dialogState = subscription_dialog_1.SubscriptionDialog.initialDialogStateForSubscription(this.message, uas.message);
// Subscription Initiated! :)
this.dialog = new subscription_dialog_1.SubscriptionDialog(this.subscriptionEvent, this.subscriptionExpires, this.subscriptionState, this.core, dialogState);
}
// Delegate.
if (this.delegate && this.delegate.onNotify) {
var request = uas;
var subscription = this.dialog;
this.delegate.onNotify({ request: request, subscription: subscription });
}
else {
uas.accept();
}
};
SubscribeUserAgentClient.prototype.waitNotifyStart = function () {
var _this = this;
if (!this.N) {
// Add ourselves to the core's subscriber map.
// This allows the core to route out of dialog NOTIFY messages to us.
this.core.subscribers.set(this.subscriberId, this);
this.N = setTimeout(function () { return _this.timer_N(); }, timers_1.Timers.TIMER_N);
}
};
SubscribeUserAgentClient.prototype.waitNotifyStop = function () {
if (this.N) {
// Remove ourselves to the core's subscriber map.
// Any future out of dialog NOTIFY messages will be rejected with a 481.
this.core.subscribers.delete(this.subscriberId);
clearTimeout(this.N);
this.N = undefined;
}
};
/**
* Receive a response from the transaction layer.
* @param message - Incoming response message.
*/
SubscribeUserAgentClient.prototype.receiveResponse = function (message) {
if (!this.authenticationGuard(message)) {
return;
}
if (message.statusCode && message.statusCode >= 200 && message.statusCode < 300) {
// The "Expires" header field in a 200-class response to SUBSCRIBE
// request indicates the actual duration for which the subscription will
// remain active (unless refreshed). The received value might be
// smaller than the value indicated in the SUBSCRIBE request but cannot
// be larger; see Section 4.2.1 for details.
// https://tools.ietf.org/html/rfc6665#section-4.1.2.1
// The "Expires" values present in SUBSCRIBE 200-class responses behave
// in the same way as they do in REGISTER responses: the server MAY
// shorten the interval but MUST NOT lengthen it.
//
// If the duration specified in a SUBSCRIBE request is unacceptably
// short, the notifier may be able to send a 423 response, as
// described earlier in this section.
//
// 200-class responses to SUBSCRIBE requests will not generally contain
// any useful information beyond subscription duration; their primary
// purpose is to serve as a reliability mechanism. State information
// will be communicated via a subsequent NOTIFY request from the
// notifier.
// https://tools.ietf.org/html/rfc6665#section-4.2.1.1
var expires = message.getHeader("Expires");
if (!expires) {
this.logger.warn("Expires header missing in a 200-class response to SUBSCRIBE");
}
else {
var subscriptionExpiresReceived = Number(expires);
if (subscriptionExpiresReceived > this.subscriptionExpiresRequested) {
this.logger.warn("Expires header in a 200-class response to SUBSCRIBE with a higher value than the one in the request");
}
if (subscriptionExpiresReceived < this.subscriptionExpires) {
this.subscriptionExpires = subscriptionExpiresReceived;
}
}
// If a NOTIFY arrived before 200-class response a dialog may have been created.
// Updated the dialogs expiration only if this indicates earlier expiration.
if (this.dialog) {
if (this.dialog.subscriptionExpires > this.subscriptionExpires) {
this.dialog.subscriptionExpires = this.subscriptionExpires;
}
}
}
if (message.statusCode && message.statusCode >= 300 && message.statusCode < 700) {
this.waitNotifyStop(); // No NOTIFY will be sent after a negative final response.
}
_super.prototype.receiveResponse.call(this, message);
};
/**
* To ensure that subscribers do not wait indefinitely for a
* subscription to be established, a subscriber starts a Timer N, set to
* 64*T1, when it sends a SUBSCRIBE request. If this Timer N expires
* prior to the receipt of a NOTIFY request, the subscriber considers
* the subscription failed, and cleans up any state associated with the
* subscription attempt.
* https://tools.ietf.org/html/rfc6665#section-4.1.2.4
*/
SubscribeUserAgentClient.prototype.timer_N = function () {
this.logger.warn("Timer N expired for SUBSCRIBE user agent client. Timed out waiting for NOTIFY.");
this.waitNotifyStop();
if (this.delegate && this.delegate.onNotifyTimeout) {
this.delegate.onNotifyTimeout();
}
};
return SubscribeUserAgentClient;
}(user_agent_client_1.UserAgentClient));
exports.SubscribeUserAgentClient = SubscribeUserAgentClient; | */
var SubscribeUserAgentClient = /** @class */ (function (_super) {
tslib_1.__extends(SubscribeUserAgentClient, _super); |
LauncherWindow.ts | import { app, BrowserWindow, ipcMain } from 'electron'
import * as path from 'path'
import { format as formatUrl } from 'url'
const windowConfig = require('@config').window
// Пока не обновили пакет и не завезли новые тайпинги - костылим через require))
// import installExtension, { VUEJS_DEVTOOLS } from 'electron-devtools-installer'
const { default: installExtension, VUEJS_DEVTOOLS } = require('electron-devtools-installer')
export default class LauncherWindow {
mainWindow: BrowserWindow | null = null
/**
* Launcher initialization
*/
constructor() {
// quit application when all windows are closed
app.on('window-all-closed', () => {
// on macOS it is common for applications to stay open until the user explicitly quits
if (process.platform !== 'darwin') { | app.on('activate', () => {
// on macOS it is common to re-create a window even after all windows have been closed
if (this.mainWindow === null) {
this.mainWindow = this.createMainWindow()
}
})
// create main window when electron is ready
app.on('ready', () => {
this.mainWindow = this.createMainWindow()
if (process.env.DEV || false) {
installExtension(VUEJS_DEVTOOLS, {
loadExtensionOptions: { allowFileAccess: true },
})
.then((name: any) => console.log(`Added Extension: ${name}`))
.catch((err: any) => console.log('An error occurred: ', err));
}
})
// hide the main window when the minimize button is pressed
ipcMain.on('window-hide', () => {
this.mainWindow?.minimize()
})
// close the main window when the close button is pressed
ipcMain.on('window-close', () => {
this.mainWindow?.close()
})
}
/**
* Create launcher window
*/
createMainWindow() {
// creating and configuring a window
const launcherWindow = new BrowserWindow({
width: windowConfig.width || 900,
height: windowConfig.height || 550,
frame: windowConfig.frame || false,
resizable: windowConfig.resizable || false,
maximizable: windowConfig.maximizable || false,
fullscreenable: windowConfig.fullscreenable || false,
title: windowConfig.title || "Aurora Launcher",
icon: path.join(__dirname, '../renderer/logo.png'),
webPreferences: {
nodeIntegration: true,
// TODO Пофиксить
// Временный фикс, подробнее:
// https://github.com/AuroraTeam/Launcher/issues/3
// https://github.com/electron/electron/issues/28034
// https://github.com/electron/electron/blob/master/docs/breaking-changes.md#default-changed-contextisolation-defaults-to-true
contextIsolation: false
}
})
// loading renderer code (runtime)
launcherWindow.loadURL(formatUrl({
pathname: path.join(__dirname, '../renderer/index.html'),
protocol: 'file',
slashes: true
}))
launcherWindow.on('closed', () => {
this.mainWindow = null
})
// open developer tools when using development mode
launcherWindow.webContents.on('did-frame-finish-load', () => {
if (process.env.DEV || false) launcherWindow.webContents.openDevTools()
})
// focus on development tools when opening
launcherWindow.webContents.on('devtools-opened', () => {
launcherWindow.focus()
setImmediate(() => {
launcherWindow.focus()
})
})
return launcherWindow
}
sendEvent(channel: string, ...args: any[]): void {
return this.mainWindow?.webContents.send(channel, ...args)
}
} | app.quit()
}
})
|
rootless.rs | use std::{env, path::PathBuf};
use anyhow::{bail, Result};
use nix::sched::CloneFlags;
use oci_spec::{Linux, LinuxIdMapping, Mount, Spec};
use crate::namespaces::Namespaces;
#[derive(Debug, Clone)]
pub struct Rootless {
/// Location of the newuidmap binary
pub newuidmap: Option<PathBuf>,
/// Location of the newgidmap binary
pub newgidmap: Option<PathBuf>,
/// Mappings for user ids
pub uid_mappings: Vec<LinuxIdMapping>,
/// Mappings for group ids
pub gid_mappings: Vec<LinuxIdMapping>,
}
impl From<&Linux> for Rootless {
fn from(linux: &Linux) -> Self {
Self {
newuidmap: None,
newgidmap: None,
uid_mappings: linux.uid_mappings.clone(),
gid_mappings: linux.gid_mappings.clone(),
}
}
}
pub fn detect_rootless(spec: &Spec) -> Result<Option<Rootless>> {
let rootless = if should_use_rootless() {
log::debug!("rootless container should be created");
log::warn!(
"resource constraints and multi id mapping is unimplemented for rootless containers"
);
validate(spec)?;
let mut rootless = Rootless::from(&spec.linux);
if let Some((uid_binary, gid_binary)) = lookup_map_binaries(&spec.linux)? {
rootless.newuidmap = Some(uid_binary);
rootless.newgidmap = Some(gid_binary);
}
Some(rootless)
} else {
None
};
Ok(rootless)
}
/// Checks if rootless mode should be used
pub fn should_use_rootless() -> bool {
if !nix::unistd::geteuid().is_root() {
return true;
}
if let Ok("true") = std::env::var("YOUKI_USE_ROOTLESS").as_deref() {
return true;
}
false
}
/// Validates that the spec contains the required information for
/// running in rootless mode
pub fn validate(spec: &Spec) -> Result<()> {
let linux = &spec.linux;
if linux.uid_mappings.is_empty() {
bail!("rootless containers require at least one uid mapping");
}
if linux.gid_mappings.is_empty() {
bail!("rootless containers require at least one gid mapping")
}
let namespaces: Namespaces = linux.namespaces.clone().into();
if !namespaces.clone_flags.contains(CloneFlags::CLONE_NEWUSER) {
bail!("rootless containers require the specification of a user namespace");
}
validate_mounts(&spec.mounts, &linux.uid_mappings, &linux.gid_mappings)?;
Ok(())
}
fn validate_mounts(
mounts: &[Mount],
uid_mappings: &[LinuxIdMapping],
gid_mappings: &[LinuxIdMapping],
) -> Result<()> {
for mount in mounts {
for opt in &mount.options {
if opt.starts_with("uid=") && !is_id_mapped(&opt[4..], uid_mappings)? {
bail!("Mount {:?} specifies option {} which is not mapped inside the rootless container", mount, opt);
}
if opt.starts_with("gid=") && !is_id_mapped(&opt[4..], gid_mappings)? {
bail!("Mount {:?} specifies option {} which is not mapped inside the rootless container", mount, opt);
}
}
}
Ok(())
}
fn is_id_mapped(id: &str, mappings: &[LinuxIdMapping]) -> Result<bool> {
let id = id.parse::<u32>()?;
Ok(mappings
.iter()
.any(|m| id >= m.container_id && id <= m.container_id + m.size))
}
/// Looks up the location of the newuidmap and newgidmap binaries which
/// are required to write multiple user/group mappings
pub fn lookup_map_binaries(spec: &Linux) -> Result<Option<(PathBuf, PathBuf)>> {
if spec.uid_mappings.len() == 1 && spec.uid_mappings.len() == 1 {
return Ok(None);
}
let uidmap = lookup_map_binary("newuidmap")?;
let gidmap = lookup_map_binary("newgidmap")?;
match (uidmap, gidmap) {
(Some(newuidmap), Some(newgidmap)) => Ok(Some((newuidmap, newgidmap))),
_ => bail!("newuidmap/newgidmap binaries could not be found in path. This is required if multiple id mappings are specified"),
}
}
fn | (binary: &str) -> Result<Option<PathBuf>> {
let paths = env::var("PATH")?;
Ok(paths
.split_terminator(':')
.find(|p| PathBuf::from(p).join(binary).exists())
.map(PathBuf::from))
}
| lookup_map_binary |
common-toolbar.js | import React from 'react';
import PropTypes from 'prop-types';
import { isPro, gettext } from '../../utils/constants';
import Search from '../search/search';
import Notification from '../common/notification';
import Account from '../common/account';
const propTypes = {
repoID: PropTypes.string,
onSearchedClick: PropTypes.func.isRequired,
searchPlaceholder: PropTypes.string
};
class | extends React.Component {
render() {
let searchPlaceholder = this.props.searchPlaceholder || gettext('Search Files');
return (
<div className="common-toolbar">
{isPro && (
<Search
repoID={this.props.repoID}
placeholder={searchPlaceholder}
onSearchedClick={this.props.onSearchedClick}
/>
)}
<Notification />
<Account />
</div>
);
}
}
CommonToolbar.propTypes = propTypes;
export default CommonToolbar;
| CommonToolbar |
abstract_curve_segment.py | from dataclasses import dataclass
from bindings.gmd.abstract_curve_segment_type import AbstractCurveSegmentType |
__NAMESPACE__ = "http://www.opengis.net/gml"
@dataclass
class AbstractCurveSegment(AbstractCurveSegmentType):
"""A curve segment defines a homogeneous segment of a curve.
The attributes numDerivativesAtStart, numDerivativesAtEnd and
numDerivativesInterior specify the type of continuity as specified
in ISO 19107:2003, 6.4.9.3. The AbstractCurveSegment element is the
abstract head of the substituition group for all curve segment
elements, i.e. continuous segments of the same interpolation
mechanism. All curve segments shall have an attribute interpolation
with type gml:CurveInterpolationType specifying the curve
interpolation mechanism used for this segment. This mechanism uses
the control points and control parameters to determine the position
of this curve segment.
"""
class Meta:
namespace = "http://www.opengis.net/gml" | |
backbone.py | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
"""
Backbone modules.
"""
from collections import OrderedDict
import torch
import torch.nn.functional as F
import torchvision
from torch import nn
from torchvision.models._utils import IntermediateLayerGetter
from typing import Dict, List
from util.misc import NestedTensor, is_main_process
from .position_encoding import build_position_encoding
class FrozenBatchNorm2d(torch.nn.Module):
"""
BatchNorm2d where the batch statistics and the affine parameters are fixed.
Copy-paste from torchvision.misc.ops with added eps before rqsrt,
without which any other models than torchvision.models.resnet[18,34,50,101]
produce nans.
"""
def __init__(self, n):
super(FrozenBatchNorm2d, self).__init__()
self.register_buffer("weight", torch.ones(n))
self.register_buffer("bias", torch.zeros(n))
self.register_buffer("running_mean", torch.zeros(n))
self.register_buffer("running_var", torch.ones(n))
# 固定参数的batch norm,读取到本层参数时删除它
def _load_from_state_dict(self, state_dict, prefix, local_metadata, strict,
missing_keys, unexpected_keys, error_msgs):
num_batches_tracked_key = prefix + 'num_batches_tracked'
if num_batches_tracked_key in state_dict:
del state_dict[num_batches_tracked_key]
super(FrozenBatchNorm2d, self)._load_from_state_dict(
state_dict, prefix, local_metadata, strict,
missing_keys, unexpected_keys, error_msgs)
def forward(self, x):
# move reshapes to the beginning
# to make it fuser-friendly
w = self.weight.reshape(1, -1, 1, 1)
b = self.bias.reshape(1, -1, 1, 1)
rv = self.running_var.reshape(1, -1, 1, 1)
rm = self.running_mean.reshape(1, -1, 1, 1)
eps = 1e-5
scale = w * (rv + eps).rsqrt()
bias = b - rm * scale
return x * scale + bias
class BackboneBase(nn.Module):
def __init__(self, backbone: nn.Module, train_backbone: bool, num_channels: int, return_interm_layers: bool):
super().__init__()
for name, parameter in backbone.named_parameters():
if not train_backbone or 'layer2' not in name and 'layer3' not in name and 'layer4' not in name: # 初始层和第一层不参与训练
parameter.requires_grad_(False)
if return_interm_layers: # 说明取数据的层
return_layers = {"layer1": "0", "layer2": "1", "layer3": "2", "layer4": "3"}
else:
return_layers = {'layer4': "0"}
self.body = IntermediateLayerGetter(backbone, return_layers=return_layers) # 这个函数可以返回一个新模型,新模型的输出为指定层名的输出
self.num_channels = num_channels
def forward(self, tensor_list: NestedTensor):
xs = self.body(tensor_list.tensors) # 输出
out: Dict[str, NestedTensor] = {}
for name, x in xs.items():
m = tensor_list.mask
assert m is not None
mask = F.interpolate(m[None].float(), size=x.shape[-2:]).to(torch.bool)[0]
out[name] = NestedTensor(x, mask)
return out
class Backbone(BackboneBase):
"""ResNet backbone with frozen BatchNorm."""
def __init__(self, name: str,
train_backbone: bool,
return_interm_layers: bool,
dilation: bool):
backbone = getattr(torchvision.models, name)( | replace_stride_with_dilation=[False, False, dilation],
pretrained=is_main_process(), norm_layer=FrozenBatchNorm2d)
num_channels = 512 if name in ('resnet18', 'resnet34') else 2048
super().__init__(backbone, train_backbone, num_channels, return_interm_layers)
class Joiner(nn.Sequential):
def __init__(self, backbone, position_embedding):
super().__init__(backbone, position_embedding)
def forward(self, tensor_list: NestedTensor):
xs = self[0](tensor_list) # boneNet输出
out: List[NestedTensor] = []
pos = []
for name, x in xs.items():
out.append(x)
# position encoding
pos.append(self[1](x).to(x.tensors.dtype)) # position embedding
return out, pos
def build_backbone(args):
position_embedding = build_position_encoding(args) #构建特征图像素坐标
train_backbone = args.lr_backbone > 0 # 是否训练主干网络
return_interm_layers = args.masks
backbone = Backbone(args.backbone, train_backbone, return_interm_layers, args.dilation) # 生成主干网络
model = Joiner(backbone, position_embedding) # 将embedding与主函数融合
model.num_channels = backbone.num_channels
return model | |
typescript.rs | #![feature(test)]
#![feature(bench_black_box)]
extern crate swc_node_base;
extern crate test;
use std::{
hint::black_box,
io::{self, stderr},
sync::Arc,
};
use swc::config::{Config, IsModule, JscConfig, Options, SourceMapsConfig};
use swc_common::{errors::Handler, FileName, FilePathMapping, Mark, SourceFile, SourceMap};
use swc_ecma_ast::{EsVersion, Program};
use swc_ecma_parser::{Syntax, TsConfig};
use swc_ecma_transforms::{fixer, hygiene, resolver, resolver_with_mark, typescript};
use swc_ecma_visit::FoldWith;
use test::Bencher;
static SOURCE: &str = include_str!("assets/Observable.ts");
fn mk() -> swc::Compiler {
let cm = Arc::new(SourceMap::new(FilePathMapping::empty()));
swc::Compiler::new(cm)
}
fn parse(c: &swc::Compiler) -> (Arc<SourceFile>, Program) {
let fm = c.cm.new_source_file(
FileName::Real("rxjs/src/internal/Observable.ts".into()),
SOURCE.to_string(),
);
let handler = Handler::with_emitter_writer(Box::new(io::stderr()), Some(c.cm.clone()));
let comments = c.comments().clone();
(
fm.clone(),
c.parse_js(
fm,
&handler,
EsVersion::Es5,
Syntax::Typescript(Default::default()),
IsModule::Bool(true),
Some(&comments),
)
.unwrap(),
)
}
fn as_es(c: &swc::Compiler) -> Program {
let program = parse(c).1;
let mark = Mark::fresh(Mark::root());
program
.fold_with(&mut resolver_with_mark(mark))
.fold_with(&mut typescript::strip(mark))
}
#[bench]
fn base_tr_fixer(b: &mut Bencher) {
let c = mk();
c.run(|| {
let module = as_es(&c);
b.iter(|| {
let handler = Handler::with_emitter_writer(Box::new(stderr()), Some(c.cm.clone()));
black_box(c.run_transform(&handler, true, || {
module.clone().fold_with(&mut fixer(Some(c.comments())))
}))
});
});
}
#[bench]
fn base_tr_resolver_and_hygiene(b: &mut Bencher) {
let c = mk();
c.run(|| {
let module = as_es(&c);
b.iter(|| {
let handler = Handler::with_emitter_writer(Box::new(stderr()), Some(c.cm.clone()));
black_box(c.run_transform(&handler, true, || {
module
.clone()
.fold_with(&mut resolver())
.fold_with(&mut hygiene())
}))
});
})
}
/// This benchmark exists to know exact execution time of each pass.
fn bench_codegen(b: &mut Bencher, _target: EsVersion) {
let c = mk();
c.run(|| {
let module = as_es(&c);
//TODO: Use target
b.iter(|| {
black_box(
c.print(
&module,
None,
None,
false,
EsVersion::Es2020,
SourceMapsConfig::Bool(false),
&Default::default(),
None,
false,
None,
)
.unwrap(),
);
})
});
}
macro_rules! codegen {
($name:ident, $target:expr) => {
#[bench]
fn $name(b: &mut Bencher) {
bench_codegen(b, $target);
}
};
}
codegen!(codegen_es3, EsVersion::Es3);
codegen!(codegen_es5, EsVersion::Es5);
codegen!(codegen_es2015, EsVersion::Es2015);
codegen!(codegen_es2016, EsVersion::Es2016);
codegen!(codegen_es2017, EsVersion::Es2017);
codegen!(codegen_es2018, EsVersion::Es2018);
codegen!(codegen_es2019, EsVersion::Es2019);
codegen!(codegen_es2020, EsVersion::Es2020);
fn bench_full(b: &mut Bencher, opts: &Options) |
macro_rules! compat {
($name:ident, $target:expr) => {
#[bench]
fn $name(b: &mut Bencher) {
bench_full(
b,
&Options {
config: Config {
jsc: JscConfig {
target: Some($target),
syntax: Some(Syntax::Typescript(TsConfig {
..Default::default()
})),
..Default::default()
},
module: None,
..Default::default()
},
swcrc: false,
is_module: IsModule::Bool(true),
..Default::default()
},
);
}
};
}
compat!(full_es3, EsVersion::Es3);
compat!(full_es5, EsVersion::Es5);
compat!(full_es2015, EsVersion::Es2015);
compat!(full_es2016, EsVersion::Es2016);
compat!(full_es2017, EsVersion::Es2017);
compat!(full_es2018, EsVersion::Es2018);
compat!(full_es2019, EsVersion::Es2019);
compat!(full_es2020, EsVersion::Es2020);
#[bench]
fn parser(b: &mut Bencher) {
let c = mk();
//TODO: Use target
b.iter(|| {
black_box(parse(&c));
})
}
| {
let c = mk();
b.iter(|| {
for _ in 0..100 {
let handler = Handler::with_emitter_writer(Box::new(stderr()), Some(c.cm.clone()));
let fm = c.cm.new_source_file(
FileName::Real("rxjs/src/internal/Observable.ts".into()),
SOURCE.to_string(),
);
let _ = c.process_js_file(fm, &handler, opts).unwrap();
}
});
} |
train8_Unet_scSE_hyper_LR2.py | import os
import sys
sys.path.append('../../')
from dependencies import *
from settings import *
from reproducibility import *
from models.TGS_salt.Unet34_scSE_hyper import Unet_scSE_hyper as Net
SIZE = 101
PAD = 27
Y0, Y1, X0, X1 = PAD,PAD+SIZE,PAD,PAD+SIZE,
def time_to_str(time, str):
#if str == 'min':
# return str(round(float(time)/60,5))+" min(s)"
return time
#TODO: Instead of directly printing to stdout, copy it into a txt file
class Logger():
def __init__(self,path=None):
super().__init__()
self.path=path
def write(str):
print(str)
def valid_augment(image,mask,index):
cache = Struct(image = image.copy(), mask = mask.copy())
image, mask = do_resize2(image, mask, SIZE, SIZE)
image, mask = do_center_pad_to_factor2(image, mask)
return image,mask,index,cache
def train_augment(image,mask,index):
cache = Struct(image = image.copy(), mask = mask.copy())
if np.random.rand() < 0.5:
image, mask = do_horizontal_flip2(image, mask)
pass
if np.random.rand() < 0.5:
c = np.random.choice(4)
if c==0:
image, mask = do_random_shift_scale_crop_pad2(image, mask, 0.2) #0.125
if c==1:
image, mask = do_horizontal_shear2( image, mask, dx=np.random.uniform(-0.07,0.07) )
pass
if c==2:
image, mask = do_shift_scale_rotate2( image, mask, dx=0, dy=0, scale=1, angle=np.random.uniform(0,15)) #10
if c==3:
image, mask = do_elastic_transform2(image, mask, grid=10, distort=np.random.uniform(0,0.15))#0.10
pass
if np.random.rand() < 0.5:
c = np.random.choice(3)
if c==0:
image = do_brightness_shift(image,np.random.uniform(-0.1,+0.1))
if c==1:
image = do_brightness_multiply(image,np.random.uniform(1-0.08,1+0.08))
if c==2:
image = do_gamma(image,np.random.uniform(1-0.08,1+0.08))
# if c==1:
# image = do_invert_intensity(image)
image, mask = do_resize2(image, mask, SIZE, SIZE)
image, mask = do_center_pad_to_factor2(image, mask)
#print(image.shape)
return image,mask,index,cache
def validation( net, valid_loader ):
valid_num = 0
valid_loss = np.zeros(3,np.float32)
predicts = []
truths = []
for input, truth, index, cache in valid_loader:
input = input.cuda()
truth = truth.cuda()
with torch.no_grad():
logit = data_parallel(net,input) #net(input)
prob = F.sigmoid(logit)
loss = net.criterion(logit, truth)
dice = net.metric(logit, truth)
batch_size = len(index)
valid_loss += batch_size*np.array(( loss.item(), dice.item(), 0))
valid_num += batch_size
prob = prob [:,:,Y0:Y1, X0:X1]
truth = truth[:,:,Y0:Y1, X0:X1]
prob = F.avg_pool2d(prob, kernel_size=2, stride=2)
truth = F.avg_pool2d(truth, kernel_size=2, stride=2)
predicts.append(prob.data.cpu().numpy())
truths.append(truth.data.cpu().numpy())
assert(valid_num == len(valid_loader.sampler))
valid_loss = valid_loss/valid_num
#--------------------------------------------------------
predicts = np.concatenate(predicts).squeeze()
truths = np.concatenate(truths).squeeze()
precision, result, threshold = do_kaggle_metric(predicts, truths)
valid_loss[2] = precision.mean()
return valid_loss
def train():
initial_checkpoint = None
#'checkpoint/00048500_model.pth'\
# None #'/root/share/project/kaggle/tgs/results/resnet34-resize128-focus/fold0-1a/checkpoint/00003500_model.pth'
## setup -----------------
os.makedirs(CHECKPOINTS +'/checkpoint', exist_ok=True)
os.makedirs(CHECKPOINTS +'/train', exist_ok=True)
os.makedirs(CHECKPOINTS +'/backup', exist_ok=True)
#backup_project_as_zip(PROJECT_PATH, RESULT +'/backup/code.train.%s.zip'%IDENTIFIER)
| print('\n--- [START %s] %s\n\n' % (IDENTIFIER, '-' * 64))
print('\tSEED = %u\n' % SEED)
print('\tPROJECT_PATH = %s\n' % CODE)
print('\t__file__ = %s\n' % __file__)
print('\tRESULT = %s\n' % CHECKPOINTS)
print('\n')
print('\t<additional comments>\n')
print('\t ... \n')
print('\n')
## dataset ----------------------------------------
print('Configuring dataset...\n')
batch_size = 16
train_dataset = TGSDataset('list_train8_3600', train_augment, 'train')
os.makedirs(CHECKPOINTS +'/list_train8_3600', exist_ok=True)
train_loader = DataLoader(
train_dataset,
sampler = RandomSampler(train_dataset),
#sampler = ConstantSampler(train_dataset,[31]*batch_size*100),
batch_size = batch_size,
drop_last = True,
num_workers = 8,
pin_memory = True,
collate_fn = null_collate)
valid_dataset = TGSDataset('list_valid8_400', valid_augment, 'train')
valid_loader = DataLoader(
valid_dataset,
sampler = RandomSampler(valid_dataset),
batch_size = batch_size,
drop_last = False,
num_workers = 8,
pin_memory = True,
collate_fn = null_collate)
assert(len(train_dataset)>=batch_size)
print('batch_size = %d\n'%(batch_size))
print('train_dataset.split = %s\n'%(train_dataset.split))
print('valid_dataset.split = %s\n'%(valid_dataset.split))
print('\n')
#debug
if 0: #debug ##-------------------------------
for input, truth, index, cache in train_loader:
images = input.cpu().data.numpy().squeeze()
masks = truth.cpu().data.numpy().squeeze()
batch_size = len(index)
for b in range(batch_size):
image = images[b]*255
image = np.dstack([image,image,image])
mask = masks[b]
image_show('image',image,resize=2)
image_show_norm('mask', mask, max=1,resize=2)
overlay0 = draw_mask_overlay(mask, image, color=[0,0,255])
overlay0 = draw_mask_to_contour_overlay(mask, overlay0, 2, color=[0,0,255])
image_show('overlay0',overlay0,resize=2)
cv2.waitKey(0)
#--------------------------------------
## net ----------------------------------------
print('Configuring neural network...\n')
net = Net().cuda()
if initial_checkpoint is not None:
print('\tinitial_checkpoint = %s\n' % initial_checkpoint)
net.load_state_dict(torch.load(initial_checkpoint, map_location=lambda storage, loc: storage))
print("The net is an instance of {}.".format(type(net)))
print('\n')
## optimiser ----------------------------------
num_iters = 300 *1000
iter_smooth = 20
iter_log = 50
iter_valid = 100
epoch_save = np.arange(0,1500,10)#[0, num_iters-1]\
#+ list(range(0,num_iters,500))#1*1000
FREEZE=False
#------------------------------------------------------
if FREEZE: ##freeze
for p in net.feature_net.parameters():
p.requires_grad = False
#from cls import CyclicLR
#net.set_mode('train',is_freeze_bn=True)
#------------------------------------------------------
scheduler = lambda x: (0.009/2)*(np.cos(PI*(np.mod(x-1,int(11.25*1000))/(int(11.25*1000))))+1)+0.001
print(scheduler(1))
print(scheduler(5000))
print(scheduler(10001))
#scheduler = CyclicLR(base_lr=0.01, max_lr=0.01, step_size=10000, gamma=1., scale_fn=clr_fn, scale_mode='iterations')
#schduler = None #StepLR([ (0, 0.01), (200, 0.001)])
#base_params = list(map(id, net.resnet.parameters()))
#decode_params = filter(lambda p: id(p) not in base_params, net.parameters())
#params = [ {"params": decode_params, "lr": 0.01},
# {"params": net.resnet.parameters(), "lr": 0.005}, ]
#optimizer = torch.optim.SGD(params, momentum=0.9, weight_decay=0.0001)
optimizer = optim.SGD(filter(lambda p: p.requires_grad, net.parameters()),
lr=0.01, momentum=0.9, weight_decay=0.0001)
#scheduler = CyclicLR(optimizer,base_lr=0.01, max_lr=0.01, step_size=10000, gamma=1., scale_fn=clr_fn, scale_mode='iterations')
#scheduler= CyclicLR(optimizer, base_lr=0.001, max_lr=0.01, step_size=10000, gamma=0.99, mode='cos_anneal')
start_iter = 0
start_epoch= 0
if initial_checkpoint is not None:
checkpoint = torch.load(initial_checkpoint.replace('_model.pth','_optimizer.pth'))
start_iter = checkpoint['iter' ]
start_epoch = checkpoint['epoch']
rate = get_learning_rate(optimizer) #load all except learning rate
#optimizer.load_state_dict(checkpoint['optimizer'])
adjust_learning_rate(optimizer, rate)
pass
## start training here! ##############################################
print('Start training...\n')
#print(' samples_per_epoch = %d\n\n'%len(train_dataset))
print(' rate iter epoch | valid_loss | train_loss | batch_loss | time \n')
print('-------------------------------------------------------------------------------------------------------------------------------\n')
train_loss = np.zeros(6,np.float32)
valid_loss = np.zeros(6,np.float32)
batch_loss = np.zeros(6,np.float32)
rate = 0
iter = 0
i = 0
start = timer()
while iter<num_iters: # loop over the dataset multiple times
sum_train_loss = np.zeros(6,np.float32)
sum = 0
optimizer.zero_grad()
for input, truth, index, cache in train_loader:
if 0: #debug ##-------------------------------
image = input.cpu().data.numpy().squeeze()
mask = truth.cpu().data.numpy().squeeze()
batch_size = len(index)
for b in range(batch_size):
image_show_norm('image',image[b],max=1,resize=2)
image_show_norm('mask', mask[b], max=1,resize=2)
cv2.waitKey(0)
#--------------------------------------
len_train_dataset = len(train_dataset)
batch_size = len(index)
iter = i + start_iter
epoch = (iter-start_iter)*batch_size/len_train_dataset + start_epoch
num_samples = epoch*len_train_dataset
if iter % iter_valid==0:
net.set_mode('valid')
valid_loss = validation(net, valid_loader)
net.set_mode('train')
print('\r',end='',flush=True)
print('%0.4f %5.1f %6.1f | %0.3f %0.3f (%0.3f) | %0.3f %0.3f | %0.3f %0.3f | %s \n' % (\
rate, iter/1000, epoch,
valid_loss[0], valid_loss[1], valid_loss[2],
train_loss[0], train_loss[1],
batch_loss[0], batch_loss[1],
time_to_str((timer() - start),'min')))
time.sleep(0.01)
#if 1:
if round(epoch,1) == 0 or round(epoch,1) == 1 or round(epoch,1)+0.1 in epoch_save:
torch.save(net.state_dict(),CHECKPOINTS+"/"+train_dataset.split+'/%08d_model.pth'%(int(round(epoch,1)+0.1)))
torch.save({
'optimizer': optimizer.state_dict(),
'iter' : iter,
'epoch' : epoch,
}, CHECKPOINTS+"/"+train_dataset.split+'/%08d_optimizer.pth'%(int(round(epoch,1)+0.1)))
pass
# learning rate schduler -------------
if scheduler is not None:
#scheduler.batch_step()
lr = scheduler(iter)
if lr<0 : break
adjust_learning_rate(optimizer, lr)
rate = get_learning_rate(optimizer)
#rate = 0.01
# one iteration update -------------
#net.set_mode('train',is_freeze_bn=True)
net.set_mode('train')
input = input.cuda()
truth = truth.cuda()
logit = data_parallel(net,input) #net(input)
loss = net.criterion(logit, truth)
#loss = torch.nn.BCEWithLogitsLoss(logit,truth)
dice = net.metric(logit, truth)
loss.backward()
optimizer.step()
optimizer.zero_grad()
#torch.nn.utils.clip_grad_norm(net.parameters(), 1)
# print statistics ------------
batch_loss = np.array((
loss.item(),
dice.item(),
0, 0, 0, 0,
))
sum_train_loss += batch_loss
sum += 1
if iter%iter_smooth == 0:
train_loss = sum_train_loss/sum
sum_train_loss = np.zeros(6,np.float32)
sum = 0
print('\r%0.4f %5.1f %6.1f | %0.3f %0.3f (%0.3f) | %0.3f %0.3f | %0.3f %0.3f | %s ' % (\
rate, iter/1000, epoch,
valid_loss[0], valid_loss[1], valid_loss[2],
train_loss[0], train_loss[1],
batch_loss[0], batch_loss[1],
time_to_str((timer() - start), 'min')), end='',flush=True)
i=i+1
#<debug> ===================================================================
if 0:
#if iter%200==0:
#voxel, aux, query, link, truth, cache = make_valid_batch(valid_dataset.dataset, batch_size=2)
net.set_mode('test')#
with torch.no_grad():
logit = net(input)
prob = F.sigmoid(logit)
loss = net.criterion(logit, truth)
dice = net.metric(logit, truth)
if 0:
loss = net.criterion(logit, truth)
accuracy,hit_rate,precision_rate = net.metric(logit, truth)
valid_loss[0] = loss.item()
valid_loss[1] = accuracy.item()
valid_loss[2] = hit_rate.item()
valid_loss[3] = precision_rate.item()
#show only b in batch ---
b = 1
prob = prob.data.cpu().numpy()[b].squeeze()
truth = truth.data.cpu().numpy()[b].squeeze()
input = input.data.cpu().numpy()[b].squeeze()
all = np.hstack([input,truth,prob])
image_show_norm('all',all,max=1,resize=3)
cv2.waitKey(100)
net.set_mode('train')
#<debug> ===================================================================
pass #-- end of one data loader --
pass #-- end of all iterations --
if 1: #save last
torch.save(net.state_dict(),CHECKPOINTS +'/checkpoint/'+train_dataset.split+'/%d_model.pth'%(i))
torch.save({
'optimizer': optimizer.state_dict(),
'iter' : i,
'epoch' : epoch,
}, CHECKPOINTS +'/checkpoint/'+train_dataset.split+'/%d_optimizer.pth'%(i))
print('\n')
if __name__ == '__main__':
print("Training U-Net with hypercolumn concatenation and spatial/channel-wise excitation...")
train()
print('\tFinished!') | log = Logger()
#log.open(RESULT+'/log.train.txt',mode='a') |
editorController.js | function getEditor(post) {
var simplemde = new SimpleMDE({
toolbar: [
"bold", "italic", "heading-2", "|",
"quote", "unordered-list", "ordered-list", "|",
"link", "code",
{
name: "insertImg",
action: openFileMgr,
className: "fa fa-folder-open",
title: "File Manager"
},
{
name: "insertYoutube",
action: insertYoutube,
className: "fa fa-youtube",
title: "Insert Youtube Video"
},
"|", "preview", "|", "guide"
],
blockStyles: {
bold: "__",
italic: "_"
},
element: document.getElementById("mdEditor"),
indentWithTabs: false,
insertTexts: {
horizontalRule: ["", "\n\n-----\n\n"],
image: [""],
link: ["[", "](#url#)"],
table: ["", "\n\n| Column 1 | Column 2 | Column 3 |\n| -------- | -------- | -------- |\n| Text | Text | Text |\n\n"]
},
lineWrapping: true, | minHeight: "300px",
parsingConfig: {
allowAtxHeaderWithoutSpace: true,
strikethrough: false,
underscoresBreakWords: true
},
placeholder: "Type here...",
promptURLs: true,
renderingConfig: {
singleLineBreaks: false,
codeSyntaxHighlighting: true
},
shortcuts: {
drawTable: "Cmd-Alt-T"
},
spellChecker: true,
status: ["lines", "words"],
styleSelectedText: false,
syncSideBySidePreviewScroll: false
});
var txt = post.content ? post.content : '';
simplemde.value(txt
.replace(/
/g, '\r\n')
.replace(/
/g, '')
.replace(/</g, '<')
.replace(/>/g, '>')
.replace(/"/g, '"'));
return simplemde;
}
var _editor = {};
function openFileMgr(editor) {
_editor = editor;
fileManagerController.open(insertImageCallback);
}
function insertYoutube(editor) {
_editor = editor;
var id = prompt("Please enter video ID", "");
if (id !== null && id !== "") {
var tag = '<iframe width="640" height="480" src="http://www.youtube.com/embed/' + id + '" frameborder="0" allowfullscreen></iframe>';
var cm = _editor.codemirror;
cm.replaceSelection(tag);
}
}
// Create the measurement node for scrollbar
var scrollDiv = document.createElement("div");
scrollDiv.className = "scrollbar-measure";
document.body.appendChild(scrollDiv);
var scrollbarWidth = scrollDiv.offsetWidth - scrollDiv.clientWidth;
document.body.removeChild(scrollDiv); | |
server.js | /* globals Importer */
Importer.CSV = class ImporterCSV extends Importer.Base {
constructor(name, descriptionI18N, mimeType) {
super(name, descriptionI18N, mimeType);
this.logger.debug('Constructed a new CSV Importer.');
this.csvParser = Npm.require('csv-parse/lib/sync');
this.messages = new Map();
}
prepare(dataURI, sentContentType, fileName) {
super.prepare(dataURI, sentContentType, fileName);
const uriResult = RocketChatFile.dataURIParse(dataURI);
const zip = new this.AdmZip(new Buffer(uriResult.image, 'base64'));
const zipEntries = zip.getEntries();
let tempChannels = [];
let tempUsers = [];
const tempMessages = new Map();
for (const entry of zipEntries) {
this.logger.debug(`Entry: ${ entry.entryName }`);
//Ignore anything that has `__MACOSX` in it's name, as sadly these things seem to mess everything up
if (entry.entryName.indexOf('__MACOSX') > -1) {
this.logger.debug(`Ignoring the file: ${ entry.entryName }`);
continue;
}
//Directories are ignored, since they are "virtual" in a zip file
if (entry.isDirectory) {
this.logger.debug(`Ignoring the directory entry: ${ entry.entryName }`);
continue;
}
//Parse the channels
if (entry.entryName.toLowerCase() === 'channels.csv') {
super.updateProgress(Importer.ProgressStep.PREPARING_CHANNELS);
const parsedChannels = this.csvParser(entry.getData().toString());
tempChannels = parsedChannels.map((c) => {
return {
id: c[0].trim().replace('.', '_'),
name: c[0].trim(),
creator: c[1].trim(),
isPrivate: c[2].trim().toLowerCase() === 'private' ? true : false,
members: c[3].trim().split(';').map((m) => m.trim())
};
});
continue;
}
//Parse the users
if (entry.entryName.toLowerCase() === 'users.csv') {
super.updateProgress(Importer.ProgressStep.PREPARING_USERS);
const parsedUsers = this.csvParser(entry.getData().toString());
tempUsers = parsedUsers.map((u) => { return { id: u[0].trim().replace('.', '_'), username: u[0].trim(), email: u[1].trim(), name: u[2].trim() }; });
continue;
}
//Parse the messages
if (entry.entryName.indexOf('/') > -1) {
const item = entry.entryName.split('/'); //random/messages.csv
const channelName = item[0]; //random
const msgGroupData = item[1].split('.')[0]; //2015-10-04
if (!tempMessages.get(channelName)) {
tempMessages.set(channelName, new Map());
}
let msgs = [];
try {
msgs = this.csvParser(entry.getData().toString());
} catch (e) {
this.logger.warn(`The file ${ entry.entryName } contains invalid syntax`, e);
continue;
}
tempMessages.get(channelName).set(msgGroupData, msgs.map((m) => { return { username: m[0], ts: m[1], text: m[2] }; }));
continue;
}
}
// Insert the users record, eventually this might have to be split into several ones as well
// if someone tries to import a several thousands users instance
const usersId = this.collection.insert({ 'import': this.importRecord._id, 'importer': this.name, 'type': 'users', 'users': tempUsers });
this.users = this.collection.findOne(usersId);
super.updateRecord({ 'count.users': tempUsers.length });
super.addCountToTotal(tempUsers.length);
// Insert the channels records.
const channelsId = this.collection.insert({ 'import': this.importRecord._id, 'importer': this.name, 'type': 'channels', 'channels': tempChannels });
this.channels = this.collection.findOne(channelsId);
super.updateRecord({ 'count.channels': tempChannels.length });
super.addCountToTotal(tempChannels.length);
// Save the messages records to the import record for `startImport` usage
super.updateProgress(Importer.ProgressStep.PREPARING_MESSAGES);
let messagesCount = 0;
for (const [channel, messagesMap] of tempMessages.entries()) {
if (!this.messages.get(channel)) {
this.messages.set(channel, new Map());
}
for (const [msgGroupData, msgs] of messagesMap.entries()) {
messagesCount += msgs.length;
super.updateRecord({ 'messagesstatus': `${ channel }/${ msgGroupData }` });
if (Importer.Base.getBSONSize(msgs) > Importer.Base.MaxBSONSize) {
Importer.Base.getBSONSafeArraysFromAnArray(msgs).forEach((splitMsg, i) => {
const messagesId = this.collection.insert({ 'import': this.importRecord._id, 'importer': this.name, 'type': 'messages', 'name': `${ channel }/${ msgGroupData }.${ i }`, 'messages': splitMsg });
this.messages.get(channel).set(`${ msgGroupData }.${ i }`, this.collection.findOne(messagesId));
});
} else {
const messagesId = this.collection.insert({ 'import': this.importRecord._id, 'importer': this.name, 'type': 'messages', 'name': `${ channel }/${ msgGroupData }`, 'messages': msgs });
this.messages.get(channel).set(msgGroupData, this.collection.findOne(messagesId));
}
}
}
super.updateRecord({ 'count.messages': messagesCount, 'messagesstatus': null });
super.addCountToTotal(messagesCount);
//Ensure we have at least a single user, channel, or message
if (tempUsers.length === 0 && tempChannels.length === 0 && messagesCount === 0) {
this.logger.error('No users, channels, or messages found in the import file.');
super.updateProgress(Importer.ProgressStep.ERROR);
return super.getProgress();
}
const selectionUsers = tempUsers.map((u) => new Importer.SelectionUser(u.id, u.username, u.email, false, false, true));
const selectionChannels = tempChannels.map((c) => new Importer.SelectionChannel(c.id, c.name, false, true, c.isPrivate));
super.updateProgress(Importer.ProgressStep.USER_SELECTION);
return new Importer.Selection(this.name, selectionUsers, selectionChannels);
}
startImport(importSelection) {
super.startImport(importSelection);
const started = Date.now();
//Ensure we're only going to import the users that the user has selected
for (const user of importSelection.users) {
for (const u of this.users.users) {
if (u.id === user.user_id) {
u.do_import = user.do_import;
}
}
}
this.collection.update({ _id: this.users._id }, { $set: { 'users': this.users.users }});
//Ensure we're only importing the channels the user has selected.
for (const channel of importSelection.channels) {
for (const c of this.channels.channels) {
if (c.id === channel.channel_id) {
c.do_import = channel.do_import;
}
}
}
this.collection.update({ _id: this.channels._id }, { $set: { 'channels': this.channels.channels }});
const startedByUserId = Meteor.userId();
Meteor.defer(() => {
super.updateProgress(Importer.ProgressStep.IMPORTING_USERS);
//Import the users
for (const u of this.users.users) {
if (!u.do_import) {
continue;
}
Meteor.runAsUser(startedByUserId, () => {
let existantUser = RocketChat.models.Users.findOneByEmailAddress(u.email);
//If we couldn't find one by their email address, try to find an existing user by their username
if (!existantUser) {
existantUser = RocketChat.models.Users.findOneByUsername(u.username);
}
if (existantUser) {
//since we have an existing user, let's try a few things
u.rocketId = existantUser._id;
RocketChat.models.Users.update({ _id: u.rocketId }, { $addToSet: { importIds: u.id } });
} else {
const userId = Accounts.createUser({ email: u.email, password: Date.now() + u.name + u.email.toUpperCase() });
Meteor.runAsUser(userId, () => {
Meteor.call('setUsername', u.username, {joinDefaultChannelsSilenced: true});
RocketChat.models.Users.setName(userId, u.name);
RocketChat.models.Users.update({ _id: userId }, { $addToSet: { importIds: u.id } });
u.rocketId = userId;
});
}
super.addCountCompleted(1);
});
}
this.collection.update({ _id: this.users._id }, { $set: { 'users': this.users.users }});
//Import the channels
super.updateProgress(Importer.ProgressStep.IMPORTING_CHANNELS);
for (const c of this.channels.channels) {
if (!c.do_import) {
continue;
}
Meteor.runAsUser(startedByUserId, () => {
const existantRoom = RocketChat.models.Rooms.findOneByName(c.name);
//If the room exists or the name of it is 'general', then we don't need to create it again
if (existantRoom || c.name.toUpperCase() === 'GENERAL') {
c.rocketId = c.name.toUpperCase() === 'GENERAL' ? 'GENERAL' : existantRoom._id;
RocketChat.models.Rooms.update({ _id: c.rocketId }, { $addToSet: { importIds: c.id } });
} else {
//Find the rocketchatId of the user who created this channel
let creatorId = startedByUserId;
for (const u of this.users.users) {
if (u.username === c.creator && u.do_import) {
creatorId = u.rocketId;
} | //Create the channel
Meteor.runAsUser(creatorId, () => {
const roomInfo = Meteor.call(c.isPrivate ? 'createPrivateGroup' : 'createChannel', c.name, c.members);
c.rocketId = roomInfo.rid;
});
RocketChat.models.Rooms.update({ _id: c.rocketId }, { $addToSet: { importIds: c.id } });
}
super.addCountCompleted(1);
});
}
this.collection.update({ _id: this.channels._id }, { $set: { 'channels': this.channels.channels }});
//Import the Messages
super.updateProgress(Importer.ProgressStep.IMPORTING_MESSAGES);
for (const [ch, messagesMap] of this.messages.entries()) {
const csvChannel = this.getChannelFromName(ch);
if (!csvChannel.do_import) {
continue;
}
const room = RocketChat.models.Rooms.findOneById(csvChannel.rocketId, { fields: { usernames: 1, t: 1, name: 1 } });
Meteor.runAsUser(startedByUserId, () => {
for (const [msgGroupData, msgs] of messagesMap.entries()) {
super.updateRecord({ 'messagesstatus': `${ ch }/${ msgGroupData }.${ msgs.messages.length }` });
for (const msg of msgs.messages) {
if (isNaN(new Date(parseInt(msg.ts)))) {
this.logger.warn(`Timestamp on a message in ${ ch }/${ msgGroupData } is invalid`);
super.addCountCompleted(1);
continue;
}
const creator = this.getUserFromUsername(msg.username);
if (creator) {
const msgObj = {
_id: `csv-${ csvChannel.id }-${ msg.ts }`,
ts: new Date(parseInt(msg.ts)),
msg: msg.text,
rid: room._id,
u: {
_id: creator._id,
username: creator.username
}
};
RocketChat.sendMessage(creator, msgObj, room, true);
}
super.addCountCompleted(1);
}
}
});
}
super.updateProgress(Importer.ProgressStep.FINISHING);
super.updateProgress(Importer.ProgressStep.DONE);
const timeTook = Date.now() - started;
this.logger.log(`CSV Import took ${ timeTook } milliseconds.`);
});
return super.getProgress();
}
getSelection() {
const selectionUsers = this.users.users.map((u) => new Importer.SelectionUser(u.id, u.username, u.email, false, false, true));
const selectionChannels = this.channels.channels.map((c) => new Importer.SelectionChannel(c.id, c.name, false, true, c.isPrivate));
return new Importer.Selection(this.name, selectionUsers, selectionChannels);
}
getChannelFromName(channelName) {
for (const ch of this.channels.channels) {
if (ch.name === channelName) {
return ch;
}
}
}
getUserFromUsername(username) {
for (const u of this.users.users) {
if (u.username === username) {
return RocketChat.models.Users.findOneById(u.rocketId, { fields: { username: 1 }});
}
}
}
}; | }
|
AntOcrVehicleplateIdentifyModel.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class AntOcrVehicleplateIdentifyModel(object):
def __init__(self):
self._image = None
self._type = None
@property
def | (self):
return self._image
@image.setter
def image(self, value):
self._image = value
@property
def type(self):
return self._type
@type.setter
def type(self, value):
self._type = value
def to_alipay_dict(self):
params = dict()
if self.image:
if hasattr(self.image, 'to_alipay_dict'):
params['image'] = self.image.to_alipay_dict()
else:
params['image'] = self.image
if self.type:
if hasattr(self.type, 'to_alipay_dict'):
params['type'] = self.type.to_alipay_dict()
else:
params['type'] = self.type
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AntOcrVehicleplateIdentifyModel()
if 'image' in d:
o.image = d['image']
if 'type' in d:
o.type = d['type']
return o
| image |
setup.in.py | # ==============================================================================
# Copyright 2018-2019 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from platform import system
from setuptools import setup
from wheel.bdist_wheel import bdist_wheel
import os
# https://stackoverflow.com/questions/45150304/how-to-force-a-python-wheel-to-be-platform-specific-when-building-it
class BinaryBdistWheel(bdist_wheel):
def finalize_options(self):
# bdist_wheel is old-style class in python 2, so can't `super`
bdist_wheel.finalize_options(self)
self.root_is_pure = False
def | (self):
_, _, plat = bdist_wheel.get_tag(self)
if system() == 'Linux':
plat = 'manylinux1_x86_64'
return ('py2.py3', 'none', plat)
ext = 'dylib' if system() == 'Darwin' else 'so'
with open(@README_DOC@, "r") as fh:
long_description = fh.read()
# Collect the list of include files, while preserving the tree structure
os.chdir('ngraph_bridge')
include_list = []
for path, dirs, files in os.walk('include'):
for f in files:
include_list.append(path + "/" + f )
os.chdir('..')
# The following is filled in my cmake - essentially a list of library
# and license files
ng_data_list = [
@ngraph_libraries@ @license_files@ @licence_top_level@
]
include_list.extend(ng_data_list)
# This is the contents of the Package Data
package_data_dict = {}
package_data_dict['ngraph_bridge'] = include_list
setup(
name='ngraph_tensorflow_bridge',
version='0.21.0rc1',
description='Intel nGraph compiler and runtime for TensorFlow',
long_description=long_description,
long_description_content_type="text/markdown",
url='https://github.com/tensorflow/ngraph-bridge',
packages=['ngraph_bridge'],
author='Intel Nervana',
license='Apache License, Version 2.0',
platforms='Ubuntu 16.04, macOS Sierra',
include_package_data=True,
package_data= package_data_dict,
cmdclass={'bdist_wheel': BinaryBdistWheel},
extras_require={
'plaidml': ["plaidml>=0.6.3"],
},
)
| get_tag |
builtin.rs | //! Check properties that are required by built-in traits and set
//! up data structures required by type-checking/codegen.
use rustc::infer;
use rustc::infer::outlives::env::OutlivesEnvironment;
use rustc::infer::SuppressRegionErrors;
use rustc::middle::lang_items::UnsizeTraitLangItem;
use rustc::middle::region;
use rustc::traits::misc::{can_type_implement_copy, CopyImplementationError};
use rustc::traits::predicate_for_trait_def;
use rustc::traits::{self, ObligationCause, TraitEngine};
use rustc::ty::adjustment::CoerceUnsizedInfo;
use rustc::ty::TypeFoldable;
use rustc::ty::{self, Ty, TyCtxt};
use rustc_errors::struct_span_err;
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_hir::ItemKind;
pub fn check_trait(tcx: TyCtxt<'_>, trait_def_id: DefId) |
struct Checker<'tcx> {
tcx: TyCtxt<'tcx>,
trait_def_id: DefId,
}
impl<'tcx> Checker<'tcx> {
fn check<F>(&self, trait_def_id: Option<DefId>, mut f: F) -> &Self
where
F: FnMut(TyCtxt<'tcx>, DefId),
{
if Some(self.trait_def_id) == trait_def_id {
for &impl_id in self.tcx.hir().trait_impls(self.trait_def_id) {
let impl_def_id = self.tcx.hir().local_def_id(impl_id);
f(self.tcx, impl_def_id);
}
}
self
}
}
fn visit_implementation_of_drop(tcx: TyCtxt<'_>, impl_did: DefId) {
// Destructors only work on nominal types.
if let ty::Adt(..) | ty::Error = tcx.type_of(impl_did).kind {
return;
}
let impl_hir_id = tcx.hir().as_local_hir_id(impl_did).expect("foreign Drop impl on non-ADT");
let sp = match tcx.hir().expect_item(impl_hir_id).kind {
ItemKind::Impl { self_ty, .. } => self_ty.span,
_ => bug!("expected Drop impl item"),
};
struct_span_err!(
tcx.sess,
sp,
E0120,
"the `Drop` trait may only be implemented for structs, enums, and unions",
)
.span_label(sp, "must be a struct, enum, or union")
.emit();
}
fn visit_implementation_of_copy(tcx: TyCtxt<'_>, impl_did: DefId) {
debug!("visit_implementation_of_copy: impl_did={:?}", impl_did);
let impl_hir_id = if let Some(n) = tcx.hir().as_local_hir_id(impl_did) {
n
} else {
debug!("visit_implementation_of_copy(): impl not in this crate");
return;
};
let self_type = tcx.type_of(impl_did);
debug!("visit_implementation_of_copy: self_type={:?} (bound)", self_type);
let span = tcx.hir().span(impl_hir_id);
let param_env = tcx.param_env(impl_did);
assert!(!self_type.has_escaping_bound_vars());
debug!("visit_implementation_of_copy: self_type={:?} (free)", self_type);
match can_type_implement_copy(tcx, param_env, self_type) {
Ok(()) => {}
Err(CopyImplementationError::InfrigingFields(fields)) => {
let item = tcx.hir().expect_item(impl_hir_id);
let span = if let ItemKind::Impl { of_trait: Some(ref tr), .. } = item.kind {
tr.path.span
} else {
span
};
let mut err = struct_span_err!(
tcx.sess,
span,
E0204,
"the trait `Copy` may not be implemented for this type"
);
for span in fields.iter().map(|f| tcx.def_span(f.did)) {
err.span_label(span, "this field does not implement `Copy`");
}
err.emit()
}
Err(CopyImplementationError::NotAnAdt) => {
let item = tcx.hir().expect_item(impl_hir_id);
let span =
if let ItemKind::Impl { self_ty, .. } = item.kind { self_ty.span } else { span };
struct_span_err!(
tcx.sess,
span,
E0206,
"the trait `Copy` may not be implemented for this type"
)
.span_label(span, "type is not a structure or enumeration")
.emit();
}
Err(CopyImplementationError::HasDestructor) => {
struct_span_err!(
tcx.sess,
span,
E0184,
"the trait `Copy` may not be implemented for this type; the \
type has a destructor"
)
.span_label(span, "Copy not allowed on types with destructors")
.emit();
}
}
}
fn visit_implementation_of_coerce_unsized(tcx: TyCtxt<'tcx>, impl_did: DefId) {
debug!("visit_implementation_of_coerce_unsized: impl_did={:?}", impl_did);
// Just compute this for the side-effects, in particular reporting
// errors; other parts of the code may demand it for the info of
// course.
if impl_did.is_local() {
let span = tcx.def_span(impl_did);
tcx.at(span).coerce_unsized_info(impl_did);
}
}
fn visit_implementation_of_dispatch_from_dyn(tcx: TyCtxt<'_>, impl_did: DefId) {
debug!("visit_implementation_of_dispatch_from_dyn: impl_did={:?}", impl_did);
if impl_did.is_local() {
let dispatch_from_dyn_trait = tcx.lang_items().dispatch_from_dyn_trait().unwrap();
let impl_hir_id = tcx.hir().as_local_hir_id(impl_did).unwrap();
let span = tcx.hir().span(impl_hir_id);
let source = tcx.type_of(impl_did);
assert!(!source.has_escaping_bound_vars());
let target = {
let trait_ref = tcx.impl_trait_ref(impl_did).unwrap();
assert_eq!(trait_ref.def_id, dispatch_from_dyn_trait);
trait_ref.substs.type_at(1)
};
debug!("visit_implementation_of_dispatch_from_dyn: {:?} -> {:?}", source, target);
let param_env = tcx.param_env(impl_did);
let create_err = |msg: &str| struct_span_err!(tcx.sess, span, E0378, "{}", msg);
tcx.infer_ctxt().enter(|infcx| {
let cause = ObligationCause::misc(span, impl_hir_id);
use ty::TyKind::*;
match (&source.kind, &target.kind) {
(&Ref(r_a, _, mutbl_a), Ref(r_b, _, mutbl_b))
if infcx.at(&cause, param_env).eq(r_a, r_b).is_ok() && mutbl_a == *mutbl_b =>
{
()
}
(&RawPtr(tm_a), &RawPtr(tm_b)) if tm_a.mutbl == tm_b.mutbl => (),
(&Adt(def_a, substs_a), &Adt(def_b, substs_b))
if def_a.is_struct() && def_b.is_struct() =>
{
if def_a != def_b {
let source_path = tcx.def_path_str(def_a.did);
let target_path = tcx.def_path_str(def_b.did);
create_err(&format!(
"the trait `DispatchFromDyn` may only be implemented \
for a coercion between structures with the same \
definition; expected `{}`, found `{}`",
source_path, target_path,
))
.emit();
return;
}
if def_a.repr.c() || def_a.repr.packed() {
create_err(
"structs implementing `DispatchFromDyn` may not have \
`#[repr(packed)]` or `#[repr(C)]`",
)
.emit();
}
let fields = &def_a.non_enum_variant().fields;
let coerced_fields = fields
.iter()
.filter_map(|field| {
let ty_a = field.ty(tcx, substs_a);
let ty_b = field.ty(tcx, substs_b);
if let Ok(layout) = tcx.layout_of(param_env.and(ty_a)) {
if layout.is_zst() && layout.details.align.abi.bytes() == 1 {
// ignore ZST fields with alignment of 1 byte
return None;
}
}
if let Ok(ok) = infcx.at(&cause, param_env).eq(ty_a, ty_b) {
if ok.obligations.is_empty() {
create_err(
"the trait `DispatchFromDyn` may only be implemented \
for structs containing the field being coerced, \
ZST fields with 1 byte alignment, and nothing else",
)
.note(&format!(
"extra field `{}` of type `{}` is not allowed",
field.ident, ty_a,
))
.emit();
return None;
}
}
Some(field)
})
.collect::<Vec<_>>();
if coerced_fields.is_empty() {
create_err(
"the trait `DispatchFromDyn` may only be implemented \
for a coercion between structures with a single field \
being coerced, none found",
)
.emit();
} else if coerced_fields.len() > 1 {
create_err(
"implementing the `DispatchFromDyn` trait requires multiple coercions",
)
.note(
"the trait `DispatchFromDyn` may only be implemented \
for a coercion between structures with a single field \
being coerced",
)
.note(&format!(
"currently, {} fields need coercions: {}",
coerced_fields.len(),
coerced_fields
.iter()
.map(|field| {
format!(
"`{}` (`{}` to `{}`)",
field.ident,
field.ty(tcx, substs_a),
field.ty(tcx, substs_b),
)
})
.collect::<Vec<_>>()
.join(", ")
))
.emit();
} else {
let mut fulfill_cx = TraitEngine::new(infcx.tcx);
for field in coerced_fields {
let predicate = predicate_for_trait_def(
tcx,
param_env,
cause.clone(),
dispatch_from_dyn_trait,
0,
field.ty(tcx, substs_a),
&[field.ty(tcx, substs_b).into()],
);
fulfill_cx.register_predicate_obligation(&infcx, predicate);
}
// Check that all transitive obligations are satisfied.
if let Err(errors) = fulfill_cx.select_all_or_error(&infcx) {
infcx.report_fulfillment_errors(&errors, None, false);
}
// Finally, resolve all regions.
let region_scope_tree = region::ScopeTree::default();
let outlives_env = OutlivesEnvironment::new(param_env);
infcx.resolve_regions_and_report_errors(
impl_did,
®ion_scope_tree,
&outlives_env,
SuppressRegionErrors::default(),
);
}
}
_ => {
create_err(
"the trait `DispatchFromDyn` may only be implemented \
for a coercion between structures",
)
.emit();
}
}
})
}
}
pub fn coerce_unsized_info<'tcx>(tcx: TyCtxt<'tcx>, impl_did: DefId) -> CoerceUnsizedInfo {
debug!("compute_coerce_unsized_info(impl_did={:?})", impl_did);
let coerce_unsized_trait = tcx.lang_items().coerce_unsized_trait().unwrap();
let unsize_trait = tcx.lang_items().require(UnsizeTraitLangItem).unwrap_or_else(|err| {
tcx.sess.fatal(&format!("`CoerceUnsized` implementation {}", err));
});
// this provider should only get invoked for local def-ids
let impl_hir_id = tcx.hir().as_local_hir_id(impl_did).unwrap_or_else(|| {
bug!("coerce_unsized_info: invoked for non-local def-id {:?}", impl_did)
});
let source = tcx.type_of(impl_did);
let trait_ref = tcx.impl_trait_ref(impl_did).unwrap();
assert_eq!(trait_ref.def_id, coerce_unsized_trait);
let target = trait_ref.substs.type_at(1);
debug!("visit_implementation_of_coerce_unsized: {:?} -> {:?} (bound)", source, target);
let span = tcx.hir().span(impl_hir_id);
let param_env = tcx.param_env(impl_did);
assert!(!source.has_escaping_bound_vars());
let err_info = CoerceUnsizedInfo { custom_kind: None };
debug!("visit_implementation_of_coerce_unsized: {:?} -> {:?} (free)", source, target);
tcx.infer_ctxt().enter(|infcx| {
let cause = ObligationCause::misc(span, impl_hir_id);
let check_mutbl = |mt_a: ty::TypeAndMut<'tcx>,
mt_b: ty::TypeAndMut<'tcx>,
mk_ptr: &dyn Fn(Ty<'tcx>) -> Ty<'tcx>| {
if (mt_a.mutbl, mt_b.mutbl) == (hir::Mutability::Not, hir::Mutability::Mut) {
infcx
.report_mismatched_types(
&cause,
mk_ptr(mt_b.ty),
target,
ty::error::TypeError::Mutability,
)
.emit();
}
(mt_a.ty, mt_b.ty, unsize_trait, None)
};
let (source, target, trait_def_id, kind) = match (&source.kind, &target.kind) {
(&ty::Ref(r_a, ty_a, mutbl_a), &ty::Ref(r_b, ty_b, mutbl_b)) => {
infcx.sub_regions(infer::RelateObjectBound(span), r_b, r_a);
let mt_a = ty::TypeAndMut { ty: ty_a, mutbl: mutbl_a };
let mt_b = ty::TypeAndMut { ty: ty_b, mutbl: mutbl_b };
check_mutbl(mt_a, mt_b, &|ty| tcx.mk_imm_ref(r_b, ty))
}
(&ty::Ref(_, ty_a, mutbl_a), &ty::RawPtr(mt_b)) => {
let mt_a = ty::TypeAndMut { ty: ty_a, mutbl: mutbl_a };
check_mutbl(mt_a, mt_b, &|ty| tcx.mk_imm_ptr(ty))
}
(&ty::RawPtr(mt_a), &ty::RawPtr(mt_b)) => {
check_mutbl(mt_a, mt_b, &|ty| tcx.mk_imm_ptr(ty))
}
(&ty::Adt(def_a, substs_a), &ty::Adt(def_b, substs_b))
if def_a.is_struct() && def_b.is_struct() =>
{
if def_a != def_b {
let source_path = tcx.def_path_str(def_a.did);
let target_path = tcx.def_path_str(def_b.did);
struct_span_err!(
tcx.sess,
span,
E0377,
"the trait `CoerceUnsized` may only be implemented \
for a coercion between structures with the same \
definition; expected `{}`, found `{}`",
source_path,
target_path
)
.emit();
return err_info;
}
// Here we are considering a case of converting
// `S<P0...Pn>` to S<Q0...Qn>`. As an example, let's imagine a struct `Foo<T, U>`,
// which acts like a pointer to `U`, but carries along some extra data of type `T`:
//
// struct Foo<T, U> {
// extra: T,
// ptr: *mut U,
// }
//
// We might have an impl that allows (e.g.) `Foo<T, [i32; 3]>` to be unsized
// to `Foo<T, [i32]>`. That impl would look like:
//
// impl<T, U: Unsize<V>, V> CoerceUnsized<Foo<T, V>> for Foo<T, U> {}
//
// Here `U = [i32; 3]` and `V = [i32]`. At runtime,
// when this coercion occurs, we would be changing the
// field `ptr` from a thin pointer of type `*mut [i32;
// 3]` to a fat pointer of type `*mut [i32]` (with
// extra data `3`). **The purpose of this check is to
// make sure that we know how to do this conversion.**
//
// To check if this impl is legal, we would walk down
// the fields of `Foo` and consider their types with
// both substitutes. We are looking to find that
// exactly one (non-phantom) field has changed its
// type, which we will expect to be the pointer that
// is becoming fat (we could probably generalize this
// to multiple thin pointers of the same type becoming
// fat, but we don't). In this case:
//
// - `extra` has type `T` before and type `T` after
// - `ptr` has type `*mut U` before and type `*mut V` after
//
// Since just one field changed, we would then check
// that `*mut U: CoerceUnsized<*mut V>` is implemented
// (in other words, that we know how to do this
// conversion). This will work out because `U:
// Unsize<V>`, and we have a builtin rule that `*mut
// U` can be coerced to `*mut V` if `U: Unsize<V>`.
let fields = &def_a.non_enum_variant().fields;
let diff_fields = fields
.iter()
.enumerate()
.filter_map(|(i, f)| {
let (a, b) = (f.ty(tcx, substs_a), f.ty(tcx, substs_b));
if tcx.type_of(f.did).is_phantom_data() {
// Ignore PhantomData fields
return None;
}
// Ignore fields that aren't changed; it may
// be that we could get away with subtyping or
// something more accepting, but we use
// equality because we want to be able to
// perform this check without computing
// variance where possible. (This is because
// we may have to evaluate constraint
// expressions in the course of execution.)
// See e.g., #41936.
if let Ok(ok) = infcx.at(&cause, param_env).eq(a, b) {
if ok.obligations.is_empty() {
return None;
}
}
// Collect up all fields that were significantly changed
// i.e., those that contain T in coerce_unsized T -> U
Some((i, a, b))
})
.collect::<Vec<_>>();
if diff_fields.is_empty() {
struct_span_err!(
tcx.sess,
span,
E0374,
"the trait `CoerceUnsized` may only be implemented \
for a coercion between structures with one field \
being coerced, none found"
)
.emit();
return err_info;
} else if diff_fields.len() > 1 {
let item = tcx.hir().expect_item(impl_hir_id);
let span = if let ItemKind::Impl { of_trait: Some(ref t), .. } = item.kind {
t.path.span
} else {
tcx.hir().span(impl_hir_id)
};
struct_span_err!(
tcx.sess,
span,
E0375,
"implementing the trait \
`CoerceUnsized` requires multiple \
coercions"
)
.note(
"`CoerceUnsized` may only be implemented for \
a coercion between structures with one field being coerced",
)
.note(&format!(
"currently, {} fields need coercions: {}",
diff_fields.len(),
diff_fields
.iter()
.map(|&(i, a, b)| {
format!("`{}` (`{}` to `{}`)", fields[i].ident, a, b)
})
.collect::<Vec<_>>()
.join(", ")
))
.span_label(span, "requires multiple coercions")
.emit();
return err_info;
}
let (i, a, b) = diff_fields[0];
let kind = ty::adjustment::CustomCoerceUnsized::Struct(i);
(a, b, coerce_unsized_trait, Some(kind))
}
_ => {
struct_span_err!(
tcx.sess,
span,
E0376,
"the trait `CoerceUnsized` may only be implemented \
for a coercion between structures"
)
.emit();
return err_info;
}
};
let mut fulfill_cx = TraitEngine::new(infcx.tcx);
// Register an obligation for `A: Trait<B>`.
let cause = traits::ObligationCause::misc(span, impl_hir_id);
let predicate = predicate_for_trait_def(
tcx,
param_env,
cause,
trait_def_id,
0,
source,
&[target.into()],
);
fulfill_cx.register_predicate_obligation(&infcx, predicate);
// Check that all transitive obligations are satisfied.
if let Err(errors) = fulfill_cx.select_all_or_error(&infcx) {
infcx.report_fulfillment_errors(&errors, None, false);
}
// Finally, resolve all regions.
let region_scope_tree = region::ScopeTree::default();
let outlives_env = OutlivesEnvironment::new(param_env);
infcx.resolve_regions_and_report_errors(
impl_did,
®ion_scope_tree,
&outlives_env,
SuppressRegionErrors::default(),
);
CoerceUnsizedInfo { custom_kind: kind }
})
}
| {
Checker { tcx, trait_def_id }
.check(tcx.lang_items().drop_trait(), visit_implementation_of_drop)
.check(tcx.lang_items().copy_trait(), visit_implementation_of_copy)
.check(tcx.lang_items().coerce_unsized_trait(), visit_implementation_of_coerce_unsized)
.check(
tcx.lang_items().dispatch_from_dyn_trait(),
visit_implementation_of_dispatch_from_dyn,
);
} |
socfbucsc68.py | """
This file offers the methods to automatically retrieve the graph socfb-UCSC68.
The graph is automatically retrieved from the NetworkRepository repository.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-06 11:50:55.897921
The undirected graph socfb-UCSC68 has 8991 nodes and 224584 unweighted
edges, of which none are self-loops. The graph is sparse as it has a density
of 0.00556 and has 7 connected components, where the component with most
nodes has 8979 nodes and the component with the least nodes has 2 nodes.
The graph median node degree is 39, the mean node degree is 49.96, and
the node degree mode is 1. The top 5 most central nodes are 2840 (degree
454), 7542 (degree 400), 4763 (degree 329), 692 (degree 323) and 2949 (degree
315).
References
---------------------
Please cite the following if you use the data:
@inproceedings{nr,
title = {The Network Data Repository with Interactive Graph Analytics and Visualization},
author={Ryan A. Rossi and Nesreen K. Ahmed},
booktitle = {AAAI},
url={http://networkrepository.com},
year={2015}
}
@article{traud2012social,
title={Social structure of {F}acebook networks},
author={Traud, Amanda L and Mucha, Peter J and Porter, Mason A},
journal={Phys. A},
month={Aug},
number={16},
pages={4165--4180},
volume={391}, | title={Comparing Community Structure to Characteristics in Online Collegiate Social Networks},
author={Traud, Amanda L and Kelsic, Eric D and Mucha, Peter J and Porter, Mason A},
journal={SIAM Rev.},
number={3},
pages={526--543},
volume={53},
year={2011}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.networkrepository import SocfbUcsc68
# Then load the graph
graph = SocfbUcsc68()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen_graph import EnsmallenGraph # pylint: disable=import-error
def SocfbUcsc68(
directed: bool = False,
verbose: int = 2,
cache_path: str = "graphs/networkrepository",
**additional_graph_kwargs: Dict
) -> EnsmallenGraph:
"""Return new instance of the socfb-UCSC68 graph.
The graph is automatically retrieved from the NetworkRepository repository.
Parameters
-------------------
directed: bool = False,
Wether to load the graph as directed or undirected.
By default false.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache_path: str = "graphs",
Where to store the downloaded graphs.
additional_graph_kwargs: Dict,
Additional graph kwargs.
Returns
-----------------------
Instace of socfb-UCSC68 graph.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-06 11:50:55.897921
The undirected graph socfb-UCSC68 has 8991 nodes and 224584 unweighted
edges, of which none are self-loops. The graph is sparse as it has a density
of 0.00556 and has 7 connected components, where the component with most
nodes has 8979 nodes and the component with the least nodes has 2 nodes.
The graph median node degree is 39, the mean node degree is 49.96, and
the node degree mode is 1. The top 5 most central nodes are 2840 (degree
454), 7542 (degree 400), 4763 (degree 329), 692 (degree 323) and 2949 (degree
315).
References
---------------------
Please cite the following if you use the data:
@inproceedings{nr,
title = {The Network Data Repository with Interactive Graph Analytics and Visualization},
author={Ryan A. Rossi and Nesreen K. Ahmed},
booktitle = {AAAI},
url={http://networkrepository.com},
year={2015}
}
@article{traud2012social,
title={Social structure of {F}acebook networks},
author={Traud, Amanda L and Mucha, Peter J and Porter, Mason A},
journal={Phys. A},
month={Aug},
number={16},
pages={4165--4180},
volume={391},
year={2012}
}
@article{Traud:2011fs,
title={Comparing Community Structure to Characteristics in Online Collegiate Social Networks},
author={Traud, Amanda L and Kelsic, Eric D and Mucha, Peter J and Porter, Mason A},
journal={SIAM Rev.},
number={3},
pages={526--543},
volume={53},
year={2011}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.networkrepository import SocfbUcsc68
# Then load the graph
graph = SocfbUcsc68()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
return AutomaticallyRetrievedGraph(
graph_name="SocfbUcsc68",
dataset="networkrepository",
directed=directed,
verbose=verbose,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)() | year={2012}
}
@article{Traud:2011fs, |
fields.py | from __future__ import unicode_literals
from future.builtins import str, super
import os
import datetime
from django import forms
from django.conf import settings
from django.core.files.storage import default_storage
from django.db.models.fields import Field
from django.db.models.fields.files import FileDescriptor
from django.forms.widgets import Input
from django.template.loader import render_to_string
from django.utils.encoding import smart_str
from django.utils.translation import ugettext_lazy as _
from filebrowser_safe.settings import *
from filebrowser_safe.base import FieldFileObject
from filebrowser_safe.functions import get_directory
class FileBrowseWidget(Input):
input_type = 'text'
class Media:
js = (os.path.join(URL_FILEBROWSER_MEDIA, 'js/AddFileBrowser.js'), )
def __init__(self, attrs=None):
self.directory = attrs.get('directory', '')
self.extensions = attrs.get('extensions', '')
self.format = attrs.get('format', '')
if attrs is not None:
self.attrs = attrs.copy()
else:
self.attrs = {}
def | (self, name, value, attrs=None, renderer=None):
if value is None:
value = ""
directory = self.directory
if self.directory:
if callable(self.directory):
directory = self.directory()
directory = os.path.normpath(datetime.datetime.now().strftime(directory))
fullpath = os.path.join(get_directory(), directory)
if not default_storage.isdir(fullpath):
default_storage.makedirs(fullpath)
final_attrs = dict(type=self.input_type, name=name, **attrs)
final_attrs['search_icon'] = URL_FILEBROWSER_MEDIA + 'img/filebrowser_icon_show.gif'
final_attrs['directory'] = directory
final_attrs['extensions'] = self.extensions
final_attrs['format'] = self.format
final_attrs['DEBUG'] = DEBUG
return render_to_string("filebrowser/custom_field.html", dict(locals(), MEDIA_URL=MEDIA_URL))
class FileBrowseFormField(forms.CharField):
widget = FileBrowseWidget
default_error_messages = {
'extension': _(u'Extension %(ext)s is not allowed. Only %(allowed)s is allowed.'),
}
def __init__(self, max_length=None, min_length=None,
directory=None, extensions=None, format=None,
*args, **kwargs):
self.max_length, self.min_length = max_length, min_length
self.directory = directory
self.extensions = extensions
if format:
self.format = format or ''
self.extensions = extensions or EXTENSIONS.get(format)
super(FileBrowseFormField, self).__init__(*args, **kwargs)
def clean(self, value):
value = super(FileBrowseFormField, self).clean(value)
if value == '':
return value
file_extension = os.path.splitext(value)[1].lower().split("?")[0]
if self.extensions and not file_extension in self.extensions:
raise forms.ValidationError(self.error_messages['extension'] % {'ext': file_extension, 'allowed': ", ".join(self.extensions)})
return value
class FileBrowseField(Field):
# These attributes control how the field is accessed on a model instance.
# Due to contribute_to_class, FileDescriptor will cause this field to be
# represented by a FileFieldObject on model instances.
# Adapted from django.db.models.fields.files.FileField.
attr_class = FieldFileObject
descriptor_class = FileDescriptor
def __init__(self, *args, **kwargs):
self.directory = kwargs.pop('directory', '')
self.extensions = kwargs.pop('extensions', '')
self.format = kwargs.pop('format', '')
self.storage = kwargs.pop('storage', default_storage)
super(FileBrowseField, self).__init__(*args, **kwargs)
def get_db_prep_value(self, value, connection, prepared=False):
if value is None:
return None
return smart_str(value)
def get_manipulator_field_objs(self):
return [oldforms.TextField]
def get_internal_type(self):
return "CharField"
def formfield(self, **kwargs):
attrs = {
'directory': self.directory,
'extensions': self.extensions,
'format': self.format,
'storage': self.storage,
}
defaults = {
'form_class': FileBrowseFormField,
'widget': FileBrowseWidget(attrs=attrs),
'directory': self.directory,
'extensions': self.extensions,
'format': self.format
}
defaults.update(kwargs)
return super(FileBrowseField, self).formfield(**defaults)
def contribute_to_class(self, cls, name, **kwargs):
"""
From django.db.models.fields.files.FileField.contribute_to_class
"""
super(FileBrowseField, self).contribute_to_class(cls, name, **kwargs)
setattr(cls, self.name, self.descriptor_class(self))
| render |
ProfileHeader.tsx | import React, { ReactChild, useState } from "react";
import { ProfileHeaderWrapper } from "./ProfileHeaderWrapper";
import { Button } from "./Button";
import { UserBadge } from "./UserBadge";
import { SingleUser } from "./UserAvatar/SingleUser";
import {
SolidCompass,
SolidFriends,
SolidMessages,
SolidPersonAdd,
} from "../icons";
import { useTypeSafeMutation } from "../shared-hooks/useTypeSafeMutation";
import { UserWithFollowInfo } from "@dogehouse/kebab";
import { useTypeSafeTranslation } from "../shared-hooks/useTypeSafeTranslation";
import { useTypeSafeUpdateQuery } from "../shared-hooks/useTypeSafeUpdateQuery";
import { EditProfileModal } from "../modules/user/EditProfileModal";
import { usePreloadPush } from "../shared-components/ApiPreloadLink";
export interface ProfileHeaderProps {
displayName: string;
username: string;
children?: ReactChild;
pfp?: string;
canDM?: boolean;
isCurrentUser?: boolean;
user: UserWithFollowInfo;
}
export const ProfileHeader: React.FC<ProfileHeaderProps> = ({
displayName,
username,
user,
children,
canDM,
isCurrentUser,
pfp = "https://dogehouse.tv/favicon.ico",
}) => {
const {
mutateAsync,
isLoading: followLoading,
variables,
} = useTypeSafeMutation("follow");
const { t } = useTypeSafeTranslation();
const updater = useTypeSafeUpdateQuery();
const [showEditProfileModal, setShowEditProfileModal] = useState(false);
const preloadPush = usePreloadPush();
const update = useTypeSafeUpdateQuery();
return (
// @TODO: Add the cover api (once it's implemented)}
<ProfileHeaderWrapper
coverUrl={user.bannerUrl || "https://source.unsplash.com/random"}
>
<EditProfileModal
isOpen={showEditProfileModal}
onRequestClose={() => setShowEditProfileModal(false)}
onEdit={(d) => {
update(["getUserProfile", d.username], (x) =>
!x ? x : { ...x, ...d }
);
if (d.username !== username) {
preloadPush({
route: "profile",
data: { username: d.username },
});
}
}}
/>
<div className="flex mr-4 ">
<SingleUser
isOnline={user.online}
className="absolute flex-none -top-5.5 rounded-full shadow-outlineLg"
src={pfp}
/>
</div>
<div className="flex flex-col w-3/6 font-sans">
<h4 className="text-primary-100 font-bold truncate">{displayName}</h4>
<div className="flex flex-row items-center">
<p
className="text-primary-300 mr-2"
data-testid="profile-info-username"
>{`@${username}`}</p>
{user.followsYou ? (
<UserBadge color="grey">{t("pages.viewUser.followsYou")}</UserBadge>
) : (
""
)}
</div>
<div className="mt-2">{children}</div>
</div>
<div className="w-3/6 ">
<div className="flex flex-row justify-end content-end gap-2">
{!isCurrentUser && (
<Button
loading={false}
onClick={async () => { | ? u
: {
...u,
numFollowers:
u.numFollowers + (user.youAreFollowing ? -1 : 1),
youAreFollowing: !user.youAreFollowing,
}
);
}}
size="small"
color={user.youAreFollowing ? "secondary" : "primary"}
icon={user.youAreFollowing ? null : <SolidFriends />}
>
{user.youAreFollowing
? t("pages.viewUser.unfollow")
: t("pages.viewUser.followHim")}
</Button>
)}
{isCurrentUser ? (
<Button
size="small"
color="secondary"
onClick={() => setShowEditProfileModal(true)}
icon={<SolidCompass />}
>
{t("pages.viewUser.editProfile")}
</Button>
) : (
""
)}
{canDM ? (
<Button size="small" color="secondary" icon={<SolidMessages />}>
Send DM
</Button>
) : (
""
)}
</div>
</div>
</ProfileHeaderWrapper>
);
}; | await mutateAsync([user.id, !user.youAreFollowing]);
updater(["getUserProfile", username], (u) =>
!u |
boids.js | // Size of canvas. These get updated to fill the whole browser.
let width = 150;
let height = 150;
var count = 0;
const numBoids = 100;
const visualRange = 75;
var boids = [];
var terrain = [];
var active = false;
var currentX;
var currentY;
var initialX;
var initialY;
var xOffset = 0;
var yOffset = 0;
var headerOffset = 100;
function dragStart(e) {
if (e.type === "touchstart") {
initialX = e.touches[0].clientX;
initialY = e.touches[0].clientY;
addBoid('blue');
} else {
//addBoid('green');
initialX = e.clientX;
initialY = e.clientY;
addBoid('white');
}
document.getElementById('output').innerHTML = initialX;
document.getElementById('output2').innerHTML = initialY;
for(let rect of terrain){
if(initialX < rect.x + rect.width && initialX > rect.x && initialY < rect.y + rect.height && initialY > rect.y){
addBoid('orange');
}
}
active = true;
}
function dragEnd(e) {
initialX = currentX;
initialY = currentY;
addBoid('black');
active = false;
}
function drag(e){
// if(e.clientX < rect.x + rect.width && e.clientX > rect.x && e.clientY < rect.y + rect.height && e.clientY > rect.y){
// addBoid('white');
// }
// //addBoid('green');
if (active){
e.preventDefault();
//addBoid('green');
if(e.type === "touchmove"){
// something soon
deltaX = e.clientX[0] - initialX;
deltaY = e.clientY[0] - initialY;
hldX = e.clientX[0];
hldY = e.clientY[0];
addBoid('grey');
} else {
deltaX = e.clientX - initialX;
deltaY = e.clientY - initialY;
hldX = e.clientX;
hldY = e.clientY;
}
//something
for(let rect of terrain){
addBoid('green');
//addBoid('green');
if(hldX < rect.x + rect.width && hldX > rect.x && hldY - headerOffset < rect.y + rect.height && hldY - headerOffset > rect.y){
//addBoid('green');
rect.x += deltaX;
rect.y += deltaY;
//document.getElementById('output').innerHTML = e.clientX;
addBoid("purple");
}
}
initialX = hldX;
initialY = hldY;
yOffset = currentX;
yOffset = currentY;
}
}
function addRectangle(xPos,yPos,wid,heig,col){
terrain.push({
x : xPos,
y : yPos,
width : wid,
height : heig,
color : col,
});
}
function addBoid(col){
boids.push({
x: Math.random() * 10 + 500,
y: Math.random() * 10 + 700,
dx: Math.random() * 10 - 5,
dy: Math.random() * 10 - 5,
history: [],
color: col,
});
}
function addBoid2(e){
addBoid('red');
}
function boidTeam(num,col){
for(var i = 0; i < num; i +=1){
addBoid(col);
}
}
function initBoids(color) {
for (var i = 0; i < numBoids; i += 1) {
boids[boids.length] = {
x: Math.random() * width,
y: Math.random() * height,
dx: Math.random() * 10 - 5,
dy: Math.random() * 10 - 5,
history: [],
color: color,
};
}
}
function distance(boid1, boid2) {
return Math.sqrt(
(boid1.x - boid2.x) * (boid1.x - boid2.x) +
(boid1.y - boid2.y) * (boid1.y - boid2.y),
);
}
// TODO: This is naive and inefficient.
function nClosestBoids(boid, n) {
// Make a copy
const sorted = boids.slice();
// Sort the copy by distance from `boid`
sorted.sort((a, b) => distance(boid, a) - distance(boid, b));
// Return the `n` closest
return sorted.slice(1, n + 1);
}
// Called initially and whenever the window resizes to update the canvas
// size and width/height variables.
function sizeCanvas() {
const canvas = document.getElementById("boids");
//document.write(canvas)
width = window.innerWidth;
height = window.innerHeight;
canvas.width = width;
canvas.height = height;
}
function collisionDect(boid){
for(let rect of terrain){
if(boid.x < rect.x + rect.width &&
boid.x > rect.x &&
boid.y < rect.y + rect.height &&
boid.y > rect.y){
boid.color = 'green';
//addBoid('orange');
//rect.x += 100;
const index = boids.indexOf(boid);
if(index > -1){
boids.splice(index,1);
}
}
}
}
// Constrain a boid to within the window. If it gets too close to an edge,
// nudge it back in and reverse its direction.
function keepWithinBounds(boid) {
const margin = 200;
const turnFactor = 1;
if (boid.x < margin) {
boid.dx += turnFactor;
}
if (boid.x > width - margin) {
boid.dx -= turnFactor;
}
if (boid.y < margin) {
boid.dy += turnFactor;
}
if (boid.y > height - margin) {
boid.dy -= turnFactor;
}
}
// Find the center of mass of the other boids and adjust velocity slightly to
// point towards the center of mass.
function flyTowardsCenter(boid) {
const centeringFactor = 0.005; // adjust velocity by this %
let centerX = 0;
let centerY = 0;
let numNeighbors = 0;
for (let otherBoid of boids) {
if(otherBoid.color == boid.color){
if (distance(boid, otherBoid) < visualRange) {
centerX += otherBoid.x;
centerY += otherBoid.y;
numNeighbors += 1;
}
}
}
if (numNeighbors) {
centerX = centerX / numNeighbors;
centerY = centerY / numNeighbors;
boid.dx += (centerX - boid.x) * centeringFactor;
boid.dy += (centerY - boid.y) * centeringFactor;
}
}
// Move away from other boids that are too close to avoid colliding
function avoidOthers(boid) {
const minDistance = 20; // The distance to stay away from other boids
const avoidFactor = 0.05; // Adjust velocity by this %
let moveX = 0;
let moveY = 0;
for (let otherBoid of boids) {
if(otherBoid.color == boid.color){
if (otherBoid !== boid) {
if (distance(boid, otherBoid) < minDistance) {
moveX += boid.x - otherBoid.x;
moveY += boid.y - otherBoid.y;
}
}
}
}
boid.dx += moveX * avoidFactor;
boid.dy += moveY * avoidFactor;
}
// Find the average velocity (speed and direction) of the other boids and
// adjust velocity slightly to match.
function matchVelocity(boid) {
const matchingFactor = 0.05; // Adjust by this % of average velocity
let avgDX = 0;
let avgDY = 0;
let numNeighbors = 0;
for (let otherBoid of boids) {
if(otherBoid.color == boid.color){
if (distance(boid, otherBoid) < visualRange) {
avgDX += otherBoid.dx;
avgDY += otherBoid.dy;
numNeighbors += 1;
}
}
}
if (numNeighbors) {
avgDX = avgDX / numNeighbors;
avgDY = avgDY / numNeighbors;
boid.dx += (avgDX - boid.dx) * matchingFactor;
boid.dy += (avgDY - boid.dy) * matchingFactor;
}
}
// Speed will naturally vary in flocking behavior, but real animals can't go
// arbitrarily fast.
function limitSpeed(boid) {
const speedLimit = 15;
const speed = Math.sqrt(boid.dx * boid.dx + boid.dy * boid.dy);
if (speed > speedLimit) {
boid.dx = (boid.dx / speed) * speedLimit;
boid.dy = (boid.dy / speed) * speedLimit;
}
}
const DRAW_TRAIL = false;
function drawBoid(ctx, boid) {
const angle = Math.atan2(boid.dy, boid.dx);
ctx.translate(boid.x, boid.y);
ctx.rotate(angle);
ctx.translate(-boid.x, -boid.y);
//ctx.fillStyle = "#558cf4";
ctx.fillStyle = boid.color;
ctx.beginPath();
ctx.moveTo(boid.x, boid.y);
ctx.lineTo(boid.x - 15, boid.y + 5);
ctx.lineTo(boid.x - 15, boid.y - 5);
ctx.lineTo(boid.x, boid.y);
ctx.fill();
ctx.setTransform(1, 0, 0, 1, 0, 0);
if (DRAW_TRAIL) {
ctx.strokeStyle = "#558cf466";
ctx.beginPath();
ctx.moveTo(boid.history[0][0], boid.history[0][1]);
for (const point of boid.history) {
ctx.lineTo(point[0], point[1]);
}
ctx.stroke();
}
}
function drawRectangle(ctx,rect){
ctx.beginPath();
//ctx.fillStyle('green');
//ctx.rect(rect.x, rect.y, rect.width, rect.height);
ctx.fillStyle = rect.color;
ctx.fillRect(rect.x, rect.y, rect.width, rect.height);
//ctx.fillRect(20, 20, 150, 100);
//ctx.fill(rect.color)
ctx.stroke();
}
// Main animation loop
function animationLoop() {
//document.getElementById("clicker").innerHTML = width.toString();
//count += 1;
// Update each boid
for (let boid of boids) {
// Update the velocities according to each rule
flyTowardsCenter(boid);
avoidOthers(boid);
matchVelocity(boid);
limitSpeed(boid);
keepWithinBounds(boid);
collisionDect(boid);
// Update the position based on the current velocity
boid.x += boid.dx;
boid.y += boid.dy;
boid.history.push([boid.x, boid.y])
boid.history = boid.history.slice(-50);
}
// Clear the canvas and redraw all the boids in their current positions
const ctx = document.getElementById("boids").getContext("2d");
ctx.clearRect(0, 0, width, height);
for (let boid of boids) {
drawBoid(ctx, boid);
}
for (let rect of terrain){
drawRectangle(ctx,rect);
}
// Schedule the next frame
window.requestAnimationFrame(animationLoop);
}
//New stuffs
function myFunction() {
//document.getElementById("demo").innerHTML = "YOU CLICKED ME!";
}
function | (){
count += 1;
//document.getElementById("clicker").innerHTML = count.toString();
}
//End
window.onload = () => {
//document.getElementById("demo").onclick = function() {addBoid('green')};
document.getElementById("dump").onclick = function() {boidTeam(20,'blue')};
//document.getElementById("clicker").onclick = function() {clickerAdd()};
//document.getElementById("clicker").innerHTML = 'test';
document.getElementById('output').innerHTML = 'ending';
//document.addEventListener("mousedown",addBoid2, false);
document.addEventListener("touchstart", dragStart, false);
document.addEventListener("touchend", dragEnd, false);
document.addEventListener("touchmove", drag, false);
document.addEventListener("mousedown", dragStart, false);
document.addEventListener("mouseup", dragEnd, false);
document.addEventListener("mousemove", drag, false);
//count += 1;
// Make sure the canvas always fills the whole window
window.addEventListener("resize", sizeCanvas, false);
sizeCanvas();
// Randomly distribute the boids to start
initBoids("red");
addRectangle(700,50,150,300,"blue")
addRectangle(100,0,150,300,"blue")
// Schedule the main animation loop
window.requestAnimationFrame(animationLoop);
};
| clickerAdd |
routes.ts | import { RouteRenderer } from './helper/route';
const ledger: RouteRenderer<typeof import('./windows/inspectors/money-inspector').MoneyInspector> = {
id: 'ledger',
emoji: '💰',
name: 'Money',
root: () =>
import('./windows/inspectors/money-inspector').then(
(m) => m.MoneyInspector
),
};
| const allEntities: RouteRenderer<typeof import('./windows/all-entities').AllEntitities> = {
id: 'allEntities',
emoji: '👩🔧',
name: 'Staffing',
root: () => import('./windows/all-entities').then((m) => m.AllEntitities),
};
const entity: RouteRenderer<typeof import('./windows/inspectors/entity-inspector').EntityInspector> = {
id: 'entity',
emoji: '🔵',
name: 'Inspector',
root: () =>
import('./windows/inspectors/entity-inspector').then(
(m) => m.EntityInspector
),
};
const system: RouteRenderer<typeof import('./windows/system').SystemMenu> = {
id: 'system',
emoji: '🍔',
name: 'Info & Settings',
root: () => import('./windows/system').then((m) => m.SystemMenu),
};
const hire: RouteRenderer<typeof import('./windows/hire').Hire> = {
id: 'hire',
emoji: '👩🔧',
name: 'Hire',
root: () => import('./windows/hire').then((m) => m.Hire),
};
export const routeRenderers = { ledger, allEntities, system, entity, hire };
export type RouteRenderers = typeof routeRenderers; | |
list.ts | import { Joi, prefabs, validate } from "@api/middleware/validation";
import { Request, Response } from "express";
import { prisma, Notification, User } from "@db";
async function | (req: Request, res: Response) {
const result = await prisma.$transaction([
User.findUnique({
where: {
id: req.user.id,
},
select: {
hasUnread: true,
},
}),
Notification.count({
where: { userId: req.user.id },
}),
Notification.findMany({
where: { userId: req.user.id },
skip: Number(req.query.page) * 20,
take: 20,
orderBy: {
createdAt: "desc",
},
}),
]);
res.status(200).json({
hasUnread: result[0].hasUnread || false,
total: result[1] || 0,
list: result[2] || [],
});
}
export default [
validate({
query: Joi.object({
page: prefabs.page.required(),
}),
}),
listNotifications,
];
| listNotifications |
repo_test.go | // Copyright 2019 George Aristy
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package repo_test
import (
"fmt"
"io/ioutil"
"os"
"path/filepath"
"strings"
"testing"
"time"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gopkg.in/src-d/go-git.v4"
"gopkg.in/src-d/go-git.v4/plumbing/object"
"github.com/llorllale/go-gitlint/internal/repo"
)
func TestFilesystem(t *testing.T) {
msgs := []string{"commit1", "commit2", "commit3"}
r, path := tmpGitRepo(t, msgs...)
test := repo.Filesystem(path)()
head, err := test.Head()
require.NoError(t, err)
iter, err := r.Log(&git.LogOptions{From: head.Hash()})
require.NoError(t, err)
err = iter.ForEach(func(c *object.Commit) error {
assert.Contains(t, msgs, c.Message,
"repo.Filesystem() did not return all commits")
return nil
})
require.NoError(t, err)
}
func tmpGitRepo(t *testing.T, msgs ...string) (r *git.Repository, folder string) {
var err error
folder, err = ioutil.TempDir(
"",
strings.ReplaceAll(uuid.New().String(), "-", ""),
)
require.NoError(t, err)
t.Cleanup(func() {
require.NoError(t, os.RemoveAll(folder))
})
r, err = git.PlainInit(folder, false)
require.NoError(t, err)
wt, err := r.Worktree()
require.NoError(t, err)
for i, msg := range msgs {
file := fmt.Sprintf("msg%d.txt", i)
err = ioutil.WriteFile(filepath.Join(folder, file), []byte(msg), 0600) | require.NoError(t, err)
_, err = wt.Add(file)
require.NoError(t, err)
_, err = wt.Commit(msg, &git.CommitOptions{
Author: &object.Signature{
Name: "John Doe",
Email: "[email protected]",
When: time.Now(),
},
})
require.NoError(t, err)
}
return r, folder
} | |
check.py | import json
from pathlib import Path
from typing import Optional
import typer
from . import utils
from .utils import example
from .utils.iohelper import AltTemporaryDirectory
@example()
def check(
project_dir: Path = Path("."), checkout: Optional[str] = None, strict: bool = True
) -> bool:
"""Checks to see if there have been any updates to the Cookiecutter template
used to generate this project."""
cruft_file = utils.cruft.get_cruft_file(project_dir)
cruft_state = json.loads(cruft_file.read_text())
with AltTemporaryDirectory() as cookiecutter_template_dir:
with utils.cookiecutter.get_cookiecutter_repo(
cruft_state["template"],
Path(cookiecutter_template_dir),
checkout,
filter="blob:none",
no_checkout=True,
) as repo:
last_commit = repo.head.object.hexsha
if utils.cruft.is_project_updated(repo, cruft_state["commit"], last_commit, strict):
typer.secho(
"SUCCESS: Good work! Project's cruft is up to date "
"and as clean as possible :).",
fg=typer.colors.GREEN,
)
return True
typer.secho(
"FAILURE: Project's cruft is out of date! Run `cruft update` to clean this mess up.", | fg=typer.colors.RED,
)
return False |
|
compat.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import django
import six
DJANGO3 = django.VERSION[0] == 3
DJANGO2 = django.VERSION[0] == 2
#
# if DJANGO2 or DJANGO3:
# def is_anonymous(user):
# return user.is_anonymous
#
# else:
# def is_anonymous(user):
# return user.is_anonymous() |
if six.PY2:
from django.utils.lru_cache import lru_cache
else:
from functools import lru_cache | |
data.go | package soyutil;
import (
"container/list"
"fmt"
"strconv"
"reflect"
)
var NilDataInstance = &NilData{}
type Equalser interface {
Equals(other interface{}) bool
}
type Stringer interface {
String() string
}
type SoyDataException struct {
msg string
}
func NewSoyDataException(msg string) *SoyDataException {
return &SoyDataException{msg:msg}
}
func (p *SoyDataException) String() string {
return p.msg
}
func (p *SoyDataException) Error() string {
return p.msg
}
type SoyData interface {
/**
* Converts this data object into a string (e.g. when used in a string context).
* @return The value of this data object if coerced into a string.
*/
String() string
/**
* Converts this data object into a boolean (e.g. when used in a boolean context). In other words,
* this method tells whether this object is truthy.
* @return The value of this data object if coerced into a boolean. I.e. true if this object is
* truthy, false if this object is falsy.
*/
Bool() bool
/**
* Precondition: Only call this method if you know that this SoyData object is a boolean.
* This method gets the boolean value of this boolean object.
* @return The boolean value of this boolean object.
* @throws SoyDataException If this object is not actually a boolean.
*/
BooleanValue() (bool)
/**
* Precondition: Only call this method if you know that this SoyData object is an integer.
* This method gets the integer value of this integer object.
* @return The integer value of this integer object.
* @throws SoyDataException If this object is not actually an integer.
*/
IntegerValue() (int)
/**
* Precondition: Only call this method if you know that this SoyData object is a float.
* This method gets the float value of this float object.
* @return The float value of this float object.
* @throws SoyDataException If this object is not actually a float.
*/
FloatValue() (float32)
/**
* Precondition: Only call this method if you know that this SoyData object is a float64.
* This method gets the float value of this number object (converting integer to float if
* necessary).
* @return The float value of this number object.
* @throws SoyDataException If this object is not actually a number.
*/
Float64Value() (float64)
/**
* Precondition: Only call this method if you know that this SoyData object is a number.
* This method gets the float value of this number object (converting integer to float if
* necessary).
* @return The float value of this number object.
* @throws SoyDataException If this object is not actually a number.
*/
NumberValue() (float64)
/**
* Precondition: Only call this method if you know that this SoyData object is a string.
* This method gets the string value of this string object.
* @return The string value of this string object.
* @throws SoyDataException If this object is not actually a string.
*/
StringValue() (string)
SoyData() SoyData
/**
* Compares this data object against another for equality in the sense of the operator '==' for
* Soy expressions.
*
* @param other The other data object to compare against.
* @return True if the two objects are equal.
*/
Equals(other interface{}) bool
}
/**
* Default function implementations for SoyData types
*/
type soyData struct {}
func defaultBooleanValue() (bool) {
return false
}
func defaultIntegerValue() (int) {
return 0
}
func defaultFloatValue() (float32) {
return 0.0
}
func defaultFloat64Value() (float64) {
return 0.0
}
func defaultNumberValue() (float64) {
return 0.0
}
func defaultStringValue() (string) {
return ""
}
type NilData struct {}
func (p NilData) BooleanValue() (bool) {
return false
}
func (p NilData) IntegerValue() (int) {
return 0
}
func (p NilData) FloatValue() (float32) {
return 0.0
}
func (p NilData) Float64Value() (float64) {
return 0.0
}
func (p NilData) NumberValue() (float64) {
return 0.0
}
func (p NilData) StringValue() (string) {
return "null"
}
func (p NilData) Value() interface{} {
return nil
}
func (p NilData) String() string {
return "null"
}
func (p NilData) Bool() bool {
return false
}
func (p NilData) Equals(other interface{}) bool {
return p == other || other == nil
}
func (p NilData) HashCode() int {
return 827
}
func (p NilData) SoyData() SoyData {
return p
}
func (p NilData) At(index int) SoyData {
return p
}
func (p NilData) Back() *list.Element {
return nil
}
func (p NilData) Front() *list.Element {
return nil
}
func (p NilData) HasElements() bool {
return false
}
func (p NilData) Init() SoyListData {
return p
}
func (p NilData) InsertAfter(value SoyData, mark *list.Element) *list.Element {
return nil
}
func (p NilData) InsertBefore(value SoyData, mark *list.Element) *list.Element {
return nil
}
func (p NilData) IsEmpty() bool {
return true
}
func (p NilData) Len() int {
return 0
}
func (p NilData) MoveToBack(e *list.Element) {
}
func (p NilData) MoveToFront(e *list.Element) {
}
func (p NilData) PushBack(value SoyData) *list.Element {
return nil
}
func (p NilData) PushBackList(ol SoyListData) {
}
func (p NilData) PushFront(value SoyData) *list.Element {
return nil
}
func (p NilData) PushFrontList(ol SoyListData) {
}
func (p NilData) Remove(e *list.Element) SoyData {
return p
}
type BooleanData bool
func NewBooleanData(value bool) BooleanData {
return BooleanData(value)
}
func (p BooleanData) Value() bool {
return bool(p)
}
func (p BooleanData) BooleanValue() (bool) {
return bool(p)
}
func (p BooleanData) IntegerValue() (int) {
if p {
return 1
}
return 0
}
func (p BooleanData) FloatValue() (float32) {
if p {
return 1
}
return 0
}
func (p BooleanData) Float64Value() (float64) {
if p {
return 1
}
return 0
}
func (p BooleanData) NumberValue() (float64) {
if p {
return 1
}
return 0
}
func (p BooleanData) StringValue() (string) {
return p.String()
}
func (p BooleanData) String() string {
if p {
return "true"
}
return "false"
}
func (p BooleanData) Bool() bool {
return bool(p)
}
func (p BooleanData) Equals(other interface{}) bool {
if other == nil {
return false
}
switch o := other.(type) {
case *NilData:
return false;
case bool:
return bool(p) == o
case SoyData:
return bool(p) == o.Bool()
}
return false
}
func (p BooleanData) HashCode() int {
if p {
return 1
}
return 0
}
func (p BooleanData) SoyData() SoyData {
return p
}
type IntegerData int
func NewIntegerData(value int) IntegerData {
return IntegerData(value)
}
func (p IntegerData) Value() int {
return int(p)
}
func (p IntegerData) BooleanValue() (bool) {
return p.Value() != 0
}
func (p IntegerData) IntegerValue() (int) {
return p.Value()
}
func (p IntegerData) FloatValue() (float32) {
return float32(p.Value())
}
func (p IntegerData) Float64Value() (float64) {
return float64(p.Value())
}
func (p IntegerData) NumberValue() (float64) {
return float64(p.Value())
}
func (p IntegerData) StringValue() (string) {
return string(p.Value())
}
func (p IntegerData) String() string {
return strconv.Itoa(p.Value())
}
func (p IntegerData) Bool() bool {
return p.Value() != 0
}
func (p IntegerData) Equals(other interface{}) bool {
if other == nil {
return false
}
switch o := other.(type) {
case *NilData:
return false;
case int:
return int(p) == o
case int32:
return int(p) == int(o)
case int64:
return int(p) == int(o)
case float32:
return float64(p) == float64(o)
case float64:
return float64(p) == o
case SoyData:
return int(p) == o.IntegerValue()
}
return false
}
func (p IntegerData) HashCode() int {
return int(p)
}
func (p IntegerData) SoyData() SoyData {
return p
}
type Float64Data float64
func NewFloat64Data(value float64) Float64Data {
return Float64Data(value)
}
func (p Float64Data) BooleanValue() (bool) {
return p != 0.0
}
func (p Float64Data) IntegerValue() (int) {
return int(p)
}
func (p Float64Data) Value() float64 {
return float64(p)
}
func (p Float64Data) FloatValue() (float32) {
return float32(p)
}
func (p Float64Data) Float64Value() (float64) {
return float64(p)
}
func (p Float64Data) NumberValue() (float64) {
return float64(p)
}
func (p Float64Data) StringValue() string {
return strconv.FormatFloat(float64(p), 'g', -1, 64)
}
func (p Float64Data) String() string {
return strconv.FormatFloat(float64(p), 'g', -1, 64)
}
func (p Float64Data) Bool() bool {
return p != 0.0
}
func (p Float64Data) Equals(other interface{}) bool {
if other == nil {
return false
}
switch o := other.(type) {
case *NilData:
return false;
case int:
return float64(p) == float64(o)
case int32:
return float64(p) == float64(o)
case int64:
return float64(p) == float64(o)
case float32:
return float64(p) == float64(o)
case float64:
return float64(p) == o
case SoyData:
return float64(p) == o.Float64Value()
}
return false
}
func (p Float64Data) HashCode() int {
return int(p)
}
func (p Float64Data) SoyData() SoyData {
return p
}
type StringData string
func NewStringData(value string) StringData {
return StringData(value)
}
func (p StringData) Value() string {
return string(p)
}
func (p StringData) BooleanValue() (bool) {
return defaultBooleanValue()
}
func (p StringData) IntegerValue() (int) {
return defaultIntegerValue()
}
func (p StringData) FloatValue() (float32) {
return defaultFloatValue()
}
func (p StringData) Float64Value() (float64) {
return defaultFloat64Value()
}
func (p StringData) NumberValue() (float64) {
return defaultNumberValue()
}
func (p StringData) StringValue() (string) {
return string(p)
}
func (p StringData) String() string {
return string(p)
}
func (p StringData) Bool() bool {
return len(p) > 0
}
func (p StringData) Len() int {
return len(p)
}
func (p StringData) Equals(other interface{}) bool {
if other == nil {
return false
}
switch o := other.(type) {
case *NilData:
return false;
case string:
return string(p) == o
case SoyData:
return string(p) == o.StringValue()
}
return false
}
func (p StringData) HashCode() int {
// todo create efficient string hashcode function
return 123
}
func (p StringData) SoyData() SoyData {
return p
}
type SoyListData interface {
SoyData
At(index int) SoyData
Back() *list.Element
Front() *list.Element
HasElements() bool
Init() SoyListData
InsertAfter(value SoyData, mark *list.Element) *list.Element
InsertBefore(value SoyData, mark *list.Element) *list.Element
IsEmpty() bool
Len() int
MoveToBack(e *list.Element)
MoveToFront(e *list.Element)
PushBack(value SoyData) *list.Element
PushBackList(ol SoyListData)
PushFront(value SoyData) *list.Element
PushFrontList(ol SoyListData)
Remove(e *list.Element) SoyData
}
type soyListData struct {
l *list.List
}
func NewSoyListData() SoyListData {
return &soyListData{l:list.New()}
}
func NewSoyListDataFromArgs(args... interface{}) SoyListData {
l := list.New()
for _, v := range args {
s, _ := ToSoyData(v)
l.PushBack(s)
}
o := &soyListData{l:l}
return o
}
func NewSoyListDataFromSoyListData(o SoyListData) SoyListData {
if o == nil {
return &soyListData{l:list.New()}
}
a := &soyListData{l:list.New()}
a.PushBackList(o)
return a
}
func NewSoyListDataFromList(o *list.List) SoyListData {
if o == nil {
return &soyListData{l:list.New()}
}
l := list.New()
l.PushBackList(o)
a := &soyListData{l:l}
return a
}
func NewSoyListDataFromVector(o []SoyData) SoyListData {
if o == nil {
return &soyListData{l:list.New()}
}
l := list.New()
for i := 0; i < len(o); i++ {
l.PushBack(o[i])
}
a := &soyListData{l:l}
return a
}
func (p *soyListData) Bool() bool {
return p.Len() > 0
}
func (p *soyListData) String() string {
return fmt.Sprintf("[%#v]", p.l)
}
func (p *soyListData) BooleanValue() (bool) {
return defaultBooleanValue()
}
func (p *soyListData) IntegerValue() (int) {
return defaultIntegerValue()
}
func (p *soyListData) FloatValue() (float32) {
return defaultFloatValue()
}
func (p *soyListData) Float64Value() (float64) {
return defaultFloat64Value()
}
func (p *soyListData) NumberValue() (float64) {
return defaultNumberValue()
}
func (p *soyListData) StringValue() (string) {
return p.String()
}
func (p *soyListData) Equals(other interface{}) bool {
if p == other {
return true
}
if other == nil {
return false
}
if o, ok := other.(SoyListData); ok {
if p.Len() != o.Len() {
return false
}
for oe, pe := o.Front(), p.Front(); oe != nil && pe != nil; oe, pe = oe.Next(), pe.Next() {
if oe.Value == pe.Value {
continue
}
if oe.Value != nil {
if e, ok := oe.Value.(Equalser); ok {
if e.Equals(pe.Value) {
continue
}
}
}
return false
}
return true
}
return false
}
func (p *soyListData) SoyData() SoyData {
return p
}
func (p *soyListData) At(index int) SoyData {
e := p.l.Front()
for i := 0; i < index && e != nil; i++ {
e = e.Next()
}
if e == nil {
return NilDataInstance
}
return e.Value.(SoyData)
}
func (p *soyListData) Back() *list.Element {
return p.l.Back()
}
func (p *soyListData) Front() *list.Element {
return p.l.Front()
}
func (p *soyListData) HasElements() bool {
return p.l.Len() > 0
}
func (p *soyListData) Init() SoyListData {
p.l.Init()
return p
}
func (p *soyListData) InsertAfter(value SoyData, mark *list.Element) *list.Element {
return p.l.InsertAfter(value, mark)
}
func (p *soyListData) InsertBefore(value SoyData, mark *list.Element) *list.Element {
return p.l.InsertBefore(value, mark)
}
func (p *soyListData) IsEmpty() bool {
return p.l.Len() == 0
}
func (p *soyListData) Len() int {
return p.l.Len()
}
func (p *soyListData) MoveToBack(e *list.Element) {
p.l.MoveToBack(e)
}
func (p *soyListData) MoveToFront(e *list.Element) {
p.l.MoveToFront(e)
}
func (p *soyListData) PushBack(value SoyData) *list.Element {
return p.l.PushBack(value)
}
func (p *soyListData) PushBackList(ol SoyListData) {
if ol == nil {
return
}
if osld, ok := ol.(*soyListData); ok {
p.l.PushBackList(osld.l)
} else {
for e := ol.Front(); e != nil; e = e.Next() {
p.l.PushBack(e.Value)
}
}
}
func (p *soyListData) PushFront(value SoyData) *list.Element {
return p.l.PushFront(value)
}
func (p *soyListData) PushFrontList(ol SoyListData) {
if ol == nil {
return
}
if osld, ok := ol.(*soyListData); ok {
p.l.PushFrontList(osld.l)
} else {
for e := ol.Back(); e != nil; e = e.Prev() {
p.l.PushFront(e.Value)
}
}
}
func (p *soyListData) Remove(e *list.Element) SoyData {
return p.l.Remove(e).(SoyData)
}
type SoyMapData map[string]SoyData
func NewSoyMapData() SoyMapData {
return make(SoyMapData)
}
func NewSoyMapDataFromArgs(args ...interface{}) SoyMapData {
m := make(map[string]SoyData)
isKey := true
var key string
for _, arg := range args {
if isKey {
sdk, err := ToSoyData(arg)
if err != nil {
return nil
}
key = sdk.String()
} else {
value, err := ToSoyData(arg)
if err != nil {
return nil
}
m[key] = value
}
isKey = !isKey
}
return SoyMapData(m)
}
func NewSoyMapDataFromGenericMap(o map[string]interface{}) SoyMapData {
m := make(map[string]SoyData)
for key, v := range o {
value, err := ToSoyData(v)
if err != nil {
return nil
}
m[key] = value
}
return SoyMapData(m)
}
func NewSoyMapDataFromMap(o map[string]SoyData) SoyMapData {
return SoyMapData(o)
}
func (p SoyMapData) BooleanValue() (bool) {
return defaultBooleanValue()
}
func (p SoyMapData) IntegerValue() (int) {
return defaultIntegerValue()
}
func (p SoyMapData) FloatValue() (float32) {
return defaultFloatValue()
}
func (p SoyMapData) Float64Value() (float64) {
return defaultFloat64Value()
}
func (p SoyMapData) NumberValue() (float64) {
return defaultNumberValue()
}
func (p SoyMapData) StringValue() (string) {
return defaultStringValue()
}
func (p SoyMapData) Len() int {
return len(p)
}
func (p SoyMapData) Get(key string) SoyData {
value, ok := p[key]
if !ok {
return NilDataInstance
}
return value
}
func (p SoyMapData) Contains(key string) bool {
_, ok := p[key]
return ok
}
func (p SoyMapData) Keys() []string {
arr := make([]string, len(p))
i := 0
for k := range p {
arr[i] = k
i++
}
return arr
}
func (p SoyMapData) Set(key string, value SoyData) {
p[key] = value
}
func (p SoyMapData) Bool() bool {
return len(p) > 0
}
func (p SoyMapData) String() string {
return fmt.Sprintf("%#v", map[string]SoyData(p))
}
func (p SoyMapData) Equals(other interface{}) bool {
if other == nil {
return false
}
if o, ok := other.(SoyMapData); ok && &p == &o {
return true
}
if o, ok := other.(SoyMapData); ok {
if len(p) != len(o) {
return false
}
// TODO check each element
return true
}
return false
}
func (p SoyMapData) SoyData() SoyData {
return p
}
func (p SoyMapData) HasElements() bool {
return len(p) > 0
}
func (p SoyMapData) IsEmpty() bool {
return len(p) == 0
}
func ToBooleanData(obj interface{}) BooleanData {
if obj == nil || obj == NilDataInstance {
return NewBooleanData(false)
}
if o, ok := obj.(BooleanData); ok {
return o
}
s := ToSoyDataNoErr(obj)
if o, ok := s.(BooleanData); ok {
return o
}
return NewBooleanData(s.BooleanValue())
}
func ToIntegerData(obj interface{}) IntegerData {
if obj == nil || obj == NilDataInstance {
return NewIntegerData(0)
}
if o, ok := obj.(IntegerData); ok {
return o
}
s := ToSoyDataNoErr(obj)
if o, ok := s.(IntegerData); ok {
return o
}
return NewIntegerData(s.IntegerValue())
}
func ToFloat64Data(obj interface{}) Float64Data {
if obj == nil || obj == NilDataInstance {
return NewFloat64Data(0.0)
}
if o, ok := obj.(Float64Data); ok {
return o
}
s := ToSoyDataNoErr(obj)
if o, ok := s.(Float64Data); ok {
return o
}
return NewFloat64Data(s.Float64Value())
}
func ToStringData(obj interface{}) StringData {
if obj == nil || obj == NilDataInstance {
return NewStringData("")
}
if o, ok := obj.(StringData); ok {
return o
}
s := ToSoyDataNoErr(obj)
if o, ok := s.(StringData); ok {
return o
}
return NewStringData(s.StringValue())
}
func ToSoyListData(obj interface{}) SoyListData {
if obj == nil || obj == NilDataInstance {
return NewSoyListData()
}
if o, ok := obj.(SoyListData); ok {
return o
}
s := ToSoyDataNoErr(obj)
if o, ok := s.(SoyListData); ok {
return o
}
return NewSoyListData()
}
func ToSoyMapData(obj interface{}) SoyMapData {
if obj == nil || obj == NilDataInstance {
return NewSoyMapData()
}
if o, ok := obj.(SoyMapData); ok {
return o
}
s := ToSoyDataNoErr(obj)
if o, ok := s.(SoyMapData); ok {
return o
}
return NewSoyMapData()
}
func ToSoyDataNoErr(obj interface{}) SoyData {
s, _ := ToSoyData(obj)
return s
}
/**
* Creation function for creating a SoyData object out of any existing primitive, data object, or
* data structure.
*
* <p> Important: Avoid using this function if you know the type of the object at compile time.
* For example, if the object is a primitive, it can be passed directly to methods such as
* {@code SoyMapData.put()} or {@code SoyListData.add()}. If the object is a Map or an Iterable,
* you can directly create the equivalent SoyData object using the constructor of
* {@code SoyMapData} or {@code SoyListData}.
*
* <p> If the given object is already a SoyData object, then it is simply returned.
* Otherwise a new SoyData object will be created that is equivalent to the given primitive, data
* object, or data structure (even if the given object is null!).
*
* <p> Note that in order for the conversion process to succeed, the given data structure must
* correspond to a valid SoyData tree. Some requirements include:
* (a) all Maps within your data structure must have string keys that are identifiers,
* (b) all non-leaf nodes must be Maps or Lists,
* (c) all leaf nodes must be null, boolean, int, double, or String (corresponding to Soy
* primitive data types null, boolean, integer, float, string).
*
* @param obj The existing object or data structure to convert.
* @return A SoyData object or tree that corresponds to the given object.
* @throws SoyDataException If the given object cannot be converted to SoyData.
*/
func ToSoyData(obj interface{}) (SoyData, error) {
if obj == nil {
return NilDataInstance, nil
}
if o, ok := obj.(SoyData); ok && o != nil {
return o, nil
}
switch o := obj.(type) {
case nil:
return NilDataInstance, nil
case SoyData:
return o, nil
case string:
return NewStringData(o), nil
case bool:
return NewBooleanData(o), nil
case uint:
return NewIntegerData(int(o)), nil
case int:
return NewIntegerData(o), nil
case int32:
return NewIntegerData(int(o)), nil
case int64:
return NewIntegerData(int(o)), nil
case float32:
return NewFloat64Data(float64(o)), nil
case float64:
return NewFloat64Data(o), nil
case *list.List:
return NewSoyListDataFromList(o), nil
case []SoyData:
return NewSoyListDataFromVector(o), nil
}
rv := reflect.ValueOf(obj)
switch rv.Kind() {
case reflect.Array, reflect.Slice:
l := NewSoyListData()
for i := 0; i < rv.Len(); i++ {
v := rv.Index(i)
var sv SoyData
if v.Interface() == nil {
sv = NilDataInstance
} else {
sv, _ = ToSoyData(v.Interface())
}
l.PushBack(sv)
}
return l, nil
case reflect.Map:
m := NewSoyMapData()
if !rv.IsNil() {
for _, key := range rv.MapKeys() {
var k string
var sv SoyData
if key.Interface() == nil {
k = "null"
} else if st, ok := key.Interface().(Stringer); ok {
k = st.String()
} else if k, ok = key.Interface().(string); ok | else {
s, _ := ToSoyData(key.Interface())
k = s.StringValue()
}
av := rv.MapIndex(key)
if av.Interface() == nil {
sv = NilDataInstance
} else {
sv, _ = ToSoyData(av.Interface())
}
m.Set(k, sv)
}
}
return m, nil
case reflect.Struct:
m := NewSoyMapData()
rt := rv.Type()
for i := 0; i < rt.NumField(); i++ {
f := rt.Field(i)
k := f.Name
v, _ := ToSoyData(rv.Field(i).Interface())
m.Set(k, v)
}
return m, nil
}
str := fmt.Sprintf("Attempting to convert unrecognized object to Soy data (object type %t).", obj)
return NilDataInstance, NewSoyDataException(str)
}
| {
} |
monitor_helper_spec.js | import * as monitorHelper from '~/helpers/monitor_helper';
describe('monitor helper', () => {
const defaultConfig = { default: true, name: 'default name' };
const name = 'data name';
const series = [[1, 1], [2, 2], [3, 3]];
const data = ({ metric = { default_name: name }, values = series } = {}) => [{ metric, values }];
describe('makeDataSeries', () => {
const expectedDataSeries = [
{
...defaultConfig,
data: series,
},
];
it('converts query results to data series', () => {
expect(monitorHelper.makeDataSeries(data({ metric: {} }), defaultConfig)).toEqual(
expectedDataSeries,
);
});
it('returns an empty array if no query results exist', () => {
expect(monitorHelper.makeDataSeries([], defaultConfig)).toEqual([]);
});
it('handles multi-series query results', () => {
const expectedData = { ...expectedDataSeries[0], name: 'default name: data name' };
expect(monitorHelper.makeDataSeries([...data(), ...data()], defaultConfig)).toEqual([
expectedData,
expectedData,
]);
});
it('excludes NaN values', () => {
expect(
monitorHelper.makeDataSeries(
data({ metric: {}, values: [[1, 1], [2, NaN]] }),
defaultConfig,
),
).toEqual([{ ...expectedDataSeries[0], data: [[1, 1]] }]);
});
it('updates series name from templates', () => {
const config = {
...defaultConfig,
name: '{{cmd}}',
};
const [result] = monitorHelper.makeDataSeries(
[{ metric: { cmd: 'brpop' }, values: series }],
config,
);
expect(result.name).toEqual('brpop');
});
it('supports a multi metric label template expression', () => {
const config = {
...defaultConfig,
name: '',
};
const [result] = monitorHelper.makeDataSeries(
[
{
metric: {
backend: 'HA Server',
frontend: 'BA Server',
app: 'prometheus',
instance: 'k8 cluster 1',
},
values: series,
},
],
config,
); | );
});
it('supports space-padded template expressions', () => {
const config = {
...defaultConfig,
name: 'backend: {{ backend }}',
};
const [result] = monitorHelper.makeDataSeries(
[{ metric: { backend: 'HA Server' }, values: series }],
config,
);
expect(result.name).toEqual('backend: HA Server');
});
it('supports repeated template variables', () => {
const config = { ...defaultConfig, name: '{{cmd}}, {{cmd}}' };
const [result] = monitorHelper.makeDataSeries(
[{ metric: { cmd: 'brpop' }, values: series }],
config,
);
expect(result.name).toEqual('brpop, brpop');
});
it('supports hyphenated template variables', () => {
const config = { ...defaultConfig, name: 'expired - {{ test-attribute }}' };
const [result] = monitorHelper.makeDataSeries(
[{ metric: { 'test-attribute': 'test-attribute-value' }, values: series }],
config,
);
expect(result.name).toEqual('expired - test-attribute-value');
});
it('updates multiple series names from templates', () => {
const config = {
...defaultConfig,
name: '{{job}}: {{cmd}}',
};
const [result] = monitorHelper.makeDataSeries(
[{ metric: { cmd: 'brpop', job: 'redis' }, values: series }],
config,
);
expect(result.name).toEqual('redis: brpop');
});
it('updates name for each series', () => {
const config = {
...defaultConfig,
name: '{{cmd}}',
};
const [firstSeries, secondSeries] = monitorHelper.makeDataSeries(
[
{ metric: { cmd: 'brpop' }, values: series },
{ metric: { cmd: 'zrangebyscore' }, values: series },
],
config,
);
expect(firstSeries.name).toEqual('brpop');
expect(secondSeries.name).toEqual('zrangebyscore');
});
});
}); |
expect(result.name).toBe(
'backend: HA Server, frontend: BA Server, app: prometheus, instance: k8 cluster 1', |
sftp.js | /**
* ssh2 sftp client
*/
const { Client } = require('@electerm/ssh2')
const proxySock = require('./socks')
const _ = require('lodash')
const { readRemoteFile, writeRemoteFile } = require('./sftp-file')
const alg = require('./ssh2-alg')
const getPort = require('get-port')
class | {
constructor () {
this.client = new Client()
this.tunnelClient = new Client()
}
/**
* connect to server
* @return {Promise} sftp inst
*/
connect (config) {
const { client, tunnelClient } = this
const confs = Object.assign(
{
tryKeyboard: true
},
{
readyTimeout: _.get(config, 'readyTimeout'),
keepaliveInterval: _.get(config, 'keepaliveInterval'),
agent: process.env.SSH_AUTH_SOCK,
algorithms: alg
},
config
)
if (!confs.password) {
delete confs.password
}
if (!confs.passphrase) {
delete confs.passphrase
}
return new Promise((resolve, reject) => {
if (config.tunnelHost) {
const cpParam = Object.assign(
{},
{
readyTimeout: _.get(config, 'sshReadyTimeout'),
keepaliveInterval: _.get(config, 'keepaliveInterval'),
agent: process.env.SSH_AUTH_SOCK
},
_.pick(config, [
'tunnelHost',
'tunnelPort',
'tunnelUsername',
'tunnelPassword',
'tunnelPrivateKey',
'tunnelPassphrase'
])
)
cpParam.host = cpParam['tunnelHost']
cpParam.port = cpParam['tunnelPort']
cpParam.username = cpParam['tunnelUsername']
cpParam.password = cpParam['tunnelPassword']
cpParam.privateKey = cpParam['tunnelPrivateKey']
cpParam.passphrase = cpParam['tunnelPassphrase']
delete cpParam.tunnelHost
delete cpParam.tunnelPort
delete cpParam.tunnelUsername
delete cpParam.tunnelPassword
delete cpParam.tunnelPrivateKey
delete cpParam.tunnelPassphrase
const localPort = Promise.resolve(getPort)
const run = (info) => {
if (info && info.socket) {
delete cpParam.host
delete cpParam.port
delete cpParam.proxy
cpParam.sock = info.socket
}
client
.on('keyboard-interactive', (
name,
instructions,
instructionsLang,
prompts,
finish
) => {
finish([confs.password])
})
.on('ready', () => {
const tunnelOpt = Object.assign({}, config)
delete tunnelOpt.host
delete tunnelOpt.port
client.forwardOut('127.0.0.1', localPort, config.host, config.port, (err, stream) => {
if (err) {
reject(err)
return
}
tunnelOpt.sock = stream
tunnelClient.on('ready', () => {
tunnelClient.sftp((err, sftp) => {
if (err) {
reject(err)
}
this.sftp = sftp
resolve('')
})
}).connect(tunnelOpt)
})
})
.on('error', (err) => {
client.end()
reject(err)
})
.connect(cpParam)
}
if (
config.proxy &&
config.proxy.proxyIp &&
config.proxy.proxyPort
) {
proxySock({
...config,
...cpParam
}).then(run)
} else {
run()
}
} else {
const run = (info) => {
if (info && info.socket) {
delete confs.host
delete confs.port
delete confs.proxy
confs.sock = info.socket
}
client
.on('keyboard-interactive', (
name,
instructions,
instructionsLang,
prompts,
finish
) => {
finish([confs.password])
})
.on('ready', () => {
client.sftp((err, sftp) => {
if (err) {
reject(err)
}
this.sftp = sftp
resolve('')
})
})
.on('error', (err) => {
client.end()
reject(err)
})
.connect(confs)
}
if (
config.proxy &&
config.proxy.proxyIp &&
config.proxy.proxyPort
) {
proxySock({
...config,
...confs
}).then(run)
} else {
run()
}
}
})
}
// /**
// * connect to socks5 proxy
// */
// proxy(config) {
// const socks = socksv5.connect({
// host: options.host,
// port: options.port,
// proxyHost: options.proxyHost,
// proxyPort: options.proxyPort,
// auths: options.auths || [ socksv5.auth.None() ]
// }, (socket) => {
// let sshOptions = Object.assign({
// sock: socket
// }, options)
// delete sshOptions.host
// delete sshOptions.port
// delete sshOptions.proxyHost
// delete sshOptions.proxyPort
// return super.connect(sshOptions)
// });
// socks.on('error', (error) => {
// error.level = 'socks'
// this.emit('error', error)
// })
// }
/**
* list remote directory
*
* @param {String} remotePath
* @return {Promise} list
*/
list (remotePath) {
return new Promise((resolve, reject) => {
const { sftp } = this
const reg = /-/g
sftp.readdir(remotePath, (err, list) => {
if (err) {
return reject(err)
}
resolve(list.map(item => {
const {
filename,
longname,
attrs: {
size, mtime, atime, uid, gid, mode
}
} = item
// from https://github.com/jyu213/ssh2-sftp-client/blob/master/src/index.js
return {
type: longname.substr(0, 1),
name: filename,
size,
modifyTime: mtime * 1000,
accessTime: atime * 1000,
mode,
rights: {
user: longname.substr(1, 3).replace(reg, ''),
group: longname.substr(4, 3).replace(reg, ''),
other: longname.substr(7, 3).replace(reg, '')
},
owner: uid,
group: gid
}
}))
})
})
}
/**
* mkdir
*
* @param {String} remotePath
* @param {Object} attributes
* An object with the following valid properties:
mode - integer - Mode/permissions for the resource.
uid - integer - User ID of the resource.
gid - integer - Group ID of the resource.
size - integer - Resource size in bytes.
atime - integer - UNIX timestamp of the access time of the resource.
mtime - integer - UNIX timestamp of the modified time of the resource.
When supplying an ATTRS object to one of the SFTP methods:
atime and mtime can be either a Date instance or a UNIX timestamp.
mode can either be an integer or a string containing an octal number.
* https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md
* @return {Promise}
*/
mkdir (remotePath, options = {}) {
return new Promise((resolve, reject) => {
const { sftp } = this
sftp.mkdir(remotePath, options, err => {
if (err) reject(err)
else resolve()
})
})
}
/**
* getHomeDir
*
* https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md
* only support linux / mac
* @return {Promise}
*/
getHomeDir () {
return new Promise((resolve, reject) => {
const { client } = this
const cmd = 'eval echo "~$different_user"'
client.exec(cmd, (err, stream) => {
if (err) reject(err)
stream.on('data', function (data) {
resolve(data.toString())
})
})
})
}
/**
* rmdir
*
* @param {String} remotePath
* https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md
* only support rm -rf
* @return {Promise}
*/
rmdir (remotePath) {
return new Promise((resolve, reject) => {
const { client } = this
const cmd = `rm -rf "${remotePath}"`
client.exec(cmd, err => {
if (err) reject(err)
else resolve()
})
})
}
/**
* stat
*
* @param {String} remotePath
* https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md
* @return {Promise} stat
* stats.isDirectory()
stats.isFile()
stats.isBlockDevice()
stats.isCharacterDevice()
stats.isSymbolicLink()
stats.isFIFO()
stats.isSocket()
*/
stat (remotePath) {
return new Promise((resolve, reject) => {
const { sftp } = this
sftp.stat(remotePath, (err, stat) => {
if (err) reject(err)
else {
resolve(
Object.assign(stat, {
isDirectory: stat.isDirectory()
})
)
}
})
})
}
/**
* readlink
*
* @param {String} remotePath
* https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md
* @return {Promise} target
*/
readlink (remotePath) {
return new Promise((resolve, reject) => {
const { sftp } = this
sftp.readlink(remotePath, (err, target) => {
if (err) reject(err)
else resolve(target)
})
})
}
/**
* realpath
*
* @param {String} remotePath
* https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md
* @return {Promise} target
*/
realpath (remotePath) {
return new Promise((resolve, reject) => {
const { sftp } = this
sftp.realpath(remotePath, (err, target) => {
if (err) reject(err)
else resolve(target)
})
})
}
/**
* lstat
*
* @param {String} remotePath
* https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md
* @return {Promise} stat
* stats.isDirectory()
stats.isFile()
stats.isBlockDevice()
stats.isCharacterDevice()
stats.isSymbolicLink()
stats.isFIFO()
stats.isSocket()
*/
lstat (remotePath) {
return new Promise((resolve, reject) => {
const { sftp } = this
sftp.lstat(remotePath, (err, stat) => {
if (err) reject(err)
else resolve(stat)
})
})
}
/**
* chmod
*
* @param {String} remotePath
* https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md
* @return {Promise}
*/
chmod (remotePath, mode) {
return new Promise((resolve, reject) => {
const { sftp } = this
sftp.chmod(remotePath, mode, (err) => {
if (err) reject(err)
else resolve()
})
})
}
/**
* rename
*
* @param {String} remotePath
* @param {String} remotePathNew
* https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md
* @return {Promise}
*/
rename (remotePath, remotePathNew) {
return new Promise((resolve, reject) => {
const { sftp } = this
sftp.rename(remotePath, remotePathNew, (err) => {
if (err) reject(err)
else resolve()
})
})
}
/**
* rm delete single file
*
* @param {String} remotePath
* https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md
* @return {Promise}
*/
rm (remotePath) {
return new Promise((resolve, reject) => {
const { sftp } = this
sftp.unlink(remotePath, (err) => {
if (err) reject(err)
else resolve()
})
})
}
/**
* touch a file
*
* @param {String} remotePath
* https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md
* @return {Promise}
*/
touch (remotePath) {
return new Promise((resolve, reject) => {
const { client } = this
const cmd = `touch "${remotePath}"`
client.exec(cmd, err => {
if (err) reject(err)
else resolve()
})
})
}
/**
* mv
*
* @param {String} from
* @param {String} to
* https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md
* @return {Promise}
*/
mv (from, to) {
return new Promise((resolve, reject) => {
const { client } = this
const cmd = `mv "${from}" "${to}"`
client.exec(cmd, (err) => {
if (err) reject(err)
else resolve()
})
})
}
/**
* cp
*
* @param {String} from
* @param {String} to
* https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md
* @return {Promise}
*/
cp (from, to) {
return new Promise((resolve, reject) => {
const { client } = this
const cmd = `cp -r "${from}" "${to}"`
client.exec(cmd, (err) => {
if (err) reject(err)
else resolve()
})
})
}
/**
* readFile single file
*
* @param {String} remotePath
* https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md
* @return {Promise}
*/
readFile (remotePath) {
return readRemoteFile(this.sftp, remotePath)
}
/**
* writeFile single file
*
* @param {String} remotePath
* https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md
* @return {Promise}
*/
writeFile (remotePath, str, mode) {
return writeRemoteFile(this.sftp, remotePath, str, mode)
}
// end
}
module.exports = {
Sftp,
instSftpKeys: [
'connect',
'list',
'download',
'upload',
'mkdir',
'getHomeDir',
'rmdir',
'stat',
'lstat',
'chmod',
'rename',
'rm',
'touch',
'readlink',
'realpath',
'mv',
'cp',
'readFile',
'writeFile'
]
}
| Sftp |
d17_test.go | package y2021m02
import (
"fmt"
"testing"
"github.com/yourtion/LeetCode/golang/utils"
)
func Test_20210217(t *testing.T) {
type params struct {
para1 [][]int
para2 int
para3 int
ans [][]int
}
qs := []params{
{
para1: [][]int{{1, 2}, {3, 4}},
para2: 1,
para3: 4,
ans: [][]int{{1, 2, 3, 4}}, | {
para1: [][]int{{1, 2}, {3, 4}},
para2: 2,
para3: 4,
ans: [][]int{{1, 2}, {3, 4}},
},
}
utils.Segmentation("20210217")
for _, q := range qs {
ret, p, r, c := q.ans, q.para1, q.para2, q.para3
res := matrixReshape(p, r, c)
fmt.Printf("【input】: %v %d %d \t【output】: %v\n", p, r, c, res)
if !utils.DeepEqual(ret, res) {
t.Errorf(`"%v" not equal to "%v"`, res, ret)
}
}
} | }, |
index.js | /* * * * * * * * * * * * * * * * * * * * * * * * *\
*
* FileName : index.js
* Author : Association of Southeast Asian Nations Build The Earth
* CreateTime : 15-5-2021
* Organization : https://github.com/ASEAN-Build-The-Earth
* Description : Merlion discord bot core file
* FileType : JS File
*
\* * * * * * * * * * * * * * * * * * * * * * * * */
const fs = require('fs');
const { Client, Intents, Collection } = require('discord.js');
const { prefix } = require('./data/config.json');
const { token } = require("./data/auth.json")
const client = new Client({
intents: [
Intents.FLAGS.DIRECT_MESSAGES,
Intents.FLAGS.GUILDS,
Intents.FLAGS.GUILD_BANS,
Intents.FLAGS.GUILD_MEMBERS,
Intents.FLAGS.GUILD_MESSAGES,
Intents.FLAGS.GUILD_WEBHOOKS,
Intents.FLAGS.GUILD_VOICE_STATES
] //,
//partials: ["CHANNEL"]
});
client.commands = new Collection();
const commandFolders = fs.readdirSync('./commands');
for (const folder of commandFolders) {
const commandFiles = fs.readdirSync(`./commands/${folder}`).filter(file => file.endsWith('.js'));
for (const file of commandFiles) {
const command = require(`./commands/${folder}/${file}`);
client.commands.set(command.name, command);
}
// ===== TEMPOLARY SPECIAL FILE SEARCHING =====
// - it search for commands in ./commands/Fun/Images since the code above dont support 2nd folder step.
const funImageDirectory = "./commands/Fun/Images";
try
{
if (fs.existsSync(funImageDirectory))
{
console.log("Image directory exists")
const funImagesFile = fs.readdirSync(funImageDirectory).filter(file => file.endsWith('.js'));
for (const file of funImagesFile)
{
const specialCommand = require(`${funImageDirectory}/${file}`);
client.commands.set(specialCommand.name, specialCommand);
}
}
}
catch(error)
{
console.log("Cannot find Images directory:\n" + error);
}
}
client.once('ready', () => {
console.log('Ready!');
client.user.setActivity(`YOU`, {
type: "WATCHING",
});
});
client.on('messageCreate', message => {
// === All the autoresponders ===
if (message.content === "<@850730172630302720>" || message.content === "<@!850730172630302720>") {
message.reply(`Hi ${message.author}, My Prefix is \`${prefix}\`!`)
}
if (message.content.toLowerCase() === 'hi') {
return message.reply('Hi :)'); | if (message.content.toLowerCase() === 'xbox') {
return message.reply('Is Sus');
}
if (message.content.toLowerCase() === 'phats') {
return message.reply('https://media.discordapp.net/attachments/832603438285062164/869838594859237418/render_2021-07-21_15.32.09.gif');
}
if (message.content.toLowerCase() === 'how to join') {
return message.reply('Please read <#789012857798000690>');
}
if (message.content.toLowerCase() === 'how do i join') {
return message.reply('Please read <#789012857798000690>');
}
if (!message.content.startsWith(prefix)) return;
// === Message and command handler ====
const args = message.content.slice(prefix.length).trim().split(/ +/g);
const commandName = args.shift().toLowerCase();
const command = client.commands.get(commandName) ||
client.commands.find(cmd => cmd.aliases && cmd.aliases.includes(commandName));
if (!command) return;
try {
return command.execute(message, args, client);
} catch (error) {
console.error(error);
const ErrorEmbed = new Discord.MessageEmbed()
.setColor("#ffb7c5")
.setAuthor(`${message.author.tag}`, `${message.author.displayAvatarURL({ dynamic: true })}`)
.addFields({ name: "ERR!", value: "Oops! I can't execute this command!" }, );
return message.reply({ embeds: [ErrorEmbed] });
}
});
client.login(token) | } |
index.js | const unbox = require("./unbox");
const fs = require("fs");
const config = require("../config");
const tmp = require("tmp");
const cwd = require("process").cwd();
const path = require("path");
const ora = require("ora");
module.exports = {
downloadBox: async (url, destination) => {
const downloadSpinner = ora("Downloading").start();
try {
await unbox.verifyURL(url);
await unbox.fetchRepository(url, destination);
downloadSpinner.succeed();
} catch (error) {
downloadSpinner.fail();
throw new Error(error);
}
},
readBoxConfig: async destination => {
const possibleConfigs = [
path.join(destination, "susyknot-box.json"),
path.join(destination, "susyknot-init.json")
];
const configPath = possibleConfigs.reduce((path, alt) => {
return path || (fs.existsSync(alt) && alt);
}, undefined);
try {
const boxConfig = await config.read(configPath);
return boxConfig;
} catch (error) {
throw new Error(error);
}
},
setUpTempDirectory: () => {
const prepareSpinner = ora("Preparing to download").start();
return new Promise((resolve, reject) => {
const options = {
dir: cwd,
unsafeCleanup: true
};
tmp.dir(options, (error, dir, cleanupCallback) => {
if (error) {
prepareSpinner.fail();
return reject(error);
}
prepareSpinner.succeed();
resolve({
path: path.join(dir, "box"),
cleanupCallback
});
});
});
},
unpackBox: async (tempDir, destination, boxConfig, unpackBoxOptions) => { | await unbox.prepareToCopyFiles(tempDir, boxConfig);
await unbox.copyTempIntoDestination(tempDir, destination, unpackBoxOptions);
},
setUpBox: async (boxConfig, destination) => {
const setUpSpinner = ora("Setting up box").start();
try {
await unbox.installBoxDependencies(boxConfig, destination);
setUpSpinner.succeed();
} catch (error) {
setUpSpinner.fail();
throw new Error(error);
}
}
}; | |
cli.go | // Copyright 2015 Osipov Konstantin <[email protected]>. All rights reserved.
// license that can be found in the LICENSE file.
// This file is part of the application source code leveldb-cli
// This software provides a console interface to leveldb.
// ATTENTION! This version of the software is an experimental!
// There is no guarantee that the application will work correctly
// This code will be refactored, so do not rely on its structure
package leveldbcli
import (
"fmt"
"os"
"path"
"runtime"
"strconv"
"bitbucket.org/creachadair/shell"
"github.com/chzyer/readline"
"github.com/cometwk/leveldb-cli/commands"
)
// Software version number
const VERSION = "0.3.0"
var completer = readline.NewPrefixCompleter(
readline.PcItem("show",
readline.PcItem("prefix"),
readline.PcItem("range"),
),
readline.PcItem("exit"),
readline.PcItem("quit"),
readline.PcItem("help"),
readline.PcItem("get"),
readline.PcItem("export"),
readline.PcItem("put"),
readline.PcItem("set"),
readline.PcItem("delete"),
readline.PcItem("version"),
)
// Main function
func Shell() {
l, err := readline.NewEx(&readline.Config{
Prompt: "\033[31m»\033[0m ",
HistoryFile: "/tmp/leveldb-cli.tmp",
AutoComplete: completer,
})
if err != nil {
panic(err)
}
defer l.Close()
fmt.Println("*********************************************************")
fmt.Println("** LevelDB CLI **")
fmt.Println("*********************************************************")
fmt.Println("")
fmt.Println("Run command 'help' for help.")
for {
line, err := l.Readline()
if err != nil {
break
}
args, _ := shell.Split(line)
if args == nil {
continue
}
switch {
// Command: version
case line == "version":
fmt.Printf("Version %s. %s %s %s\n", VERSION, runtime.Compiler, runtime.GOARCH, runtime.GOOS)
break
// Command: help
case line == "help":
fmt.Println("Enter one of the commands to get help: show, set, get, export, open, close, put, delete, version")
break
// Command: quit and exit
case line == "quit":
case line == "exit":
goto exit
// Command: show
case args[0] == "show":
if len(args) == 1 {
fmt.Println("Bad format. Please use 'show prefix|range'")
break
}
switch args[1] {
// Sub-command: range
case "range":
if len(args) < 4 || len(args) > 5 {
fmt.Println("Bad format. Please use 'show range START LIMIT [FORMAT]'")
break
}
format := ""
if len(args) == 5 {
format = args[4]
}
fmt.Println(commands.ShowByRange(args[2], args[3], format))
break
// Sub-command: prefix
case "prefix":
if len(args) < 3 || len(args) > 4 {
fmt.Println("Bad format. Please use 'show prefix PREFIX [FORMAT]'")
break
}
format := ""
if len(args) == 4 {
format = args[3]
}
fmt.Println(commands.ShowByPrefix(args[2], format))
break
}
break
// Command: put and set
case args[0] == "put":
case args[0] == "set":
if len(args) != 3 {
fmt.Printf("Bad format. Please use '%s KEY VALUE'\n", args[0])
break
}
fmt.Println(commands.Set(args[1], args[2]))
break
// Command: get
case args[0] == "get":
if len(args) < 2 || len(args) > 3 {
fmt.Println("Bad format. Please use 'get KEY FORMAT'")
break
}
format := ""
if len(args) == 3 {
format = args[2]
}
fmt.Println(commands.Get(args[1], format))
break
//Command export
case args[0] == "export":
if len(args) != 3 {
fmt.Println("Bad format. Please use 'export KEY FILENAME'")
break
}
fmt.Println(commands.Export(args[1], args[2]))
break
// Command: delete | if len(args) != 2 {
fmt.Print("Bad format. Please use 'delete KEY'")
break
}
fmt.Println(commands.Delete(args[1]))
break
// Command: close
case args[0] == "close":
if len(args) != 1 {
fmt.Print("Bad format. Please use 'close'")
break
}
l.SetPrompt("\033[31m»\033[0m ")
fmt.Println(commands.Close())
break
// Command: open
case args[0] == "open":
_, err := os.Stat(args[1])
if err != nil {
fmt.Println("Database not exist! Create new database.")
}
if len(args) != 2 {
fmt.Println("Bad format. Please use 'open DATABASE_NAME'")
break
}
l.SetPrompt(fmt.Sprintf("\033[31m%s»\033[0m ", path.Base(args[1])))
fmt.Println(commands.Open(args[1]))
break
default:
fmt.Println("Unknown command: ", strconv.Quote(line))
}
}
exit:
} | case args[0] == "delete": |
test_exceptions.py | # Python test set -- part 5, built-in exceptions
import os
import sys
import unittest
import pickle, cPickle
import warnings
from test.test_support import TESTFN, unlink, run_unittest, captured_output
from test.test_pep352 import ignore_message_warning
# XXX This is not really enough, each *operation* should be tested!
class ExceptionTests(unittest.TestCase):
|
def test_main():
run_unittest(ExceptionTests)
if __name__ == '__main__':
test_main()
| def testReload(self):
# Reloading the built-in exceptions module failed prior to Py2.2, while it
# should act the same as reloading built-in sys.
try:
import exceptions
reload(exceptions)
except ImportError, e:
self.fail("reloading exceptions: %s" % e)
def raise_catch(self, exc, excname):
try:
raise exc, "spam"
except exc, err:
buf1 = str(err)
try:
raise exc("spam")
except exc, err:
buf2 = str(err)
self.assertEquals(buf1, buf2)
self.assertEquals(exc.__name__, excname)
def testRaising(self):
self.raise_catch(AttributeError, "AttributeError")
self.assertRaises(AttributeError, getattr, sys, "undefined_attribute")
self.raise_catch(EOFError, "EOFError")
fp = open(TESTFN, 'w')
fp.close()
fp = open(TESTFN, 'r')
savestdin = sys.stdin
try:
try:
sys.stdin = fp
x = raw_input()
except EOFError:
pass
finally:
sys.stdin = savestdin
fp.close()
unlink(TESTFN)
self.raise_catch(IOError, "IOError")
self.assertRaises(IOError, open, 'this file does not exist', 'r')
self.raise_catch(ImportError, "ImportError")
self.assertRaises(ImportError, __import__, "undefined_module")
self.raise_catch(IndexError, "IndexError")
x = []
self.assertRaises(IndexError, x.__getitem__, 10)
self.raise_catch(KeyError, "KeyError")
x = {}
self.assertRaises(KeyError, x.__getitem__, 'key')
self.raise_catch(KeyboardInterrupt, "KeyboardInterrupt")
self.raise_catch(MemoryError, "MemoryError")
self.raise_catch(NameError, "NameError")
try: x = undefined_variable
except NameError: pass
self.raise_catch(OverflowError, "OverflowError")
x = 1
for dummy in range(128):
x += x # this simply shouldn't blow up
self.raise_catch(RuntimeError, "RuntimeError")
self.raise_catch(SyntaxError, "SyntaxError")
try: exec '/\n'
except SyntaxError: pass
self.raise_catch(IndentationError, "IndentationError")
self.raise_catch(TabError, "TabError")
# can only be tested under -tt, and is the only test for -tt
#try: compile("try:\n\t1/0\n \t1/0\nfinally:\n pass\n", '<string>', 'exec')
#except TabError: pass
#else: self.fail("TabError not raised")
self.raise_catch(SystemError, "SystemError")
self.raise_catch(SystemExit, "SystemExit")
self.assertRaises(SystemExit, sys.exit, 0)
self.raise_catch(TypeError, "TypeError")
try: [] + ()
except TypeError: pass
self.raise_catch(ValueError, "ValueError")
self.assertRaises(ValueError, chr, 10000)
self.raise_catch(ZeroDivisionError, "ZeroDivisionError")
try: x = 1/0
except ZeroDivisionError: pass
self.raise_catch(Exception, "Exception")
try: x = 1/0
except Exception, e: pass
def testSyntaxErrorMessage(self):
# make sure the right exception message is raised for each of
# these code fragments
def ckmsg(src, msg):
try:
compile(src, '<fragment>', 'exec')
except SyntaxError, e:
if e.msg != msg:
self.fail("expected %s, got %s" % (msg, e.msg))
else:
self.fail("failed to get expected SyntaxError")
s = '''while 1:
try:
pass
finally:
continue'''
if not sys.platform.startswith('java'):
ckmsg(s, "'continue' not supported inside 'finally' clause")
s = '''if 1:
try:
continue
except:
pass'''
ckmsg(s, "'continue' not properly in loop")
ckmsg("continue\n", "'continue' not properly in loop")
def testSettingException(self):
# test that setting an exception at the C level works even if the
# exception object can't be constructed.
class BadException:
def __init__(self_):
raise RuntimeError, "can't instantiate BadException"
def test_capi1():
import _testcapi
try:
_testcapi.raise_exception(BadException, 1)
except TypeError, err:
exc, err, tb = sys.exc_info()
co = tb.tb_frame.f_code
self.assertEquals(co.co_name, "test_capi1")
self.assert_(co.co_filename.endswith('test_exceptions'+os.extsep+'py'))
else:
self.fail("Expected exception")
def test_capi2():
import _testcapi
try:
_testcapi.raise_exception(BadException, 0)
except RuntimeError, err:
exc, err, tb = sys.exc_info()
co = tb.tb_frame.f_code
self.assertEquals(co.co_name, "__init__")
self.assert_(co.co_filename.endswith('test_exceptions'+os.extsep+'py'))
co2 = tb.tb_frame.f_back.f_code
self.assertEquals(co2.co_name, "test_capi2")
else:
self.fail("Expected exception")
if not sys.platform.startswith('java'):
test_capi1()
test_capi2()
def test_WindowsError(self):
try:
WindowsError
except NameError:
pass
else:
self.failUnlessEqual(str(WindowsError(1001)),
"1001")
self.failUnlessEqual(str(WindowsError(1001, "message")),
"[Error 1001] message")
self.failUnlessEqual(WindowsError(1001, "message").errno, 22)
self.failUnlessEqual(WindowsError(1001, "message").winerror, 1001)
def testAttributes(self):
# test that exception attributes are happy
exceptionList = [
(BaseException, (), {'message' : '', 'args' : ()}),
(BaseException, (1, ), {'message' : 1, 'args' : (1,)}),
(BaseException, ('foo',),
{'message' : 'foo', 'args' : ('foo',)}),
(BaseException, ('foo', 1),
{'message' : '', 'args' : ('foo', 1)}),
(SystemExit, ('foo',),
{'message' : 'foo', 'args' : ('foo',), 'code' : 'foo'}),
(IOError, ('foo',),
{'message' : 'foo', 'args' : ('foo',), 'filename' : None,
'errno' : None, 'strerror' : None}),
(IOError, ('foo', 'bar'),
{'message' : '', 'args' : ('foo', 'bar'), 'filename' : None,
'errno' : 'foo', 'strerror' : 'bar'}),
(IOError, ('foo', 'bar', 'baz'),
{'message' : '', 'args' : ('foo', 'bar'), 'filename' : 'baz',
'errno' : 'foo', 'strerror' : 'bar'}),
(IOError, ('foo', 'bar', 'baz', 'quux'),
{'message' : '', 'args' : ('foo', 'bar', 'baz', 'quux')}),
(EnvironmentError, ('errnoStr', 'strErrorStr', 'filenameStr'),
{'message' : '', 'args' : ('errnoStr', 'strErrorStr'),
'strerror' : 'strErrorStr', 'errno' : 'errnoStr',
'filename' : 'filenameStr'}),
(EnvironmentError, (1, 'strErrorStr', 'filenameStr'),
{'message' : '', 'args' : (1, 'strErrorStr'), 'errno' : 1,
'strerror' : 'strErrorStr', 'filename' : 'filenameStr'}),
(SyntaxError, (), {'message' : '', 'msg' : None, 'text' : None,
'filename' : None, 'lineno' : None, 'offset' : None,
'print_file_and_line' : None}),
(SyntaxError, ('msgStr',),
{'message' : 'msgStr', 'args' : ('msgStr',), 'text' : None,
'print_file_and_line' : None, 'msg' : 'msgStr',
'filename' : None, 'lineno' : None, 'offset' : None}),
(SyntaxError, ('msgStr', ('filenameStr', 'linenoStr', 'offsetStr',
'textStr')),
{'message' : '', 'offset' : 'offsetStr', 'text' : 'textStr',
'args' : ('msgStr', ('filenameStr', 'linenoStr',
'offsetStr', 'textStr')),
'print_file_and_line' : None, 'msg' : 'msgStr',
'filename' : 'filenameStr', 'lineno' : 'linenoStr'}),
(SyntaxError, ('msgStr', 'filenameStr', 'linenoStr', 'offsetStr',
'textStr', 'print_file_and_lineStr'),
{'message' : '', 'text' : None,
'args' : ('msgStr', 'filenameStr', 'linenoStr', 'offsetStr',
'textStr', 'print_file_and_lineStr'),
'print_file_and_line' : None, 'msg' : 'msgStr',
'filename' : None, 'lineno' : None, 'offset' : None}),
(UnicodeError, (), {'message' : '', 'args' : (),}),
(UnicodeEncodeError, ('ascii', u'a', 0, 1, 'ordinal not in range'),
{'message' : '', 'args' : ('ascii', u'a', 0, 1,
'ordinal not in range'),
'encoding' : 'ascii', 'object' : u'a',
'start' : 0, 'reason' : 'ordinal not in range'}),
(UnicodeDecodeError, ('ascii', '\xff', 0, 1, 'ordinal not in range'),
{'message' : '', 'args' : ('ascii', '\xff', 0, 1,
'ordinal not in range'),
'encoding' : 'ascii', 'object' : '\xff',
'start' : 0, 'reason' : 'ordinal not in range'}),
(UnicodeTranslateError, (u"\u3042", 0, 1, "ouch"),
{'message' : '', 'args' : (u'\u3042', 0, 1, 'ouch'),
'object' : u'\u3042', 'reason' : 'ouch',
'start' : 0, 'end' : 1}),
]
try:
exceptionList.append(
(WindowsError, (1, 'strErrorStr', 'filenameStr'),
{'message' : '', 'args' : (1, 'strErrorStr'),
'strerror' : 'strErrorStr', 'winerror' : 1,
'errno' : 22, 'filename' : 'filenameStr'})
)
except NameError:
pass
with warnings.catch_warnings():
ignore_message_warning()
for exc, args, expected in exceptionList:
try:
raise exc(*args)
except BaseException, e:
if type(e) is not exc:
raise
# Verify module name
self.assertEquals(type(e).__module__, 'exceptions')
# Verify no ref leaks in Exc_str()
s = str(e)
for checkArgName in expected:
self.assertEquals(repr(getattr(e, checkArgName)),
repr(expected[checkArgName]),
'exception "%s", attribute "%s"' %
(repr(e), checkArgName))
# test for pickling support
for p in pickle, cPickle:
for protocol in range(p.HIGHEST_PROTOCOL + 1):
new = p.loads(p.dumps(e, protocol))
for checkArgName in expected:
got = repr(getattr(new, checkArgName))
want = repr(expected[checkArgName])
self.assertEquals(got, want,
'pickled "%r", attribute "%s"' %
(e, checkArgName))
def testSlicing(self):
# Test that you can slice an exception directly instead of requiring
# going through the 'args' attribute.
args = (1, 2, 3)
exc = BaseException(*args)
self.failUnlessEqual(exc[:], args)
def testKeywordArgs(self):
# test that builtin exception don't take keyword args,
# but user-defined subclasses can if they want
self.assertRaises(TypeError, BaseException, a=1)
class DerivedException(BaseException):
def __init__(self, fancy_arg):
BaseException.__init__(self)
self.fancy_arg = fancy_arg
x = DerivedException(fancy_arg=42)
self.assertEquals(x.fancy_arg, 42)
def testInfiniteRecursion(self):
def f():
return f()
self.assertRaises(RuntimeError, f)
def g():
try:
return g()
except ValueError:
return -1
# The test prints an unraisable recursion error when
# doing "except ValueError", this is because subclass
# checking has recursion checking too.
with captured_output("stderr"):
try:
g()
except RuntimeError:
pass
except:
self.fail("Should have raised KeyError")
else:
self.fail("Should have raised KeyError")
def testUnicodeStrUsage(self):
# Make sure both instances and classes have a str and unicode
# representation.
self.failUnless(str(Exception))
self.failUnless(unicode(Exception))
self.failUnless(str(Exception('a')))
self.failUnless(unicode(Exception(u'a')))
self.failUnless(unicode(Exception(u'\xe1')))
def test_badisinstance(self):
# Bug #2542: if issubclass(e, MyException) raises an exception,
# it should be ignored
class Meta(type):
def __subclasscheck__(cls, subclass):
raise ValueError()
class MyException(Exception):
__metaclass__ = Meta
pass
with captured_output("stderr") as stderr:
try:
raise KeyError()
except MyException, e:
self.fail("exception should not be a MyException")
except KeyError:
pass
except:
self.fail("Should have raised KeyError")
else:
self.fail("Should have raised KeyError")
with captured_output("stderr") as stderr:
def g():
try:
return g()
except RuntimeError:
return sys.exc_info()
e, v, tb = g()
self.assert_(e is RuntimeError, e)
self.assert_("maximum recursion depth exceeded" in str(v), v) |
handler.py | import json
import boto3
import json
autoscaling = boto3.client('autoscaling')
processes_to_suspend = ["AZRebalance", "AlarmNotification", "ScheduledActions", "ReplaceUnhealthy"]
def update_autoscaling_group(autoscaling_group, asg_min_size):
print("Trying to reset %s to minimal size of %i instances" % (autoscaling_group, asg_min_size))
client = boto3.client('autoscaling')
response = client.update_auto_scaling_group(
AutoScalingGroupName=autoscaling_group,
MinSize=asg_min_size
)
if response['ResponseMetadata']['HTTPStatusCode'] == 200:
print("DEBUG: Updating Autoscaling Group minimal size successfull")
return True
else:
print("ERROR: Unable to reset minimal size of '" + autoscaling_group_name + "'")
return False
def get_asg_min_size(asg):
client = boto3.client('autoscaling')
response = client.describe_auto_scaling_groups(
AutoScalingGroupNames=
[ asg ]
)
print(response)
for i in response['AutoScalingGroups'][0]['Tags']:
print(i)
if "ASGMinSize" in i['Key']:
asg_min_size = int(i['Value'])
return asg_min_size
else:
return False
def get_autoscaling_group(deployment_group):
asg_list = []
client = boto3.client('autoscaling')
filter = list(
[
{ 'Name': "key", 'Values': [ 'AutomatedASGScript '] },
{ 'Name': "value", 'Values': [ 'true' ] },
{ 'Name': "key", 'Values': [ 'DeploymentGroup' ] },
{ 'Name': "value", 'Values': [ deployment_group ] }
]
);
response = client.describe_tags(Filters=filter)
print(response)
if not response['Tags']:
print('Found no Autoscaling Group for Deployment Group %s - exiting' % deployment_group)
exit(1);
else:
print('Found Autoscaling Groups for Deployment Group %s' % deployment_group)
for i in response['Tags']:
asg_list.append(i['ResourceId'])
return(asg_list)
def suspend_processes( autoscaling_group_name, processes_to_suspend ):
response = autoscaling.suspend_processes(
AutoScalingGroupName=autoscaling_group_name,
ScalingProcesses=processes_to_suspend
)
if response['ResponseMetadata']['HTTPStatusCode'] == 200:
print("DEBUG: Autoscaling Processes suspended")
return True
else:
print("ERROR: Unable to suspend_processes on '" + autoscaling_group_name + "'")
return False
def resume_processes( autoscaling_group_name, processes_to_suspend ):
response = autoscaling.resume_processes(
AutoScalingGroupName=autoscaling_group_name,
ScalingProcesses=processes_to_suspend
)
if response['ResponseMetadata']['HTTPStatusCode'] == 200:
print("DEBUG: Autoscaling Processes resumed")
return True
else:
print("ERROR: Unable to resume_processes on '" + autoscaling_group_name + "'")
return False
def autoscale(event, context):
message_json = json.loads(event['Records'][0]['Sns']['Message'])
print(message_json)
deployment_group = message_json['deploymentGroupName']
autoscaling_group_name = get_autoscaling_group(deployment_group)
deployment_group = message_json['deploymentGroupName']
autoscaling_group_name = get_autoscaling_group(deployment_group)
for i in autoscaling_group_name:
print(i)
asg_min_size = get_asg_min_size(i)
if not asg_min_size:
print("Found no ASGMinSize Tag for %s" % i)
else:
|
topic_arn = event['Records'][0]['Sns']['TopicArn']
print('Got Message from %s' % topic_arn)
if "suspendAutoscaling" in topic_arn:
item = suspend_processes(i, processes_to_suspend)
body = {
"message": "Suspending Autoscaling Processes",
"successful": item
}
response = {
"statusCode": 200,
"body": json.dumps(body)
}
elif "resumeAutoscaling" in topic_arn:
if asg_min_size:
update_autoscaling_group(i, asg_min_size)
item = resume_processes(i, processes_to_suspend)
body = {
"message": "Resuming Autoscaling Processes",
"succesful": item
}
response = {
"statusCode": 200,
"body": json.dumps(body)
}
else:
print('Recieved Message from unknown SNS Topic %s - Exiting ' % topic_arn)
return False
print("DEBUG:", response)
return response
| print("Found ASG %s with min. size of %s instances" % (i, asg_min_size)) |
config.go | // Copyright 2020 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Package eap is an abstract class for EAP security classes which need certificate/key installation.
package eap
import (
"context"
"fmt"
"io/ioutil"
"math/rand"
"os"
"path/filepath"
"chromiumos/tast/common/crypto/certificate"
"chromiumos/tast/common/pkcs11/netcertstore"
"chromiumos/tast/common/shillconst"
"chromiumos/tast/common/wifi/security"
"chromiumos/tast/errors"
"chromiumos/tast/ssh"
"chromiumos/tast/ssh/linuxssh"
)
// Config implements security.Config interface for EAP protected network.
type Config struct {
// fileSuffix is the file name suffix of the cert/key files which are installed onto the router.
fileSuffix string
// identity is the client identity used by shill for setting up service of type "802.1x".
identity string
serverCACert string
serverCred certificate.Credential
serverEAPUsers string
clientCACert string
clientCred certificate.Credential
// Fields that would be set in InstallRouterCredentials().
ServerCACertFile string
ServerCertFile string
ServerKeyFile string
ServerEAPUsersFile string
// Fields that would be set in InstallClientCredentials().
ClientSlotID int
Pin string
NetCertID string
}
// Class returns the security class of EAP network.
func (c *Config) Class() string {
return shillconst.Security8021x
}
// HostapdConfig returns hostapd config of EAP network.
func (c *Config) HostapdConfig() (map[string]string, error) {
if c.ServerCACertFile == "" || c.ServerCertFile == "" || c.ServerKeyFile == "" || c.ServerEAPUsersFile == "" {
return nil, errors.New("router credentials are not installed")
}
return map[string]string{
"ieee8021x": "1",
"eap_server": "1", // Do EAP inside hostapd to avoid RADIUS.
"ca_cert": c.ServerCACertFile,
"server_cert": c.ServerCertFile,
"private_key": c.ServerKeyFile,
"eap_user_file": c.ServerEAPUsersFile,
}, nil
}
// ShillServiceProperties returns shill properties of EAP network.
func (c *Config) ShillServiceProperties() (map[string]interface{}, error) {
// For c.ClientSlotID, 0 is a system slot but not a user slot,
// which means that InstallClientCredentials has not been called.
if c.NeedsNetCertStore() && (c.Pin == "" || c.ClientSlotID == 0) {
return nil, errors.New("client credentials are not installed")
}
ret := map[string]interface{}{shillconst.ServicePropertyEAPIdentity: c.identity}
if c.Pin != "" {
ret[shillconst.ServicePropertyEAPPin] = c.Pin
}
if c.clientCACert != "" {
// Technically, we could accept a list of certificates here, but we have no such tests.
ret[shillconst.ServicePropertyEAPCACertPEM] = []string{c.clientCACert}
}
if c.clientCred.Cert != "" {
ret[shillconst.ServicePropertyEAPCertID] = fmt.Sprintf("%d:%s", c.ClientSlotID, c.NetCertID)
}
if c.clientCred.PrivateKey != "" {
ret[shillconst.ServicePropertyEAPKeyID] = fmt.Sprintf("%d:%s", c.ClientSlotID, c.NetCertID)
}
return ret, nil
}
// InstallRouterCredentials installs the necessary credentials onto router.
func (c *Config) InstallRouterCredentials(ctx context.Context, host *ssh.Conn, workDir string) error {
pathMap := make(map[string]string)
c.ServerCACertFile = filepath.Join(workDir, "hostapd_ca_cert_file."+c.fileSuffix)
c.ServerCertFile = filepath.Join(workDir, "hostapd_cert_file."+c.fileSuffix)
c.ServerKeyFile = filepath.Join(workDir, "hostapd_key_file."+c.fileSuffix)
c.ServerEAPUsersFile = filepath.Join(workDir, "hostapd_eap_user_cert_file."+c.fileSuffix)
for _, f := range []struct {
content string
path string
}{
{c.serverCACert, c.ServerCACertFile},
{c.serverCred.Cert, c.ServerCertFile},
{c.serverCred.PrivateKey, c.ServerKeyFile},
{c.serverEAPUsers, c.ServerEAPUsersFile},
} {
tmpfile, err := ioutil.TempFile("", "upload_tmp_")
if err != nil {
return errors.Wrap(err, "unable to create temp file")
}
defer os.Remove(tmpfile.Name())
_, err = tmpfile.Write([]byte(f.content))
tmpfile.Close()
if err != nil {
return errors.Wrap(err, "unable to write to temp file")
}
pathMap[tmpfile.Name()] = f.path
}
if _, err := linuxssh.PutFiles(ctx, host, pathMap, linuxssh.DereferenceSymlinks); err != nil {
return errors.Wrap(err, "unable to upload file to host")
}
return nil
}
// NeedsNetCertStore tells that netcert store is necessary for this test.
func (c *Config) NeedsNetCertStore() bool {
return c.hasClientCred()
}
// InstallClientCredentials installs credentials on the DUT.
func (c *Config) InstallClientCredentials(ctx context.Context, store *netcertstore.Store) error {
if !c.hasClientCred() {
return nil
}
c.Pin = store.Pin()
c.ClientSlotID = store.Slot()
netCertID, err := store.InstallCertKeyPair(ctx, c.clientCred.PrivateKey, c.clientCred.Cert)
if err != nil {
return err
}
c.NetCertID = netCertID
return nil
}
func (c *Config) hasClientCred() bool {
return c.clientCred.Cert != "" && c.clientCred.PrivateKey != ""
}
// validate validates the Config.
func (c *Config) validate() error {
if c.identity == "" {
return errors.New("no EAP identity is set")
}
if c.serverCACert == "" {
return errors.New("no CA certificate is set on server")
}
if c.serverCred.Cert == "" {
return errors.New("no certificate is set on server")
}
if c.serverCred.PrivateKey == "" {
return errors.New("no private key is set on server")
}
if c.serverEAPUsers == "" {
return errors.New("no EAP users is set on server")
}
if (c.clientCred.Cert != "") != (c.clientCred.PrivateKey != "") {
return errors.New("client Cret and PrivateKey should be either both set or both not set")
}
return nil
}
// ConfigFactory holds some Option and provides Gen method to build a new Config.
type ConfigFactory struct {
serverCACert string
serverCred certificate.Credential
ops []Option
}
// NewConfigFactory builds a ConfigFactory with the given Option.
func NewConfigFactory(serverCACert string, serverCred certificate.Credential, ops ...Option) *ConfigFactory {
return &ConfigFactory{
serverCACert: serverCACert,
serverCred: serverCred,
ops: ops,
}
}
// Gen builds a Config with the given Option stored in ConfigFactory.
func (f *ConfigFactory) Gen() (security.Config, error) {
// Default config.
conf := &Config{
identity: "chromeos",
serverCACert: f.serverCACert,
serverCred: f.serverCred,
serverEAPUsers: "* TLS",
}
for _, op := range f.ops {
op(conf)
}
if conf.fileSuffix == "" {
conf.fileSuffix = randomSuffix()
}
if err := conf.validate(); err != nil {
return nil, err
}
return conf, nil
}
// randomSuffix returns a random suffix of length 10.
func | () string {
const letters = "abcdefghijklmnopqrstuvwxyz0123456789"
ret := make([]byte, 10)
for i := range ret {
ret[i] = letters[rand.Intn(len(letters))]
}
return string(ret)
}
// Static check: ConfigFactory implements security.ConfigFactory interface.
var _ security.ConfigFactory = (*ConfigFactory)(nil)
| randomSuffix |
whatsapp_alex.py | #ontwikkeld door S1118551
#We sturen in deze file een whatsapp bericht naar gebruiker wanner die wakker is geworden met de taken die hem te wachten staan op deze dag.
#mocht de gebruiker geen taak hebben op die dag, dan krijgt die een melding dat die goed bezig was de afgelopen dag
#Dit is een koppeling met subsysteem van Tycho!
import os
from twilio.rest import Client
import mysql.connector
mydb = mysql.connector.connect(
host="vserver385.axc.nl",
user="tychogp385_ipmedt5",
passwd="ipmedt5",
database="tychogp385_ipmedt5"
)
#taken ophalen
taken = []
mycursor = mydb.cursor()
sql_select_Query = "select title, omschrijving from taken where uitvoerdatum = curdate() and status = 'niet voltooid'" #we gaan een bericht sturen naar de gebruiker met taken die hij op die dag moet uitvoeren
mycursor.execute(sql_select_Query)
records = mycursor.fetchall()
for row in records:
taken.append(row[1])
takenOpen = ',\n'.join(taken) #we zetten list om in een string vol met omschrijving van de taken op die dag
#gegevens ophalen
gegevens = []
sql_select_Query1 = "select name, telefoonnummer from users where userId = 1" #user gegevens ophalen
mycursor.execute(sql_select_Query1)
records = mycursor.fetchall()
for user in records:
gegevens.append(user[0])
gegevens.append(user[1])
naam = gegevens[0]
nummer = '+31' + str(gegevens[1])
print('taken')
print('naam is ' + naam)
print('nummer is ' + nummer)
account_sid = '' #We moeten dit prive houden in github vanwege security issues
auth_token = '' #We moeten dit prive houden in github vanwege security issu
client = Client(account_sid, auth_token)
##WAARSCHUWING
#gebruiker moet eerst bericht op whatsapp 'join above-noon' sturen naar nummer +14155238886 om berichten te kunnen ontvangen, omdat we gebruik maken van een sandbox:
##
from_whatsapp_number='whatsapp:+14155238886'
to_whatsapp_number = 'whatsapp:' + nummer
if(taken > 0): #als gebruiker taken op die dag heeft staan
|
else: #anders sturen we gebruiker een leuk bericht met dat die de afgelopen weken goed is bezig geweest
client.messages.create(body='Beste ' + naam + ',\nvandaag heb je geen taak/taken open staan. Je bent goed bezig geweest de afgelopen dagen, ga zo door!', from_=from_whatsapp_number,to=to_whatsapp_number)
| client.messages.create(body='Beste ' + naam + ',\nvandaag heb je op planning de volgende taak/taken staan:\n' + takenOpen + '.\nVeel succes en zet hem op!', from_=from_whatsapp_number,to=to_whatsapp_number) |
testFixtures.test.ts | // * this file is to make sure our text fixtures return expected data, and jeopardize other test suites that depend on these fixtures
import {
platformCoordsXYTestData,
platformCoordsBasisTestData,
platformCoordsTestData,
homeHeightTestData,
b_leg2xTestData,
b_leg3xTestData,
b_leg23yTestData,
l2aTestData,
l3aTestData,
baseCoordsTestData,
platformCoordsHomeTestData,
baseAnglesTestData,
platformAnglesTestData,
newAxesTestData,
previousInputTestData,
} from './testFixtures';
import roundTo from 'round-to';
describe('#testFixtures', () => {
describe('platformAngles', () => {
test('should return expected tensor', () => {
expect(platformAnglesTestData).toIncludeSameMembers([
0,
Math.PI / 3,
(2 * Math.PI) / 3,
Math.PI,
(4 * Math.PI) / 3,
(5 * Math.PI) / 3,
]);
});
});
describe('newAxes', () => {
test('should return expected tensor', () => {
expect(newAxesTestData).toEqual({
x: 0,
y: 0,
z: 10,
roll: 0,
pitch: 0,
yaw: 0,
});
}); | expect(previousInputTestData).toIncludeSameMembers([0, 0, 0, 0, 0, 0]);
});
});
describe('platformCoordsXY', () => {
test('should return expected tensor', () => {
expect(platformCoordsXYTestData).toIncludeSameMembers([
[75, 37.5, -37.5, -75, -37.5, 37.5],
[
0, 64.951905283833, 64.951905283833, 0, -64.951905283833,
-64.951905283833,
],
]);
});
});
describe('platformCoordsBasis', () => {
test('should return expected tensor', () => {
expect(platformCoordsBasisTestData).toIncludeSameMembers([
[75, 37.5, -37.5, -75, -37.5, 37.5],
[
0, 64.951905283833, 64.951905283833, 0, -64.951905283833,
-64.951905283833,
],
[0, 0, 0, 0, 0, 0],
]);
});
});
// todo: add more test for the exported stuff here
describe('baseAngles', () => {
describe('intermediate values', () => {
test('b_leg2x', () => {
expect(b_leg2xTestData).toEqual(37.5);
});
test('b_leg3x', () => {
expect(b_leg3xTestData).toEqual(-37.5);
});
test('b_leg23y', () => {
expect(b_leg23yTestData).toEqual(119.2424001771182);
});
test('l2a', () => {
expect(l2aTestData).toEqual(1.266103672779499);
});
test('l3a', () => {
expect(l3aTestData).toEqual(1.8754889808102944);
});
});
test('should return expected tensor', () => {
expect(baseAnglesTestData).toIncludeSameMembers([
6.064279185596685, 1.266103672779499, 1.8754889808102944,
3.3604987751726942, 3.96988408320349, 5.4548938775658895,
]);
});
});
describe('baseCoords', () => {
test('should return expected tensor', () => {
//todo
expect(
baseCoordsTestData.map((arr) => arr.map((ele) => roundTo(ele, 2)))
).toIncludeSameMembers([
[122.02, 37.5, -37.5, -122.02, -84.52, 84.52],
[-27.15, 119.24, 119.24, -27.15, -92.1, -92.1],
]);
});
});
describe('homeHeight', () => {
test('should return expected tensor', () => {
expect(homeHeightTestData).toEqual(352.8608936297042);
});
});
describe('platformCoordsHome', () => {
test('should return expected tensor', () => {
expect(
platformCoordsHomeTestData.map((arr) =>
arr.map((ele) => roundTo(ele, 2))
)
).toIncludeSameMembers([
[75, 37.5, -37.5, -75, -37.5, 37.5],
[0, 64.95, 64.95, 0, -64.95, -64.95],
[352.86, 352.86, 352.86, 352.86, 352.86, 352.86],
]);
});
});
describe('platformCoords', () => {
test('should return expected tensor', () => {
expect(platformCoordsTestData).toIncludeSameMembers([
[75, 37.5, -37.5, -75, -37.5, 37.5],
[
0, 64.951905283833, 64.951905283833, 0, -64.951905283833,
-64.951905283833,
],
[
362.8608936297042, 362.8608936297042, 362.8608936297042,
362.8608936297042, 362.8608936297042, 362.8608936297042,
],
]);
});
});
}); | });
describe('previousInput', () => {
test('should return expected tensor', () => { |
lib.rs | //! The point of this crate is to be able to have enough different "kinds" of
//! documentation generated so we can test each different features.
#![doc(html_playground_url="https://play.rust-lang.org/")]
#![crate_name = "test_docs"]
#![feature(rustdoc_internals)]
#![feature(doc_cfg)]
use std::convert::AsRef;
use std::fmt;
/// Basic function with some code examples:
///
/// ```
/// println!("nothing fancy");
/// println!("but with two lines!");
/// ```
///
/// A failing to compile one:
///
/// ```compile_fail
/// println!("where did my argument {} go? :'(");
/// ```
///
/// An ignored one:
///
/// ```ignore (it's a test)
/// Let's say I'm just some text will ya?
/// ```
///
/// An inlined `code`!
pub fn foo() {}
/// Just a normal struct.
pub struct Foo;
impl Foo {
#[must_use]
pub fn must_use(&self) -> bool {
true
}
}
impl AsRef<str> for Foo {
fn as_ref(&self) -> &str {
"hello"
}
}
/// Just a normal enum.
///
/// # title!
#[doc(alias = "ThisIsAnAlias")]
pub enum WhoLetTheDogOut {
/// Woof!
Woof,
/// Meoooooooow...
Meow,
}
/// Who doesn't love to wrap a `format!` call?
pub fn some_more_function<T: fmt::Debug>(t: &T) -> String {
format!("{:?}", t)
}
/// Woohoo! A trait!
pub trait AnotherOne {
/// Some func 3.
fn func3();
/// Some func 1.
fn func1();
fn another();
fn why_not();
/// Some func 2.
fn func2();
fn hello();
}
/// ```compile_fail
/// whatever
/// ```
///
/// Check for "i" signs in lists!
///
/// 1. elem 1
/// 2. test 1
/// ```compile_fail
/// fn foo() {}
/// ```
/// 3. elem 3
/// 4. ```ignore (it's a test)
/// fn foo() {}
/// ```
/// 5. elem 5
///
/// Final one:
///
/// ```ignore (still a test)
/// let x = 12;
/// ```
pub fn check_list_code_block() {}
/// a thing with a label
#[deprecated(since = "1.0.0", note = "text why this deprecated")]
#[doc(cfg(unix))]
pub fn replaced_function() {}
/// Some doc with `code`!
pub enum AnEnum {
WithVariants { and: usize, sub: usize, variants: usize },
}
#[doc(keyword = "CookieMonster")]
/// Some keyword.
pub mod keyword {}
/// Just some type alias.
pub type SomeType = u32;
pub mod huge_amount_of_consts {
include!(concat!(env!("OUT_DIR"), "/huge_amount_of_consts.rs"));
}
/// Very long code text `hereIgoWithLongTextBecauseWhyNotAndWhyWouldntI`.
pub mod long_code_block {}
#[macro_export]
macro_rules! repro {
() => {};
}
pub use crate::repro as repro2;
/// # Top-doc Prose title
///
/// Text below title.
///
/// ## Top-doc Prose sub-heading
///
/// Text below sub-heading.
///
/// ### Top-doc Prose sub-sub-heading
///
/// Text below sub-sub-heading
pub struct HeavilyDocumentedStruct {
/// # Title for field
/// ## Sub-heading for field
pub nothing: (),
}
/// # Title for struct impl doc
///
/// Text below heading.
///
/// ## Sub-heading for struct impl doc
///
/// Text below sub-heading.
///
/// ### Sub-sub-heading for struct impl doc
///
/// Text below sub-sub-heading.
///
impl HeavilyDocumentedStruct {
/// # Title for struct impl-item doc
/// Text below title.
/// ## Sub-heading for struct impl-item doc
/// Text below sub-heading.
/// ### Sub-sub-heading for struct impl-item doc
/// Text below sub-sub-heading.
pub fn do_nothing() {}
}
/// # Top-doc Prose title
///
/// Text below title.
///
/// ## Top-doc Prose sub-heading
///
/// Text below sub-heading.
///
/// ### Top-doc Prose sub-sub-heading
///
/// Text below sub-sub-heading
pub enum HeavilyDocumentedEnum {
/// # None prose title
/// ## None prose sub-heading
None,
/// # Wrapped prose title
/// ## Wrapped prose sub-heading
Wrapped(
/// # Wrapped.0 prose title
/// ## Wrapped.0 prose sub-heading
String,
String,
),
Structy {
/// # Structy prose title
/// ## Structy prose sub-heading
alpha: String,
beta: String,
},
}
/// # Title for enum impl doc
///
/// Text below heading.
///
/// ## Sub-heading for enum impl doc
///
/// Text below sub-heading.
///
/// ### Sub-sub-heading for enum impl doc
///
/// Text below sub-sub-heading.
///
impl HeavilyDocumentedEnum {
/// # Title for enum impl-item doc
/// Text below title.
/// ## Sub-heading for enum impl-item doc
/// Text below sub-heading.
/// ### Sub-sub-heading for enum impl-item doc
/// Text below sub-sub-heading.
pub fn | () {}
}
/// # Top-doc prose title
///
/// Text below heading.
///
/// ## Top-doc prose sub-heading
///
/// Text below heading.
pub union HeavilyDocumentedUnion {
/// # Title for union variant
/// ## Sub-heading for union variant
pub nothing: (),
pub something: f32,
}
/// # Title for union impl doc
/// ## Sub-heading for union impl doc
impl HeavilyDocumentedUnion {
/// # Title for union impl-item doc
/// ## Sub-heading for union impl-item doc
pub fn do_nothing() {}
}
/// # Top-doc prose title
///
/// Text below heading.
///
/// ## Top-doc prose sub-heading
///
/// Text below heading.
#[macro_export]
macro_rules! heavily_documented_macro {
() => {};
}
| do_nothing |
fix.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 1999-2021 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
from ... import opcodes as OperandDef
from ..utils import infer_dtype
from .core import TensorUnaryOp
from .utils import arithmetic_operand
@arithmetic_operand(sparse_mode="unary")
class TensorFix(TensorUnaryOp):
_op_type_ = OperandDef.FIX
_func_name = "fix"
@infer_dtype(np.fix)
def | (x, out=None, **kwargs):
"""
Round to nearest integer towards zero.
Round a tensor of floats element-wise to nearest integer towards zero.
The rounded values are returned as floats.
Parameters
----------
x : array_like
An tensor of floats to be rounded
out : Tensor, optional
Output tensor
Returns
-------
out : Tensor of floats
The array of rounded numbers
See Also
--------
trunc, floor, ceil
around : Round to given number of decimals
Examples
--------
>>> import mars.tensor as mt
>>> mt.fix(3.14).execute()
3.0
>>> mt.fix(3).execute()
3.0
>>> mt.fix([2.1, 2.9, -2.1, -2.9]).execute()
array([ 2., 2., -2., -2.])
"""
op = TensorFix(**kwargs)
return op(x, out=out)
| fix |
commandhandler.go | // Copyright (c) 2014 - The Event Horizon authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package bus
import (
"context"
"errors"
"sync"
eh "github.com/looplab/eventhorizon"
)
var (
// ErrHandlerAlreadySet is when a handler is already registered for a command.
ErrHandlerAlreadySet = errors.New("handler is already set")
// ErrHandlerNotFound is when no handler can be found.
ErrHandlerNotFound = errors.New("no handlers for command")
)
// CommandHandler is a command handler that handles commands by routing to the
// registered CommandHandlers.
type CommandHandler struct {
handlers map[eh.CommandType]eh.CommandHandler
handlersMu sync.RWMutex
}
// NewCommandHandler creates a CommandHandler.
func NewCommandHandler() *CommandHandler {
return &CommandHandler{
handlers: make(map[eh.CommandType]eh.CommandHandler),
}
}
// HandleCommand handles a command with a handler capable of handling it.
func (h *CommandHandler) HandleCommand(ctx context.Context, cmd eh.Command) error {
select {
case <-ctx.Done():
return ctx.Err()
default:
}
if err := eh.CheckCommand(cmd); err != nil |
h.handlersMu.RLock()
defer h.handlersMu.RUnlock()
if handler, ok := h.handlers[cmd.CommandType()]; ok {
return handler.HandleCommand(ctx, cmd)
}
return ErrHandlerNotFound
}
// SetHandler adds a handler for a specific command.
func (h *CommandHandler) SetHandler(handler eh.CommandHandler, cmdType eh.CommandType) error {
h.handlersMu.Lock()
defer h.handlersMu.Unlock()
if _, ok := h.handlers[cmdType]; ok {
return ErrHandlerAlreadySet
}
h.handlers[cmdType] = handler
return nil
}
| {
return err
} |
listcommand.d.ts | import { Command, Editor } from '@ckeditor/ckeditor5-core';
export default class | extends Command {
readonly type: 'numbered' | 'bulleted';
value: boolean;
constructor(editor: Editor, type: 'numbered' | 'bulleted');
refresh(): void;
execute(options?: { forceValue?: boolean | undefined }): void;
}
| ListCommand |
3d_scene.rs | use bevy::prelude::*;
use bevy_mod_picking::*;
fn main() {
App::build()
.add_resource(Msaa { samples: 4 })
.add_plugins(DefaultPlugins)
.add_plugin(PickingPlugin)
.add_plugin(DebugPickingPlugin)
.add_plugin(InteractablePickingPlugin)
.add_startup_system(setup)
.add_startup_system(set_highlight_params)
.run();
}
/// set up a simple 3D scene
fn setup(
commands: &mut Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
// add entities to the world
// camera
commands
.spawn(Camera3dBundle {
transform: Transform::from_matrix(Mat4::face_toward(
Vec3::new(-3.0, 5.0, 8.0),
Vec3::new(0.0, 0.0, 0.0),
Vec3::new(0.0, 1.0, 0.0),
)),
..Default::default()
})
.with(PickSource::default())
//plane
.spawn(PbrBundle {
mesh: meshes.add(Mesh::from(shape::Plane { size: 10.0 })),
material: materials.add(Color::rgb(1.0, 1.0, 1.0).into()),
..Default::default()
})
.with(PickableMesh::default())
.with(InteractableMesh::default())
.with(HighlightablePickMesh::default())
.with(SelectablePickMesh::default())
// cube
.spawn(PbrBundle {
mesh: meshes.add(Mesh::from(shape::Cube { size: 1.0 })),
material: materials.add(Color::rgb(1.0, 1.0, 1.0).into()),
transform: Transform::from_translation(Vec3::new(0.0, 1.0, 0.0)),
..Default::default()
})
.with(PickableMesh::default())
.with(InteractableMesh::default())
.with(HighlightablePickMesh::default())
.with(SelectablePickMesh::default())
// sphere
.spawn(PbrBundle {
mesh: meshes.add(Mesh::from(shape::Icosphere {
subdivisions: 4,
radius: 0.5,
})),
material: materials.add(Color::rgb(1.0, 1.0, 1.0).into()),
transform: Transform::from_translation(Vec3::new(1.5, 1.5, 1.5)),
..Default::default()
})
.with(PickableMesh::default())
.with(InteractableMesh::default())
.with(HighlightablePickMesh::default())
.with(SelectablePickMesh::default())
// light
.spawn(LightBundle {
transform: Transform::from_translation(Vec3::new(4.0, 8.0, 4.0)),
..Default::default()
});
}
fn set_highlight_params(mut highlight_params: ResMut<PickHighlightParams>) | {
highlight_params.set_hover_color(Color::rgb(1.0, 0.0, 0.0));
highlight_params.set_selection_color(Color::rgb(1.0, 0.0, 1.0));
} |
|
server.go | /**
* Author: JeffreyBool
* Date: 2019/5/15
* Time: 01:42
* Software: GoLand
*/
package main
import (
"github.com/JeffreyBool/gozinx/src/znet/server"
"github.com/JeffreyBool/gozinx/src/znet/router"
"fmt" | /*
基于 `GoZinx` 框架来开发的,服务器端应用程序
**/
//ping test 自定义路由
type PingRouter struct {
router.BaseRouter
}
func (router *PingRouter) Handle(request ziface.IRequest) {
fmt.Println("Call Router Handle...")
fmt.Printf("recv from client MsgId: %d, MsgData: %s\n", request.GetMsgId(), request.GetData())
//选读取客户端发送的数据
err := request.GetConnection().SendMsg(request.GetMsgId(), []byte("call server router ping...\n"))
if err != nil {
fmt.Println(err)
}
}
//HelloRouter test 自定义路由
type HelloRouter struct {
router.BaseRouter
}
func (router *HelloRouter) Handle(request ziface.IRequest) {
fmt.Println("Call Router Handle...")
fmt.Printf("recv from client MsgId: %d, MsgData: %s\n", request.GetMsgId(), request.GetData())
//选读取客户端发送的数据
err := request.GetConnection().SendMsg(request.GetMsgId(), []byte("Hello Welcome To GoZinx\n"))
if err != nil {
fmt.Println(err)
}
}
func main() {
//new server 服务
s := server.NewServer()
//给 server 添加一个自定义的 router
s.AddRouter(1, &PingRouter{})
s.AddRouter(2, &HelloRouter{})
//启动 server
s.Serve()
} | "github.com/JeffreyBool/gozinx/src/ziface"
)
|
grass_cutter.py | from constants import *
from data import IMAGE_ID
from random import randint
from actor.skills.base_skill import BaseSkill
from util import dice
class | (BaseSkill):
def __init__(self, x=0, y=0, name="grass_cutter"):
super().__init__(
name=name,
image=IMAGE_ID[name],
x=x,
y=y,
)
#attackに渡される属性
self._damage = 5
self.hit_rate = 95
self.attr = "physical"
self.effect = None
self.owner = None
self._level = 1
self.tag = [Tag.item, Tag.equip, Tag.weapon, Tag.skill, Tag.passive]
self.item_weight = 1.1
self.explanatory_text = f"damage: {self.level}D{self.damage}\nhit rate: {self.hit_rate}"
self.icon = IMAGE_ID["grass_cutter_icon"]
@property
def damage(self):
if self.owner:
return dice((self.level / 3 + 1), ((self.owner.fighter.STR+self._damage))/2, (self.level/2))
def update_animation(self, delta_time):
super().update_animation(delta_time)
try:
if self.master.state == state.ATTACK and Tag.weapon in self.tag:
self.item_margin_x = (self.item_position_x + 5) * SPRITE_SCALE
self.angle += 90
else:
self.angle = 0
except:
pass
| GrassCutter |
e_hcspportroute.rs | #[doc = "Register `E_HCSPPORTROUTE` reader"]
pub struct R(crate::R<E_HCSPPORTROUTE_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<E_HCSPPORTROUTE_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<E_HCSPPORTROUTE_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<E_HCSPPORTROUTE_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `E_HCSPPORTROUTE` writer"]
pub struct W(crate::W<E_HCSPPORTROUTE_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<E_HCSPPORTROUTE_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
} | #[inline(always)]
fn from(writer: crate::W<E_HCSPPORTROUTE_SPEC>) -> Self {
W(writer)
}
}
impl W {
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "EHCI Companion Port Route Description\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [e_hcspportroute](index.html) module"]
pub struct E_HCSPPORTROUTE_SPEC;
impl crate::RegisterSpec for E_HCSPPORTROUTE_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [e_hcspportroute::R](R) reader structure"]
impl crate::Readable for E_HCSPPORTROUTE_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [e_hcspportroute::W](W) writer structure"]
impl crate::Writable for E_HCSPPORTROUTE_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets E_HCSPPORTROUTE to value 0"]
impl crate::Resettable for E_HCSPPORTROUTE_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
} | }
impl From<crate::W<E_HCSPPORTROUTE_SPEC>> for W { |
part1.py |
import sys
import numpy as np
rawalgo, rawimg = sys.stdin.read().strip().split('\n\n')
algo = np.array([1 if c == '#' else 0 for c in rawalgo], dtype=np.int8)
img = np.array([[1 if c == '#' else 0 for c in line] for line in rawimg.split('\n')], dtype=np.int8)
def enhance(img, algo):
|
img = np.pad(img, 1)
for _ in range(2):
img = enhance(img, algo)
print("Result:", img.sum())
| img = np.pad(img, 2, 'edge')
new = np.copy(img)
for i in range(1, img.shape[0] - 1):
for j in range(1, img.shape[1] - 1):
values = img[i-1:i+2,j-1:j+2].flatten()
index = (values * 2**np.arange(9)[::-1]).sum()
new[i,j] = algo[index]
return new[1:-1,1:-1] |
base_datamodule.py | from abc import ABC, abstractmethod
from pathlib import Path
import torch
from torch import Tensor
from torch.utils.data import Dataset, DataLoader
class BaseDataModule(ABC):
def __init__(
self,
data_path: Path,
batch_size: int,
num_workers: int,
):
super().__init__()
self.data_path = data_path
self.batch_size = batch_size
self.num_workers = num_workers
@staticmethod
def prepare_data(
data_path: Path,
):
pass
@abstractmethod
def setup(
self,
val_ratio: float,
) -> None:
pass
def train_dataloader(self) -> DataLoader:
train_dataloader = DataLoader(
dataset=self.train_dataset,
batch_size=self.batch_size,
num_workers=self.num_workers,
)
return train_dataloader
def val_dataloader(self) -> DataLoader:
val_dataloader = DataLoader(
dataset=self.val_dataset,
batch_size=self.batch_size,
num_workers=self.num_workers,
)
return val_dataloader
def test_dataloader(self):
| pass |
|
Navbar.js | import React, { useState } from 'react'
import styled from 'styled-components'
import SearchIcon from '@material-ui/icons/Search';
import {
Home, OndemandVideo, Store, SupervisedUserCircleOutlined,
ViewQuiltOutlined, Apps, Telegram, Notifications, ArrowDropDown,
Menu
} from '@material-ui/icons'
import { Avatar, makeStyles } from '@material-ui/core';
function Navbar() {
const [active, setActive] = useState("Home");
const useStyles = makeStyles((theme) => ({
medium: {
width: theme.spacing(4),
height: theme.spacing(4),
},
large: {
width: theme.spacing(7),
height: theme.spacing(7),
},
}));
const classes = useStyles();
return (
<Header>
<LeftButton>
<FbLogo src="https://upload.wikimedia.org/wikipedia/commons/thumb/0/05/Facebook_Logo_(2019).png/1200px-Facebook_Logo_(2019).png"/>
<SearchArea>
<SearchIcon/>
<input placeholder="Search Facebook" type="text" />
</SearchArea>
</LeftButton>
<MiddleButton>
<Icon className={active === "Home"?("active"):('default')}>
<Home/>
</Icon>
<Icon className={active === "Video"?("active"):('default')}>
<OndemandVideo />
</Icon>
<Icon className={active === "MarketPlace"?("active"):('default')}>
<Store />
</Icon>
<Icon className={active === "Groups"?("active"):('default')}>
<SupervisedUserCircleOutlined />
</Icon>
<Icon className={active === "Gaming"?("active"):('default')}>
<ViewQuiltOutlined />
</Icon>
<MenuIcon>
<Icon className="default">
<Menu />
</Icon>
</MenuIcon>
</MiddleButton>
<RightButton>
<AccountDetail>
<Avatar src="https://media-exp1.licdn.com/dms/image/C5603AQF-IuuNt18UrA/profile-displayphoto-shrink_400_400/0/1621611802448?e=1633564800&v=beta&t=mniGlAVfwO-fDkPtp56OHkRCqnanCQOKbh4YNoVxru4" className={classes.medium}/>
<p>Gaurav</p>
</AccountDetail>
<OtherIcons>
<Apps />
<Telegram />
<Notifications />
<ArrowDropDown />
</OtherIcons>
</RightButton>
</Header>
)
}
export default Navbar
const Header = styled.div`
position: absolute;
left: 0;
| top: 0;
display: flex;
flex-direction: row;
justify-content: space-between;
background-color: #242526;
color: white;
padding: 0px 16px;
align-items: center;
border-bottom: 1px solid gray;
`
const LeftButton = styled.div`
display: flex;
align-items: center;
`
const FbLogo = styled.img`
width: 40px;
// height: 40px;
`
const SearchArea = styled.div`
margin-left: 6px;
display: flex;
flex: 1;
background-color: #3a3b3c;
padding: 8px;
border-radius: 20px;
.MuiSvgIcon-root {
margin: 0px 3px;
color: gray;
}
input {
font-size: 16px;
color: white;
// margin-right:10px;
background-color: inherit;
outline-width: 0;
border: none;
}
@media(max-width: 1200px) {
input{
display: none;
}
.MuiSvgIcon-root {
margin: 0px 0px;
}
}
`
const MiddleButton = styled.div`
display: flex;
flex: 1;
justify-content: center;
.MuiSvgIcon-root {
padding: 10px 38px;
font-size: 30px;
margin: 0px 3px;
margin-top: 4px;
cursor: pointer;
border-radius: 10px;
}
.active {
color: #2d88ff;
border-bottom: 3px solid #2d88ff;
}
.default .MuiSvgIcon-root:hover {
background-color: #68686880;
}
@media(max-width: 1200px) {
.MuiSvgIcon-root {
padding: 10px 2vw;
margin: 4px;
font-size: 26px;
}
}
@media(max-width: 650px) {
padding: 4px 0px;
justify-content: flex-start;
// margin: 28px 0px;
// // margin-top: 4px;
.MuiSvgIcon-root {
display: none;
}
}
`
const MenuIcon = styled.div`
display: none;
@media(max-width: 800px){
display: flex;
}
@media(max-width: 650px){
.MuiSvgIcon-root{
display: flex;
}
}
`
const Icon = styled.div`
`
const RightButton = styled.div`
display: flex;
align-items: center;
`
const AccountDetail = styled.div`
display: flex;
flex-direction: row;
align-items: center;
padding: 4px 6px;
border-radius: 20px;
cursor: pointer;
.MuiAvatar-root {
}
p {
margin-left: 8px;
margin-right: 8px;
font-weight: bold;
}
:hover {
background-color: #68686880;
}
@media(max-width: 1200px) {
display: none;
}
`
const OtherIcons = styled.div`
display: flex;
flex-direction: row;
.MuiSvgIcon-root {
cursor: pointer;
font-size: 22px;
padding: 10px;
margin: 4px;
background-color: #68686880;
border-radius: 40px;
}
@media(max-width: 1200px){
.MuiSvgIcon-root {
font-size: 18px;
}
}
` | right: 0;
|
getwork.go | package rpc
import (
"bytes"
"encoding/binary"
"encoding/hex"
"fmt"
"math/big"
"math/rand"
"time"
"github.com/conformal/fastsha256"
blockchain "github.com/p9c/pod/pkg/chain"
"github.com/p9c/pod/pkg/chain/fork"
chainhash "github.com/p9c/pod/pkg/chain/hash"
"github.com/p9c/pod/pkg/chain/wire"
"github.com/p9c/pod/pkg/log"
"github.com/p9c/pod/pkg/rpc/btcjson"
"github.com/p9c/pod/pkg/util"
)
// Uint256Size is the number of bytes needed to represent an unsigned 256-bit
// integer.
const Uint256Size = 32
// GetworkDataLen is the length of the data field of the getwork RPC.
// It consists of the serialized block header plus the internal sha256
// padding. The internal sha256 padding consists of a single 1 bit followed
// by enough zeros to pad the message out to 56 bytes followed by length of
// the message in bits encoded as a big-endian uint64 (8 bytes). Thus,
// the resulting length is a multiple of the sha256 block size (64 bytes).
var GetworkDataLen = (1 + ((wire.MaxBlockHeaderPayload + 8) / fastsha256.
BlockSize)) * fastsha256.BlockSize
// Hash1Len is the length of the hash1 field of the getwork RPC.
// It consists of a zero hash plus the internal sha256 padding.
// See the getworkDataLen comment for details about the internal sha256
// padding format.
var Hash1Len = (1 + ((chainhash.HashSize + 8) / fastsha256.
BlockSize)) * fastsha256.BlockSize
// BigToLEUint256 returns the passed big integer as an unsigned 256-bit
// integer encoded as little-endian bytes.
// Numbers which are larger than the max unsigned 256-bit integer are truncated.
func BigToLEUint256(n *big.Int) [Uint256Size]byte {
// Pad or truncate the big-endian big int to correct number of bytes.
nBytes := n.Bytes()
nlen := len(nBytes)
pad := 0
start := 0
if nlen <= Uint256Size {
pad = Uint256Size - nlen
} else {
start = nlen - Uint256Size
}
var buf [Uint256Size]byte
copy(buf[pad:], nBytes[start:])
// Reverse the bytes to little endian and return them.
for i := 0; i < Uint256Size/2; i++ {
buf[i], buf[Uint256Size-1-i] = buf[Uint256Size-1-i], buf[i]
}
return buf
}
// HandleGetWork handles the getwork call
func HandleGetWork(s *Server, cmd interface{}, closeChan <-chan struct{}) (interface{}, error) {
c := cmd.(*btcjson.GetWorkCmd)
if len(s.StateCfg.ActiveMiningAddrs) == 0 {
return nil, &btcjson.RPCError{
Code: btcjson.ErrRPCInternal.Code,
Message: "No payment addresses specified via --miningaddr",
}
}
netwk := (*s.Config.Network)[0]
if !((netwk == 'r') || (netwk == 's')) &&
s.Cfg.ConnMgr.ConnectedCount() == 0 {
return nil, &btcjson.RPCError{
Code: btcjson.ErrRPCClientNotConnected,
Message: "Pod is not connected to network",
}
}
// No point in generating or accepting work before the chain is synced.
latestHeight := s.Cfg.Chain.BestSnapshot().Height
if latestHeight != 0 && !s.Cfg.SyncMgr.IsCurrent() {
return nil, &btcjson.RPCError{
Code: btcjson.ErrRPCClientInInitialDownload,
Message: "Pod is not yet synchronised...",
}
}
state := s.GBTWorkState
state.Lock()
defer state.Unlock()
if c.Data != nil {
return HandleGetWorkSubmission(s, *c.Data)
}
// Choose a payment address at random.
rand.Seed(time.Now().UnixNano())
payToAddr := s.StateCfg.ActiveMiningAddrs[rand.Intn(len(s.StateCfg.ActiveMiningAddrs))]
lastTxUpdate := s.GBTWorkState.LastTxUpdate
latestHash := &s.Cfg.Chain.BestSnapshot().Hash
generator := s.Cfg.Generator
if state.Template == nil {
var err error
state.Template, err = generator.NewBlockTemplate(0, payToAddr,
s.Cfg.Algo)
if err != nil {
log.ERROR(err)
return nil, err
}
}
msgBlock := state.Template.Block
if msgBlock == nil || state.prevHash == nil ||
!state.prevHash.IsEqual(latestHash) ||
(state.LastTxUpdate != lastTxUpdate &&
time.Now().After(state.LastGenerated.Add(time.Minute))) {
// Reset the extra nonce and clear all cached template variations if
// the best block changed.
if state.prevHash != nil && !state.prevHash.IsEqual(latestHash) {
e := state.UpdateBlockTemplate(s, false)
if e != nil {
log.WARN("failed to update block template", e)
}
}
// Reset the previous best hash the block template was generated
// against so any errors below cause the next invocation to try
// again.
state.prevHash = nil
var err error
state.Template, err = generator.NewBlockTemplate(0, payToAddr,
s.Cfg.Algo)
if err != nil {
errStr := fmt.Sprintf("Failed to create new block template: %v", err)
log.ERROR(errStr)
return nil, &btcjson.RPCError{
Code: btcjson.ErrRPCInternal.Code,
Message: errStr,
}
}
msgBlock = state.Template.Block
// Update work state to ensure another block template isn't generated
// until needed.
state.Template.Block = msgBlock
state.LastGenerated = time.Now()
state.LastTxUpdate = lastTxUpdate
state.prevHash = latestHash
log.DEBUGC(func() string {
return fmt.Sprintf(
"generated block template (timestamp %v, target %064x, "+
"merkle root %s, signature script %x)",
msgBlock.Header.Timestamp, fork.CompactToBig(msgBlock.Header.
Bits), msgBlock.Header.MerkleRoot,
msgBlock.Transactions[0].TxIn[0].SignatureScript,
)
})
} else {
// At this point, there is a saved block template and a new request for
// work was made but either the available transactions haven't change
// or it hasn't been long enough to trigger a new block template to be
// generated.
// So, update the existing block template and track the variations so
// each variation can be regenerated if a caller finds an answer and
// makes a submission against it. Update the time of the block template
// to the current time while accounting for the median time of the past
// several blocks per the chain consensus rules.
e := generator.UpdateBlockTime(0, msgBlock)
if e != nil {
log.WARN("failed to update block time", e)
}
// Increment the extra nonce and update the block template with the new
// value by regenerating the coinbase script and setting the merkle root
// to the new value.
log.DEBUGF(
"updated block template (timestamp %v, target %064x, "+
"merkle root %s, signature script %x)",
msgBlock.Header.Timestamp, fork.CompactToBig(msgBlock.Header.Bits),
msgBlock.Header.MerkleRoot, msgBlock.Transactions[0].TxIn[0].
SignatureScript)
}
// In order to efficiently store the variations of block templates that
// have been provided to callers save a pointer to the block as well as the
// modified signature script keyed by the merkle root. This information,
// along with the data that is included in a work submission, is used to
// rebuild the block before checking the submitted solution.
/*
coinbaseTx := msgBlock.Transactions[0]
state.blockInfo[msgBlock.Header.MerkleRoot] = &workStateBlockInfo{
msgBlock: msgBlock,
signatureScript: coinbaseTx.TxIn[0].SignatureScript,
}
*/
// Serialize the block header into a buffer large enough to hold the the
// block header and the internal sha256 padding that is added and returned
// as part of the data below.
data := make([]byte, 0, GetworkDataLen)
buf := bytes.NewBuffer(data)
err := msgBlock.Header.Serialize(buf)
if err != nil {
log.ERROR(err)
errStr := fmt.Sprintf("Failed to serialize data: %v", err)
log.WARN(errStr)
return nil, &btcjson.RPCError{
Code: btcjson.ErrRPCInternal.Code,
Message: errStr,
}
}
// Calculate the midstate for the block header. The midstate here is the
// internal state of the sha256 algorithm for the first chunk of the block
// header (sha256 operates on 64-byte chunks) which is before the nonce.
// This allows sophisticated callers to avoid hashing the first chunk over
// and over while iterating the nonce range.
data = data[:buf.Len()]
midstate := fastsha256.MidState256(data)
// Expand the data slice to include the full data buffer and apply the
// internal sha256 padding which consists of a single 1 bit followed by
// enough zeros to pad the message out to 56 bytes followed by the length of
// the message in bits encoded as a big-endian uint64 (8 bytes). Thus, the
// resulting length is a multiple of the sha256 block size (64 bytes). This
// makes the data ready for sophisticated caller to make use of only the
// second chunk along with the midstate for the first chunk.
data = data[:GetworkDataLen]
data[wire.MaxBlockHeaderPayload] = 0x80
binary.BigEndian.PutUint64(data[len(data)-8:],
wire.MaxBlockHeaderPayload*8)
// Create the hash1 field which is a zero hash along with the internal
// sha256 padding as described above. This field is really quite useless,
// but it is required for compatibility with the reference implementation.
var hash1 = make([]byte, Hash1Len)
hash1[chainhash.HashSize] = 0x80
binary.BigEndian.PutUint64(hash1[len(hash1)-8:], chainhash.HashSize*8)
// The final result reverses the each of the fields to little endian. In
// particular, the data, hash1, and midstate fields are treated as arrays of
// uint32s (per the internal sha256 hashing state) which are in big endian,
// and thus each 4 bytes is byte swapped. The target is also in big endian,
// but it is treated as a uint256 and byte swapped to little endian
// accordingly. The fact the fields are reversed in this way is rather odd
// and likely an artifact of some legacy internal state in the reference
// implementation, but it is required for compatibility.
ReverseUint32Array(data)
ReverseUint32Array(hash1)
ReverseUint32Array(midstate[:])
target := BigToLEUint256(fork.CompactToBig(msgBlock.Header.Bits))
reply := &btcjson.GetWorkResult{
Data: hex.EncodeToString(data),
Hash1: hex.EncodeToString(hash1),
Midstate: hex.EncodeToString(midstate[:]),
Target: hex.EncodeToString(target[:]),
}
return reply, nil
}
// HandleGetWorkSubmission is a helper for handleGetWork which deals with the
// calling submitting work to be verified and processed. This function MUST be
// called with the RPC workstate locked.
func HandleGetWorkSubmission(s *Server, hexData string) (interface{}, error) {
// Ensure the provided data is sane.
if len(hexData)%2 != 0 {
hexData = "0" + hexData
}
data, err := hex.DecodeString(hexData)
if err != nil {
log.ERROR(err)
return nil, &btcjson.RPCError{
Code: btcjson.ErrRPCInvalidParameter,
Message: fmt.Sprintf("argument must be "+
"hexadecimal string (not %q)", hexData),
}
}
if len(data) != GetworkDataLen {
return false, &btcjson.RPCError{
Code: btcjson.ErrRPCInvalidParameter,
Message: fmt.Sprintf("argument must be "+
"%d bytes (not %d)", GetworkDataLen,
len(data)),
}
}
// Reverse the data as if it were an array of 32-bit unsigned integers. The
// fact the getwork request and submission data is reversed in this way is
// rather odd and likey an artifact of some legacy internal state in the
// reference implementation, but it is required for compatibility.
ReverseUint32Array(data)
// Deserialize the block header from the data.
var submittedHeader wire.BlockHeader
bhBuf := bytes.NewBuffer(data[0:wire.MaxBlockHeaderPayload])
err = submittedHeader.Deserialize(bhBuf)
if err != nil {
log.ERROR(err)
return false, &btcjson.RPCError{
Code: btcjson.ErrRPCInvalidParameter,
Message: fmt.Sprintf("argument does not "+
"contain a valid block header: %v", err),
}
}
// Look up the full block for the provided data based on the merkle root.
// Return false to indicate the solve failed if it's not available.
state := s.GBTWorkState
if state.Template.Block.Header.MerkleRoot.String() == "" {
log.DEBUG(
"Block submitted via getwork has no matching template for merkle root",
submittedHeader.MerkleRoot)
return false, nil
}
// Reconstruct the block using the submitted header stored block info.
msgBlock := state.Template.Block
block := util.NewBlock(msgBlock)
msgBlock.Header.Timestamp = submittedHeader.Timestamp
msgBlock.Header.Nonce = submittedHeader.Nonce
msgBlock.Transactions[0].TxIn[0].SignatureScript = state.Template.Block.
Transactions[0].TxIn[0].SignatureScript
merkles := blockchain.BuildMerkleTreeStore(block.Transactions(), false)
msgBlock.Header.MerkleRoot = *merkles[len(merkles)-1]
// Ensure the submitted block hash is less than the target difficulty.
pl := fork.GetMinDiff(s.Cfg.Algo, s.Cfg.Chain.BestSnapshot().Height)
err = blockchain.CheckProofOfWork(block, pl, s.Cfg.Chain.BestSnapshot().Height)
if err != nil {
log.ERROR(err)
// Anything other than a rule violation is an unexpected error, so return
// that error as an internal error.
if _, ok := err.(blockchain.RuleError); !ok {
return nil, &btcjson.RPCError{
Code: btcjson.ErrRPCInternal.Code,
Message: fmt.Sprintf("Unexpected error while checking proof"+
" of work: %v", err),
}
}
log.DEBUG("block submitted via getwork does not meet the required proof of"+
" work:", err)
return false, nil
}
latestHash := &s.Cfg.Chain.BestSnapshot().Hash
if !msgBlock.Header.PrevBlock.IsEqual(latestHash) {
log.DEBUGF(
"block submitted via getwork with previous block %s is stale",
msgBlock.Header.PrevBlock)
return false, nil
}
// Process this block using the same rules as blocks coming from other
// nodes. This will in turn relay it to the network like normal.
_, isOrphan, err := s.Cfg.Chain.ProcessBlock(0, block, 0,
s.Cfg.Chain.BestSnapshot().Height)
if err != nil || isOrphan {
log.ERROR(err)
// Anything other than a rule violation is an unexpected error, so return
// that error as an internal error.
if _, ok := err.(blockchain.RuleError); !ok {
return nil, &btcjson.RPCError{
Code: btcjson.ErrRPCInternal.Code,
Message: fmt.Sprintf("Unexpected error while processing block"+
": %v", err),
}
}
log.INFO("block submitted via getwork rejected:", err)
return false, nil
}
// The block was accepted.
blockSha := block.Hash()
log.INFO("block submitted via getwork accepted:", blockSha)
return true, nil
}
// ReverseUint32Array treats the passed bytes as a series of uint32s and
// reverses the byte order of each uint32. The passed byte slice must be a
// multiple of 4 for a correct result. The passed bytes slice is modified.
func | (b []byte) {
blen := len(b)
for i := 0; i < blen; i += 4 {
b[i], b[i+3] = b[i+3], b[i]
b[i+1], b[i+2] = b[i+2], b[i+1]
}
}
| ReverseUint32Array |
lib.rs | //! Provides a seamless wrapper around OpenGL and WebGL, so that the rest of
//! the code doesn't need to know which of the two it's running on.
mod buffer;
mod context;
mod framebuffer;
#[cfg(not(target_arch = "wasm32"))]
pub mod opengl;
pub mod shaders;
mod texture;
mod vertexbuffer;
#[cfg(target_arch = "wasm32")]
pub mod webgl;
#[cfg(not(target_arch = "wasm32"))]
pub use crate::opengl::GLContext as Context;
#[cfg(target_arch = "wasm32")]
pub use crate::webgl::WebGLContext as Context;
pub use crate::buffer::Buffer;
pub use crate::buffer::BufferType;
pub use crate::context::Buffer as NativeBuffer; | pub use crate::context::{AbstractContext, GlPrimitive, Program, Shader, UniformLocation};
pub use crate::framebuffer::FrameBuffer;
pub use crate::texture::Texture;
pub use crate::texture::TextureFormat;
pub use crate::vertexbuffer::VertexBuffer; | pub use crate::context::FrameBuffer as NativeFrameBuffer;
pub use crate::context::Texture as NativeTexture;
pub use crate::context::VertexArray as NativeVertexBuffer; |
analysis.py | import plotly
class Plotter:
|
def plot_metrics(self):
pass | def __init__(self):
pass |
applicationgateways.go | package network
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/autorest/validation"
"github.com/Azure/go-autorest/tracing"
"net/http"
)
// ApplicationGatewaysClient is the network Client
type ApplicationGatewaysClient struct {
BaseClient
}
// NewApplicationGatewaysClient creates an instance of the ApplicationGatewaysClient client.
func NewApplicationGatewaysClient(subscriptionID string) ApplicationGatewaysClient {
return NewApplicationGatewaysClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewApplicationGatewaysClientWithBaseURI creates an instance of the ApplicationGatewaysClient client using a custom
// endpoint. Use this when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure
// stack).
func NewApplicationGatewaysClientWithBaseURI(baseURI string, subscriptionID string) ApplicationGatewaysClient {
return ApplicationGatewaysClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// BackendHealth gets the backend health of the specified application gateway in a resource group.
// Parameters:
// resourceGroupName - the name of the resource group.
// applicationGatewayName - the name of the application gateway.
// expand - expands BackendAddressPool and BackendHttpSettings referenced in backend health.
func (client ApplicationGatewaysClient) BackendHealth(ctx context.Context, resourceGroupName string, applicationGatewayName string, expand string) (result ApplicationGatewaysBackendHealthFuture, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ApplicationGatewaysClient.BackendHealth")
defer func() {
sc := -1
if result.Response() != nil {
sc = result.Response().StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.BackendHealthPreparer(ctx, resourceGroupName, applicationGatewayName, expand)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "BackendHealth", nil, "Failure preparing request")
return
}
result, err = client.BackendHealthSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "BackendHealth", result.Response(), "Failure sending request")
return
}
return
}
// BackendHealthPreparer prepares the BackendHealth request.
func (client ApplicationGatewaysClient) BackendHealthPreparer(ctx context.Context, resourceGroupName string, applicationGatewayName string, expand string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"applicationGatewayName": autorest.Encode("path", applicationGatewayName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2019-12-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
if len(expand) > 0 {
queryParameters["$expand"] = autorest.Encode("query", expand)
}
preparer := autorest.CreatePreparer(
autorest.AsPost(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/backendhealth", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// BackendHealthSender sends the BackendHealth request. The method will close the
// http.Response Body if it receives an error.
func (client ApplicationGatewaysClient) BackendHealthSender(req *http.Request) (future ApplicationGatewaysBackendHealthFuture, err error) {
var resp *http.Response
resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client))
if err != nil {
return
}
future.Future, err = azure.NewFutureFromResponse(resp)
return
}
// BackendHealthResponder handles the response to the BackendHealth request. The method always
// closes the http.Response Body.
func (client ApplicationGatewaysClient) BackendHealthResponder(resp *http.Response) (result ApplicationGatewayBackendHealth, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// BackendHealthOnDemand gets the backend health for given combination of backend pool and http setting of the
// specified application gateway in a resource group.
// Parameters:
// resourceGroupName - the name of the resource group.
// applicationGatewayName - the name of the application gateway.
// probeRequest - request body for on-demand test probe operation.
// expand - expands BackendAddressPool and BackendHttpSettings referenced in backend health.
func (client ApplicationGatewaysClient) BackendHealthOnDemand(ctx context.Context, resourceGroupName string, applicationGatewayName string, probeRequest ApplicationGatewayOnDemandProbe, expand string) (result ApplicationGatewaysBackendHealthOnDemandFuture, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ApplicationGatewaysClient.BackendHealthOnDemand")
defer func() {
sc := -1
if result.Response() != nil {
sc = result.Response().StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.BackendHealthOnDemandPreparer(ctx, resourceGroupName, applicationGatewayName, probeRequest, expand)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "BackendHealthOnDemand", nil, "Failure preparing request")
return
}
result, err = client.BackendHealthOnDemandSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "BackendHealthOnDemand", result.Response(), "Failure sending request")
return
}
return
}
// BackendHealthOnDemandPreparer prepares the BackendHealthOnDemand request.
func (client ApplicationGatewaysClient) BackendHealthOnDemandPreparer(ctx context.Context, resourceGroupName string, applicationGatewayName string, probeRequest ApplicationGatewayOnDemandProbe, expand string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"applicationGatewayName": autorest.Encode("path", applicationGatewayName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2019-12-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
if len(expand) > 0 {
queryParameters["$expand"] = autorest.Encode("query", expand)
}
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPost(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/getBackendHealthOnDemand", pathParameters),
autorest.WithJSON(probeRequest),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// BackendHealthOnDemandSender sends the BackendHealthOnDemand request. The method will close the
// http.Response Body if it receives an error.
func (client ApplicationGatewaysClient) BackendHealthOnDemandSender(req *http.Request) (future ApplicationGatewaysBackendHealthOnDemandFuture, err error) {
var resp *http.Response
resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client))
if err != nil {
return
}
future.Future, err = azure.NewFutureFromResponse(resp)
return
}
// BackendHealthOnDemandResponder handles the response to the BackendHealthOnDemand request. The method always
// closes the http.Response Body.
func (client ApplicationGatewaysClient) BackendHealthOnDemandResponder(resp *http.Response) (result ApplicationGatewayBackendHealthOnDemand, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// CreateOrUpdate creates or updates the specified application gateway.
// Parameters:
// resourceGroupName - the name of the resource group.
// applicationGatewayName - the name of the application gateway.
// parameters - parameters supplied to the create or update application gateway operation.
func (client ApplicationGatewaysClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, applicationGatewayName string, parameters ApplicationGateway) (result ApplicationGatewaysCreateOrUpdateFuture, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ApplicationGatewaysClient.CreateOrUpdate")
defer func() {
sc := -1
if result.Response() != nil {
sc = result.Response().StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
if err := validation.Validate([]validation.Validation{
{TargetValue: parameters,
Constraints: []validation.Constraint{{Target: "parameters.ApplicationGatewayPropertiesFormat", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "parameters.ApplicationGatewayPropertiesFormat.WebApplicationFirewallConfiguration", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "parameters.ApplicationGatewayPropertiesFormat.WebApplicationFirewallConfiguration.Enabled", Name: validation.Null, Rule: true, Chain: nil},
{Target: "parameters.ApplicationGatewayPropertiesFormat.WebApplicationFirewallConfiguration.RuleSetType", Name: validation.Null, Rule: true, Chain: nil},
{Target: "parameters.ApplicationGatewayPropertiesFormat.WebApplicationFirewallConfiguration.RuleSetVersion", Name: validation.Null, Rule: true, Chain: nil},
{Target: "parameters.ApplicationGatewayPropertiesFormat.WebApplicationFirewallConfiguration.MaxRequestBodySize", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "parameters.ApplicationGatewayPropertiesFormat.WebApplicationFirewallConfiguration.MaxRequestBodySize", Name: validation.InclusiveMaximum, Rule: int64(128), Chain: nil},
{Target: "parameters.ApplicationGatewayPropertiesFormat.WebApplicationFirewallConfiguration.MaxRequestBodySize", Name: validation.InclusiveMinimum, Rule: int64(8), Chain: nil},
}},
{Target: "parameters.ApplicationGatewayPropertiesFormat.WebApplicationFirewallConfiguration.MaxRequestBodySizeInKb", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "parameters.ApplicationGatewayPropertiesFormat.WebApplicationFirewallConfiguration.MaxRequestBodySizeInKb", Name: validation.InclusiveMaximum, Rule: int64(128), Chain: nil},
{Target: "parameters.ApplicationGatewayPropertiesFormat.WebApplicationFirewallConfiguration.MaxRequestBodySizeInKb", Name: validation.InclusiveMinimum, Rule: int64(8), Chain: nil},
}},
{Target: "parameters.ApplicationGatewayPropertiesFormat.WebApplicationFirewallConfiguration.FileUploadLimitInMb", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "parameters.ApplicationGatewayPropertiesFormat.WebApplicationFirewallConfiguration.FileUploadLimitInMb", Name: validation.InclusiveMinimum, Rule: int64(0), Chain: nil}}},
}},
{Target: "parameters.ApplicationGatewayPropertiesFormat.AutoscaleConfiguration", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "parameters.ApplicationGatewayPropertiesFormat.AutoscaleConfiguration.MinCapacity", Name: validation.Null, Rule: true,
Chain: []validation.Constraint{{Target: "parameters.ApplicationGatewayPropertiesFormat.AutoscaleConfiguration.MinCapacity", Name: validation.InclusiveMinimum, Rule: int64(0), Chain: nil}}},
{Target: "parameters.ApplicationGatewayPropertiesFormat.AutoscaleConfiguration.MaxCapacity", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "parameters.ApplicationGatewayPropertiesFormat.AutoscaleConfiguration.MaxCapacity", Name: validation.InclusiveMinimum, Rule: int64(2), Chain: nil}}},
}},
}}}}}); err != nil {
return result, validation.NewError("network.ApplicationGatewaysClient", "CreateOrUpdate", err.Error())
}
req, err := client.CreateOrUpdatePreparer(ctx, resourceGroupName, applicationGatewayName, parameters)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "CreateOrUpdate", nil, "Failure preparing request")
return
}
result, err = client.CreateOrUpdateSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "CreateOrUpdate", result.Response(), "Failure sending request")
return
}
return
}
// CreateOrUpdatePreparer prepares the CreateOrUpdate request.
func (client ApplicationGatewaysClient) CreateOrUpdatePreparer(ctx context.Context, resourceGroupName string, applicationGatewayName string, parameters ApplicationGateway) (*http.Request, error) {
pathParameters := map[string]interface{}{
"applicationGatewayName": autorest.Encode("path", applicationGatewayName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2019-12-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
parameters.Etag = nil
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPut(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}", pathParameters),
autorest.WithJSON(parameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the
// http.Response Body if it receives an error.
func (client ApplicationGatewaysClient) CreateOrUpdateSender(req *http.Request) (future ApplicationGatewaysCreateOrUpdateFuture, err error) {
var resp *http.Response
resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client))
if err != nil {
return
}
future.Future, err = azure.NewFutureFromResponse(resp)
return
}
// CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always
// closes the http.Response Body.
func (client ApplicationGatewaysClient) CreateOrUpdateResponder(resp *http.Response) (result ApplicationGateway, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// Delete deletes the specified application gateway.
// Parameters:
// resourceGroupName - the name of the resource group.
// applicationGatewayName - the name of the application gateway.
func (client ApplicationGatewaysClient) Delete(ctx context.Context, resourceGroupName string, applicationGatewayName string) (result ApplicationGatewaysDeleteFuture, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ApplicationGatewaysClient.Delete")
defer func() {
sc := -1
if result.Response() != nil {
sc = result.Response().StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.DeletePreparer(ctx, resourceGroupName, applicationGatewayName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "Delete", nil, "Failure preparing request")
return
}
result, err = client.DeleteSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "Delete", result.Response(), "Failure sending request")
return
}
return
}
// DeletePreparer prepares the Delete request.
func (client ApplicationGatewaysClient) DeletePreparer(ctx context.Context, resourceGroupName string, applicationGatewayName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"applicationGatewayName": autorest.Encode("path", applicationGatewayName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2019-12-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsDelete(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// DeleteSender sends the Delete request. The method will close the
// http.Response Body if it receives an error.
func (client ApplicationGatewaysClient) DeleteSender(req *http.Request) (future ApplicationGatewaysDeleteFuture, err error) {
var resp *http.Response
resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client))
if err != nil {
return
}
future.Future, err = azure.NewFutureFromResponse(resp)
return
}
// DeleteResponder handles the response to the Delete request. The method always
// closes the http.Response Body.
func (client ApplicationGatewaysClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent),
autorest.ByClosing())
result.Response = resp
return
}
// Get gets the specified application gateway.
// Parameters:
// resourceGroupName - the name of the resource group.
// applicationGatewayName - the name of the application gateway.
func (client ApplicationGatewaysClient) Get(ctx context.Context, resourceGroupName string, applicationGatewayName string) (result ApplicationGateway, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ApplicationGatewaysClient.Get")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.GetPreparer(ctx, resourceGroupName, applicationGatewayName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "Get", nil, "Failure preparing request")
return
}
resp, err := client.GetSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "Get", resp, "Failure sending request")
return
}
result, err = client.GetResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "Get", resp, "Failure responding to request")
}
return
}
// GetPreparer prepares the Get request.
func (client ApplicationGatewaysClient) GetPreparer(ctx context.Context, resourceGroupName string, applicationGatewayName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"applicationGatewayName": autorest.Encode("path", applicationGatewayName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2019-12-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client ApplicationGatewaysClient) GetSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// GetResponder handles the response to the Get request. The method always
// closes the http.Response Body.
func (client ApplicationGatewaysClient) GetResponder(resp *http.Response) (result ApplicationGateway, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// GetSslPredefinedPolicy gets Ssl predefined policy with the specified policy name.
// Parameters:
// predefinedPolicyName - name of Ssl predefined policy.
func (client ApplicationGatewaysClient) GetSslPredefinedPolicy(ctx context.Context, predefinedPolicyName string) (result ApplicationGatewaySslPredefinedPolicy, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ApplicationGatewaysClient.GetSslPredefinedPolicy")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.GetSslPredefinedPolicyPreparer(ctx, predefinedPolicyName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "GetSslPredefinedPolicy", nil, "Failure preparing request")
return
}
resp, err := client.GetSslPredefinedPolicySender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "GetSslPredefinedPolicy", resp, "Failure sending request")
return
}
result, err = client.GetSslPredefinedPolicyResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "GetSslPredefinedPolicy", resp, "Failure responding to request")
}
return
}
// GetSslPredefinedPolicyPreparer prepares the GetSslPredefinedPolicy request.
func (client ApplicationGatewaysClient) GetSslPredefinedPolicyPreparer(ctx context.Context, predefinedPolicyName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"predefinedPolicyName": autorest.Encode("path", predefinedPolicyName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2019-12-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableSslOptions/default/predefinedPolicies/{predefinedPolicyName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetSslPredefinedPolicySender sends the GetSslPredefinedPolicy request. The method will close the
// http.Response Body if it receives an error.
func (client ApplicationGatewaysClient) GetSslPredefinedPolicySender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// GetSslPredefinedPolicyResponder handles the response to the GetSslPredefinedPolicy request. The method always
// closes the http.Response Body.
func (client ApplicationGatewaysClient) GetSslPredefinedPolicyResponder(resp *http.Response) (result ApplicationGatewaySslPredefinedPolicy, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// List lists all application gateways in a resource group.
// Parameters:
// resourceGroupName - the name of the resource group.
func (client ApplicationGatewaysClient) List(ctx context.Context, resourceGroupName string) (result ApplicationGatewayListResultPage, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ApplicationGatewaysClient.List")
defer func() {
sc := -1
if result.aglr.Response.Response != nil {
sc = result.aglr.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.fn = client.listNextResults
req, err := client.ListPreparer(ctx, resourceGroupName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "List", nil, "Failure preparing request")
return
}
resp, err := client.ListSender(req)
if err != nil {
result.aglr.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "List", resp, "Failure sending request")
return
}
result.aglr, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "List", resp, "Failure responding to request")
}
if result.aglr.hasNextLink() && result.aglr.IsEmpty() {
err = result.NextWithContext(ctx)
}
return
}
// ListPreparer prepares the List request.
func (client ApplicationGatewaysClient) ListPreparer(ctx context.Context, resourceGroupName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2019-12-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListSender sends the List request. The method will close the
// http.Response Body if it receives an error.
func (client ApplicationGatewaysClient) ListSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// ListResponder handles the response to the List request. The method always
// closes the http.Response Body.
func (client ApplicationGatewaysClient) ListResponder(resp *http.Response) (result ApplicationGatewayListResult, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listNextResults retrieves the next set of results, if any.
func (client ApplicationGatewaysClient) listNextResults(ctx context.Context, lastResults ApplicationGatewayListResult) (result ApplicationGatewayListResult, err error) {
req, err := lastResults.applicationGatewayListResultPreparer(ctx)
if err != nil {
return result, autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "listNextResults", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "listNextResults", resp, "Failure sending next results request")
}
result, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "listNextResults", resp, "Failure responding to next results request")
}
return
}
// ListComplete enumerates all values, automatically crossing page boundaries as required.
func (client ApplicationGatewaysClient) ListComplete(ctx context.Context, resourceGroupName string) (result ApplicationGatewayListResultIterator, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ApplicationGatewaysClient.List")
defer func() {
sc := -1
if result.Response().Response.Response != nil {
sc = result.page.Response().Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.page, err = client.List(ctx, resourceGroupName)
return
}
// ListAll gets all the application gateways in a subscription.
func (client ApplicationGatewaysClient) ListAll(ctx context.Context) (result ApplicationGatewayListResultPage, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ApplicationGatewaysClient.ListAll")
defer func() {
sc := -1
if result.aglr.Response.Response != nil {
sc = result.aglr.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.fn = client.listAllNextResults
req, err := client.ListAllPreparer(ctx)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "ListAll", nil, "Failure preparing request")
return
}
resp, err := client.ListAllSender(req)
if err != nil {
result.aglr.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "ListAll", resp, "Failure sending request")
return
}
result.aglr, err = client.ListAllResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "ListAll", resp, "Failure responding to request")
}
if result.aglr.hasNextLink() && result.aglr.IsEmpty() {
err = result.NextWithContext(ctx)
}
return
}
// ListAllPreparer prepares the ListAll request.
func (client ApplicationGatewaysClient) ListAllPreparer(ctx context.Context) (*http.Request, error) {
pathParameters := map[string]interface{}{
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2019-12-01" |
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGateways", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListAllSender sends the ListAll request. The method will close the
// http.Response Body if it receives an error.
func (client ApplicationGatewaysClient) ListAllSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// ListAllResponder handles the response to the ListAll request. The method always
// closes the http.Response Body.
func (client ApplicationGatewaysClient) ListAllResponder(resp *http.Response) (result ApplicationGatewayListResult, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listAllNextResults retrieves the next set of results, if any.
func (client ApplicationGatewaysClient) listAllNextResults(ctx context.Context, lastResults ApplicationGatewayListResult) (result ApplicationGatewayListResult, err error) {
req, err := lastResults.applicationGatewayListResultPreparer(ctx)
if err != nil {
return result, autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "listAllNextResults", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListAllSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "listAllNextResults", resp, "Failure sending next results request")
}
result, err = client.ListAllResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "listAllNextResults", resp, "Failure responding to next results request")
}
return
}
// ListAllComplete enumerates all values, automatically crossing page boundaries as required.
func (client ApplicationGatewaysClient) ListAllComplete(ctx context.Context) (result ApplicationGatewayListResultIterator, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ApplicationGatewaysClient.ListAll")
defer func() {
sc := -1
if result.Response().Response.Response != nil {
sc = result.page.Response().Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.page, err = client.ListAll(ctx)
return
}
// ListAvailableRequestHeaders lists all available request headers.
func (client ApplicationGatewaysClient) ListAvailableRequestHeaders(ctx context.Context) (result ListString, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ApplicationGatewaysClient.ListAvailableRequestHeaders")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.ListAvailableRequestHeadersPreparer(ctx)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "ListAvailableRequestHeaders", nil, "Failure preparing request")
return
}
resp, err := client.ListAvailableRequestHeadersSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "ListAvailableRequestHeaders", resp, "Failure sending request")
return
}
result, err = client.ListAvailableRequestHeadersResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "ListAvailableRequestHeaders", resp, "Failure responding to request")
}
return
}
// ListAvailableRequestHeadersPreparer prepares the ListAvailableRequestHeaders request.
func (client ApplicationGatewaysClient) ListAvailableRequestHeadersPreparer(ctx context.Context) (*http.Request, error) {
pathParameters := map[string]interface{}{
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2019-12-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableRequestHeaders", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListAvailableRequestHeadersSender sends the ListAvailableRequestHeaders request. The method will close the
// http.Response Body if it receives an error.
func (client ApplicationGatewaysClient) ListAvailableRequestHeadersSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// ListAvailableRequestHeadersResponder handles the response to the ListAvailableRequestHeaders request. The method always
// closes the http.Response Body.
func (client ApplicationGatewaysClient) ListAvailableRequestHeadersResponder(resp *http.Response) (result ListString, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result.Value),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// ListAvailableResponseHeaders lists all available response headers.
func (client ApplicationGatewaysClient) ListAvailableResponseHeaders(ctx context.Context) (result ListString, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ApplicationGatewaysClient.ListAvailableResponseHeaders")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.ListAvailableResponseHeadersPreparer(ctx)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "ListAvailableResponseHeaders", nil, "Failure preparing request")
return
}
resp, err := client.ListAvailableResponseHeadersSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "ListAvailableResponseHeaders", resp, "Failure sending request")
return
}
result, err = client.ListAvailableResponseHeadersResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "ListAvailableResponseHeaders", resp, "Failure responding to request")
}
return
}
// ListAvailableResponseHeadersPreparer prepares the ListAvailableResponseHeaders request.
func (client ApplicationGatewaysClient) ListAvailableResponseHeadersPreparer(ctx context.Context) (*http.Request, error) {
pathParameters := map[string]interface{}{
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2019-12-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableResponseHeaders", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListAvailableResponseHeadersSender sends the ListAvailableResponseHeaders request. The method will close the
// http.Response Body if it receives an error.
func (client ApplicationGatewaysClient) ListAvailableResponseHeadersSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// ListAvailableResponseHeadersResponder handles the response to the ListAvailableResponseHeaders request. The method always
// closes the http.Response Body.
func (client ApplicationGatewaysClient) ListAvailableResponseHeadersResponder(resp *http.Response) (result ListString, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result.Value),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// ListAvailableServerVariables lists all available server variables.
func (client ApplicationGatewaysClient) ListAvailableServerVariables(ctx context.Context) (result ListString, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ApplicationGatewaysClient.ListAvailableServerVariables")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.ListAvailableServerVariablesPreparer(ctx)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "ListAvailableServerVariables", nil, "Failure preparing request")
return
}
resp, err := client.ListAvailableServerVariablesSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "ListAvailableServerVariables", resp, "Failure sending request")
return
}
result, err = client.ListAvailableServerVariablesResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "ListAvailableServerVariables", resp, "Failure responding to request")
}
return
}
// ListAvailableServerVariablesPreparer prepares the ListAvailableServerVariables request.
func (client ApplicationGatewaysClient) ListAvailableServerVariablesPreparer(ctx context.Context) (*http.Request, error) {
pathParameters := map[string]interface{}{
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2019-12-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableServerVariables", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListAvailableServerVariablesSender sends the ListAvailableServerVariables request. The method will close the
// http.Response Body if it receives an error.
func (client ApplicationGatewaysClient) ListAvailableServerVariablesSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// ListAvailableServerVariablesResponder handles the response to the ListAvailableServerVariables request. The method always
// closes the http.Response Body.
func (client ApplicationGatewaysClient) ListAvailableServerVariablesResponder(resp *http.Response) (result ListString, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result.Value),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// ListAvailableSslOptions lists available Ssl options for configuring Ssl policy.
func (client ApplicationGatewaysClient) ListAvailableSslOptions(ctx context.Context) (result ApplicationGatewayAvailableSslOptions, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ApplicationGatewaysClient.ListAvailableSslOptions")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.ListAvailableSslOptionsPreparer(ctx)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "ListAvailableSslOptions", nil, "Failure preparing request")
return
}
resp, err := client.ListAvailableSslOptionsSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "ListAvailableSslOptions", resp, "Failure sending request")
return
}
result, err = client.ListAvailableSslOptionsResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "ListAvailableSslOptions", resp, "Failure responding to request")
}
return
}
// ListAvailableSslOptionsPreparer prepares the ListAvailableSslOptions request.
func (client ApplicationGatewaysClient) ListAvailableSslOptionsPreparer(ctx context.Context) (*http.Request, error) {
pathParameters := map[string]interface{}{
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2019-12-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableSslOptions/default", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListAvailableSslOptionsSender sends the ListAvailableSslOptions request. The method will close the
// http.Response Body if it receives an error.
func (client ApplicationGatewaysClient) ListAvailableSslOptionsSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// ListAvailableSslOptionsResponder handles the response to the ListAvailableSslOptions request. The method always
// closes the http.Response Body.
func (client ApplicationGatewaysClient) ListAvailableSslOptionsResponder(resp *http.Response) (result ApplicationGatewayAvailableSslOptions, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// ListAvailableSslPredefinedPolicies lists all SSL predefined policies for configuring Ssl policy.
func (client ApplicationGatewaysClient) ListAvailableSslPredefinedPolicies(ctx context.Context) (result ApplicationGatewayAvailableSslPredefinedPoliciesPage, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ApplicationGatewaysClient.ListAvailableSslPredefinedPolicies")
defer func() {
sc := -1
if result.agaspp.Response.Response != nil {
sc = result.agaspp.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.fn = client.listAvailableSslPredefinedPoliciesNextResults
req, err := client.ListAvailableSslPredefinedPoliciesPreparer(ctx)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "ListAvailableSslPredefinedPolicies", nil, "Failure preparing request")
return
}
resp, err := client.ListAvailableSslPredefinedPoliciesSender(req)
if err != nil {
result.agaspp.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "ListAvailableSslPredefinedPolicies", resp, "Failure sending request")
return
}
result.agaspp, err = client.ListAvailableSslPredefinedPoliciesResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "ListAvailableSslPredefinedPolicies", resp, "Failure responding to request")
}
if result.agaspp.hasNextLink() && result.agaspp.IsEmpty() {
err = result.NextWithContext(ctx)
}
return
}
// ListAvailableSslPredefinedPoliciesPreparer prepares the ListAvailableSslPredefinedPolicies request.
func (client ApplicationGatewaysClient) ListAvailableSslPredefinedPoliciesPreparer(ctx context.Context) (*http.Request, error) {
pathParameters := map[string]interface{}{
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2019-12-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableSslOptions/default/predefinedPolicies", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListAvailableSslPredefinedPoliciesSender sends the ListAvailableSslPredefinedPolicies request. The method will close the
// http.Response Body if it receives an error.
func (client ApplicationGatewaysClient) ListAvailableSslPredefinedPoliciesSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// ListAvailableSslPredefinedPoliciesResponder handles the response to the ListAvailableSslPredefinedPolicies request. The method always
// closes the http.Response Body.
func (client ApplicationGatewaysClient) ListAvailableSslPredefinedPoliciesResponder(resp *http.Response) (result ApplicationGatewayAvailableSslPredefinedPolicies, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listAvailableSslPredefinedPoliciesNextResults retrieves the next set of results, if any.
func (client ApplicationGatewaysClient) listAvailableSslPredefinedPoliciesNextResults(ctx context.Context, lastResults ApplicationGatewayAvailableSslPredefinedPolicies) (result ApplicationGatewayAvailableSslPredefinedPolicies, err error) {
req, err := lastResults.applicationGatewayAvailableSslPredefinedPoliciesPreparer(ctx)
if err != nil {
return result, autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "listAvailableSslPredefinedPoliciesNextResults", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListAvailableSslPredefinedPoliciesSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "listAvailableSslPredefinedPoliciesNextResults", resp, "Failure sending next results request")
}
result, err = client.ListAvailableSslPredefinedPoliciesResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "listAvailableSslPredefinedPoliciesNextResults", resp, "Failure responding to next results request")
}
return
}
// ListAvailableSslPredefinedPoliciesComplete enumerates all values, automatically crossing page boundaries as required.
func (client ApplicationGatewaysClient) ListAvailableSslPredefinedPoliciesComplete(ctx context.Context) (result ApplicationGatewayAvailableSslPredefinedPoliciesIterator, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ApplicationGatewaysClient.ListAvailableSslPredefinedPolicies")
defer func() {
sc := -1
if result.Response().Response.Response != nil {
sc = result.page.Response().Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.page, err = client.ListAvailableSslPredefinedPolicies(ctx)
return
}
// ListAvailableWafRuleSets lists all available web application firewall rule sets.
func (client ApplicationGatewaysClient) ListAvailableWafRuleSets(ctx context.Context) (result ApplicationGatewayAvailableWafRuleSetsResult, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ApplicationGatewaysClient.ListAvailableWafRuleSets")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.ListAvailableWafRuleSetsPreparer(ctx)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "ListAvailableWafRuleSets", nil, "Failure preparing request")
return
}
resp, err := client.ListAvailableWafRuleSetsSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "ListAvailableWafRuleSets", resp, "Failure sending request")
return
}
result, err = client.ListAvailableWafRuleSetsResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "ListAvailableWafRuleSets", resp, "Failure responding to request")
}
return
}
// ListAvailableWafRuleSetsPreparer prepares the ListAvailableWafRuleSets request.
func (client ApplicationGatewaysClient) ListAvailableWafRuleSetsPreparer(ctx context.Context) (*http.Request, error) {
pathParameters := map[string]interface{}{
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2019-12-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableWafRuleSets", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListAvailableWafRuleSetsSender sends the ListAvailableWafRuleSets request. The method will close the
// http.Response Body if it receives an error.
func (client ApplicationGatewaysClient) ListAvailableWafRuleSetsSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// ListAvailableWafRuleSetsResponder handles the response to the ListAvailableWafRuleSets request. The method always
// closes the http.Response Body.
func (client ApplicationGatewaysClient) ListAvailableWafRuleSetsResponder(resp *http.Response) (result ApplicationGatewayAvailableWafRuleSetsResult, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// Start starts the specified application gateway.
// Parameters:
// resourceGroupName - the name of the resource group.
// applicationGatewayName - the name of the application gateway.
func (client ApplicationGatewaysClient) Start(ctx context.Context, resourceGroupName string, applicationGatewayName string) (result ApplicationGatewaysStartFuture, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ApplicationGatewaysClient.Start")
defer func() {
sc := -1
if result.Response() != nil {
sc = result.Response().StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.StartPreparer(ctx, resourceGroupName, applicationGatewayName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "Start", nil, "Failure preparing request")
return
}
result, err = client.StartSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "Start", result.Response(), "Failure sending request")
return
}
return
}
// StartPreparer prepares the Start request.
func (client ApplicationGatewaysClient) StartPreparer(ctx context.Context, resourceGroupName string, applicationGatewayName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"applicationGatewayName": autorest.Encode("path", applicationGatewayName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2019-12-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsPost(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/start", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// StartSender sends the Start request. The method will close the
// http.Response Body if it receives an error.
func (client ApplicationGatewaysClient) StartSender(req *http.Request) (future ApplicationGatewaysStartFuture, err error) {
var resp *http.Response
resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client))
if err != nil {
return
}
future.Future, err = azure.NewFutureFromResponse(resp)
return
}
// StartResponder handles the response to the Start request. The method always
// closes the http.Response Body.
func (client ApplicationGatewaysClient) StartResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted),
autorest.ByClosing())
result.Response = resp
return
}
// Stop stops the specified application gateway in a resource group.
// Parameters:
// resourceGroupName - the name of the resource group.
// applicationGatewayName - the name of the application gateway.
func (client ApplicationGatewaysClient) Stop(ctx context.Context, resourceGroupName string, applicationGatewayName string) (result ApplicationGatewaysStopFuture, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ApplicationGatewaysClient.Stop")
defer func() {
sc := -1
if result.Response() != nil {
sc = result.Response().StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.StopPreparer(ctx, resourceGroupName, applicationGatewayName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "Stop", nil, "Failure preparing request")
return
}
result, err = client.StopSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "Stop", result.Response(), "Failure sending request")
return
}
return
}
// StopPreparer prepares the Stop request.
func (client ApplicationGatewaysClient) StopPreparer(ctx context.Context, resourceGroupName string, applicationGatewayName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"applicationGatewayName": autorest.Encode("path", applicationGatewayName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2019-12-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsPost(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/stop", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// StopSender sends the Stop request. The method will close the
// http.Response Body if it receives an error.
func (client ApplicationGatewaysClient) StopSender(req *http.Request) (future ApplicationGatewaysStopFuture, err error) {
var resp *http.Response
resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client))
if err != nil {
return
}
future.Future, err = azure.NewFutureFromResponse(resp)
return
}
// StopResponder handles the response to the Stop request. The method always
// closes the http.Response Body.
func (client ApplicationGatewaysClient) StopResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted),
autorest.ByClosing())
result.Response = resp
return
}
// UpdateTags updates the specified application gateway tags.
// Parameters:
// resourceGroupName - the name of the resource group.
// applicationGatewayName - the name of the application gateway.
// parameters - parameters supplied to update application gateway tags.
func (client ApplicationGatewaysClient) UpdateTags(ctx context.Context, resourceGroupName string, applicationGatewayName string, parameters TagsObject) (result ApplicationGateway, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ApplicationGatewaysClient.UpdateTags")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.UpdateTagsPreparer(ctx, resourceGroupName, applicationGatewayName, parameters)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "UpdateTags", nil, "Failure preparing request")
return
}
resp, err := client.UpdateTagsSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "UpdateTags", resp, "Failure sending request")
return
}
result, err = client.UpdateTagsResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ApplicationGatewaysClient", "UpdateTags", resp, "Failure responding to request")
}
return
}
// UpdateTagsPreparer prepares the UpdateTags request.
func (client ApplicationGatewaysClient) UpdateTagsPreparer(ctx context.Context, resourceGroupName string, applicationGatewayName string, parameters TagsObject) (*http.Request, error) {
pathParameters := map[string]interface{}{
"applicationGatewayName": autorest.Encode("path", applicationGatewayName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2019-12-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPatch(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}", pathParameters),
autorest.WithJSON(parameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// UpdateTagsSender sends the UpdateTags request. The method will close the
// http.Response Body if it receives an error.
func (client ApplicationGatewaysClient) UpdateTagsSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// UpdateTagsResponder handles the response to the UpdateTags request. The method always
// closes the http.Response Body.
func (client ApplicationGatewaysClient) UpdateTagsResponder(resp *http.Response) (result ApplicationGateway, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
} | queryParameters := map[string]interface{}{
"api-version": APIVersion,
} |
close_service_approval_parameters.go | // Code generated by go-swagger; DO NOT EDIT.
package service_approval
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"net/http"
"github.com/go-openapi/errors"
"github.com/go-openapi/runtime"
"github.com/go-openapi/runtime/middleware"
"github.com/go-openapi/swag"
"github.com/go-openapi/validate"
strfmt "github.com/go-openapi/strfmt"
)
// NewCloseServiceApprovalParams creates a new CloseServiceApprovalParams object
// with the default values initialized.
func | () CloseServiceApprovalParams {
var (
// initialize parameters with default values
pageSizeDefault = int64(20)
)
return CloseServiceApprovalParams{
PageSize: &pageSizeDefault,
}
}
// CloseServiceApprovalParams contains all the bound params for the close service approval operation
// typically these are obtained from a http.Request
//
// swagger:parameters closeServiceApproval
type CloseServiceApprovalParams struct {
// HTTP Request Object
HTTPRequest *http.Request `json:"-"`
/*Approval Event ID
Required: true
In: path
*/
ApprovalID string
/*Pointer to the next set of items
In: query
*/
NextPageKey *string
/*The number of items to return
Maximum: 50
Minimum: 1
In: query
Default: 20
*/
PageSize *int64
/*Name of the project
Required: true
In: path
*/
ProjectName string
/*Name of the service
Required: true
In: path
*/
ServiceName string
/*Name of the stage
Required: true
In: path
*/
StageName string
}
// BindRequest both binds and validates a request, it assumes that complex things implement a Validatable(strfmt.Registry) error interface
// for simple values it will use straight method calls.
//
// To ensure default values, the struct must have been initialized with NewCloseServiceApprovalParams() beforehand.
func (o *CloseServiceApprovalParams) BindRequest(r *http.Request, route *middleware.MatchedRoute) error {
var res []error
o.HTTPRequest = r
qs := runtime.Values(r.URL.Query())
rApprovalID, rhkApprovalID, _ := route.Params.GetOK("approvalID")
if err := o.bindApprovalID(rApprovalID, rhkApprovalID, route.Formats); err != nil {
res = append(res, err)
}
qNextPageKey, qhkNextPageKey, _ := qs.GetOK("nextPageKey")
if err := o.bindNextPageKey(qNextPageKey, qhkNextPageKey, route.Formats); err != nil {
res = append(res, err)
}
qPageSize, qhkPageSize, _ := qs.GetOK("pageSize")
if err := o.bindPageSize(qPageSize, qhkPageSize, route.Formats); err != nil {
res = append(res, err)
}
rProjectName, rhkProjectName, _ := route.Params.GetOK("projectName")
if err := o.bindProjectName(rProjectName, rhkProjectName, route.Formats); err != nil {
res = append(res, err)
}
rServiceName, rhkServiceName, _ := route.Params.GetOK("serviceName")
if err := o.bindServiceName(rServiceName, rhkServiceName, route.Formats); err != nil {
res = append(res, err)
}
rStageName, rhkStageName, _ := route.Params.GetOK("stageName")
if err := o.bindStageName(rStageName, rhkStageName, route.Formats); err != nil {
res = append(res, err)
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
// bindApprovalID binds and validates parameter ApprovalID from path.
func (o *CloseServiceApprovalParams) bindApprovalID(rawData []string, hasKey bool, formats strfmt.Registry) error {
var raw string
if len(rawData) > 0 {
raw = rawData[len(rawData)-1]
}
// Required: true
// Parameter is provided by construction from the route
o.ApprovalID = raw
return nil
}
// bindNextPageKey binds and validates parameter NextPageKey from query.
func (o *CloseServiceApprovalParams) bindNextPageKey(rawData []string, hasKey bool, formats strfmt.Registry) error {
var raw string
if len(rawData) > 0 {
raw = rawData[len(rawData)-1]
}
// Required: false
// AllowEmptyValue: false
if raw == "" { // empty values pass all other validations
return nil
}
o.NextPageKey = &raw
return nil
}
// bindPageSize binds and validates parameter PageSize from query.
func (o *CloseServiceApprovalParams) bindPageSize(rawData []string, hasKey bool, formats strfmt.Registry) error {
var raw string
if len(rawData) > 0 {
raw = rawData[len(rawData)-1]
}
// Required: false
// AllowEmptyValue: false
if raw == "" { // empty values pass all other validations
// Default values have been previously initialized by NewCloseServiceApprovalParams()
return nil
}
value, err := swag.ConvertInt64(raw)
if err != nil {
return errors.InvalidType("pageSize", "query", "int64", raw)
}
o.PageSize = &value
if err := o.validatePageSize(formats); err != nil {
return err
}
return nil
}
// validatePageSize carries on validations for parameter PageSize
func (o *CloseServiceApprovalParams) validatePageSize(formats strfmt.Registry) error {
if err := validate.MinimumInt("pageSize", "query", int64(*o.PageSize), 1, false); err != nil {
return err
}
if err := validate.MaximumInt("pageSize", "query", int64(*o.PageSize), 50, false); err != nil {
return err
}
return nil
}
// bindProjectName binds and validates parameter ProjectName from path.
func (o *CloseServiceApprovalParams) bindProjectName(rawData []string, hasKey bool, formats strfmt.Registry) error {
var raw string
if len(rawData) > 0 {
raw = rawData[len(rawData)-1]
}
// Required: true
// Parameter is provided by construction from the route
o.ProjectName = raw
return nil
}
// bindServiceName binds and validates parameter ServiceName from path.
func (o *CloseServiceApprovalParams) bindServiceName(rawData []string, hasKey bool, formats strfmt.Registry) error {
var raw string
if len(rawData) > 0 {
raw = rawData[len(rawData)-1]
}
// Required: true
// Parameter is provided by construction from the route
o.ServiceName = raw
return nil
}
// bindStageName binds and validates parameter StageName from path.
func (o *CloseServiceApprovalParams) bindStageName(rawData []string, hasKey bool, formats strfmt.Registry) error {
var raw string
if len(rawData) > 0 {
raw = rawData[len(rawData)-1]
}
// Required: true
// Parameter is provided by construction from the route
o.StageName = raw
return nil
}
| NewCloseServiceApprovalParams |
box_test.go | package box_test
import (
"github.com/boxgo/box"
"github.com/boxgo/box/pkg/logger"
"github.com/boxgo/box/pkg/server/ginserver"
"github.com/gin-gonic/gin"
)
// Example this is a ping-pong http server.
func | () {
ginserver.Default.GET("/ping", func(ctx *gin.Context) {
ctx.JSON(200, "pong")
})
app := box.New(
box.WithBoxes(
ginserver.Default,
),
)
if err := app.Run(); err != nil {
logger.Errorw("server run error: ", "err", err)
}
}
| Example |
asgi.py | """
ASGI config for online_quiz project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
"""
import os
|
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'online_quiz.settings')
application = get_asgi_application() | from django.core.asgi import get_asgi_application
|
test_quiz.py | from django.test import TestCase
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APIClient
QUIZZES_URL = reverse('questionary:quiz-list')
class PublicQuizzesApiTests(TestCase):
"""Test the publicly available tags API"""
def setUp(self):
self.client = APIClient()
| def test_login_required(self):
"""Test that login required for retrieving quizzes"""
res = self.client.get(QUIZZES_URL)
self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED) | |
usage_api.py | """
OpenAPI Extension with dynamic servers
This specification shows how to use dynamic servers. # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from dynamic_servers.api_client import ApiClient, Endpoint as _Endpoint
from dynamic_servers.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
class UsageApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self.custom_server_endpoint = _Endpoint(
settings={
'response_type': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},),
'auth': [],
'endpoint_path': '/custom',
'operation_id': 'custom_server',
'http_method': 'GET',
'servers': [
{
'url': "https://{server}.swagger.io:{port}/v2",
'description': "No description provided",
'variables': {
'server': {
'description': "No description provided",
'default_value': "custom-petstore",
'enum_values': [
"custom-petstore",
"custom-qa-petstore",
"custom-dev-petstore"
]
},
'port': {
'description': "No description provided",
'default_value': "8080",
'enum_values': [
"80",
"8080"
]
}
}
},
{
'url': "https://localhost:8081/{version}",
'description': "The local custom server",
'variables': {
'version': {
'description': "No description provided",
'default_value': "v2",
'enum_values': [
"v1",
"v2",
"v3"
]
}
}
},
{
'url': "https://third.example.com/{prefix}",
'description': "The local custom server",
'variables': {
'prefix': {
'description': "No description provided",
'default_value': "custom",
}
}
},
]
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.default_server_endpoint = _Endpoint(
settings={
'response_type': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},),
'auth': [],
'endpoint_path': '/default',
'operation_id': 'default_server',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
def | (
self,
**kwargs
):
"""Use custom server # noqa: E501
Use custom server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_server(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
{str: (bool, date, datetime, dict, float, int, list, str, none_type)}
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
return self.custom_server_endpoint.call_with_http_info(**kwargs)
def default_server(
self,
**kwargs
):
"""Use default server # noqa: E501
Use default server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.default_server(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
{str: (bool, date, datetime, dict, float, int, list, str, none_type)}
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
return self.default_server_endpoint.call_with_http_info(**kwargs)
| custom_server |
garbagecollector_test.go | /*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package garbagecollector
import (
"net/http"
"net/http/httptest"
"reflect"
"strings"
"sync"
"testing"
"github.com/stretchr/testify/assert"
_ "k8s.io/kubernetes/pkg/api/install"
"k8s.io/apimachinery/pkg/api/meta"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime/schema"
"k8s.io/apimachinery/pkg/runtime/serializer"
"k8s.io/apimachinery/pkg/types"
"k8s.io/apimachinery/pkg/util/json"
"k8s.io/apimachinery/pkg/util/sets"
"k8s.io/apimachinery/pkg/util/strategicpatch"
"k8s.io/client-go/dynamic"
restclient "k8s.io/client-go/rest"
"k8s.io/client-go/util/workqueue"
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/api/v1"
"k8s.io/kubernetes/pkg/client/clientset_generated/clientset/fake"
informers "k8s.io/kubernetes/pkg/client/informers/informers_generated/externalversions"
"k8s.io/kubernetes/pkg/controller/garbagecollector/metaonly"
)
func TestNewGarbageCollector(t *testing.T) {
config := &restclient.Config{}
config.ContentConfig.NegotiatedSerializer = serializer.DirectCodecFactory{CodecFactory: metaonly.NewMetadataCodecFactory()}
metaOnlyClientPool := dynamic.NewClientPool(config, api.Registry.RESTMapper(), dynamic.LegacyAPIPathResolverFunc)
config.ContentConfig.NegotiatedSerializer = nil
clientPool := dynamic.NewClientPool(config, api.Registry.RESTMapper(), dynamic.LegacyAPIPathResolverFunc)
podResource := map[schema.GroupVersionResource]struct{}{
{Version: "v1", Resource: "pods"}: {},
// no monitor will be constructed for non-core resource, the GC construction will not fail.
{Group: "tpr.io", Version: "v1", Resource: "unknown"}: {},
}
client := fake.NewSimpleClientset()
sharedInformers := informers.NewSharedInformerFactory(client, 0)
alwaysStarted := make(chan struct{})
close(alwaysStarted)
gc, err := NewGarbageCollector(metaOnlyClientPool, clientPool, api.Registry.RESTMapper(), podResource, ignoredResources, sharedInformers, alwaysStarted)
if err != nil {
t.Fatal(err)
}
assert.Equal(t, 1, len(gc.dependencyGraphBuilder.monitors))
}
// fakeAction records information about requests to aid in testing.
type fakeAction struct {
method string
path string
query string
}
// String returns method=path to aid in testing
func (f *fakeAction) String() string {
return strings.Join([]string{f.method, f.path}, "=")
}
type FakeResponse struct {
statusCode int
content []byte
}
// fakeActionHandler holds a list of fakeActions received
type fakeActionHandler struct {
// statusCode and content returned by this handler for different method + path.
response map[string]FakeResponse
lock sync.Mutex
actions []fakeAction
}
// ServeHTTP logs the action that occurred and always returns the associated status code
func (f *fakeActionHandler) ServeHTTP(response http.ResponseWriter, request *http.Request) {
f.lock.Lock()
defer f.lock.Unlock()
f.actions = append(f.actions, fakeAction{method: request.Method, path: request.URL.Path, query: request.URL.RawQuery})
fakeResponse, ok := f.response[request.Method+request.URL.Path]
if !ok {
fakeResponse.statusCode = 200
fakeResponse.content = []byte("{\"kind\": \"List\"}")
}
response.Header().Set("Content-Type", "application/json")
response.WriteHeader(fakeResponse.statusCode)
response.Write(fakeResponse.content)
}
// testServerAndClientConfig returns a server that listens and a config that can reference it
func testServerAndClientConfig(handler func(http.ResponseWriter, *http.Request)) (*httptest.Server, *restclient.Config) {
srv := httptest.NewServer(http.HandlerFunc(handler))
config := &restclient.Config{
Host: srv.URL,
}
return srv, config
}
type garbageCollector struct {
*GarbageCollector
stop chan struct{}
}
func setupGC(t *testing.T, config *restclient.Config) garbageCollector {
config.ContentConfig.NegotiatedSerializer = serializer.DirectCodecFactory{CodecFactory: metaonly.NewMetadataCodecFactory()}
metaOnlyClientPool := dynamic.NewClientPool(config, api.Registry.RESTMapper(), dynamic.LegacyAPIPathResolverFunc)
config.ContentConfig.NegotiatedSerializer = nil
clientPool := dynamic.NewClientPool(config, api.Registry.RESTMapper(), dynamic.LegacyAPIPathResolverFunc)
podResource := map[schema.GroupVersionResource]struct{}{{Version: "v1", Resource: "pods"}: {}}
client := fake.NewSimpleClientset()
sharedInformers := informers.NewSharedInformerFactory(client, 0)
alwaysStarted := make(chan struct{})
close(alwaysStarted)
gc, err := NewGarbageCollector(metaOnlyClientPool, clientPool, api.Registry.RESTMapper(), podResource, ignoredResources, sharedInformers, alwaysStarted)
if err != nil {
t.Fatal(err)
}
stop := make(chan struct{})
go sharedInformers.Start(stop)
return garbageCollector{gc, stop}
}
func getPod(podName string, ownerReferences []metav1.OwnerReference) *v1.Pod |
func serilizeOrDie(t *testing.T, object interface{}) []byte {
data, err := json.Marshal(object)
if err != nil {
t.Fatal(err)
}
return data
}
// test the attemptToDeleteItem function making the expected actions.
func TestAttemptToDeleteItem(t *testing.T) {
pod := getPod("ToBeDeletedPod", []metav1.OwnerReference{
{
Kind: "ReplicationController",
Name: "owner1",
UID: "123",
APIVersion: "v1",
},
})
testHandler := &fakeActionHandler{
response: map[string]FakeResponse{
"GET" + "/api/v1/namespaces/ns1/replicationcontrollers/owner1": {
404,
[]byte{},
},
"GET" + "/api/v1/namespaces/ns1/pods/ToBeDeletedPod": {
200,
serilizeOrDie(t, pod),
},
},
}
srv, clientConfig := testServerAndClientConfig(testHandler.ServeHTTP)
defer srv.Close()
gc := setupGC(t, clientConfig)
defer close(gc.stop)
item := &node{
identity: objectReference{
OwnerReference: metav1.OwnerReference{
Kind: pod.Kind,
APIVersion: pod.APIVersion,
Name: pod.Name,
UID: pod.UID,
},
Namespace: pod.Namespace,
},
// owners are intentionally left empty. The attemptToDeleteItem routine should get the latest item from the server.
owners: nil,
}
err := gc.attemptToDeleteItem(item)
if err != nil {
t.Errorf("Unexpected Error: %v", err)
}
expectedActionSet := sets.NewString()
expectedActionSet.Insert("GET=/api/v1/namespaces/ns1/replicationcontrollers/owner1")
expectedActionSet.Insert("DELETE=/api/v1/namespaces/ns1/pods/ToBeDeletedPod")
expectedActionSet.Insert("GET=/api/v1/namespaces/ns1/pods/ToBeDeletedPod")
actualActionSet := sets.NewString()
for _, action := range testHandler.actions {
actualActionSet.Insert(action.String())
}
if !expectedActionSet.Equal(actualActionSet) {
t.Errorf("expected actions:\n%v\n but got:\n%v\nDifference:\n%v", expectedActionSet,
actualActionSet, expectedActionSet.Difference(actualActionSet))
}
}
// verifyGraphInvariants verifies that all of a node's owners list the node as a
// dependent and vice versa. uidToNode has all the nodes in the graph.
func verifyGraphInvariants(scenario string, uidToNode map[types.UID]*node, t *testing.T) {
for myUID, node := range uidToNode {
for dependentNode := range node.dependents {
found := false
for _, owner := range dependentNode.owners {
if owner.UID == myUID {
found = true
break
}
}
if !found {
t.Errorf("scenario: %s: node %s has node %s as a dependent, but it's not present in the latter node's owners list", scenario, node.identity, dependentNode.identity)
}
}
for _, owner := range node.owners {
ownerNode, ok := uidToNode[owner.UID]
if !ok {
// It's possible that the owner node doesn't exist
continue
}
if _, ok := ownerNode.dependents[node]; !ok {
t.Errorf("node %s has node %s as an owner, but it's not present in the latter node's dependents list", node.identity, ownerNode.identity)
}
}
}
}
func createEvent(eventType eventType, selfUID string, owners []string) event {
var ownerReferences []metav1.OwnerReference
for i := 0; i < len(owners); i++ {
ownerReferences = append(ownerReferences, metav1.OwnerReference{UID: types.UID(owners[i])})
}
return event{
eventType: eventType,
obj: &v1.Pod{
ObjectMeta: metav1.ObjectMeta{
UID: types.UID(selfUID),
OwnerReferences: ownerReferences,
},
},
}
}
func TestProcessEvent(t *testing.T) {
var testScenarios = []struct {
name string
// a series of events that will be supplied to the
// GraphBuilder.eventQueue.
events []event
}{
{
name: "test1",
events: []event{
createEvent(addEvent, "1", []string{}),
createEvent(addEvent, "2", []string{"1"}),
createEvent(addEvent, "3", []string{"1", "2"}),
},
},
{
name: "test2",
events: []event{
createEvent(addEvent, "1", []string{}),
createEvent(addEvent, "2", []string{"1"}),
createEvent(addEvent, "3", []string{"1", "2"}),
createEvent(addEvent, "4", []string{"2"}),
createEvent(deleteEvent, "2", []string{"doesn't matter"}),
},
},
{
name: "test3",
events: []event{
createEvent(addEvent, "1", []string{}),
createEvent(addEvent, "2", []string{"1"}),
createEvent(addEvent, "3", []string{"1", "2"}),
createEvent(addEvent, "4", []string{"3"}),
createEvent(updateEvent, "2", []string{"4"}),
},
},
{
name: "reverse test2",
events: []event{
createEvent(addEvent, "4", []string{"2"}),
createEvent(addEvent, "3", []string{"1", "2"}),
createEvent(addEvent, "2", []string{"1"}),
createEvent(addEvent, "1", []string{}),
createEvent(deleteEvent, "2", []string{"doesn't matter"}),
},
},
}
alwaysStarted := make(chan struct{})
close(alwaysStarted)
for _, scenario := range testScenarios {
dependencyGraphBuilder := &GraphBuilder{
informersStarted: alwaysStarted,
graphChanges: workqueue.NewRateLimitingQueue(workqueue.DefaultControllerRateLimiter()),
uidToNode: &concurrentUIDToNode{
uidToNodeLock: sync.RWMutex{},
uidToNode: make(map[types.UID]*node),
},
attemptToDelete: workqueue.NewRateLimitingQueue(workqueue.DefaultControllerRateLimiter()),
absentOwnerCache: NewUIDCache(2),
}
for i := 0; i < len(scenario.events); i++ {
dependencyGraphBuilder.graphChanges.Add(&scenario.events[i])
dependencyGraphBuilder.processGraphChanges()
verifyGraphInvariants(scenario.name, dependencyGraphBuilder.uidToNode.uidToNode, t)
}
}
}
// TestDependentsRace relies on golang's data race detector to check if there is
// data race among in the dependents field.
func TestDependentsRace(t *testing.T) {
gc := setupGC(t, &restclient.Config{})
defer close(gc.stop)
const updates = 100
owner := &node{dependents: make(map[*node]struct{})}
ownerUID := types.UID("owner")
gc.dependencyGraphBuilder.uidToNode.Write(owner)
go func() {
for i := 0; i < updates; i++ {
dependent := &node{}
gc.dependencyGraphBuilder.addDependentToOwners(dependent, []metav1.OwnerReference{{UID: ownerUID}})
gc.dependencyGraphBuilder.removeDependentFromOwners(dependent, []metav1.OwnerReference{{UID: ownerUID}})
}
}()
go func() {
gc.attemptToOrphan.Add(owner)
for i := 0; i < updates; i++ {
gc.attemptToOrphanWorker()
}
}()
}
// test the list and watch functions correctly converts the ListOptions
func TestGCListWatcher(t *testing.T) {
testHandler := &fakeActionHandler{}
srv, clientConfig := testServerAndClientConfig(testHandler.ServeHTTP)
defer srv.Close()
clientPool := dynamic.NewClientPool(clientConfig, api.Registry.RESTMapper(), dynamic.LegacyAPIPathResolverFunc)
podResource := schema.GroupVersionResource{Version: "v1", Resource: "pods"}
client, err := clientPool.ClientForGroupVersionResource(podResource)
if err != nil {
t.Fatal(err)
}
lw := listWatcher(client, podResource)
if _, err := lw.Watch(metav1.ListOptions{ResourceVersion: "1"}); err != nil {
t.Fatal(err)
}
if _, err := lw.List(metav1.ListOptions{ResourceVersion: "1"}); err != nil {
t.Fatal(err)
}
if e, a := 2, len(testHandler.actions); e != a {
t.Errorf("expect %d requests, got %d", e, a)
}
if e, a := "resourceVersion=1&watch=true", testHandler.actions[0].query; e != a {
t.Errorf("expect %s, got %s", e, a)
}
if e, a := "resourceVersion=1", testHandler.actions[1].query; e != a {
t.Errorf("expect %s, got %s", e, a)
}
}
func podToGCNode(pod *v1.Pod) *node {
return &node{
identity: objectReference{
OwnerReference: metav1.OwnerReference{
Kind: pod.Kind,
APIVersion: pod.APIVersion,
Name: pod.Name,
UID: pod.UID,
},
Namespace: pod.Namespace,
},
// owners are intentionally left empty. The attemptToDeleteItem routine should get the latest item from the server.
owners: nil,
}
}
func TestAbsentUIDCache(t *testing.T) {
rc1Pod1 := getPod("rc1Pod1", []metav1.OwnerReference{
{
Kind: "ReplicationController",
Name: "rc1",
UID: "1",
APIVersion: "v1",
},
})
rc1Pod2 := getPod("rc1Pod2", []metav1.OwnerReference{
{
Kind: "ReplicationController",
Name: "rc1",
UID: "1",
APIVersion: "v1",
},
})
rc2Pod1 := getPod("rc2Pod1", []metav1.OwnerReference{
{
Kind: "ReplicationController",
Name: "rc2",
UID: "2",
APIVersion: "v1",
},
})
rc3Pod1 := getPod("rc3Pod1", []metav1.OwnerReference{
{
Kind: "ReplicationController",
Name: "rc3",
UID: "3",
APIVersion: "v1",
},
})
testHandler := &fakeActionHandler{
response: map[string]FakeResponse{
"GET" + "/api/v1/namespaces/ns1/pods/rc1Pod1": {
200,
serilizeOrDie(t, rc1Pod1),
},
"GET" + "/api/v1/namespaces/ns1/pods/rc1Pod2": {
200,
serilizeOrDie(t, rc1Pod2),
},
"GET" + "/api/v1/namespaces/ns1/pods/rc2Pod1": {
200,
serilizeOrDie(t, rc2Pod1),
},
"GET" + "/api/v1/namespaces/ns1/pods/rc3Pod1": {
200,
serilizeOrDie(t, rc3Pod1),
},
"GET" + "/api/v1/namespaces/ns1/replicationcontrollers/rc1": {
404,
[]byte{},
},
"GET" + "/api/v1/namespaces/ns1/replicationcontrollers/rc2": {
404,
[]byte{},
},
"GET" + "/api/v1/namespaces/ns1/replicationcontrollers/rc3": {
404,
[]byte{},
},
},
}
srv, clientConfig := testServerAndClientConfig(testHandler.ServeHTTP)
defer srv.Close()
gc := setupGC(t, clientConfig)
defer close(gc.stop)
gc.absentOwnerCache = NewUIDCache(2)
gc.attemptToDeleteItem(podToGCNode(rc1Pod1))
gc.attemptToDeleteItem(podToGCNode(rc2Pod1))
// rc1 should already be in the cache, no request should be sent. rc1 should be promoted in the UIDCache
gc.attemptToDeleteItem(podToGCNode(rc1Pod2))
// after this call, rc2 should be evicted from the UIDCache
gc.attemptToDeleteItem(podToGCNode(rc3Pod1))
// check cache
if !gc.absentOwnerCache.Has(types.UID("1")) {
t.Errorf("expected rc1 to be in the cache")
}
if gc.absentOwnerCache.Has(types.UID("2")) {
t.Errorf("expected rc2 to not exist in the cache")
}
if !gc.absentOwnerCache.Has(types.UID("3")) {
t.Errorf("expected rc3 to be in the cache")
}
// check the request sent to the server
count := 0
for _, action := range testHandler.actions {
if action.String() == "GET=/api/v1/namespaces/ns1/replicationcontrollers/rc1" {
count++
}
}
if count != 1 {
t.Errorf("expected only 1 GET rc1 request, got %d", count)
}
}
func TestDeleteOwnerRefPatch(t *testing.T) {
original := v1.Pod{
ObjectMeta: metav1.ObjectMeta{
UID: "100",
OwnerReferences: []metav1.OwnerReference{
{UID: "1"},
{UID: "2"},
{UID: "3"},
},
},
}
originalData := serilizeOrDie(t, original)
expected := v1.Pod{
ObjectMeta: metav1.ObjectMeta{
UID: "100",
OwnerReferences: []metav1.OwnerReference{
{UID: "1"},
},
},
}
patch := deleteOwnerRefPatch("100", "2", "3")
patched, err := strategicpatch.StrategicMergePatch(originalData, patch, v1.Pod{})
if err != nil {
t.Fatal(err)
}
var got v1.Pod
if err := json.Unmarshal(patched, &got); err != nil {
t.Fatal(err)
}
if !reflect.DeepEqual(expected, got) {
t.Errorf("expected: %#v,\ngot: %#v", expected, got)
}
}
func TestUnblockOwnerReference(t *testing.T) {
trueVar := true
falseVar := false
original := v1.Pod{
ObjectMeta: metav1.ObjectMeta{
UID: "100",
OwnerReferences: []metav1.OwnerReference{
{UID: "1", BlockOwnerDeletion: &trueVar},
{UID: "2", BlockOwnerDeletion: &falseVar},
{UID: "3"},
},
},
}
originalData := serilizeOrDie(t, original)
expected := v1.Pod{
ObjectMeta: metav1.ObjectMeta{
UID: "100",
OwnerReferences: []metav1.OwnerReference{
{UID: "1", BlockOwnerDeletion: &falseVar},
{UID: "2", BlockOwnerDeletion: &falseVar},
{UID: "3"},
},
},
}
accessor, err := meta.Accessor(&original)
if err != nil {
t.Fatal(err)
}
n := node{
owners: accessor.GetOwnerReferences(),
}
patch, err := n.patchToUnblockOwnerReferences()
if err != nil {
t.Fatal(err)
}
patched, err := strategicpatch.StrategicMergePatch(originalData, patch, v1.Pod{})
if err != nil {
t.Fatal(err)
}
var got v1.Pod
if err := json.Unmarshal(patched, &got); err != nil {
t.Fatal(err)
}
if !reflect.DeepEqual(expected, got) {
t.Errorf("expected: %#v,\ngot: %#v", expected, got)
t.Errorf("expected: %#v,\ngot: %#v", expected.OwnerReferences, got.OwnerReferences)
for _, ref := range got.OwnerReferences {
t.Errorf("ref.UID=%s, ref.BlockOwnerDeletion=%v", ref.UID, *ref.BlockOwnerDeletion)
}
}
}
func TestOrphanDependentsFailure(t *testing.T) {
testHandler := &fakeActionHandler{
response: map[string]FakeResponse{
"PATCH" + "/api/v1/namespaces/ns1/pods/pod": {
409,
[]byte{},
},
},
}
srv, clientConfig := testServerAndClientConfig(testHandler.ServeHTTP)
defer srv.Close()
gc := setupGC(t, clientConfig)
defer close(gc.stop)
dependents := []*node{
{
identity: objectReference{
OwnerReference: metav1.OwnerReference{
Kind: "Pod",
APIVersion: "v1",
Name: "pod",
},
Namespace: "ns1",
},
},
}
err := gc.orphanDependents(objectReference{}, dependents)
expected := `the server reported a conflict (patch pods pod)`
if err == nil || !strings.Contains(err.Error(), expected) {
t.Errorf("expected error contains text %s, got %v", expected, err)
}
}
| {
return &v1.Pod{
TypeMeta: metav1.TypeMeta{
Kind: "Pod",
APIVersion: "v1",
},
ObjectMeta: metav1.ObjectMeta{
Name: podName,
Namespace: "ns1",
OwnerReferences: ownerReferences,
},
}
} |
math.go | package endpoints
import (
"net/http"
"strconv"
| "github.com/gin-gonic/gin"
)
// GetMath responds with about/help page
func GetMath(c *gin.Context) {
c.IndentedJSON(http.StatusOK, gin.H{
"found": "Welcome, you found this hidden easter egg endpoint! Congrats, smarty pants ;)",
"about": "This endpoint receives two integer values, and returns the addition of such integers.",
"help": "PUT /math/:x/:y -> responds with x+y",
})
}
// PutMath receives two integers and responds with their addition
func PutMath(c *gin.Context) {
x := c.Param("x")
y := c.Param("y")
intX, err := strconv.Atoi(x)
intY, err1 := strconv.Atoi(y)
if err != nil || err1 != nil {
c.IndentedJSON(http.StatusBadRequest, gin.H{"error": "user gave non-integer values that cannot be added"})
} else {
result := intX + intY
c.IndentedJSON(http.StatusCreated, gin.H{"addition": result})
}
} | |
interface.go | /*
Copyright (c) 2021 SAP SE or an SAP affiliate company. All rights reserved. This file is licensed under the Apache Software License, v. 2 except as noted otherwise in the LICENSE file
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by informer-gen. DO NOT EDIT.
package v1beta1
import (
internalinterfaces "github.com/gardener/gardener/pkg/client/core/informers/externalversions/internalinterfaces"
)
// Interface provides access to all the informers in this group version.
type Interface interface {
// BackupBuckets returns a BackupBucketInformer.
BackupBuckets() BackupBucketInformer
// BackupEntries returns a BackupEntryInformer.
BackupEntries() BackupEntryInformer
// CloudProfiles returns a CloudProfileInformer.
CloudProfiles() CloudProfileInformer
// ControllerInstallations returns a ControllerInstallationInformer.
ControllerInstallations() ControllerInstallationInformer
// ControllerRegistrations returns a ControllerRegistrationInformer.
ControllerRegistrations() ControllerRegistrationInformer
// Plants returns a PlantInformer.
Plants() PlantInformer
// Projects returns a ProjectInformer.
Projects() ProjectInformer
// Quotas returns a QuotaInformer.
Quotas() QuotaInformer
// SecretBindings returns a SecretBindingInformer.
SecretBindings() SecretBindingInformer
// Seeds returns a SeedInformer.
Seeds() SeedInformer
// Shoots returns a ShootInformer.
Shoots() ShootInformer
}
type version struct {
factory internalinterfaces.SharedInformerFactory
namespace string | // New returns a new Interface.
func New(f internalinterfaces.SharedInformerFactory, namespace string, tweakListOptions internalinterfaces.TweakListOptionsFunc) Interface {
return &version{factory: f, namespace: namespace, tweakListOptions: tweakListOptions}
}
// BackupBuckets returns a BackupBucketInformer.
func (v *version) BackupBuckets() BackupBucketInformer {
return &backupBucketInformer{factory: v.factory, tweakListOptions: v.tweakListOptions}
}
// BackupEntries returns a BackupEntryInformer.
func (v *version) BackupEntries() BackupEntryInformer {
return &backupEntryInformer{factory: v.factory, namespace: v.namespace, tweakListOptions: v.tweakListOptions}
}
// CloudProfiles returns a CloudProfileInformer.
func (v *version) CloudProfiles() CloudProfileInformer {
return &cloudProfileInformer{factory: v.factory, tweakListOptions: v.tweakListOptions}
}
// ControllerInstallations returns a ControllerInstallationInformer.
func (v *version) ControllerInstallations() ControllerInstallationInformer {
return &controllerInstallationInformer{factory: v.factory, tweakListOptions: v.tweakListOptions}
}
// ControllerRegistrations returns a ControllerRegistrationInformer.
func (v *version) ControllerRegistrations() ControllerRegistrationInformer {
return &controllerRegistrationInformer{factory: v.factory, tweakListOptions: v.tweakListOptions}
}
// Plants returns a PlantInformer.
func (v *version) Plants() PlantInformer {
return &plantInformer{factory: v.factory, namespace: v.namespace, tweakListOptions: v.tweakListOptions}
}
// Projects returns a ProjectInformer.
func (v *version) Projects() ProjectInformer {
return &projectInformer{factory: v.factory, tweakListOptions: v.tweakListOptions}
}
// Quotas returns a QuotaInformer.
func (v *version) Quotas() QuotaInformer {
return "aInformer{factory: v.factory, namespace: v.namespace, tweakListOptions: v.tweakListOptions}
}
// SecretBindings returns a SecretBindingInformer.
func (v *version) SecretBindings() SecretBindingInformer {
return &secretBindingInformer{factory: v.factory, namespace: v.namespace, tweakListOptions: v.tweakListOptions}
}
// Seeds returns a SeedInformer.
func (v *version) Seeds() SeedInformer {
return &seedInformer{factory: v.factory, tweakListOptions: v.tweakListOptions}
}
// Shoots returns a ShootInformer.
func (v *version) Shoots() ShootInformer {
return &shootInformer{factory: v.factory, namespace: v.namespace, tweakListOptions: v.tweakListOptions}
} | tweakListOptions internalinterfaces.TweakListOptionsFunc
}
|
lib.rs | extern crate proc_macro;
extern crate syn;
extern crate textwrap;
use proc_macro::TokenStream;
use quote::ToTokens;
use syn::parse::Parse;
use syn::parse::ParseStream;
use syn::parse::Result as ParseResult;
use syn::parse_macro_input;
// ---------------------------------------------------------------------------
struct DedentInput {
lit: syn::LitStr,
}
impl Parse for DedentInput {
fn parse(input: ParseStream) -> ParseResult<Self> {
Ok(Self {
lit: input.parse()?,
})
}
}
#[proc_macro_hack::proc_macro_hack]
pub fn dedent(tokens: TokenStream) -> TokenStream {
let input = parse_macro_input!(tokens as DedentInput);
let newstr = textwrap::dedent(&input.lit.value());
syn::LitStr::new(&newstr, input.lit.span())
.into_token_stream()
.into()
}
// ---------------------------------------------------------------------------
struct FillInput {
lit: syn::LitStr,
width: syn::LitInt,
}
impl Parse for FillInput {
fn parse(input: ParseStream) -> ParseResult<Self> {
let lit = input.parse()?;
input.parse::<syn::Token![,]>()?;
let width = input.parse()?;
Ok(Self { lit, width })
}
}
#[proc_macro_hack::proc_macro_hack]
pub fn fill(tokens: TokenStream) -> TokenStream {
let input = parse_macro_input!(tokens as FillInput);
let width: usize = input.width.base10_parse().expect("could not parse number");
let newstr = textwrap::fill(&input.lit.value(), width);
syn::LitStr::new(&newstr, input.lit.span())
.into_token_stream()
.into()
}
// ---------------------------------------------------------------------------
struct IndentInput {
lit: syn::LitStr,
prefix: syn::LitStr,
} |
impl Parse for IndentInput {
fn parse(input: ParseStream) -> ParseResult<Self> {
let lit = input.parse()?;
input.parse::<syn::Token![,]>()?;
let prefix = input.parse()?;
Ok(Self { lit, prefix })
}
}
#[proc_macro_hack::proc_macro_hack]
pub fn indent(tokens: TokenStream) -> TokenStream {
let input = parse_macro_input!(tokens as IndentInput);
let newstr = textwrap::indent(&input.lit.value(), &input.prefix.value());
syn::LitStr::new(&newstr, input.lit.span())
.into_token_stream()
.into()
}
// ---------------------------------------------------------------------------
struct WrapInput {
lit: syn::LitStr,
width: syn::LitInt,
}
impl Parse for WrapInput {
fn parse(input: ParseStream) -> ParseResult<Self> {
let lit = input.parse()?;
input.parse::<syn::Token![,]>()?;
let width = input.parse()?;
Ok(Self { lit, width })
}
}
#[proc_macro_hack::proc_macro_hack]
pub fn wrap(tokens: TokenStream) -> TokenStream {
let input = parse_macro_input!(tokens as WrapInput);
let width: usize = input.width.base10_parse().expect("could not parse number");
let elems = textwrap::wrap(&input.lit.value(), width)
.iter()
.map(|s| syn::Lit::from(syn::LitStr::new(&s, input.lit.span())))
.map(|lit| {
syn::Expr::Lit(syn::ExprLit {
lit,
attrs: Vec::new(),
})
})
.collect();
let array = syn::ExprArray {
elems,
attrs: Vec::new(),
bracket_token: Default::default(),
};
let expr = syn::ExprReference {
attrs: Vec::new(),
and_token: Default::default(),
raw: Default::default(),
mutability: None,
expr: Box::new(syn::Expr::Array(array)),
};
expr.into_token_stream().into()
} | |
conflux.ts | import { Config } from '../config'
const sendFulfillment = async (
provider: any,
account: any,
to: string,
dataPrefix: string,
functionSelector: string,
value: number,
) => {
const dataPrefixBz = ethers.utils.arrayify(dataPrefix)
const functionSelectorBz = ethers.utils.arrayify(functionSelector)
const valueBz = ethers.utils.zeroPad(ethers.utils.arrayify(Number(value)), 32)
const data = ethers.utils.concat([functionSelectorBz, dataPrefixBz, valueBz])
const tx = {
from: account.address,
to: to,
data: ethers.utils.hexlify(data),
gas: 500000,
gasPrice: 1,
}
return await provider.sendTransaction(tx).executed()
}
// const customError = (data: any) => data.Response === 'Error'
const customParams = {
// Use two sets of possible keys in case the node operator
// is using a non-EI initiator where the primary keys are reserved.
address: ['address', 'cfxAddress'],
dataPrefix: ['dataPrefix', 'cfxDataPrefix'],
functionSelector: ['functionSelector', 'cfxFunctionSelector'],
value: ['result', 'value'],
}
export const NAME = 'conflux'
export const execute: ExecuteWithConfig<Config> = async (
request,
_,
config,
): Promise<AdapterResponse> => {
const validator = new Validator(request, customParams)
const provider = new Conflux({
url: config.rpcUrl,
networkId: Number(config.networkId),
defaultGasRatio: 1.3,
defaultStorageRatio: 1.3,
})
const account = provider.wallet.addPrivateKey(config.privateKey)
const jobRunID = validator.validated.id
const address = validator.validated.data.address
const dataPrefix = validator.validated.data.dataPrefix
const functionSelector = validator.validated.data.functionSelector
const value = validator.validated.data.value
// handling the multiplying
// if (request.data.times !== undefined) {
// value = String(Math.round(Number(value)*Number(request.data.times)))
// }
const { transactionHash: txHash } = await sendFulfillment(
provider,
account,
address,
dataPrefix,
functionSelector,
value,
)
return Requester.success(jobRunID, {
data: { result: txHash },
status: 200,
})
} | import { Requester, Validator } from '@chainlink/ea-bootstrap'
import { AdapterResponse, ExecuteWithConfig } from '@chainlink/types'
import { Conflux } from 'js-conflux-sdk'
import { ethers } from 'ethers' |
|
aria2_download.py | from bot import aria2, download_dict_lock, STOP_DUPLICATE_MIRROR
from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
from bot.helper.ext_utils.bot_utils import *
from .download_helper import DownloadHelper
from bot.helper.mirror_utils.status_utils.aria_download_status import AriaDownloadStatus
from bot.helper.telegram_helper.message_utils import *
import threading
from aria2p import API
from time import sleep
class AriaDownloadHelper(DownloadHelper):
def __init__(self):
| def __onDownloadStarted(self, api, gid):
sleep(1)
LOGGER.info(f"onDownloadStart: {gid}")
dl = getDownloadByGid(gid)
download = api.get_download(gid)
self.name = download.name
sname = download.name
gdrive = GoogleDriveHelper(None)
smsg, button = gdrive.drive_list(sname)
if STOP_DUPLICATE_MIRROR:
if smsg:
dl.getListener().onDownloadError(f'😡 𝑭𝒊𝒍𝒆 𝒊𝒔 𝒂𝒍𝒓𝒆𝒂𝒅𝒚 𝒂𝒗𝒂𝒊𝒍𝒂𝒃𝒍𝒆 𝒊𝒏 𝑫𝒓𝒊𝒗𝒆\n𝑭𝒊𝒔𝒓𝒕 𝒔𝒆𝒂𝒓𝒄𝒉 𝑩𝒆𝒇𝒐𝒓𝒆 𝑴𝒊𝒓𝒓𝒐𝒓𝒊𝒏𝒈 𝒂𝒏𝒚𝒕𝒉𝒊𝒏𝒈 😡\n𝑰𝒇 𝒚𝒐𝒖 𝒅𝒐 𝒕𝒉𝒊𝒔 𝒂𝒈𝒂𝒊𝒏❗ 𝒀𝒐𝒖 𝒘𝒊𝒍𝒍 𝒃𝒆 𝑩𝒂𝒏 😐.\n\n')
print(dl.getListener())
sendMarkup(" 𝐇𝐞𝐫𝐞 𝐚𝐫𝐞 𝐭𝐡𝐞 𝐒𝐞𝐚𝐫𝐜𝐡 🔍 𝐑𝐞𝐬𝐮𝐥𝐭𝐬:👇👇", dl.getListener().bot, dl.getListener().update, button)
aria2.remove([download])
return
update_all_messages()
def __onDownloadComplete(self, api: API, gid):
LOGGER.info(f"onDownloadComplete: {gid}")
dl = getDownloadByGid(gid)
download = api.get_download(gid)
if download.followed_by_ids:
new_gid = download.followed_by_ids[0]
new_download = api.get_download(new_gid)
with download_dict_lock:
download_dict[dl.uid()] = AriaDownloadStatus(new_gid, dl.getListener())
if new_download.is_torrent:
download_dict[dl.uid()].is_torrent = True
update_all_messages()
LOGGER.info(f'Changed gid from {gid} to {new_gid}')
else:
if dl: threading.Thread(target=dl.getListener().onDownloadComplete).start()
@new_thread
def __onDownloadPause(self, api, gid):
LOGGER.info(f"onDownloadPause: {gid}")
dl = getDownloadByGid(gid)
dl.getListener().onDownloadError('Download stopped by user!🌜🌛')
@new_thread
def __onDownloadStopped(self, api, gid):
LOGGER.info(f"onDownloadStop: {gid}")
dl = getDownloadByGid(gid)
if dl: dl.getListener().onDownloadError('𝐘𝐨𝐮𝐫 𝐋𝐢𝐧𝐤 𝐢𝐬 𝐃𝐄𝐀𝐃 ❗ 😒 𝐃𝐨𝐧❜𝐭 𝐮𝐬𝐞 𝐋𝐨𝐰 𝐒𝐞𝐞𝐝𝐬 𝐓𝐨𝐫𝐫𝐞𝐧𝐭')
@new_thread
def __onDownloadError(self, api, gid):
sleep(0.5) #sleep for split second to ensure proper dl gid update from onDownloadComplete
LOGGER.info(f"onDownloadError: {gid}")
dl = getDownloadByGid(gid)
download = api.get_download(gid)
error = download.error_message
LOGGER.info(f"Download Error: {error}")
if dl: dl.getListener().onDownloadError(error)
def start_listener(self):
aria2.listen_to_notifications(threaded=True, on_download_start=self.__onDownloadStarted,
on_download_error=self.__onDownloadError,
on_download_pause=self.__onDownloadPause,
on_download_stop=self.__onDownloadStopped,
on_download_complete=self.__onDownloadComplete)
def add_download(self, link: str, path,listener):
if is_magnet(link):
download = aria2.add_magnet(link, {'dir': path})
else:
download = aria2.add_uris([link], {'dir': path})
if download.error_message: #no need to proceed further at this point
listener.onDownloadError(download.error_message)
return
with download_dict_lock:
download_dict[listener.uid] = AriaDownloadStatus(download.gid,listener)
LOGGER.info(f"Started: {download.gid} DIR:{download.dir} ") | super().__init__()
@new_thread
|
mod.rs | use std::io::Write;
use git_features::hash;
use git_hash::ObjectId;
use crate::{data, data::output, find};
///
pub mod iter_from_counts;
pub use iter_from_counts::iter_from_counts;
/// The kind of pack entry to be written
#[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone, Copy)]
#[cfg_attr(feature = "serde1", derive(serde::Serialize, serde::Deserialize))]
pub enum Kind {
/// A complete base object, including its kind
Base(git_object::Kind),
/// A delta against the object with the given index. It's always an index that was already encountered to refer only
/// to object we have written already.
DeltaRef {
/// The absolute index to the object to serve as base. It's up to the writer to maintain enough state to allow producing
/// a packed delta object from it.
object_index: usize,
},
/// A delta against the given object as identified by its `ObjectId`.
/// This is the case for thin packs only, i.e. those that are sent over the wire.
/// Note that there is the option of the `ObjectId` being used to refer to an object within
/// the same pack, but it's a discontinued practice which won't be encountered here.
DeltaOid {
/// The object serving as base for this delta
id: ObjectId,
},
}
/// The error returned by [`output::Entry::from_data()`].
#[allow(missing_docs)]
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("{0}")]
ZlibDeflate(#[from] std::io::Error),
#[error("Entry expected to have hash {expected}, but it had {actual}")]
PackToPackCopyCrc32Mismatch { actual: u32, expected: u32 },
}
impl output::Entry {
/// An object which can be identified as invalid easily which happens if objects didn't exist even if they were referred to.
pub fn invalid() -> output::Entry {
output::Entry {
id: ObjectId::null_sha1(),
kind: Kind::Base(git_object::Kind::Blob),
decompressed_size: 0,
compressed_data: vec![],
}
}
/// Returns true if this object doesn't really exist but still has to be handled responsibly
pub fn is_invalid(&self) -> bool {
self.id.is_null()
}
/// Create an Entry from a previously counted object which is located in a pack. It's `entry` is provided here.
/// The `version` specifies what kind of target `Entry` version the caller desires.
pub fn from_pack_entry(
entry: find::Entry<'_>,
count: &output::Count,
potential_bases: &[output::Count],
bases_index_offset: usize,
pack_offset_to_oid: Option<impl FnMut(u32, u64) -> Option<ObjectId>>,
target_version: crate::data::Version,
) -> Option<Result<Self, Error>> |
/// Create a new instance from the given `oid` and its corresponding git `obj`ect data.
pub fn from_data(count: &output::Count, obj: &data::Object<'_>) -> Result<Self, Error> {
Ok(output::Entry {
id: count.id.to_owned(),
kind: Kind::Base(obj.kind),
decompressed_size: obj.data.len(),
compressed_data: {
let mut out = git_features::zlib::stream::deflate::Write::new(Vec::new());
if let Err(err) = std::io::copy(&mut &*obj.data, &mut out) {
match err.kind() {
std::io::ErrorKind::Other => return Err(Error::ZlibDeflate(err)),
err => unreachable!("Should never see other errors than zlib, but got {:?}", err,),
}
};
out.flush()?;
out.into_inner()
},
})
}
/// Transform ourselves into pack entry header of `version` which can be written into a pack.
///
/// `index_to_pack(object_index) -> pack_offset` is a function to convert the base object's index into
/// the input object array (if each object is numbered) to an offset into the pack.
/// This information is known to the one calling the method.
pub fn to_entry_header(
&self,
version: crate::data::Version,
index_to_base_distance: impl FnOnce(usize) -> u64,
) -> crate::data::entry::Header {
assert!(
matches!(version, data::Version::V2),
"we can only write V2 pack entries for now"
);
use Kind::*;
match self.kind {
Base(kind) => {
use git_object::Kind::*;
match kind {
Tree => data::entry::Header::Tree,
Blob => data::entry::Header::Blob,
Commit => data::entry::Header::Commit,
Tag => data::entry::Header::Tag,
}
}
DeltaOid { id } => data::entry::Header::RefDelta { base_id: id.to_owned() },
DeltaRef { object_index } => data::entry::Header::OfsDelta {
base_distance: index_to_base_distance(object_index),
},
}
}
}
| {
if entry.version != target_version {
return None;
};
let pack_offset_must_be_zero = 0;
let pack_entry = crate::data::Entry::from_bytes(entry.data, pack_offset_must_be_zero);
if let Some(expected) = entry.crc32 {
let actual = hash::crc32(entry.data);
if actual != expected {
return Some(Err(Error::PackToPackCopyCrc32Mismatch { actual, expected }));
}
}
use crate::data::entry::Header::*;
match pack_entry.header {
Commit => Some(output::entry::Kind::Base(git_object::Kind::Commit)),
Tree => Some(output::entry::Kind::Base(git_object::Kind::Tree)),
Blob => Some(output::entry::Kind::Base(git_object::Kind::Blob)),
Tag => Some(output::entry::Kind::Base(git_object::Kind::Tag)),
OfsDelta { base_distance } => {
let pack_location = count.entry_pack_location.as_ref().expect("packed");
let base_offset = pack_location
.pack_offset
.checked_sub(base_distance)
.expect("pack-offset - distance is firmly within the pack");
potential_bases
.binary_search_by(|e| {
e.entry_pack_location
.as_ref()
.expect("packed")
.pack_offset
.cmp(&base_offset)
})
.ok()
.map(|idx| output::entry::Kind::DeltaRef {
object_index: idx + bases_index_offset,
})
.or_else(|| {
pack_offset_to_oid
.and_then(|mut f| f(pack_location.pack_id, base_offset))
.map(|id| output::entry::Kind::DeltaOid { id })
})
}
RefDelta { base_id: _ } => None, // ref deltas are for thin packs or legacy, repack them as base objects
}
.map(|kind| {
Ok(output::Entry {
id: count.id.to_owned(),
kind,
decompressed_size: pack_entry.decompressed_size as usize,
compressed_data: entry.data[pack_entry.data_offset as usize..].to_owned(),
})
})
} |
index.js | const appServer = require('./bin/app/server');
const AppServer = new appServer();
AppServer.server.listen(1000, (err) => {
if(err){
console.log(err)
}else{
console.log('Jalan') | }
}) |
|
rnn_model.py | #pylint: disable=invalid-name
import numpy as np
import torch
from torch import nn
from aw_nas import ops
from aw_nas.utils.exception import expect, ConfigException
from aw_nas.weights_manager.rnn_shared import RNNSharedNet, INIT_RANGE
class RNNGenotypeModel(RNNSharedNet):
REGISTRY = "final_model"
NAME = "rnn_model"
def __init__(self, search_space, device, genotypes,
num_tokens, num_emb=300, num_hid=300,
tie_weight=True, decoder_bias=True,
share_primitive_weights=False, share_from_weights=False,
batchnorm_step=False,
batchnorm_edge=False, batchnorm_out=True,
# training
max_grad_norm=5.0,
# dropout probs
dropout_emb=0., dropout_inp0=0., dropout_inp=0., dropout_hid=0., dropout_out=0.):
self.genotypes = genotypes
if isinstance(genotypes, str):
self.genotypes = eval("search_space.genotype_type({})".format(self.genotypes)) # pylint: disable=eval-used
self.genotypes = list(self.genotypes._asdict().values())
# check tos:
_tos = [conn[2] for conn in self.genotypes[0]]
(np.argsort(_tos) == np.arange(len(_tos))).all()
expect((np.argsort(_tos) == np.arange(len(_tos))).all(),
"genotype must be ordered in the way that `to_node` monotonously increase",
ConfigException)
super(RNNGenotypeModel, self).__init__(
search_space, device, | tie_weight=tie_weight, decoder_bias=decoder_bias,
share_primitive_weights=share_primitive_weights, share_from_weights=share_from_weights,
batchnorm_step=batchnorm_step,
batchnorm_edge=batchnorm_edge, batchnorm_out=batchnorm_out,
max_grad_norm=max_grad_norm,
dropout_emb=dropout_emb, dropout_inp0=dropout_inp0, dropout_inp=dropout_inp,
dropout_hid=dropout_hid, dropout_out=dropout_out,
genotypes=self.genotypes) # this genotypes will be used for construction/forward
self.logger.info("Genotype: %s", self.genotypes)
def forward(self, inputs, hiddens): #pylint: disable=arguments-differ
# this genotypes will not be used
return RNNSharedNet.forward(self, inputs, self.genotypes, hiddens)
@classmethod
def supported_rollout_types(cls):
# this should not be called
# assert 0, "should not be called"
return []
def assemble_candidate(self, *args, **kwargs): #pylint: disable=arguments-differ
# this will not be called
assert 0, "should not be called"
class RNNGenotypeCell(nn.Module):
def __init__(self, search_space, device, op_cls, num_emb, num_hid,
share_from_weights, batchnorm_step,
batchnorm_edge, batchnorm_out, genotypes, **kwargs):
super(RNNGenotypeCell, self).__init__()
self.genotypes = genotypes
self.search_space = search_space
self.num_emb = num_emb
self.num_hid = num_hid
self.batchnorm_step = batchnorm_step
self.batchnorm_edge = batchnorm_edge
self.batchnorm_out = batchnorm_out
self.share_from_w = share_from_weights
self._steps = search_space.num_steps
self._num_init = search_space.num_init_nodes
# the first step, convert input x and previous hidden
self.w_prev = nn.Linear(num_emb + num_hid, 2 * num_hid, bias=False)
self.w_prev.weight.data.uniform_(-INIT_RANGE, INIT_RANGE)
if self.batchnorm_edge:
# batchnorm on each edge/connection
# when `num_node_inputs==1`, there is `step + 1` edges
# the first bn
self.bn_prev = nn.BatchNorm1d(num_emb + num_hid, affine=True)
# other bn
self.bn_edges = nn.ModuleList([nn.BatchNorm1d(num_emb + num_hid, affine=True)
for _ in range(len(self.genotypes[0]))])
if self.batchnorm_step:
# batchnorm after every step (as in darts's implementation)
self.bn_steps = nn.ModuleList([nn.BatchNorm1d(num_hid, affine=False)
for _ in range(self._steps+1)])
if self.batchnorm_out:
# the out bn
self.bn_out = nn.BatchNorm1d(num_hid, affine=True)
if self.share_from_w:
# actually, as `num_node_inputs==1`, thus only one from node is used each step
# `share_from_w==True/False` are equivalent in final training...
self.step_weights = nn.ModuleList([
nn.Linear(num_hid, 2*num_hid, bias=False)
for _ in range(self._steps)])
[mod.weight.data.uniform_(-INIT_RANGE, INIT_RANGE) for mod in self.step_weights]
# initiatiate op on edges
self.Ws = nn.ModuleList()
self.ops = nn.ModuleList()
genotype_, _ = self.genotypes
for op_type, _, _ in genotype_:
# edge weights
op = ops.get_op(op_type)()
self.ops.append(op)
if not self.share_from_w:
W = nn.Linear(self.num_hid, 2 * self.num_hid, bias=False)
W.weight.data.uniform_(-INIT_RANGE, INIT_RANGE)
self.Ws.append(W)
def forward(self, inputs, hidden, x_mask, h_mask, genotypes): #pylint: disable=arguments-differ
"""
Cell forward, forward for one timestep.
"""
genotype, concat_ = self.genotypes # self.genotypes == genotypes
s0 = self._compute_init_state(inputs, hidden, x_mask, h_mask)
if self.batchnorm_step:
s0 = self.bn_steps[0](s0)
states = {0: s0}
for i, (_, from_, to_) in enumerate(genotype):
s_prev = states[from_]
s_inputs = s_prev
if self.training:
s_inputs = s_prev * h_mask
w = self.step_weights[to_-1] if self.share_from_w else self.Ws[i]
ch = w(s_inputs)
if self.batchnorm_edge:
ch = self.bn_edges[i](ch)
c, h = torch.split(ch, self.num_hid, dim=-1)
c = c.sigmoid()
h = self.ops[i](h)
out = s_prev + c * (h - s_prev)
if to_ in states:
states[to_] = states[to_] + out
else:
states[to_] = out
to_finish = i == len(genotype)-1 or genotype[i+1][2] != to_
if self.batchnorm_step and to_finish:
# if the calculation of the `to_` step finished, batch norm it
states[to_] = self.bn_steps[to_](states[to_])
# average the ends
output = torch.mean(torch.stack([states[i] for i in concat_]), 0)
if self.batchnorm_out:
# batchnorm
output = self.bn_out(output)
return output
def _compute_init_state(self, x, h, x_mask, h_mask):
if self.training:
xh_prev = torch.cat([x * x_mask, h * h_mask], dim=-1)
else:
xh_prev = torch.cat([x, h], dim=-1)
xh_prev = self.w_prev(xh_prev)
if self.batchnorm_edge:
xh_prev = self.bn_prev(xh_prev)
c0, h0 = torch.split(xh_prev, self.num_hid, dim=-1)
c0 = c0.sigmoid()
h0 = h0.tanh()
s0 = h + c0 * (h0 - h)
return s0 | cell_cls=RNNGenotypeCell, op_cls=None,
num_tokens=num_tokens, num_emb=num_emb, num_hid=num_hid, |
Style.ts | import {
checkExpr,
checkPredicate,
checkVar,
disambiguateSubNode,
} from "compiler/Substance";
import consola, { LogLevel } from "consola";
import { constrDict, objDict } from "contrib/Constraints";
// Dicts (runtime data)
import { compDict } from "contrib/Functions";
import { constOf, numOf, varOf } from "engine/Autodiff";
import {
addWarn,
defaultLbfgsParams,
dummyASTNode,
dummyIdentifier,
findExpr,
findExprSafe,
initConstraintWeight,
insertExpr,
insertExprs,
insertGPI,
isPath,
isTagExpr,
valueNumberToAutodiffConst,
} from "engine/EngineUtils";
import { alg, Graph } from "graphlib";
import _ from "lodash";
import nearley from "nearley";
import { lastLocation } from "parser/ParserUtil";
import styleGrammar from "parser/StyleParser";
import {
Canvas,
findDef,
PropType,
Sampler,
ShapeDef,
shapedefs,
} from "renderer/ShapeDef";
import rfdc from "rfdc";
import { VarAD } from "types/ad";
import { Identifier } from "types/ast";
import { Either, Just, Left, MaybeVal, Right } from "types/common";
import { ConstructorDecl, Env, TypeConstructor } from "types/domain";
import {
ParseError,
PenroseError,
StyleError,
StyleErrors,
StyleResults,
StyleWarnings,
SubstanceError,
} from "types/errors";
import { Fn, OptType, Params, State } from "types/state";
import {
BindingForm,
Block,
DeclPattern,
Expr,
GPIDecl,
Header,
HeaderBlock,
IAccessPath,
ICompApp,
IConstrFn,
ILayering,
IObjFn,
Path,
PredArg,
PropertyDecl,
RelationPattern,
RelBind,
RelPred,
Selector,
SelExpr,
Stmt,
StyProg,
StyT,
StyVar,
} from "types/style";
import { LocalVarSubst, ProgType, SelEnv, Subst } from "types/styleSemantics";
import {
ApplyConstructor,
ApplyPredicate,
LabelMap,
SubExpr,
SubPredArg,
SubProg,
SubstanceEnv,
SubStmt,
TypeConsApp,
} from "types/substance";
import {
FExpr,
Field,
FieldDict,
FieldExpr,
GPIMap,
GPIProps,
IFGPI,
IOptEval,
Property,
PropID,
ShapeTypeStr,
StyleOptFn,
TagExpr,
Translation,
Value,
} from "types/value";
import { err, isErr, ok, parseError, Result, toStyleErrors } from "utils/Error";
import { prettyPrintPath } from "utils/OtherUtils";
import { randFloat } from "utils/Util";
import { checkTypeConstructor, isDeclaredSubtype } from "./Domain";
const log = consola
.create({ level: LogLevel.Warn })
.withScope("Style Compiler");
const clone = rfdc({ proto: false, circles: false });
//#region consts
const ANON_KEYWORD = "ANON";
const LOCAL_KEYWORD = "$LOCAL";
const LABEL_FIELD = "label";
const UnknownTagError = new Error("unknown tag");
const VARYING_INIT_FN_NAME = "VARYING_INIT";
// For statically checking existence
const FN_DICT = {
CompApp: compDict,
ObjFn: objDict,
ConstrFn: constrDict,
};
const FN_ERR_TYPE = {
CompApp: "InvalidFunctionNameError" as const,
ObjFn: "InvalidObjectiveNameError" as const,
ConstrFn: "InvalidConstraintNameError" as const,
};
//#endregion
//#region utils
const dummyId = (name: string): Identifier =>
dummyIdentifier(name, "SyntheticStyle");
// numbers from 0 to r-1 w/ increment of 1
const numbers = (r: number): number[] => {
const l = 0;
if (l > r) {
throw Error("invalid range");
}
const arr = [];
for (let i = l; i < r; i++) {
arr.push(i);
}
return arr;
};
export function numbered<A>(xs: A[]): [A, number][] {
if (!xs) throw Error("fail");
return _.zip(xs, numbers(xs.length)) as [A, number][]; // COMBAK: Don't know why typescript has problem with this
}
// TODO move to util
export function isLeft<A>(val: any): val is Left<A> {
if ((val as Left<A>).tag === "Left") return true;
return false;
}
export function isRight<B>(val: any): val is Right<B> {
if ((val as Right<B>).tag === "Right") return true;
return false;
}
export function toLeft<A>(val: A): Left<A> {
return { contents: val, tag: "Left" };
}
export function | <B>(val: B): Right<B> {
return { contents: val, tag: "Right" };
}
export function ToLeft<A, B>(val: A): Either<A, B> {
return { contents: val, tag: "Left" };
}
export function ToRight<A, B>(val: B): Either<A, B> {
return { contents: val, tag: "Right" };
}
export function foldM<A, B, C>(
xs: A[],
f: (acc: B, curr: A, i: number) => Either<C, B>,
init: B
): Either<C, B> {
let res = init;
let resW: Either<C, B> = toRight(init); // wrapped
for (let i = 0; i < xs.length; i++) {
resW = f(res, xs[i], i);
if (resW.tag === "Left") {
return resW;
} // Stop fold early on first error and return it
res = resW.contents;
}
return resW;
}
function justs<T>(xs: MaybeVal<T>[]): T[] {
return xs
.filter((x) => x.tag === "Just")
.map((x) => {
if (x.tag === "Just") {
return x.contents;
}
throw Error("unexpected"); // Shouldn't happen
});
}
const safeContentsList = (x: any) => (x ? x.contents : []);
const toString = (x: BindingForm): string => x.contents.value;
// https://stackoverflow.com/questions/12303989/cartesian-product-of-multiple-arrays-in-javascript
const cartesianProduct = (...a: any[]) =>
a.reduce((a, b) => a.flatMap((d: any) => b.map((e: any) => [d, e].flat())));
const pathString = (p: Path): string => {
// COMBAK: This should be replaced by prettyPrintPath
if (p.tag === "FieldPath") {
return `${p.name.contents.value}.${p.field.value}`;
} else if (p.tag === "PropertyPath") {
return `${p.name.contents.value}.${p.field.value}.${p.property.value}`;
} else throw Error("pathStr not implemented");
};
const getShapeName = (s: string, f: Field): string => {
return `${s}.${f}`;
};
//#endregion
//#region Some code for prettyprinting
const ppExpr = (e: SelExpr): string => {
if (e.tag === "SEBind") {
return e.contents.contents.value;
} else if (["SEFunc", "SEValCons", "SEFuncOrValCons"].includes(e.tag)) {
const args = e.args.map(ppExpr);
return `${e.name.value}(${args})`;
} else if (((e as any) as StyVar).tag === "StyVar") {
return ((e as any) as StyVar).contents.value;
} else {
console.log("res", e);
throw Error("unknown tag");
}
};
const ppRelArg = (r: PredArg): string => {
if (r.tag === "RelPred") {
return ppRelPred(r);
} else {
return ppExpr(r);
}
};
const ppRelBind = (r: RelBind): string => {
const expr = ppExpr(r.expr);
return `${r.id.contents.value} := ${expr}`;
};
const ppRelPred = (r: RelPred): string => {
const args = r.args.map(ppRelArg).join(", ");
const name = r.name.value;
return `${name}(${args})`;
};
export const ppRel = (r: RelationPattern): string => {
if (r.tag === "RelBind") {
return ppRelBind(r);
} else if (r.tag === "RelPred") {
return ppRelPred(r);
} else throw Error("unknown tag");
};
//#endregion
//#region Types and code for selector checking and environment construction
const initSelEnv = (): SelEnv => {
// Note that JS objects are by reference, so you have to make a new one each time
return {
sTypeVarMap: {},
varProgTypeMap: {},
skipBlock: false,
header: { tag: "Nothing" },
warnings: [],
errors: [],
};
};
// Add a mapping from Sub or Sty var to the selector's environment
// g, (x : |T)
// NOTE: Mutates the map in `m`
const addMapping = (
k: BindingForm,
v: StyT,
m: SelEnv,
p: ProgType
): SelEnv => {
m.sTypeVarMap[toString(k)] = v;
m.varProgTypeMap[toString(k)] = [p, k];
return m;
};
// add warning/error to end of existing errors in selector env
const addErrSel = (selEnv: SelEnv, err: StyleError): SelEnv => {
return {
...selEnv,
errors: selEnv.errors.concat([err]),
};
};
// TODO: Test this
// Judgment 3. G; g |- |S_o ok ~> g'
// `checkDeclPattern`
const checkDeclPatternAndMakeEnv = (
varEnv: Env,
selEnv: SelEnv,
stmt: DeclPattern
): SelEnv => {
const [styType, bVar] = [stmt.type, stmt.id];
const typeErr = checkTypeConstructor(toSubstanceType(styType), varEnv);
if (isErr(typeErr)) {
// TODO(errors)
return addErrSel(selEnv, {
tag: "TaggedSubstanceError",
error: typeErr.error,
});
}
const varName: string = bVar.contents.value;
// TODO(errors)
if (Object.keys(selEnv.sTypeVarMap).includes(varName)) {
return addErrSel(selEnv, { tag: "SelectorVarMultipleDecl", varName: bVar });
}
if (bVar.tag === "StyVar") {
// rule Decl-Sty-Context
// NOTE: this does not aggregate *all* possible errors. May just return first error.
// y \not\in dom(g)
return addMapping(bVar, styType, selEnv, { tag: "StyProgT" });
} else if (bVar.tag === "SubVar") {
// rule Decl-Sub-Context
// x \not\in dom(g)
const substanceType = varEnv.vars.get(varName);
// If any Substance variable doesn't exist in env, ignore it,
// but flag it so we know to not translate the lines in the block later.
if (!substanceType) {
return { ...selEnv, skipBlock: true };
}
// check "T <: |T", assuming type constructors are nullary
// Specifically, the Style type for a Substance var needs to be more general. Otherwise, if it's more specific, that's a coercion
// e.g. this is correct: Substance: "SpecialVector `v`"; Style: "Vector `v`"
const declType = toSubstanceType(styType);
if (!isDeclaredSubtype(substanceType, declType, varEnv)) {
// COMBAK: Order?
// TODO(errors)
return addErrSel(selEnv, {
tag: "SelectorDeclTypeMismatch",
subType: declType,
styType: substanceType,
});
}
return addMapping(bVar, styType, selEnv, { tag: "SubProgT" });
} else throw Error("unknown tag");
};
// Judgment 6. G; g |- [|S_o] ~> g'
// `checkDeclPatterns` w/o error-checking, just addMapping for StyVars and SubVars
const checkDeclPatternsAndMakeEnv = (
varEnv: Env,
selEnv: SelEnv,
decls: DeclPattern[]
): SelEnv => {
return decls.reduce(
(s, p) => checkDeclPatternAndMakeEnv(varEnv, s, p),
selEnv
);
};
// TODO: Test this function
// Judgment 4. G |- |S_r ok
const checkRelPattern = (varEnv: Env, rel: RelationPattern): StyleErrors => {
// rule Bind-Context
if (rel.tag === "RelBind") {
// TODO: use checkSubStmt here (and in paper)?
// TODO: make sure the ill-typed bind selectors fail here (after Sub statics is fixed)
// G |- B : T1
const res1 = checkVar(rel.id.contents, varEnv);
// TODO(error)
if (isErr(res1)) {
const subErr1: SubstanceError = res1.error;
// TODO(error): Do we need to wrap this error further, or is returning SubstanceError with no additional Style info ok?
// return ["substance typecheck error in B"];
return [{ tag: "TaggedSubstanceError", error: subErr1 }];
}
const [vtype, env1] = res1.value;
// G |- E : T2
const res2 = checkExpr(toSubExpr(varEnv, rel.expr), varEnv);
// TODO(error)
if (isErr(res2)) {
const subErr2: SubstanceError = res2.error;
return [{ tag: "TaggedSubstanceError", error: subErr2 }];
// return ["substance typecheck error in E"];
}
const [etype, env2] = res2.value;
// T1 = T2
const typesEq = isDeclaredSubtype(vtype, etype, varEnv);
// TODO(error) -- improve message
if (!typesEq) {
return [
{ tag: "SelectorRelTypeMismatch", varType: vtype, exprType: etype },
];
// return ["types not equal"];
}
return [];
} else if (rel.tag === "RelPred") {
// rule Pred-Context
// G |- Q : Prop
const res = checkPredicate(toSubPred(rel), varEnv);
if (isErr(res)) {
const subErr3: SubstanceError = res.error;
return [{ tag: "TaggedSubstanceError", error: subErr3 }];
// return ["substance typecheck error in Pred"];
}
return [];
} else {
throw Error("unknown tag");
}
};
// Judgment 5. G |- [|S_r] ok
const checkRelPatterns = (
varEnv: Env,
rels: RelationPattern[]
): StyleErrors => {
return _.flatMap(
rels,
(rel: RelationPattern): StyleErrors => checkRelPattern(varEnv, rel)
);
};
const toSubstanceType = (styT: StyT): TypeConsApp => {
// TODO: Extend for non-nullary types (when they are implemented in Style)
return {
tag: "TypeConstructor",
name: styT,
args: [],
};
};
// TODO: Test this
// NOTE: `Map` is immutable; we return the same `Env` reference with a new `vars` set (rather than mutating the existing `vars` Map)
const mergeMapping = (
varProgTypeMap: { [k: string]: [ProgType, BindingForm] },
varEnv: Env,
[varName, styType]: [string, StyT]
): Env => {
const res = varProgTypeMap[varName];
if (!res) {
throw Error("var has no binding form?");
}
const [progType, bindingForm] = res;
if (bindingForm.tag === "SubVar") {
// G || (x : |T) |-> G
return varEnv;
} else if (bindingForm.tag === "StyVar") {
// G || (y : |T) |-> G[y : T] (shadowing any existing Sub vars)
return {
...varEnv,
vars: varEnv.vars.set(
bindingForm.contents.value,
toSubstanceType(styType)
),
};
} else {
throw Error("unknown tag");
}
};
// TODO: don't merge the varmaps! just put g as the varMap (otherwise there will be extraneous bindings for the relational statements)
// Judgment 1. G || g |-> ...
const mergeEnv = (varEnv: Env, selEnv: SelEnv): Env => {
return Object.entries(selEnv.sTypeVarMap).reduce(
(acc, curr) => mergeMapping(selEnv.varProgTypeMap, acc, curr),
varEnv
);
};
// ported from `checkPair`, `checkSel`, and `checkNamespace`
const checkHeader = (varEnv: Env, header: Header): SelEnv => {
if (header.tag === "Selector") {
// Judgment 7. G |- Sel ok ~> g
const sel: Selector = header;
const selEnv_afterHead = checkDeclPatternsAndMakeEnv(
varEnv,
initSelEnv(),
sel.head.contents
);
// Check `with` statements
// TODO: Did we get rid of `with` statements?
const selEnv_decls = checkDeclPatternsAndMakeEnv(
varEnv,
selEnv_afterHead,
safeContentsList(sel.with)
);
const relErrs = checkRelPatterns(
mergeEnv(varEnv, selEnv_decls),
safeContentsList(sel.where)
);
// TODO(error): The errors returned in the top 3 statements
return {
...selEnv_decls,
errors: selEnv_decls.errors.concat(relErrs), // COMBAK: Reverse the error order?
};
} else if (header.tag === "Namespace") {
// TODO(error)
return initSelEnv();
} else throw Error("unknown Style header tag");
};
// Returns a sel env for each selector in the Style program, in the same order
// previously named `checkSels`
export const checkSelsAndMakeEnv = (
varEnv: Env,
prog: HeaderBlock[]
): SelEnv[] => {
// Note that even if there is an error in one selector, it does not stop checking of the other selectors
const selEnvs: SelEnv[] = prog.map((e) => {
const res = checkHeader(varEnv, e.header);
// Put selector AST in just for debugging
res.header = { tag: "Just", contents: e.header };
return res;
});
return selEnvs;
};
//#endregion
//#region Types and code for finding substitutions
// Judgment 20. A substitution for a selector is only correct if it gives exactly one
// mapping for each Style variable in the selector. (Has test)
export const fullSubst = (selEnv: SelEnv, subst: Subst): boolean => {
// Check if a variable is a style variable, not a substance one
const isStyVar = (e: string): boolean =>
selEnv.varProgTypeMap[e][0].tag === "StyProgT";
// Equal up to permutation (M.keys ensures that there are no dups)
const selStyVars = Object.keys(selEnv.sTypeVarMap).filter(isStyVar);
const substStyVars = Object.keys(subst);
// Equal up to permutation (keys of an object in js ensures that there are no dups)
return _.isEqual(selStyVars.sort(), substStyVars.sort());
};
// Check that there are no duplicate keys or vals in the substitution
export const uniqueKeysAndVals = (subst: Subst): boolean => {
// All keys already need to be unique in js, so only checking values
const vals = Object.values(subst);
const valsSet = {};
for (let i = 0; i < vals.length; i++) {
valsSet[vals[i]] = 0; // This 0 means nothing, we just want to create a set of values
}
// All entries were unique if length didn't change (ie the nub didn't change)
return Object.keys(valsSet).length === vals.length;
};
// Optimization to filter out Substance statements that have no hope of matching any of the substituted relation patterns, so we don't do redundant work for every substitution (of which there could be millions). This function is only called once per selector.
const couldMatchRels = (
typeEnv: Env,
rels: RelationPattern[],
stmt: SubStmt
): boolean => {
// TODO < (this is an optimization; will only implement if needed)
return true;
};
//#region (subregion? TODO fix) Applying a substitution
// // Apply a substitution to various parts of Style (relational statements, exprs, blocks)
// Recursively walk the tree, looking up and replacing each Style variable encountered with a Substance variable
// If a Sty var doesn't have a substitution (i.e. substitution map is bad), keep the Sty var and move on
// COMBAK: return "maybe" if a substitution fails?
// COMBAK: Add a type for `lv`? It's not used here
const substituteBform = (
lv: any,
subst: Subst,
bform: BindingForm
): BindingForm => {
// theta(B) = ...
if (bform.tag === "SubVar") {
// Variable in backticks in block or selector (e.g. `X`), so nothing to substitute
return bform;
} else if (bform.tag === "StyVar") {
// Look up the substitution for the Style variable and return a Substance variable
// Returns result of mapping if it exists (y -> x)
const res = subst[bform.contents.value];
if (res) {
return {
...bform, // Copy the start/end loc of the original Style variable, since we don't have Substance parse info (COMBAK)
tag: "SubVar",
contents: {
...bform.contents, // Copy the start/end loc of the original Style variable, since we don't have Substance parse info
type: "value",
value: res, // COMBAK: double check please
},
};
} else {
// Nothing to substitute
return bform;
}
} else throw Error("unknown tag");
};
const substituteExpr = (subst: Subst, expr: SelExpr): SelExpr => {
// theta(B) = ...
if (expr.tag === "SEBind") {
return {
...expr,
contents: substituteBform({ tag: "Nothing" }, subst, expr.contents),
};
} else if (["SEFunc", "SEValCons", "SEFuncOrValCons"].includes(expr.tag)) {
// COMBAK: Remove SEFuncOrValCons?
// theta(f[E]) = f([theta(E)]
return {
...expr,
args: expr.args.map((arg) => substituteExpr(subst, arg)),
};
} else {
throw Error("unsupported tag");
}
};
const substitutePredArg = (subst: Subst, predArg: PredArg): PredArg => {
if (predArg.tag === "RelPred") {
return {
...predArg,
args: predArg.args.map((arg) => substitutePredArg(subst, arg)),
};
} else if (predArg.tag === "SEBind") {
return {
...predArg,
contents: substituteBform({ tag: "Nothing" }, subst, predArg.contents), // COMBAK: Why is bform here...
};
} else {
console.log("unknown tag", subst, predArg);
throw Error("unknown tag");
}
};
// theta(|S_r) = ...
export const substituteRel = (
subst: Subst,
rel: RelationPattern
): RelationPattern => {
if (rel.tag === "RelBind") {
// theta(B := E) |-> theta(B) := theta(E)
return {
...rel,
id: substituteBform({ tag: "Nothing" }, subst, rel.id),
expr: substituteExpr(subst, rel.expr),
};
} else if (rel.tag === "RelPred") {
// theta(Q([a]) = Q([theta(a)])
return {
...rel,
args: rel.args.map((arg) => substitutePredArg(subst, arg)),
};
} else throw Error("unknown tag");
};
// Applies a substitution to a list of relational statement theta([|S_r])
// TODO: assumes a full substitution
const substituteRels = (
subst: Subst,
rels: RelationPattern[]
): RelationPattern[] => {
const res = rels.map((rel) => substituteRel(subst, rel));
return res;
};
//#endregion (subregion? TODO fix)
//#region Applying a substitution to a block
// // Substs for the translation semantics (more tree-walking on blocks, just changing binding forms)
const mkLocalVarName = (lv: LocalVarSubst): string => {
if (lv.tag === "LocalVarId") {
const [blockNum, substNum] = lv.contents;
return `${LOCAL_KEYWORD}_block${blockNum}_subst${substNum}`;
} else if (lv.tag === "NamespaceId") {
return lv.contents;
} else throw Error("unknown error");
};
const substitutePath = (lv: LocalVarSubst, subst: Subst, path: Path): Path => {
if (path.tag === "FieldPath") {
return {
...path,
name: substituteBform({ tag: "Just", contents: lv }, subst, path.name),
};
} else if (path.tag === "PropertyPath") {
return {
...path,
name: substituteBform({ tag: "Just", contents: lv }, subst, path.name),
};
} else if (path.tag === "LocalVar") {
return {
nodeType: "SyntheticStyle",
children: [],
tag: "FieldPath",
name: {
children: [],
nodeType: "SyntheticStyle",
tag: "SubVar",
contents: {
...dummyId(mkLocalVarName(lv)),
},
},
field: path.contents,
};
} else if (path.tag === "InternalLocalVar") {
// Note that the local var becomes a path
// Use of local var 'v' (on right-hand side of '=' sign in Style) gets transformed into field path reference '$LOCAL_<ids>.v'
// where <ids> is a string generated to be unique to this selector match for this block
// COMBAK / HACK: Is there some way to get rid of all these dummy values?
return {
nodeType: "SyntheticStyle",
children: [],
tag: "FieldPath",
name: {
nodeType: "SyntheticStyle",
children: [],
tag: "SubVar",
contents: {
...dummyId(mkLocalVarName(lv)),
},
},
field: dummyId(path.contents),
};
} else if (path.tag === "AccessPath") {
// COMBAK: Check if this works / is needed (wasn't present in original code)
return {
...path,
path: substitutePath(lv, subst, path.path),
};
} else {
throw Error("unknown tag");
}
};
const substituteField = (
lv: LocalVarSubst,
subst: Subst,
field: PropertyDecl
): PropertyDecl => {
return {
...field,
value: substituteBlockExpr(lv, subst, field.value),
};
};
const substituteBlockExpr = (
lv: LocalVarSubst,
subst: Subst,
expr: Expr
): Expr => {
if (isPath(expr)) {
return substitutePath(lv, subst, expr);
} else if (
expr.tag === "CompApp" ||
expr.tag === "ObjFn" ||
expr.tag === "ConstrFn"
) {
// substitute out occurrences of `VARYING_INIT(i)` (the computation) for `VaryingInit(i)` (the `AnnoFloat`) as there is currently no special syntax for this
// note that this is a hack; instead of shoehorning it into `substituteBlockExpr`, it should be done more cleanly as a compiler pass on the Style block AST at some point. doesn't really matter when this is done as long as it's before the varying float initialization in `genState
if (expr.tag === "CompApp") {
if (expr.name.value === VARYING_INIT_FN_NAME) {
// TODO(err): Typecheck VARYING_INIT properly and return an error. This will be unnecessary if parsed with special syntax.
if (expr.args.length !== 1) {
throw Error("expected one argument to VARYING_INIT");
}
if (expr.args[0].tag !== "Fix") {
throw Error("expected float argument to VARYING_INIT");
}
return {
...dummyASTNode({}, "SyntheticStyle"),
tag: "VaryInit",
contents: expr.args[0].contents,
};
}
}
return {
...expr,
args: expr.args.map((arg: Expr) => substituteBlockExpr(lv, subst, arg)),
};
} else if (expr.tag === "BinOp") {
return {
...expr,
left: substituteBlockExpr(lv, subst, expr.left),
right: substituteBlockExpr(lv, subst, expr.right),
};
} else if (expr.tag === "UOp") {
return {
...expr,
arg: substituteBlockExpr(lv, subst, expr.arg),
};
} else if (
expr.tag === "List" ||
expr.tag === "Vector" ||
expr.tag === "Matrix"
) {
return {
...expr,
contents: expr.contents.map((e: Expr) =>
substituteBlockExpr(lv, subst, e)
),
};
} else if (expr.tag === "ListAccess") {
return {
...expr,
contents: [substitutePath(lv, subst, expr.contents[0]), expr.contents[1]],
};
} else if (expr.tag === "GPIDecl") {
return {
...expr,
properties: expr.properties.map((p: PropertyDecl) =>
substituteField(lv, subst, p)
),
};
} else if (expr.tag === "Layering") {
return {
...expr,
below: substitutePath(lv, subst, expr.below),
above: substitutePath(lv, subst, expr.above),
};
} else if (expr.tag === "PluginAccess") {
return {
...expr,
contents: [
expr.contents[0],
substituteBlockExpr(lv, subst, expr.contents[1]),
substituteBlockExpr(lv, subst, expr.contents[2]),
],
};
} else if (expr.tag === "Tuple") {
return {
...expr,
contents: [
substituteBlockExpr(lv, subst, expr.contents[0]),
substituteBlockExpr(lv, subst, expr.contents[1]),
],
};
} else if (expr.tag === "VectorAccess") {
return {
...expr,
contents: [
substitutePath(lv, subst, expr.contents[0]),
substituteBlockExpr(lv, subst, expr.contents[1]),
],
};
} else if (expr.tag === "MatrixAccess") {
return {
...expr,
contents: [
substitutePath(lv, subst, expr.contents[0]),
expr.contents[1].map((e) => substituteBlockExpr(lv, subst, e)),
],
};
} else if (
expr.tag === "Fix" ||
expr.tag === "Vary" ||
expr.tag === "VaryAD" || // technically is not present at this stage
expr.tag === "VaryInit" ||
expr.tag === "StringLit" ||
expr.tag === "BoolLit"
) {
// No substitution for literals
return expr;
} else {
console.error("expr", expr);
throw Error("unknown tag");
}
};
const substituteLine = (lv: LocalVarSubst, subst: Subst, line: Stmt): Stmt => {
if (line.tag === "PathAssign") {
return {
...line,
path: substitutePath(lv, subst, line.path),
value: substituteBlockExpr(lv, subst, line.value),
};
} else if (line.tag === "Override") {
return {
...line,
path: substitutePath(lv, subst, line.path),
value: substituteBlockExpr(lv, subst, line.value),
};
} else if (line.tag === "Delete") {
return {
...line,
contents: substitutePath(lv, subst, line.contents),
};
} else {
throw Error(
"Case should not be reached (anonymous statement should be substituted for a local one in `nameAnonStatements`)"
);
}
};
// Assumes a full substitution
const substituteBlock = (
[subst, si]: [Subst, number],
[block, bi]: [Block, number],
name: MaybeVal<string>
): Block => {
const lvSubst: LocalVarSubst =
name.tag === "Nothing"
? { tag: "LocalVarId", contents: [bi, si] }
: { tag: "NamespaceId", contents: name.contents };
return {
...block,
statements: block.statements.map((line) =>
substituteLine(lvSubst, subst, line)
),
};
};
//#endregion Applying a substitution to a block
// Convert Style expression to Substance expression (for ease of comparison in matching)
// Note: the env is needed to disambiguate SEFuncOrValCons
const toSubExpr = (env: Env, e: SelExpr): SubExpr => {
if (e.tag === "SEBind") {
return e.contents.contents;
} else if (e.tag === "SEFunc") {
return {
...e, // Puts the remnants of e's ASTNode info here -- is that ok?
tag: "ApplyFunction",
name: e.name,
args: e.args.map((e) => toSubExpr(env, e)),
};
} else if (e.tag === "SEValCons") {
return {
...e,
tag: "ApplyConstructor",
name: e.name,
args: e.args.map((e) => toSubExpr(env, e)),
};
} else if (e.tag === "SEFuncOrValCons") {
const res = {
...e,
tag: "Func", // Use the generic Substance parse type so on conversion, it can be disambiguated by `disambiguateFunctions`
name: e.name,
args: e.args.map((e) => toSubExpr(env, e)),
};
disambiguateSubNode(env, res); // mutates res
return res as SubExpr;
} else throw Error("unknown tag");
};
const toSubPredArg = (a: PredArg): SubPredArg => {
if (a.tag === "SEBind") {
return a.contents.contents;
} else if (a.tag === "RelPred") {
return toSubPred(a);
} else throw Error("unknown tag");
};
// Convert Style predicate to Substance predicate (for ease of comparison in matching)
const toSubPred = (p: RelPred): ApplyPredicate => {
return {
...p,
tag: "ApplyPredicate",
name: p.name,
args: p.args.map(toSubPredArg),
};
};
const varsEq = (v1: Identifier, v2: Identifier): boolean => {
return v1.value === v2.value;
};
const subVarsEq = (v1: Identifier, v2: Identifier): boolean => {
return v1.value === v2.value;
};
const argsEq = (a1: SubPredArg, a2: SubPredArg): boolean => {
if (a1.tag === "ApplyPredicate" && a2.tag === "ApplyPredicate") {
return subFnsEq(a1, a2);
} else if (a1.tag === a2.tag) {
// both are SubExpr, which are not explicitly tagged
return subExprsEq(a1 as SubExpr, a2 as SubExpr);
} else return false; // they are different types
};
const subFnsEq = (p1: any, p2: any): boolean => {
if (
!p1.hasOwnProperty("name") ||
!p1.hasOwnProperty("args") ||
!p2.hasOwnProperty("name") ||
!p2.hasOwnProperty("args")
) {
throw Error("expected substance type with name and args properties");
}
if (p1.args.length !== p2.args.length) {
return false;
}
// Can use `as` because now we know their lengths are equal
const allArgsEq = _.zip(p1.args, p2.args).every(([a1, a2]) =>
argsEq(a1 as SubPredArg, a2 as SubPredArg)
);
return p1.name.value === p2.name.value && allArgsEq;
};
const subExprsEq = (e1: SubExpr, e2: SubExpr): boolean => {
// ts doesn't seem to work well with the more generic way of checking this
if (e1.tag === "Identifier" && e2.tag === "Identifier") {
return e1.value === e2.value;
} else if (
(e1.tag === "ApplyFunction" && e2.tag === "ApplyFunction") ||
(e1.tag === "ApplyConstructor" && e2.tag === "ApplyConstructor") ||
(e1.tag === "Func" && e2.tag === "Func")
) {
return subFnsEq(e1, e2);
} else if (e1.tag === "Deconstructor" && e2.tag === "Deconstructor") {
return (
e1.variable.value === e2.variable.value &&
e1.field.value === e2.field.value
);
} else if (e1.tag === "StringLit" && e2.tag === "StringLit") {
return e1.contents === e2.contents;
}
return false;
};
const exprToVar = (e: SubExpr): Identifier => {
if (e.tag === "Identifier") {
return e;
} else {
// TODO(errors)
throw Error(
"internal error: Style expression matching doesn't yet handle nested exprssions"
);
}
};
const toTypeList = (c: ConstructorDecl): TypeConstructor[] => {
return c.args.map((p) => {
if (p.type.tag === "TypeConstructor") {
return p.type;
}
throw Error(
"internal error: expected TypeConstructor in type (expected nullary type)"
);
});
};
// TODO: Test this
// For existing judgment G |- T1 <: T2,
// this rule (SUBTYPE-ARROW) checks if the first arrow type (i.e. function or value constructor type) is a subtype of the second
// The arrow types are contravariant in their arguments and covariant in their return type
// e.g. if Cat <: Animal, then Cat -> Cat <: Cat -> Animal, and Animal -> Cat <: Cat -> Cat
const isSubtypeArrow = (
types1: TypeConstructor[],
types2: TypeConstructor[],
e: Env
): boolean => {
if (types1.length !== types2.length) {
return false;
}
if (types1.length === 0 && types2.length === 0) {
return true;
}
return (
isDeclaredSubtype(types2[0], types1[0], e) && // Note swap -- contravariant in arguments
isSubtypeArrow(types1.slice(1), types2.slice(1), e)
); // Covariant in return type
};
const exprsMatchArr = (
varEnv: Env,
subE: ApplyConstructor,
styE: ApplyConstructor
): boolean => {
const subArrType = varEnv.constructors.get(subE.name.value);
if (!subArrType) {
// TODO(errors)
throw Error("internal error: sub arr type doesn't exist");
}
const styArrType = varEnv.constructors.get(styE.name.value);
if (!styArrType) {
// TODO(errors)
throw Error("internal error: sty arr type doesn't exist");
}
if (subE.args.length !== styE.args.length) {
return false;
}
const subArrTypes = toTypeList(subArrType);
const styArrTypes = toTypeList(styArrType);
const subVarArgs = subE.args.map(exprToVar);
const styVarArgs = styE.args.map(exprToVar);
return (
isSubtypeArrow(subArrTypes, styArrTypes, varEnv) &&
_.zip(subVarArgs, styVarArgs).every(([a1, a2]) =>
varsEq(a1 as Identifier, a2 as Identifier)
)
);
// `as` is fine bc of preceding length check
};
// New judgment (number?): expression matching that accounts for subtyping. G, B, . |- E0 <| E1
// We assume the latter expression has already had a substitution applied
const exprsMatch = (typeEnv: Env, subE: SubExpr, selE: SubExpr): boolean => {
// We match value constructor applications if one val ctor is a subtype of another
// whereas for function applications, we match only if the exprs are equal (for now)
// This is because a val ctor doesn't "do" anything besides wrap its values
// whereas functions with the same type could do very different things, so we don't
// necessarily want to match them by subtyping
// (e.g. think of the infinite functions from Vector -> Vector)
// rule Match-Expr-Var
if (subE.tag === "Identifier" && selE.tag === "Identifier") {
return subVarsEq(subE, selE);
} else if (subE.tag === "ApplyFunction" && selE.tag === "ApplyFunction") {
// rule Match-Expr-Fnapp
return subExprsEq(subE, selE);
} else if (
subE.tag === "ApplyConstructor" &&
selE.tag === "ApplyConstructor"
) {
// rule Match-Expr-Vconsapp
return exprsMatchArr(typeEnv, subE, selE);
} else {
return false;
}
};
// Judgment 11. b; theta |- S <| |S_r
// After all Substance variables from a Style substitution are substituted in, check if
const relMatchesLine = (
typeEnv: Env,
subEnv: SubstanceEnv,
s1: SubStmt,
s2: RelationPattern
): boolean => {
if (s1.tag === "Bind" && s2.tag === "RelBind") {
// rule Bind-Match
const bvar = s2.id;
if (s2.id.tag === "StyVar") {
// internal error
throw Error(
`Style variable ${
s2.id.contents.value
} found in relational statement ${ppRel(s2)}. Should not be present!`
);
} else if (s2.id.tag === "SubVar") {
// B |- E = |E
const [subVar, sVar] = [s1.variable, s2.id.contents.value];
const selExpr = toSubExpr(typeEnv, s2.expr);
const subExpr = s1.expr;
return (
subVarsEq(subVar, dummyId(sVar)) &&
exprsMatch(typeEnv, subExpr, selExpr)
);
// COMBAK: Add this condition when this is implemented in the Substance typechecker
// || exprsDeclaredEqual(subEnv, expr, selExpr); // B |- E = |E
} else throw Error("unknown tag");
} else if (s1.tag === "ApplyPredicate" && s2.tag === "RelPred") {
// rule Pred-Match
const [pred, sPred] = [s1, s2];
const selPred = toSubPred(sPred);
return subFnsEq(pred, selPred);
// COMBAK: Add this condition when the Substance typechecker is implemented -- where is the equivalent function to `predsDeclaredEqual` in the new code?
// || C.predsDeclaredEqual subEnv pred selPred // B |- Q <-> |Q
} else {
return false; // Only match two bind lines or two predicate lines
}
};
// Judgment 13. b |- [S] <| |S_r
const relMatchesProg = (
typeEnv: Env,
subEnv: SubstanceEnv,
subProg: SubProg,
rel: RelationPattern
): boolean => {
return subProg.statements.some((line) =>
relMatchesLine(typeEnv, subEnv, line, rel)
);
};
// Judgment 15. b |- [S] <| [|S_r]
const allRelsMatch = (
typeEnv: Env,
subEnv: SubstanceEnv,
subProg: SubProg,
rels: RelationPattern[]
): boolean => {
return rels.every((rel) => relMatchesProg(typeEnv, subEnv, subProg, rel));
};
// Judgment 17. b; [theta] |- [S] <| [|S_r] ~> [theta']
// Folds over [theta]
const filterRels = (
typeEnv: Env,
subEnv: SubstanceEnv,
subProg: SubProg,
rels: RelationPattern[],
substs: Subst[]
): Subst[] => {
const subProgFiltered: SubProg = {
...subProg,
statements: subProg.statements.filter((line) =>
couldMatchRels(typeEnv, rels, line)
),
};
return substs.filter((subst) =>
allRelsMatch(typeEnv, subEnv, subProgFiltered, substituteRels(subst, rels))
);
};
// // Match declaration statements
// // Substitution helper functions
// (+) operator combines two substitutions: subst -> subst -> subst
const combine = (s1: Subst, s2: Subst): Subst => {
return { ...s1, ...s2 };
};
// TODO check for duplicate keys (and vals)
// (x) operator combines two lists of substitutions: [subst] -> [subst] -> [subst]
// the way merge is used, I think each subst in the second argument only contains one mapping
const merge = (s1: Subst[], s2: Subst[]): Subst[] => {
if (s2.length === 0) {
return s1;
}
if (s1.length === 0) {
return s2;
}
return cartesianProduct(s1, s2).map(([a, b]: Subst[]) => combine(a, b));
};
// Judgment 9. G; theta |- T <| |T
// Assumes types are nullary, so doesn't return a subst, only a bool indicating whether the types matched
// Ported from `matchType`
const typesMatched = (
varEnv: Env,
substanceType: TypeConsApp,
styleType: StyT
): boolean => {
if (
substanceType.tag === "TypeConstructor" &&
substanceType.args.length === 0
) {
// Style type needs to be more generic than Style type
return isDeclaredSubtype(substanceType, toSubstanceType(styleType), varEnv);
}
// TODO(errors)
console.log(substanceType, styleType);
throw Error(
"internal error: expected two nullary types (parametrized types to be implemented)"
);
};
// Judgment 10. theta |- x <| B
const matchBvar = (subVar: Identifier, bf: BindingForm): MaybeVal<Subst> => {
if (bf.tag === "StyVar") {
const newSubst = {};
newSubst[toString(bf)] = subVar.value; // StyVar matched SubVar
return {
tag: "Just",
contents: newSubst,
};
} else if (bf.tag === "SubVar") {
if (subVar.value === bf.contents.value) {
// Substance variables matched; comparing string equality
return {
tag: "Just",
contents: {},
};
} else {
return { tag: "Nothing" }; // TODO: Note, here we distinguish between an empty substitution and no substitution... but why?
}
} else throw Error("unknown tag");
};
// Judgment 12. G; theta |- S <| |S_o
// TODO: Not sure why Maybe<Subst> doesn't work in the type signature?
const matchDeclLine = (
varEnv: Env,
line: SubStmt,
decl: DeclPattern
): MaybeVal<Subst> => {
if (line.tag === "Decl") {
const [subT, subVar] = [line.type, line.name];
const [styT, bvar] = [decl.type, decl.id];
// substitution is only valid if types matched first
if (typesMatched(varEnv, subT, styT)) {
return matchBvar(subVar, bvar);
}
}
// Sty decls only match Sub decls
return { tag: "Nothing" };
};
// Judgment 16. G; [theta] |- [S] <| [|S_o] ~> [theta']
const matchDecl = (
varEnv: Env,
subProg: SubProg,
initSubsts: Subst[],
decl: DeclPattern
): Subst[] => {
// Judgment 14. G; [theta] |- [S] <| |S_o
const newSubsts = subProg.statements.map((line) =>
matchDeclLine(varEnv, line, decl)
);
const res = merge(initSubsts, justs(newSubsts)); // TODO inline
// COMBAK: Inline this
// console.log("substs to combine:", initSubsts, justs(newSubsts));
// console.log("res", res);
return res;
};
// Judgment 18. G; [theta] |- [S] <| [|S_o] ~> [theta']
// Folds over [|S_o]
const matchDecls = (
varEnv: Env,
subProg: SubProg,
decls: DeclPattern[],
initSubsts: Subst[]
): Subst[] => {
return decls.reduce(
(substs, decl) => matchDecl(varEnv, subProg, substs, decl),
initSubsts
);
};
// Judgment 19. g; G; b; [theta] |- [S] <| Sel
// NOTE: this uses little gamma (not in paper) to check substitution validity
// ported from `find_substs_sel`
const findSubstsSel = (
varEnv: Env,
subEnv: SubstanceEnv,
subProg: SubProg,
[header, selEnv]: [Header, SelEnv]
): Subst[] => {
if (header.tag === "Selector") {
const sel = header;
const decls = sel.head.contents.concat(safeContentsList(sel.with));
const rels = safeContentsList(sel.where);
const initSubsts: Subst[] = [];
const rawSubsts = matchDecls(varEnv, subProg, decls, initSubsts);
const substCandidates = rawSubsts.filter((subst) =>
fullSubst(selEnv, subst)
);
const filteredSubsts = filterRels(
varEnv,
subEnv,
subProg,
rels,
substCandidates
);
const correctSubsts = filteredSubsts.filter(uniqueKeysAndVals);
return correctSubsts;
} else if (header.tag === "Namespace") {
// No substitutions for a namespace (not in paper)
return [];
} else throw Error("unknown tag");
};
// Find a list of substitutions for each selector in the Sty program. (ported from `find_substs_prog`)
export const findSubstsProg = (
varEnv: Env,
subEnv: SubstanceEnv,
subProg: SubProg,
styProg: HeaderBlock[],
selEnvs: SelEnv[]
): Subst[][] => {
if (selEnvs.length !== styProg.length) {
throw Error("expected same # selEnvs as selectors");
}
const selsWithEnvs = _.zip(
styProg.map((e: HeaderBlock) => e.header),
selEnvs
); // TODO: Why can't I type it [Header, SelEnv][]? It shouldn't be undefined after the length check
return selsWithEnvs.map((selAndEnv) =>
findSubstsSel(varEnv, subEnv, subProg, selAndEnv as [Header, SelEnv])
);
};
//#endregion
//#region Naming anon statements
// Style AST preprocessing:
// For any anonymous statement only (e.g. `encourage near(x.shape, y.shape)`),
// replace it with a named statement (`local.<UNIQUE_ID> = encourage near(x.shape, y.shape)`)
// Note the UNIQUE_ID only needs to be unique within a block (since local will assign another ID that's globally-unique)
// Leave all other statements unchanged
const nameAnonStatement = (
[i, b]: [number, Stmt[]],
s: Stmt
): [number, Stmt[]] => {
// Transform stmt into local variable assignment "ANON_$counter = e" and increment counter
if (s.tag === "AnonAssign") {
const stmt: Stmt = {
...s,
tag: "PathAssign",
type: { tag: "TypeOf", contents: "Nothing" }, // TODO: Why is it parsed like this?
path: {
tag: "InternalLocalVar",
contents: `\$${ANON_KEYWORD}_${i}`,
nodeType: "SyntheticStyle",
children: [], // Unused bc compiler internal
},
value: s.contents,
};
return [i + 1, b.concat([stmt])];
} else {
return [i, b.concat([s])];
}
};
const nameAnonBlock = (b: Block): Block => {
return {
...b,
statements: b.statements.reduce(
(acc, curr) => nameAnonStatement(acc, curr), // Not sure why this can't be point-free
[0, []] as [number, Stmt[]]
)[1],
};
};
export const nameAnonStatements = (prog: StyProg): StyProg => {
const p = prog.blocks;
return {
...prog,
blocks: p.map((hb) => ({ ...hb, block: nameAnonBlock(hb.block) })),
};
};
//#endregion
//#region Translating Style program
const initTrans = (): Translation => {
return { trMap: {}, warnings: [] };
};
// /////// Translation judgments
/* Note: All of the folds below use foldM.
foldM stops accumulating when the first fatal error is reached, using "Either [Error]" as a monad
(Non-fatal errors are stored as warnings in the translation)
foldM :: Monad m => (a -> b -> m a) -> a -> [b] -> m a
example:
f acc elem = if elem < 0 then Left ["wrong " ++ show elem] else Right $ elem : acc
foldM f [] [1, 9, -1, 2, -2] = Left ["wrong -1"]
foldM f [] [1, 9] = Right [9,1] */
// Judgment 26. D |- phi ~> D'
// This is where interesting things actually happen (each line is interpreted and added to the translation)
// Related functions in `Evaluator`: findExprSafe, insertExpr
// Note this mutates the translation, and we return the translation reference just as a courtesy
const deleteProperty = (
trans: Translation,
path: Path, // used for ASTNode info
name: BindingForm,
field: Identifier,
property: Identifier
): Translation => {
const trn = trans.trMap;
const nm = name.contents.value;
const fld = field.value;
const prp = property.value;
const fieldDict = trn[nm];
if (!fieldDict) {
// TODO(errors / warnings): Should this be fatal?
return addWarn(trans, {
tag: "DeletedPropWithNoSubObjError",
subObj: name,
path,
});
}
const prop: FieldExpr<VarAD> = fieldDict[fld];
if (!prop) {
// TODO(errors / warnings): Should this be fatal?
return addWarn(trans, {
tag: "DeletedPropWithNoFieldError",
subObj: name,
field,
path,
});
}
if (prop.tag === "FExpr") {
// Deal with GPI aliasing (i.e. only happens if a GPI is aliased to another, and some operation is performed on the aliased GPI's property, it happens to the original)
// COMBAK: should path aliasing have destructive effects on the translation (e.g. add or delete)? maybe it should only happen in lookup? Deleting an aliased path should just delete the alias, not its referent?
// TODO: Test this
if (prop.contents.tag === "OptEval") {
if (prop.contents.contents.tag === "FieldPath") {
const p = prop.contents.contents;
if (varsEq(p.name.contents, name.contents) && varsEq(p.field, field)) {
// TODO(error)
return addWarn(trans, {
tag: "CircularPathAlias",
path: { tag: "FieldPath", name, field } as Path,
});
}
return deleteProperty(trans, p, p.name, p.field, property);
}
}
// TODO(error)
return addWarn(trans, {
tag: "DeletedPropWithNoGPIError",
subObj: name,
field,
property,
path,
});
} else if (prop.tag === "FGPI") {
// TODO(error, warning): check if the property is member of properties of GPI
const gpiDict = prop.contents[1];
delete gpiDict.prp;
return trans;
} else throw Error("unknown tag");
};
// Note this mutates the translation, and we return the translation reference just as a courtesy
const deleteField = (
trans: Translation,
path: Path,
name: BindingForm,
field: Identifier
): Translation => {
// TODO(errors): Pass in the original path for error reporting
const trn = trans.trMap;
const fieldDict = trn[name.contents.value];
if (!fieldDict) {
// TODO(errors / warnings)
return addWarn(trans, {
tag: "DeletedNonexistentFieldError",
subObj: name,
field,
path,
});
}
if (!(field.value in fieldDict)) {
// TODO(errors / warnings)
return addWarn(trans, {
tag: "DeletedNonexistentFieldError",
subObj: name,
field,
path,
});
}
delete fieldDict[field.value];
return trans;
};
// NOTE: This function mutates the translation
// rule Line-delete
const deletePath = (
trans: Translation,
path: Path
): Either<StyleErrors, Translation> => {
if (path.tag === "FieldPath") {
const transWithWarnings = deleteField(trans, path, path.name, path.field);
return toRight(transWithWarnings);
} else if (path.tag === "PropertyPath") {
const transWithWarnings = deleteProperty(
trans,
path,
path.name,
path.field,
path.property
);
return toRight(transWithWarnings);
} else if (path.tag === "AccessPath") {
// TODO(error)
const err: StyleError = { tag: "DeletedVectorElemError", path };
return toLeft([err]);
} else if (path.tag === "InternalLocalVar") {
throw Error(
"Compiler should not be deleting a local variable; this should have been removed in a earlier compiler pass"
);
} else throw Error("unknown tag");
};
// NOTE: This function mutates the translation
const addPath = (
override: boolean,
trans: Translation,
path: Path,
expr: TagExpr<VarAD>
): Either<StyleErrors, Translation> => {
// Extended `insertExpr` with an optional flag to deal with errors and warnings
// `insertExpr` replaces the old .hs functions `addField` and `addProperty`
// Check insertExpr's errors and warnings first
const tr2 = insertExpr(path, expr, trans, true, override);
if (tr2.warnings.length > 0) {
return toLeft(tr2.warnings);
}
return toRight(tr2);
};
const translateLine = (
trans: Translation,
stmt: Stmt
): Either<StyleErrors, Translation> => {
if (stmt.tag === "PathAssign") {
return addPath(false, trans, stmt.path, {
tag: "OptEval",
contents: stmt.value,
});
} else if (stmt.tag === "Override") {
return addPath(true, trans, stmt.path, {
tag: "OptEval",
contents: stmt.value,
});
} else if (stmt.tag === "Delete") {
return deletePath(trans, stmt.contents);
} else throw Error("unknown tag");
};
// Judgment 25. D |- |B ~> D' (modified to be: theta; D |- |B ~> D')
const translateBlock = (
name: MaybeVal<string>,
blockWithNum: [Block, number],
trans: Translation,
substWithNum: [Subst, number]
): Either<StyleErrors, Translation> => {
const blockSubsted: Block = substituteBlock(substWithNum, blockWithNum, name);
return foldM(blockSubsted.statements, translateLine, trans);
};
// Judgment 24. [theta]; D |- |B ~> D'
// This is a selector, not a namespace, so we substitute local vars with the subst/block IDs
const translateSubstsBlock = (
trans: Translation,
substsNum: [Subst, number][],
blockWithNum: [Block, number]
): Either<StyleErrors, Translation> => {
return foldM(
substsNum,
(trans, substNum, i) =>
translateBlock({ tag: "Nothing" }, blockWithNum, trans, substNum),
trans
);
};
//#region Block statics
const emptyErrs = () => {
return { errors: [], warnings: [] };
};
const oneErr = (err: StyleError): StyleResults => {
return { errors: [err], warnings: [] };
};
const combineErrs = (e1: StyleResults, e2: StyleResults): StyleResults => {
return {
errors: e1.errors.concat(e2.errors),
warnings: e1.warnings.concat(e2.warnings),
};
};
const flatErrs = (es: StyleResults[]): StyleResults => {
return {
errors: _.flatMap(es, (e) => e.errors),
warnings: _.flatMap(es, (e) => e.warnings),
};
};
// Check that every shape name and shape property name in a shape constructor exists
const checkGPIInfo = (selEnv: SelEnv, expr: GPIDecl): StyleResults => {
const styName: string = expr.shapeName.value;
const errors: StyleErrors = [];
const warnings: StyleWarnings = [];
const shapeNames: string[] = shapedefs.map((e: ShapeDef) => e.shapeType);
if (!shapeNames.includes(styName)) {
// Fatal error -- we cannot check the shape properties (unless you want to guess the shape)
return oneErr({ tag: "InvalidGPITypeError", givenType: expr.shapeName });
}
// `findDef` throws an error, so we find the shape name first (done above) to make sure the error can be caught
const shapeDef: ShapeDef = findDef(styName);
const givenProperties: Identifier[] = expr.properties.map((e) => e.name);
const expectedProperties: string[] = Object.entries(shapeDef.properties).map(
(e) => e[0]
);
for (let gp of givenProperties) {
// Check multiple properties, as each one is not fatal if wrong
if (!expectedProperties.includes(gp.value)) {
errors.push({
tag: "InvalidGPIPropertyError",
givenProperty: gp,
expectedProperties,
});
}
}
return { errors, warnings };
};
// Check that every function, objective, and constraint exists (below) -- parametrically over the kind of function
const checkFunctionName = (
selEnv: SelEnv,
expr: ICompApp | IObjFn | IConstrFn
): StyleResults => {
const fnDict = FN_DICT[expr.tag];
const fnNames: string[] = _.keys(fnDict); // Names of built-in functions of that kind
const givenFnName: Identifier = expr.name;
if (
!fnNames.includes(givenFnName.value) &&
givenFnName.value !== VARYING_INIT_FN_NAME
) {
const fnErrorType = FN_ERR_TYPE[expr.tag];
return oneErr({ tag: fnErrorType, givenName: givenFnName });
}
return emptyErrs();
};
// Written recursively on exprs, just accumulating possible expr errors
const checkBlockExpr = (selEnv: SelEnv, expr: Expr): StyleResults => {
// Closure for brevity
const check = (e: Expr): StyleResults => checkBlockExpr(selEnv, e);
if (isPath(expr)) {
return checkBlockPath(selEnv, expr);
} else if (
expr.tag === "CompApp" ||
expr.tag === "ObjFn" ||
expr.tag === "ConstrFn"
) {
const e1 = checkFunctionName(selEnv, expr);
const e2 = expr.args.map(check);
return flatErrs([e1].concat(e2));
} else if (expr.tag === "BinOp") {
return flatErrs([check(expr.left), check(expr.right)]);
} else if (expr.tag === "UOp") {
return check(expr.arg);
} else if (
expr.tag === "List" ||
expr.tag === "Vector" ||
expr.tag === "Matrix"
) {
return flatErrs(expr.contents.map(check));
} else if (expr.tag === "ListAccess") {
return emptyErrs();
} else if (expr.tag === "GPIDecl") {
const e1: StyleResults = checkGPIInfo(selEnv, expr);
const e2: StyleResults[] = expr.properties.map((p) => check(p.value));
return flatErrs([e1].concat(e2));
} else if (expr.tag === "Layering") {
return flatErrs([check(expr.below), check(expr.above)]);
} else if (expr.tag === "PluginAccess") {
return flatErrs([check(expr.contents[1]), check(expr.contents[2])]);
} else if (expr.tag === "Tuple") {
return flatErrs([check(expr.contents[0]), check(expr.contents[1])]);
} else if (expr.tag === "VectorAccess") {
return check(expr.contents[1]);
} else if (expr.tag === "MatrixAccess") {
return flatErrs(expr.contents[1].map(check));
} else if (
expr.tag === "Fix" ||
expr.tag === "Vary" ||
expr.tag === "VaryInit" ||
expr.tag === "StringLit" ||
expr.tag === "BoolLit"
) {
return emptyErrs();
} else {
console.error("expr", expr);
throw Error("unknown tag");
}
};
const checkBlockPath = (selEnv: SelEnv, path: Path): StyleResults => {
// TODO(errors) / Block statics
// Currently there is nothing to check for paths
return emptyErrs();
};
const checkLine = (
selEnv: SelEnv,
line: Stmt,
acc: StyleResults
): StyleResults => {
if (line.tag === "PathAssign") {
const pErrs = checkBlockPath(selEnv, line.path);
const eErrs = checkBlockExpr(selEnv, line.value);
return combineErrs(combineErrs(acc, pErrs), eErrs);
} else if (line.tag === "Override") {
const pErrs = checkBlockPath(selEnv, line.path);
const eErrs = checkBlockExpr(selEnv, line.value);
return combineErrs(combineErrs(acc, pErrs), eErrs);
} else if (line.tag === "Delete") {
const pErrs = checkBlockPath(selEnv, line.contents);
return combineErrs(acc, pErrs);
} else {
throw Error(
"Case should not be reached (anonymous statement should be substituted for a local one in `nameAnonStatements`)"
);
}
};
const checkBlock = (selEnv: SelEnv, block: Block): StyleErrors => {
// Block checking; static semantics
// The below properties are checked in one pass (a fold) over the Style AST:
// Check that every shape name and shape property name in a shape constructor exists
// Check that every function, objective, and constraint exists
// NOT CHECKED as this requires more advanced env-building work: At path construction time, check that every Substance object exists in the environment of the block + selector, or that it's defined as a local variable
const res: StyleResults = block.statements.reduce(
(acc: StyleResults, stmt: Stmt): StyleResults =>
checkLine(selEnv, stmt, acc),
emptyErrs()
);
// TODO(errors): Return warnings (non-fatally); currently there are no warnings though
if (res.warnings.length > 0) {
console.error("warnings", res.warnings);
throw Error("Internal error: report these warnings");
}
return res.errors;
};
//#endregion Block statics
// Judgment 23, contd.
const translatePair = (
varEnv: Env,
subEnv: SubstanceEnv,
subProg: SubProg,
trans: Translation,
hb: HeaderBlock,
blockNum: number
): Either<StyleErrors, Translation> => {
if (hb.header.tag === "Namespace") {
const selEnv = initSelEnv();
const bErrs = checkBlock(selEnv, hb.block); // TODO: block statics
if (selEnv.errors.length > 0 || bErrs.length > 0) {
// This is a namespace, not selector, so we substitute local vars with the namespace's name
// skip transSubstsBlock; only one subst
return {
tag: "Left",
contents: selEnv.errors.concat(bErrs),
};
}
const subst = {};
// COMBAK / errors: Keep the AST node from `hb.header` for error reporting?
return translateBlock(
{
tag: "Just",
contents: (hb.header.contents.contents.value as any) as string,
},
[hb.block, blockNum],
trans,
[subst, 0]
);
} else if (hb.header.tag === "Selector") {
const selEnv = checkHeader(varEnv, hb.header);
const bErrs = checkBlock(selEnv, hb.block); // TODO: block statics
// If any Substance variable in the selector environment doesn't exist in the Substance program (e.g. Set `A`),
// skip this block (because the Substance variable won't exist in the translation)
if (selEnv.skipBlock) {
return toRight(trans);
}
if (selEnv.errors.length > 0 || bErrs.length > 0) {
return {
tag: "Left",
contents: selEnv.errors.concat(bErrs),
};
}
// For creating unique local var names
const substs = findSubstsSel(varEnv, subEnv, subProg, [hb.header, selEnv]);
return translateSubstsBlock(trans, numbered(substs), [hb.block, blockNum]);
} else throw Error("unknown tag");
};
// Map a function over the translation
const mapTrans = (
trans: Translation,
f: (name: string, fieldDict: FieldDict) => [string, FieldDict]
): Translation => {
return {
...trans,
trMap: Object.fromEntries(
Object.entries(trans.trMap).map(([n, fd]) => f(n, fd))
),
};
};
// Note, this mutates the translation
const insertNames = (trans: Translation): Translation => {
const insertName = (
name: string,
fieldDict: FieldDict
): [string, FieldDict] => {
fieldDict.name = {
tag: "FExpr",
contents: {
tag: "Done",
contents: { tag: "StrV", contents: name },
},
};
return [name, fieldDict];
};
return mapTrans(trans, insertName);
};
/**
* Add label strings to the translation, regardless if the Substance object is selected in the Style program
* NOTE: this function mutates `trans`.
*
* @param trans `Translation` without labels
* @param labels the label map from the Substance compiler
*/
const insertLabels = (trans: Translation, labels: LabelMap): void => {
for (const labelData of labels) {
const [name, label] = labelData;
if (label.isJust()) {
const labelString = label.value;
const labelValue: TagExpr<VarAD> = {
tag: "Done",
contents: {
tag: "StrV",
contents: labelString,
},
};
const labelExpr: FieldExpr<VarAD> = {
tag: "FExpr",
contents: labelValue,
};
const fieldDict = trans.trMap[name];
if (fieldDict !== undefined) {
fieldDict[LABEL_FIELD] = labelExpr;
} else {
trans[name] = {
[LABEL_FIELD]: labelExpr,
};
}
}
}
};
const translateStyProg = (
varEnv: Env,
subEnv: SubstanceEnv,
subProg: SubProg,
styProg: StyProg,
labelMap: LabelMap,
styVals: number[]
): Either<StyleErrors, Translation> => {
// COMBAK: Deal with styVals
const res = foldM(
styProg.blocks,
(trans, hb, i) => translatePair(varEnv, subEnv, subProg, trans, hb, i),
initTrans()
);
if (isLeft(res)) {
return res;
} // Return errors
const trans = res.contents;
const transWithNames = insertNames(trans);
insertLabels(transWithNames, labelMap); // NOTE: mutates `transWithNames`
// COMBAK: Do this with plugins
// const styValMap = styJsonToMap(styVals);
// const transWithPlugins = evalPluginAccess(styValMap, transWithNamesAndLabels);
// return Right(transWithPlugins);
return toRight(transWithNames);
};
//#endregion
// BEGIN GENOPTPROBLEM.HS PORT
//#region Translation utilities -- TODO move to EngineUtils
function foldFields<T>(
f: (s: string, field: Field, fexpr: FieldExpr<VarAD>, acc: T[]) => T[],
[name, fieldDict]: [string, { [k: string]: FieldExpr<VarAD> }],
acc: T[]
): T[] {
const res: T[] = Object.entries(fieldDict).reduce(
(acc: T[], [field, expr]) => f(name, field, expr, acc),
[]
);
return res.concat(acc);
}
function foldSubObjs<T>(
f: (s: string, f: Field, fexpr: FieldExpr<VarAD>, acc: T[]) => T[],
tr: Translation
): T[] {
return Object.entries(tr.trMap).reduce(
(acc: T[], curr) => foldFields(f, curr, acc),
[]
);
}
//#endregion
//#region Gen opt problem
// Find varying (float) paths
// For now, don't optimize these float-valued properties of a GPI
// (use whatever they are initialized to in Shapes or set to in Style)
const unoptimizedFloatProperties: string[] = [
"rotation",
"strokeWidth",
"thickness",
"transform",
"transformation",
"opacity",
"finalW",
"finalH",
"arrowheadSize",
];
const optimizedVectorProperties: string[] = ["start", "end", "center"];
const declaredVarying = (t: TagExpr<VarAD>): boolean => {
if (t.tag === "OptEval") {
return isVarying(t.contents);
}
return false;
};
const mkPath = (strs: string[]): Path => {
if (strs.length === 2) {
const [name, field] = strs;
return {
tag: "FieldPath",
nodeType: "SyntheticStyle",
children: [],
name: {
nodeType: "SyntheticStyle",
children: [],
tag: "SubVar",
contents: {
...dummyId(name),
},
},
field: dummyId(field),
};
} else if (strs.length === 3) {
const [name, field, prop] = strs;
return {
tag: "PropertyPath",
nodeType: "SyntheticStyle",
children: [],
name: {
nodeType: "SyntheticStyle",
children: [],
tag: "SubVar",
contents: {
...dummyId(name),
},
},
field: dummyId(field),
property: dummyId(prop),
};
} else throw Error("bad # inputs");
};
const pendingProperties = (s: ShapeTypeStr): PropID[] => {
if (s === "Text") return ["w", "h"];
if (s === "TextTransform") return ["w", "h"];
if (s === "ImageTransform") return ["initWidth", "initHeight"];
return [];
};
const isVarying = (e: Expr): boolean => {
return e.tag === "Vary" || e.tag === "VaryInit";
};
const isPending = (s: ShapeTypeStr, p: PropID): boolean => {
return pendingProperties(s).includes(p);
};
// ---- FINDING VARIOUS THINGS IN THE TRANSLATION
const findPropertyVarying = (
name: string,
field: Field,
properties: { [k: string]: TagExpr<VarAD> },
floatProperty: string,
acc: Path[]
): Path[] => {
const expr = properties[floatProperty];
const path = mkPath([name, field, floatProperty]);
if (!expr) {
if (unoptimizedFloatProperties.includes(floatProperty)) {
return acc;
}
if (optimizedVectorProperties.includes(floatProperty)) {
const defaultVec2: TagExpr<VarAD> = {
tag: "OptEval",
contents: {
nodeType: "SyntheticStyle",
children: [],
tag: "Vector",
contents: [
dummyASTNode({ tag: "Vary" }, "SyntheticStyle") as Expr,
dummyASTNode({ tag: "Vary" }, "SyntheticStyle") as Expr,
],
},
};
// Return paths for both elements, COMBAK: This hardcodes that unset vectors have 2 elements, need to generalize
const paths = findNestedVarying(defaultVec2, path);
return paths.concat(acc);
}
return [path].concat(acc);
} else {
if (declaredVarying(expr)) {
return [path].concat(acc);
}
}
const paths = findNestedVarying(expr, path);
return paths.concat(acc);
};
// Look for nested varying variables, given the path to its parent var (e.g. `x.r` => (-1.2, ?)) => `x.r`[1] is varying
const findNestedVarying = (e: TagExpr<VarAD>, p: Path): Path[] => {
if (e.tag === "OptEval") {
const res = e.contents;
if (res.tag === "Vector") {
const elems: Expr[] = res.contents;
const indices: Path[] = elems
.map((e: Expr, i): [Expr, number] => [e, i])
.filter((e: [Expr, number]): boolean => isVarying(e[0]))
.map(
([e, i]: [Expr, number]): IAccessPath =>
({
nodeType: "SyntheticStyle",
children: [],
tag: "AccessPath",
path: p,
indices: [
dummyASTNode({ tag: "Fix", contents: i }, "SyntheticStyle"),
],
} as IAccessPath)
);
return indices;
} else if (
res.tag === "Matrix" ||
res.tag === "List" ||
res.tag === "Tuple"
) {
// COMBAK: This should search, but for now we just don't handle nested varying vars in these
return [];
}
}
return [];
};
// Given 'propType' and 'shapeType', return all props of that ValueType
// COMBAK: Model "FloatT", "FloatV", etc as types for ValueType
const propertiesOf = (propType: string, shapeType: ShapeTypeStr): PropID[] => {
const shapeInfo: [string, [PropType, Sampler]][] = Object.entries(
findDef(shapeType).properties
);
return shapeInfo
.filter(([pName, [pType, s]]) => pType === propType)
.map((e) => e[0]);
};
// Given 'propType' and 'shapeType', return all props NOT of that ValueType
const propertiesNotOf = (
propType: string,
shapeType: ShapeTypeStr
): PropID[] => {
const shapeInfo: [string, [PropType, Sampler]][] = Object.entries(
findDef(shapeType).properties
);
return shapeInfo
.filter(([pName, [pType, s]]) => pType !== propType)
.map((e) => e[0]);
};
// Find varying fields
const findFieldVarying = (
name: string,
field: Field,
fexpr: FieldExpr<VarAD>,
acc: Path[]
): Path[] => {
if (fexpr.tag === "FExpr") {
if (declaredVarying(fexpr.contents)) {
return [mkPath([name, field])].concat(acc);
}
const paths = findNestedVarying(fexpr.contents, mkPath([name, field]));
return paths.concat(acc);
} else if (fexpr.tag === "FGPI") {
const [typ, properties] = fexpr.contents;
const ctorFloats = propertiesOf("FloatV", typ).concat(
propertiesOf("VectorV", typ)
);
const varyingFloats = ctorFloats.filter((e) => !isPending(typ, e));
// This splits up vector-typed properties into one path for each element
const vs: Path[] = varyingFloats.reduce(
(acc: Path[], curr) =>
findPropertyVarying(name, field, properties, curr, acc),
[]
);
return vs.concat(acc);
} else throw Error("unknown tag");
};
// Find all varying paths
const findVarying = (tr: Translation): Path[] => {
return foldSubObjs(findFieldVarying, tr);
};
// Find uninitialized (non-float) property paths
const findPropertyUninitialized = (
name: string,
field: Field,
properties: GPIMap,
nonfloatProperty: string,
acc: Path[]
): Path[] => {
// nonfloatProperty is a non-float property that is NOT set by the user and thus we can sample it
const res = properties[nonfloatProperty];
if (!res) {
return [mkPath([name, field, nonfloatProperty])].concat(acc);
}
return acc;
};
// Find uninitialized fields
const findFieldUninitialized = (
name: string,
field: Field,
fexpr: FieldExpr<VarAD>,
acc: Path[]
): Path[] => {
// NOTE: we don't find uninitialized field because you can't leave them uninitialized. Plus, we don't know what types they are
if (fexpr.tag === "FExpr") {
return acc;
}
if (fexpr.tag === "FGPI") {
const [typ, properties] = fexpr.contents;
const ctorNonfloats = propertiesNotOf("FloatV", typ).filter(
(e) => e !== "name"
);
const uninitializedProps = ctorNonfloats;
const vs = uninitializedProps.reduce(
(acc: Path[], curr) =>
findPropertyUninitialized(name, field, properties, curr, acc),
[]
);
return vs.concat(acc);
}
throw Error("unknown tag");
};
// NOTE: we don't find uninitialized field because you can't leave them uninitialized. Plus, we don't know what types they are
const findUninitialized = (tr: Translation): Path[] => {
return foldSubObjs(findFieldUninitialized, tr);
};
// Fold function to return the names of GPIs
const findGPIName = (
name: string,
field: Field,
fexpr: FieldExpr<VarAD>,
acc: [string, Field][]
): [string, Field][] => {
if (fexpr.tag === "FGPI") {
return ([[name, field]] as [string, Field][]).concat(acc);
} else if (fexpr.tag === "FExpr") {
return acc;
} else throw Error("unknown tag");
};
// Find shapes and their properties
const findShapeNames = (tr: Translation): [string, string][] => {
return foldSubObjs(findGPIName, tr);
};
// Find paths that are the properties of shapes
const findShapeProperties = (
name: string,
field: Field,
fexpr: FieldExpr<VarAD>,
acc: [string, Field, Property][]
): [string, Field, Property][] => {
if (fexpr.tag === "FGPI") {
const properties = fexpr.contents[1];
const paths = Object.keys(properties).map(
(property) => [name, field, property] as [string, Field, Property]
);
return paths.concat(acc);
} else if (fexpr.tag === "FExpr") {
return acc;
} else throw Error("unknown tag");
};
// Find paths that are the properties of shapes
const findShapesProperties = (tr: Translation): [string, string, string][] => {
return foldSubObjs(findShapeProperties, tr);
};
// Find various kinds of functions
const findFieldFns = (
name: string,
field: Field,
fexpr: FieldExpr<VarAD>,
acc: Either<StyleOptFn, StyleOptFn>[]
): Either<StyleOptFn, StyleOptFn>[] => {
if (fexpr.tag === "FExpr") {
if (fexpr.contents.tag === "OptEval") {
const e = fexpr.contents.contents;
// COMBAK: This throws away the function's Identifier for future debugging
// (Also, why doesn't typescript report an error when `e.name` is an Identifier but a StyleOptFn expects a string, using the `as` keyword?)
if (e.tag === "ObjFn") {
const res: Either<StyleOptFn, StyleOptFn> = ToLeft([
e.name.value,
e.args,
]);
return [res].concat(acc);
} else if (e.tag === "ConstrFn") {
const res: Either<StyleOptFn, StyleOptFn> = ToRight([
e.name.value,
e.args,
]);
return [res].concat(acc);
} else {
return acc;
}
}
}
return acc;
};
// Ported from `findObjfnsConstrs`
const findUserAppliedFns = (tr: Translation): [Fn[], Fn[]] => {
return convertFns(foldSubObjs(findFieldFns, tr));
};
const findFieldDefaultFns = (
name: string,
field: Field,
fexpr: FieldExpr<VarAD>,
acc: Either<StyleOptFn, StyleOptFn>[]
): Either<StyleOptFn, StyleOptFn>[] => {
// TODO < Currently we have no default objectives/constraints, so it's not implemented
return [];
};
const findDefaultFns = (tr: Translation): [Fn[], Fn[]] => {
return convertFns(foldSubObjs(findFieldDefaultFns, tr));
};
const toFn = (t: OptType, [name, args]: StyleOptFn): Fn => {
return {
fname: name,
fargs: args,
optType: t,
};
};
const toFns = ([objfns, constrfns]: [StyleOptFn[], StyleOptFn[]]): [
Fn[],
Fn[]
] => {
return [
objfns.map((fn) => toFn("ObjFn", fn)),
constrfns.map((fn) => toFn("ConstrFn", fn)),
];
};
// COMBAK: Move this to utils
function partitionEithers<A, B>(es: Either<A, B>[]): [A[], B[]] {
return [
es.filter((e) => e.tag === "Left").map((e) => e.contents as A),
es.filter((e) => e.tag === "Right").map((e) => e.contents as B),
];
}
const convertFns = (fns: Either<StyleOptFn, StyleOptFn>[]): [Fn[], Fn[]] => {
return toFns(partitionEithers(fns));
};
// Extract number from a more complicated type
// also ported from `lookupPaths`
const getNum = (e: TagExpr<VarAD> | IFGPI<VarAD>): number => {
if (e.tag === "OptEval") {
if (e.contents.tag === "Fix") {
return e.contents.contents;
}
if (e.contents.tag === "VaryAD") {
return e.contents.contents.val;
} else {
throw Error("internal error: invalid varying path");
}
} else if (e.tag === "Done") {
if (e.contents.tag === "FloatV") {
return numOf(e.contents.contents);
} else {
throw Error("internal error: invalid varying path");
}
} else if (e.tag === "Pending") {
throw Error("internal error: invalid varying path");
} else if (e.tag === "FGPI") {
throw Error("internal error: invalid varying path");
} else {
throw Error("internal error: unknown tag");
}
};
// ported from `lookupPaths`
// lookup paths with the expectation that each one is a float
export const lookupNumericPaths = (ps: Path[], tr: Translation): number[] => {
return ps.map((path) => findExprSafe(tr, path)).map(getNum);
};
const findFieldPending = (
name: string,
field: Field,
fexpr: FieldExpr<VarAD>,
acc: Path[]
): Path[] => {
if (fexpr.tag === "FExpr") {
return acc;
} else if (fexpr.tag === "FGPI") {
const properties = fexpr.contents[1];
const pendingProps = Object.entries(properties)
.filter(([k, v]) => v.tag === "Pending")
.map((e: [string, TagExpr<VarAD>]) => e[0]);
// TODO: Pending properties currently don't support AccessPaths
return pendingProps
.map((property) => mkPath([name, field, property]))
.concat(acc);
} else throw Error("unknown tag");
};
// Find pending paths
// Find the paths to all pending, non-float, non-name properties
const findPending = (tr: Translation): Path[] => {
return foldSubObjs(findFieldPending, tr);
};
// ---- INITIALIZATION
const isFieldOrAccessPath = (p: Path): boolean => {
if (p.tag === "FieldPath") {
return true;
} else if (p.tag === "AccessPath") {
if (p.path.tag === "FieldPath" || p.path.tag === "PropertyPath") {
return true;
} else throw Error("unexpected sub-accesspath type");
}
return false;
};
// sample varying fields only (from the range defined by canvas dims) and store them in the translation
// example: A.val = OPTIMIZED
// This also samples varying access paths, e.g.
// Circle { center : (1.1, ?) ... } <// the latter is an access path that gets initialized here
// varying init paths are separated out and initialized with the value specified by the style writer
// NOTE: Mutates translation
const initFieldsAndAccessPaths = (
varyingPaths: Path[],
tr: Translation
): Translation => {
const varyingFieldsAndAccessPaths = varyingPaths.filter(isFieldOrAccessPath);
const canvas = getCanvas(tr);
const initVals = varyingFieldsAndAccessPaths.map(
(p: Path): TagExpr<VarAD> => {
// by default, sample randomly in canvas X range
let initVal = randFloat(...canvas.xRange);
// unless it's a VaryInit, in which case, don't sample, set to the init value
// TODO: This could technically use `varyingInitPathsAndVals`?
const res = findExpr(tr, p); // Some varying paths may not be in the translation. That's OK.
if (res.tag === "OptEval") {
if (res.contents.tag === "VaryInit") {
initVal = res.contents.contents;
}
}
return {
tag: "Done",
contents: {
tag: "FloatV",
contents: constOf(initVal),
},
};
}
);
const tr2 = insertExprs(
varyingFieldsAndAccessPaths,
initVals,
tr,
false,
true
);
return tr2;
};
// //////////// Generating an initial state (concrete values for all fields/properties needed to draw the GPIs)
// 1. Initialize all varying fields
// 2. Initialize all properties of all GPIs
// NOTE: since we store all varying paths separately, it is okay to mark the default values as Done // they will still be optimized, if needed.
// TODO: document the logic here (e.g. only sampling varying floats) and think about whether to use translation here or [Shape a] since we will expose the sampler to users later
// TODO: Doesn't sample partial shape properties, like start: (?, 1.) <- this is actually sampled by initFieldsAndAccessPaths
// NOTE: Shape properties are mutated; they are returned as a courtesy
const initProperty = (
shapeType: ShapeTypeStr,
properties: GPIProps<VarAD>,
[propName, [propType, propSampler]]: [string, [PropType, Sampler]],
canvas: Canvas
): GPIProps<VarAD> => {
const propVal: Value<number> = propSampler(canvas);
const propValAD: Value<VarAD> = valueNumberToAutodiffConst(propVal);
const propValDone: TagExpr<VarAD> = { tag: "Done", contents: propValAD };
const styleSetting: TagExpr<VarAD> = properties[propName];
// Property not set in Style
if (!styleSetting) {
if (isPending(shapeType, propName)) {
properties[propName] = {
tag: "Pending",
contents: propValAD,
} as TagExpr<VarAD>;
return properties;
} else {
properties[propName] = propValDone; // Use the sampled one
return properties;
}
}
// Property set in Style
if (styleSetting.tag === "OptEval") {
if (styleSetting.contents.tag === "Vary") {
properties[propName] = propValDone; // X.prop = ?
return properties;
} else if (styleSetting.contents.tag === "VaryInit") {
// Initialize the varying variable to the property specified in Style
properties[propName] = {
tag: "Done",
contents: {
tag: "FloatV",
contents: varOf(styleSetting.contents.contents),
},
};
return properties;
} else if (styleSetting.contents.tag === "Vector") {
const v: Expr[] = styleSetting.contents.contents;
if (v.length === 2) {
// Sample a whole 2D vector, e.g. `Circle { center : [?, ?] }`
// (if only one element is set to ?, then presumably it's set by initializing an access path...? TODO: Check this)
// TODO: This hardcodes an uninitialized 2D vector to be initialized/inserted
if (v[0].tag === "Vary" && v[1].tag === "Vary") {
properties[propName] = propValDone;
return properties;
}
}
return properties;
} else {
return properties;
}
} else if (styleSetting.tag === "Done") {
// TODO: pending properties are only marked if the Style source does not set them explicitly
// Check if this is the right decision. We still give pending values a default such that the initial list of shapes can be generated without errors.
return properties;
}
throw Error("internal error: unknown tag or invalid value for property");
};
const mkShapeName = (s: string, f: Field): string => {
return `${s}.${f}`;
};
// COMBAK: This will require `getNames` to work
const initShape = (
tr: Translation,
[n, field]: [string, Field]
): Translation => {
const path = mkPath([n, field]);
const res = findExprSafe(tr, path); // This is safe (as used in GenOptProblem) since we only initialize shapes with paths from the translation
if (res.tag === "FGPI") {
const [stype, props] = res.contents as [string, GPIProps<VarAD>];
const def: ShapeDef = findDef(stype);
const gpiTemplate: [string, [PropType, Sampler]][] = Object.entries(
def.properties
);
const instantiatedGPIProps: GPIProps<VarAD> = gpiTemplate.reduce(
(
newGPI: GPIProps<VarAD>,
propTemplate: [string, [PropType, Sampler]]
): GPIProps<VarAD> =>
initProperty(stype, newGPI, propTemplate, getCanvas(tr)),
clone(props)
); // NOTE: `initProperty` mutates its input, so the `props` from the translation is cloned here, so the one in the translation itself isn't mutated
// Insert the name of the shape into its prop dict
// NOTE: getShapes resolves the names + we don't use the names of the shapes in the translation
// The name-adding logic can be removed but is left in for debugging
const shapeName = mkShapeName(n, field);
instantiatedGPIProps.name = {
tag: "Done",
contents: {
tag: "StrV",
contents: shapeName,
},
};
const gpi: IFGPI<VarAD> = {
tag: "FGPI",
contents: [stype, instantiatedGPIProps],
};
return insertGPI(path, gpi, tr);
} else throw Error("expected GPI but got field");
};
const initShapes = (tr: Translation, pths: [string, string][]): Translation => {
return pths.reduce(initShape, tr);
};
//#region layering
const findLayeringExpr = (
name: string,
field: Field,
fexpr: FieldExpr<VarAD>,
acc: Expr[]
): Expr[] => {
if (fexpr.tag === "FExpr") {
if (fexpr.contents.tag === "OptEval") {
if (fexpr.contents.contents.tag === "Layering") {
const layering: ILayering = fexpr.contents.contents;
return [layering as Expr].concat(acc);
}
}
}
return acc;
};
const findLayeringExprs = (tr: Translation): Expr[] => {
return foldSubObjs(findLayeringExpr, tr);
};
const lookupGPIName = (p: Path, tr: Translation): string => {
if (p.tag === "FieldPath") {
// COMBAK: Deal with path synonyms / aliases by looking them up?
return getShapeName(p.name.contents.value, p.field.value);
} else {
throw Error("expected path to GPI");
}
};
const findNames = (e: Expr, tr: Translation): [string, string] => {
if (e.tag === "Layering") {
return [lookupGPIName(e.below, tr), lookupGPIName(e.above, tr)];
} else {
throw Error("unknown tag");
}
};
const topSortLayering = (
allGPINames: string[],
partialOrderings: [string, string][]
): MaybeVal<string[]> => {
const layerGraph: Graph = new Graph();
allGPINames.map((name: string) => layerGraph.setNode(name));
// topsort will return the most upstream node first. Since `shapeOrdering` is consistent with the SVG drawing order, we assign edges as "below => above".
partialOrderings.map(([below, above]: [string, string]) =>
layerGraph.setEdge(below, above)
);
try {
const globalOrdering: string[] = alg.topsort(layerGraph);
return { tag: "Just", contents: globalOrdering };
} catch (e) {
return { tag: "Nothing" };
}
};
const computeShapeOrdering = (tr: Translation): string[] => {
const layeringExprs = findLayeringExprs(tr);
// Returns list of layering specifications [below, above]
const partialOrderings: [string, string][] = layeringExprs.map((e: Expr): [
string,
string
] => findNames(e, tr));
const allGPINames: string[] = findShapeNames(
tr
).map((e: [string, Field]): string => getShapeName(e[0], e[1]));
const shapeOrdering = topSortLayering(allGPINames, partialOrderings);
// TODO: Errors for labeling
if (shapeOrdering.tag === "Nothing") {
throw Error("no shape ordering possible from layering");
}
return shapeOrdering.contents;
};
//#endregion
const isVaryingInitPath = (
p: Path,
tr: Translation
): [Path, MaybeVal<number>] => {
const res = findExpr(tr, p); // Some varying paths may not be in the translation. That's OK.
if (res.tag === "OptEval") {
if (res.contents.tag === "VaryInit") {
return [p, { tag: "Just", contents: res.contents.contents }];
}
}
return [p, { tag: "Nothing" }];
};
// ---- MAIN FUNCTION
// COMBAK: Add optConfig as param?
const genState = (trans: Translation): Result<State, StyleErrors> => {
const varyingPaths = findVarying(trans);
// NOTE: the properties in uninitializedPaths are NOT floats. Floats are included in varyingPaths already
const varyingInitPathsAndVals: [Path, number][] = (varyingPaths
.map((p) => isVaryingInitPath(p, trans))
.filter(
(tup: [Path, MaybeVal<number>]): boolean => tup[1].tag === "Just"
) as [Path, Just<number>][]) // TODO: Not sure how to get typescript to understand `filter`...
.map((tup: [Path, Just<number>]) => [tup[0], tup[1].contents]);
const varyingInitInfo: { [pathStr: string]: number } = Object.fromEntries(
varyingInitPathsAndVals.map((e) => [prettyPrintPath(e[0]), e[1]])
);
const uninitializedPaths = findUninitialized(trans);
const shapePathList: [string, string][] = findShapeNames(trans);
const shapePaths = shapePathList.map(mkPath);
const canvasErrs = checkCanvas(trans);
if (canvasErrs.length > 0) {
return err(canvasErrs);
}
// sample varying vals and instantiate all the non - float base properties of every GPI in the translation
// this has to be done before `initFieldsAndAccessPaths` as AccessPaths may depend on shapes' properties already having been initialized
const transInitShapes = initShapes(trans, shapePathList);
// sample varying fields and access paths, and put them in the translation
const transInitAll = initFieldsAndAccessPaths(varyingPaths, transInitShapes);
// CHECK TRANSLATION
// Have to check it after the shapes are initialized, otherwise it will complain about uninitialized shape paths
const transErrs = checkTranslation(transInitAll);
if (transErrs.length > 0) {
return err(transErrs);
}
const shapeProperties = findShapesProperties(transInitAll);
const [objfnsDecl, constrfnsDecl] = findUserAppliedFns(transInitAll);
const [objfnsDefault, constrfnsDefault] = findDefaultFns(transInitAll);
const [objFns, constrFns] = [
objfnsDecl.concat(objfnsDefault),
constrfnsDecl.concat(constrfnsDefault),
];
const [initialGPIs, transEvaled] = [[], transInitAll];
const initVaryingState: number[] = lookupNumericPaths(
varyingPaths,
transEvaled
);
const pendingPaths = findPending(transInitAll);
const shapeOrdering = computeShapeOrdering(transInitAll); // deal with layering
const initState = {
shapes: initialGPIs, // These start out empty because they are initialized in the frontend via `evalShapes` in the Evaluator
shapePaths,
shapeProperties,
shapeOrdering,
translation: transInitAll, // This is the result of the data processing
originalTranslation: clone(trans),
varyingPaths,
varyingValues: initVaryingState,
varyingInitInfo,
uninitializedPaths,
pendingPaths,
objFns,
constrFns,
// `params` are initialized properly by optimization; the only thing it needs is the weight (for the objective function synthesis)
params: ({
optStatus: "NewIter" as const,
weight: initConstraintWeight,
lbfgsInfo: defaultLbfgsParams,
UOround: -1,
EPround: -1,
} as unknown) as Params,
labelCache: [],
rng: undefined as any,
policyParams: undefined as any,
oConfig: undefined as any,
selectorMatches: undefined as any,
varyingMap: {} as any, // TODO: Should this be empty?
canvas: getCanvas(trans),
};
return ok(initState);
};
//#endregion
export const parseStyle = (p: string): Result<StyProg, ParseError> => {
const parser = new nearley.Parser(nearley.Grammar.fromCompiled(styleGrammar));
try {
const { results } = parser.feed(p).feed("\n");
if (results.length > 0) {
const ast: StyProg = results[0] as StyProg;
return ok(ast);
} else {
return err(parseError(`Unexpected end of input`, lastLocation(parser)));
}
} catch (e) {
return err(parseError(e, lastLocation(parser)));
}
};
//#region Checking translation
const isStyErr = (res: TagExpr<VarAD> | IFGPI<VarAD> | StyleError): boolean =>
res.tag !== "FGPI" && !isTagExpr(res);
const findPathsExpr = (expr: Expr): Path[] => {
// TODO: Factor the expression-folding pattern out from here and `checkBlockExpr`
if (isPath(expr)) {
return [expr];
} else if (
expr.tag === "CompApp" ||
expr.tag === "ObjFn" ||
expr.tag === "ConstrFn"
) {
return _.flatMap(expr.args, findPathsExpr);
} else if (expr.tag === "BinOp") {
return _.flatMap([expr.left, expr.right], findPathsExpr);
} else if (expr.tag === "UOp") {
return findPathsExpr(expr.arg);
} else if (
expr.tag === "List" ||
expr.tag === "Vector" ||
expr.tag === "Matrix"
) {
return _.flatMap(expr.contents, findPathsExpr);
} else if (expr.tag === "ListAccess") {
return [expr.contents[0]];
} else if (expr.tag === "GPIDecl") {
return _.flatMap(
expr.properties.map((p) => p.value),
findPathsExpr
);
} else if (expr.tag === "Layering") {
return [expr.below, expr.above];
} else if (expr.tag === "PluginAccess") {
return _.flatMap([expr.contents[1], expr.contents[2]], findPathsExpr);
} else if (expr.tag === "Tuple") {
return _.flatMap([expr.contents[0], expr.contents[1]], findPathsExpr);
} else if (expr.tag === "VectorAccess") {
return [expr.contents[0]].concat(findPathsExpr(expr.contents[1]));
} else if (expr.tag === "MatrixAccess") {
return [expr.contents[0]].concat(
_.flatMap(expr.contents[1], findPathsExpr)
);
} else if (
expr.tag === "Fix" ||
expr.tag === "Vary" ||
expr.tag === "VaryInit" ||
expr.tag === "VaryAD" ||
expr.tag === "StringLit" ||
expr.tag === "BoolLit"
) {
return [];
} else {
console.error("expr", expr);
throw Error("unknown tag");
}
};
// Find all paths given explicitly anywhere in an expression in the translation.
// (e.g. `x.shape above y.shape` <-- return [`x.shape`, `y.shape`])
const findPathsField = (
name: string,
field: Field,
fexpr: FieldExpr<VarAD>,
acc: Path[]
): Path[] => {
if (fexpr.tag === "FExpr") {
// Only look deeper in expressions, because that's where paths might be
if (fexpr.contents.tag === "OptEval") {
const res: Path[] = findPathsExpr(fexpr.contents.contents);
return acc.concat(res);
} else {
return acc;
}
} else if (fexpr.tag === "FGPI") {
// Get any exprs that the properties are set to
const propExprs: Expr[] = Object.entries(fexpr.contents[1])
.map((e) => e[1])
.filter((e: TagExpr<VarAD>): boolean => e.tag === "OptEval")
.map((e) => e as IOptEval<VarAD>) // Have to cast because TypeScript doesn't know the type changed from the filter above
.map((e: IOptEval<VarAD>): Expr => e.contents);
const res: Path[] = _.flatMap(propExprs, findPathsExpr);
return acc.concat(res);
}
throw Error("unknown tag");
};
// Check that canvas dimensions exist and have the proper type.
const checkCanvas = (tr: Translation): StyleErrors => {
let errs: StyleErrors = [];
if (!("canvas" in tr.trMap)) {
errs.push({
tag: "CanvasNonexistentError",
});
return errs;
}
if (!("width" in tr.trMap.canvas)) {
errs.push({
tag: "CanvasNonexistentDimsError",
attr: "width",
kind: "missing",
});
} else if (!("contents" in tr.trMap.canvas.width.contents)) {
errs.push({
tag: "CanvasNonexistentDimsError",
attr: "width",
kind: "GPI",
});
} else if (!("contents" in tr.trMap.canvas.width.contents.contents)) {
errs.push({
tag: "CanvasNonexistentDimsError",
attr: "width",
kind: "uninitialized",
});
} else if (
typeof tr.trMap.canvas.width.contents.contents.contents !== "number"
) {
const val = tr.trMap.canvas.width.contents.contents;
let type;
if (typeof val === "object" && "tag" in val) {
type = val.tag;
} else {
type = typeof val;
}
errs.push({
tag: "CanvasNonexistentDimsError",
attr: "width",
kind: "wrong type",
type,
});
}
if (!("height" in tr.trMap.canvas)) {
errs.push({
tag: "CanvasNonexistentDimsError",
attr: "height",
kind: "missing",
});
} else if (!("contents" in tr.trMap.canvas.height.contents)) {
errs.push({
tag: "CanvasNonexistentDimsError",
attr: "height",
kind: "GPI",
});
} else if (!("contents" in tr.trMap.canvas.height.contents.contents)) {
errs.push({
tag: "CanvasNonexistentDimsError",
attr: "height",
kind: "uninitialized",
});
} else if (
typeof tr.trMap.canvas.height.contents.contents.contents !== "number"
) {
const val = tr.trMap.canvas.height.contents.contents;
let type;
if (typeof val === "object" && "tag" in val) {
type = val.tag;
} else {
type = typeof val;
}
errs.push({
tag: "CanvasNonexistentDimsError",
attr: "height",
kind: "wrong type",
type,
});
}
return errs;
};
// Check translation integrity
const checkTranslation = (trans: Translation): StyleErrors => {
// Look up all paths used anywhere in the translation's expressions and verify they exist in the translation
const allPaths: Path[] = foldSubObjs(findPathsField, trans);
const allPathsUniq: Path[] = _.uniqBy(allPaths, prettyPrintPath);
const exprs = allPathsUniq.map((p) => findExpr(trans, p));
const errs = exprs.filter(isStyErr);
return errs as StyleErrors; // Should be true due to the filter above, though you can't use booleans and the `res is StyleError` assertion together.
};
//#endregion Checking translation
/* Precondition: checkCanvas returns without error */
export const getCanvas = (tr: Translation): Canvas => {
let width = ((tr.trMap.canvas.width.contents as TagExpr<VarAD>)
.contents as Value<VarAD>).contents as number;
let height = ((tr.trMap.canvas.height.contents as TagExpr<VarAD>)
.contents as Value<VarAD>).contents as number;
return {
width,
height,
size: [width, height],
xRange: [-width / 2, width / 2],
yRange: [-height / 2, height / 2],
};
};
export const compileStyle = (
stySource: string,
subEnv: SubstanceEnv,
varEnv: Env
): Result<State, PenroseError> => {
const subProg = subEnv.ast;
const astOk = parseStyle(stySource);
let styProgInit;
if (astOk.isOk()) {
styProgInit = astOk.value;
} else {
return err({ ...astOk.error, errorType: "StyleError" });
}
const labelMap = subEnv.labels;
// Name anon statements
const styProg: StyProg = nameAnonStatements(styProgInit);
log.info("old prog", styProgInit);
log.info("new prog, with named anon statements", styProg);
// Check selectors; return list of selector environments (`checkSels`)
const selEnvs = checkSelsAndMakeEnv(varEnv, styProg.blocks);
// TODO(errors/warn): distinguish between errors and warnings
const selErrs: StyleErrors = _.flatMap(selEnvs, (e) =>
e.warnings.concat(e.errors)
);
if (selErrs.length > 0) {
// TODO(errors): Report all of them, not just the first?
return err(toStyleErrors(selErrs));
}
// Leaving these logs in because they are still useful for debugging, but TODO: remove them
log.info("selEnvs", selEnvs);
// Find substitutions (`find_substs_prog`)
const subss = findSubstsProg(
varEnv,
subEnv,
subProg,
styProg.blocks,
selEnvs
); // TODO: Use `eqEnv`
// TODO: I guess `subss` is not actually used? remove?
log.info("substitutions", subss);
// Translate style program
const styVals: number[] = []; // COMBAK: Deal with style values when we have plugins
const translateRes = translateStyProg(
varEnv,
subEnv,
subProg,
styProg,
labelMap,
styVals
);
log.info("translation (before genOptProblem)", translateRes);
// Translation failed somewhere
if (translateRes.tag === "Left") {
return err(toStyleErrors(translateRes.contents));
}
const trans = translateRes.contents;
if (trans.warnings.length > 0) {
// TODO(errors): these errors are currently returned as warnings -- maybe systematize it
log.info("Returning warnings as errors");
return err(toStyleErrors(trans.warnings));
}
// TODO(errors): `findExprsSafe` shouldn't fail (as used in `genOptProblemAndState`, since all the paths are generated from the translation) but could always be safer...
const initState: Result<State, StyleErrors> = genState(trans);
log.info("init state from GenOptProblem", initState);
if (initState.isErr()) {
return err(toStyleErrors(initState.error));
}
return ok(initState.value);
};
| toRight |
tcx.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
* Methods for the various MIR types. These are intended for use after
* building is complete.
*/
use mir::*;
use ty::subst::{Subst, Substs};
use ty::{self, AdtDef, Ty, TyCtxt};
use hir;
use ty::util::IntTypeExt;
#[derive(Copy, Clone, Debug)]
pub enum | <'tcx> {
/// Normal type.
Ty { ty: Ty<'tcx> },
/// Downcast to a particular variant of an enum.
Downcast { adt_def: &'tcx AdtDef,
substs: &'tcx Substs<'tcx>,
variant_index: usize },
}
impl<'a, 'gcx, 'tcx> PlaceTy<'tcx> {
pub fn from_ty(ty: Ty<'tcx>) -> PlaceTy<'tcx> {
PlaceTy::Ty { ty }
}
pub fn to_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> {
match *self {
PlaceTy::Ty { ty } =>
ty,
PlaceTy::Downcast { adt_def, substs, variant_index: _ } =>
tcx.mk_adt(adt_def, substs),
}
}
/// `place_ty.field_ty(tcx, f)` computes the type at a given field
/// of a record or enum-variant. (Most clients of `PlaceTy` can
/// instead just extract the relevant type directly from their
/// `PlaceElem`, but some instances of `ProjectionElem<V, T>` do
/// not carry a `Ty` for `T`.)
///
/// Note that the resulting type has not been normalized.
pub fn field_ty(self, tcx: TyCtxt<'a, 'gcx, 'tcx>, f: &Field) -> Ty<'tcx>
{
// Pass `0` here so it can be used as a "default" variant_index in first arm below
let answer = match (self, 0) {
(PlaceTy::Ty {
ty: &ty::TyS { sty: ty::TyKind::Adt(adt_def, substs), .. } }, variant_index) |
(PlaceTy::Downcast { adt_def, substs, variant_index }, _) => {
let variant_def = &adt_def.variants[variant_index];
let field_def = &variant_def.fields[f.index()];
field_def.ty(tcx, substs)
}
(PlaceTy::Ty { ty }, _) => {
match ty.sty {
ty::Tuple(ref tys) => tys[f.index()],
_ => bug!("extracting field of non-tuple non-adt: {:?}", self),
}
}
};
debug!("field_ty self: {:?} f: {:?} yields: {:?}", self, f, answer);
answer
}
/// Convenience wrapper around `projection_ty_core` for
/// `PlaceElem`, where we can just use the `Ty` that is already
/// stored inline on field projection elems.
pub fn projection_ty(self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
elem: &PlaceElem<'tcx>)
-> PlaceTy<'tcx>
{
self.projection_ty_core(tcx, elem, |_, _, ty| -> Result<Ty<'tcx>, ()> { Ok(ty) })
.unwrap()
}
/// `place_ty.projection_ty_core(tcx, elem, |...| { ... })`
/// projects `place_ty` onto `elem`, returning the appropriate
/// `Ty` or downcast variant corresponding to that projection.
/// The `handle_field` callback must map a `Field` to its `Ty`,
/// (which should be trivial when `T` = `Ty`).
pub fn projection_ty_core<V, T, E>(
self,
tcx: TyCtxt<'a, 'gcx, 'tcx>,
elem: &ProjectionElem<'tcx, V, T>,
mut handle_field: impl FnMut(&Self, &Field, &T) -> Result<Ty<'tcx>, E>)
-> Result<PlaceTy<'tcx>, E>
where
V: ::std::fmt::Debug, T: ::std::fmt::Debug
{
let answer = match *elem {
ProjectionElem::Deref => {
let ty = self.to_ty(tcx)
.builtin_deref(true)
.unwrap_or_else(|| {
bug!("deref projection of non-dereferencable ty {:?}", self)
})
.ty;
PlaceTy::Ty {
ty,
}
}
ProjectionElem::Index(_) | ProjectionElem::ConstantIndex { .. } =>
PlaceTy::Ty {
ty: self.to_ty(tcx).builtin_index().unwrap()
},
ProjectionElem::Subslice { from, to } => {
let ty = self.to_ty(tcx);
PlaceTy::Ty {
ty: match ty.sty {
ty::Array(inner, size) => {
let size = size.unwrap_usize(tcx);
let len = size - (from as u64) - (to as u64);
tcx.mk_array(inner, len)
}
ty::Slice(..) => ty,
_ => {
bug!("cannot subslice non-array type: `{:?}`", self)
}
}
}
}
ProjectionElem::Downcast(adt_def1, index) =>
match self.to_ty(tcx).sty {
ty::Adt(adt_def, substs) => {
assert!(adt_def.is_enum());
assert!(index < adt_def.variants.len());
assert_eq!(adt_def, adt_def1);
PlaceTy::Downcast { adt_def,
substs,
variant_index: index }
}
_ => {
bug!("cannot downcast non-ADT type: `{:?}`", self)
}
},
ProjectionElem::Field(ref f, ref fty) =>
PlaceTy::Ty { ty: handle_field(&self, f, fty)? },
};
debug!("projection_ty self: {:?} elem: {:?} yields: {:?}", self, elem, answer);
Ok(answer)
}
}
EnumTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for PlaceTy<'tcx> {
(PlaceTy::Ty) { ty },
(PlaceTy::Downcast) { adt_def, substs, variant_index },
}
}
impl<'tcx> Place<'tcx> {
pub fn ty<'a, 'gcx, D>(&self, local_decls: &D, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> PlaceTy<'tcx>
where D: HasLocalDecls<'tcx>
{
match *self {
Place::Local(index) =>
PlaceTy::Ty { ty: local_decls.local_decls()[index].ty },
Place::Promoted(ref data) => PlaceTy::Ty { ty: data.1 },
Place::Static(ref data) =>
PlaceTy::Ty { ty: data.ty },
Place::Projection(ref proj) =>
proj.base.ty(local_decls, tcx).projection_ty(tcx, &proj.elem),
}
}
/// If this is a field projection, and the field is being projected from a closure type,
/// then returns the index of the field being projected. Note that this closure will always
/// be `self` in the current MIR, because that is the only time we directly access the fields
/// of a closure type.
pub fn is_upvar_field_projection<'cx, 'gcx>(&self, mir: &'cx Mir<'tcx>,
tcx: &TyCtxt<'cx, 'gcx, 'tcx>) -> Option<Field> {
let (place, by_ref) = if let Place::Projection(ref proj) = self {
if let ProjectionElem::Deref = proj.elem {
(&proj.base, true)
} else {
(self, false)
}
} else {
(self, false)
};
match place {
Place::Projection(ref proj) => match proj.elem {
ProjectionElem::Field(field, _ty) => {
let base_ty = proj.base.ty(mir, *tcx).to_ty(*tcx);
if (base_ty.is_closure() || base_ty.is_generator()) &&
(!by_ref || mir.upvar_decls[field.index()].by_ref)
{
Some(field)
} else {
None
}
},
_ => None,
}
_ => None,
}
}
}
pub enum RvalueInitializationState {
Shallow,
Deep
}
impl<'tcx> Rvalue<'tcx> {
pub fn ty<'a, 'gcx, D>(&self, local_decls: &D, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx>
where D: HasLocalDecls<'tcx>
{
match *self {
Rvalue::Use(ref operand) => operand.ty(local_decls, tcx),
Rvalue::Repeat(ref operand, count) => {
tcx.mk_array(operand.ty(local_decls, tcx), count)
}
Rvalue::Ref(reg, bk, ref place) => {
let place_ty = place.ty(local_decls, tcx).to_ty(tcx);
tcx.mk_ref(reg,
ty::TypeAndMut {
ty: place_ty,
mutbl: bk.to_mutbl_lossy()
}
)
}
Rvalue::Len(..) => tcx.types.usize,
Rvalue::Cast(.., ty) => ty,
Rvalue::BinaryOp(op, ref lhs, ref rhs) => {
let lhs_ty = lhs.ty(local_decls, tcx);
let rhs_ty = rhs.ty(local_decls, tcx);
op.ty(tcx, lhs_ty, rhs_ty)
}
Rvalue::CheckedBinaryOp(op, ref lhs, ref rhs) => {
let lhs_ty = lhs.ty(local_decls, tcx);
let rhs_ty = rhs.ty(local_decls, tcx);
let ty = op.ty(tcx, lhs_ty, rhs_ty);
tcx.intern_tup(&[ty, tcx.types.bool])
}
Rvalue::UnaryOp(UnOp::Not, ref operand) |
Rvalue::UnaryOp(UnOp::Neg, ref operand) => {
operand.ty(local_decls, tcx)
}
Rvalue::Discriminant(ref place) => {
let ty = place.ty(local_decls, tcx).to_ty(tcx);
if let ty::Adt(adt_def, _) = ty.sty {
adt_def.repr.discr_type().to_ty(tcx)
} else {
// This can only be `0`, for now, so `u8` will suffice.
tcx.types.u8
}
}
Rvalue::NullaryOp(NullOp::Box, t) => tcx.mk_box(t),
Rvalue::NullaryOp(NullOp::SizeOf, _) => tcx.types.usize,
Rvalue::Aggregate(ref ak, ref ops) => {
match **ak {
AggregateKind::Array(ty) => {
tcx.mk_array(ty, ops.len() as u64)
}
AggregateKind::Tuple => {
tcx.mk_tup(ops.iter().map(|op| op.ty(local_decls, tcx)))
}
AggregateKind::Adt(def, _, substs, _, _) => {
tcx.type_of(def.did).subst(tcx, substs)
}
AggregateKind::Closure(did, substs) => {
tcx.mk_closure(did, substs)
}
AggregateKind::Generator(did, substs, movability) => {
tcx.mk_generator(did, substs, movability)
}
}
}
}
}
#[inline]
/// Returns whether this rvalue is deeply initialized (most rvalues) or
/// whether its only shallowly initialized (`Rvalue::Box`).
pub fn initialization_state(&self) -> RvalueInitializationState {
match *self {
Rvalue::NullaryOp(NullOp::Box, _) => RvalueInitializationState::Shallow,
_ => RvalueInitializationState::Deep
}
}
}
impl<'tcx> Operand<'tcx> {
pub fn ty<'a, 'gcx, D>(&self, local_decls: &D, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx>
where D: HasLocalDecls<'tcx>
{
match self {
&Operand::Copy(ref l) |
&Operand::Move(ref l) => l.ty(local_decls, tcx).to_ty(tcx),
&Operand::Constant(ref c) => c.ty,
}
}
}
impl<'tcx> BinOp {
pub fn ty<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
lhs_ty: Ty<'tcx>,
rhs_ty: Ty<'tcx>)
-> Ty<'tcx> {
// FIXME: handle SIMD correctly
match self {
&BinOp::Add | &BinOp::Sub | &BinOp::Mul | &BinOp::Div | &BinOp::Rem |
&BinOp::BitXor | &BinOp::BitAnd | &BinOp::BitOr => {
// these should be integers or floats of the same size.
assert_eq!(lhs_ty, rhs_ty);
lhs_ty
}
&BinOp::Shl | &BinOp::Shr | &BinOp::Offset => {
lhs_ty // lhs_ty can be != rhs_ty
}
&BinOp::Eq | &BinOp::Lt | &BinOp::Le |
&BinOp::Ne | &BinOp::Ge | &BinOp::Gt => {
tcx.types.bool
}
}
}
}
impl BorrowKind {
pub fn to_mutbl_lossy(self) -> hir::Mutability {
match self {
BorrowKind::Mut { .. } => hir::MutMutable,
BorrowKind::Shared => hir::MutImmutable,
// We have no type corresponding to a unique imm borrow, so
// use `&mut`. It gives all the capabilities of an `&uniq`
// and hence is a safe "over approximation".
BorrowKind::Unique => hir::MutMutable,
// We have no type corresponding to a shallow borrow, so use
// `&` as an approximation.
BorrowKind::Shallow => hir::MutImmutable,
}
}
}
impl BinOp {
pub fn to_hir_binop(self) -> hir::BinOpKind {
match self {
BinOp::Add => hir::BinOpKind::Add,
BinOp::Sub => hir::BinOpKind::Sub,
BinOp::Mul => hir::BinOpKind::Mul,
BinOp::Div => hir::BinOpKind::Div,
BinOp::Rem => hir::BinOpKind::Rem,
BinOp::BitXor => hir::BinOpKind::BitXor,
BinOp::BitAnd => hir::BinOpKind::BitAnd,
BinOp::BitOr => hir::BinOpKind::BitOr,
BinOp::Shl => hir::BinOpKind::Shl,
BinOp::Shr => hir::BinOpKind::Shr,
BinOp::Eq => hir::BinOpKind::Eq,
BinOp::Ne => hir::BinOpKind::Ne,
BinOp::Lt => hir::BinOpKind::Lt,
BinOp::Gt => hir::BinOpKind::Gt,
BinOp::Le => hir::BinOpKind::Le,
BinOp::Ge => hir::BinOpKind::Ge,
BinOp::Offset => unreachable!()
}
}
}
| PlaceTy |
specificimpedanceInput.py | from data.user_input.project.printMessageInput import PrintMessageInput
import os
from os.path import basename
import numpy as np
from PyQt5.QtWidgets import QToolButton, QPushButton, QLineEdit, QFileDialog, QDialog, QTabWidget, QWidget, QTreeWidgetItem, QTreeWidget, QSpinBox
from PyQt5.QtGui import QIcon
from PyQt5.QtGui import QColor, QBrush
from PyQt5.QtCore import Qt
from PyQt5 import uic
import configparser
from shutil import copyfile
from pulse.utils import remove_bc_from_file
class SpecificImpedanceInput(QDialog):
def __init__(self, project, opv, *args, **kwargs):
super().__init__(*args, **kwargs)
uic.loadUi('data/user_input/ui/Model/Setup/Acoustic/specificImpedanceInput.ui', self)
icons_path = 'data\\icons\\'
self.icon = QIcon(icons_path + 'pulse.png')
self.setWindowIcon(self.icon)
self.setWindowFlags(Qt.WindowStaysOnTopHint)
self.setWindowModality(Qt.WindowModal)
self.opv = opv
self.opv.setInputObject(self)
self.transform_points = self.opv.transformPoints
self.project = project
self.preprocessor = project.preprocessor
self.before_run = self.preprocessor.get_model_checks()
self.userPath = os.path.expanduser('~')
self.new_load_path_table = ""
self.project_folder_path = project.project_folder_path
self.acoustic_bc_info_path = project.file._node_acoustic_path
self.nodes = project.preprocessor.nodes
self.specific_impedance = None
self.nodes_typed = []
self.imported_table = False
self.remove_specific_impedance = False
self.lineEdit_nodeID = self.findChild(QLineEdit, 'lineEdit_nodeID')
self.lineEdit_specific_impedance_real = self.findChild(QLineEdit, 'lineEdit_specific_impedance_real')
self.lineEdit_specific_impedance_imag = self.findChild(QLineEdit, 'lineEdit_specific_impedance_imag')
self.lineEdit_load_table_path = self.findChild(QLineEdit, 'line_load_table_path')
self.tabWidget_specific_impedance = self.findChild(QTabWidget, "tabWidget_specific_impedance")
self.tabWidget_specific_impedance.currentChanged.connect(self.tabEvent_specific_impedance)
self.tab_single_values = self.tabWidget_specific_impedance.findChild(QWidget, "tab_single_values")
self.tab_table_values = self.tabWidget_specific_impedance.findChild(QWidget, "tab_table_values")
self.treeWidget_specific_impedance = self.findChild(QTreeWidget, 'treeWidget_specific_impedance')
self.treeWidget_specific_impedance.setColumnWidth(1, 20)
self.treeWidget_specific_impedance.setColumnWidth(2, 80)
self.treeWidget_specific_impedance.itemClicked.connect(self.on_click_item)
self.treeWidget_specific_impedance.itemDoubleClicked.connect(self.on_doubleclick_item)
self.toolButton_load_table = self.findChild(QToolButton, 'toolButton_load_table')
self.toolButton_load_table.clicked.connect(self.load_specific_impedance_table)
self.pushButton_single_values_confirm = self.findChild(QPushButton, 'pushButton_single_values_confirm')
self.pushButton_single_values_confirm.clicked.connect(self.check_single_values)
self.pushButton_table_values_confirm = self.findChild(QPushButton, 'pushButton_table_values_confirm')
self.pushButton_table_values_confirm.clicked.connect(self.check_table_values)
self.lineEdit_skiprows = self.findChild(QSpinBox, 'spinBox')
self.pushButton_remove_bc_confirm = self.findChild(QPushButton, 'pushButton_remove_bc_confirm')
self.pushButton_remove_bc_confirm.clicked.connect(self.check_remove_bc_from_node)
self.pushButton_remove_bc_confirm_2 = self.findChild(QPushButton, 'pushButton_remove_bc_confirm_2')
self.pushButton_remove_bc_confirm_2.clicked.connect(self.check_remove_bc_from_node)
self.writeNodes(self.opv.getListPickedPoints())
self.load_nodes_info()
self.exec_()
def keyPressEvent(self, event):
if event.key() == Qt.Key_Enter or event.key() == Qt.Key_Return:
if self.tabWidget_specific_impedance.currentIndex()==0:
self.check_single_values()
if self.tabWidget_specific_impedance.currentIndex()==1:
self.check_table_values()
elif event.key() == Qt.Key_Delete:
if self.tabWidget_specific_impedance.currentIndex()==2:
self.check_remove_bc_from_node()
elif event.key() == Qt.Key_Escape:
self.close()
def tabEvent_specific_impedance(self):
self.current_tab = self.tabWidget_specific_impedance.currentIndex()
if self.current_tab == 2:
self.lineEdit_nodeID.setDisabled(True)
else:
self.lineEdit_nodeID.setDisabled(False)
def writeNodes(self, list_node_ids):
text = ""
for node in list_node_ids:
text += "{}, ".format(node)
self.lineEdit_nodeID.setText(text)
def check_complex_entries(self, lineEdit_real, lineEdit_imag):
self.stop = False
if lineEdit_real.text() != "":
try:
real_F = float(lineEdit_real.text())
except Exception:
window_title ="ERROR"
title = "Invalid entry to the specific impedance"
message = "Wrong input for real part of specific impedance."
PrintMessageInput([title, message, window_title])
self.stop = True
return
else:
real_F = 0
if lineEdit_imag.text() != "":
try:
imag_F = float(lineEdit_imag.text())
except Exception:
window_title ="ERROR"
title = "Invalid entry to the specific impedance"
message = "Wrong input for imaginary part of specific impedance."
PrintMessageInput([title, message, window_title])
self.stop = True
return
else:
imag_F = 0
if real_F == 0 and imag_F == 0:
return None
else:
return real_F + 1j*imag_F
def check_single_values(self):
lineEdit_nodeID = self.lineEdit_nodeID.text()
self.stop, self.nodes_typed = self.before_run.check_input_NodeID(lineEdit_nodeID)
if self.stop:
return
specific_impedance = self.check_complex_entries(self.lineEdit_specific_impedance_real, self.lineEdit_specific_impedance_imag)
if self.stop:
return
if specific_impedance is not None:
self.specific_impedance = specific_impedance
self.project.set_specific_impedance_bc_by_node(self.nodes_typed, self.specific_impedance, False)
self.transform_points(self.nodes_typed)
self.close()
else:
window_title ="ERROR"
title = "Additional inputs required"
message = "You must to inform at least one nodal load to confirm the input!"
PrintMessageInput([title, message, window_title])
def load_table(self, lineEdit, header):
self.basename = ""
window_label = 'Choose a table to import the specific impedance'
self.path_imported_table, _type = QFileDialog.getOpenFileName(None, window_label, self.userPath, 'Files (*.dat; *.csv)')
if self.path_imported_table == "":
return "", ""
self.basename = os.path.basename(self.path_imported_table)
lineEdit.setText(self.path_imported_table)
if self.basename != "":
self.imported_table_name = self.basename
if "\\" in self.project_folder_path:
self.new_load_path_table = "{}\\{}".format(self.project_folder_path, self.basename)
elif "/" in self.project_folder_path:
self.new_load_path_table = "{}/{}".format(self.project_folder_path, self.basename)
try:
skiprows = int(self.lineEdit_skiprows.text())
imported_file = np.loadtxt(self.path_imported_table, delimiter=",", skiprows=skiprows)
except Exception as error_log:
window_title ="ERROR"
title = "Error reached while loading table"
message = f" {str(error_log)} \n\nIt is recommended to skip the header rows."
PrintMessageInput([title, message, window_title])
return
if imported_file.shape[1]<2:
window_title ="ERROR"
title = "Error reached while loading table"
message = "The imported table has insufficient number of columns. The spectrum \n"
message += "data must have frequencies, real and imaginary columns."
PrintMessageInput([title, message, window_title])
return
try:
self.imported_values = imported_file[:,1] + 1j*imported_file[:,2]
if imported_file.shape[1]>2:
self.frequencies = imported_file[:,0]
self.f_min = self.frequencies[0]
self.f_max = self.frequencies[-1]
self.f_step = self.frequencies[1] - self.frequencies[0]
self.imported_table = True
real_values = np.real(self.imported_values)
imag_values = np.imag(self.imported_values)
abs_values = np.imag(self.imported_values)
data = np.array([self.frequencies, real_values, imag_values, abs_values]).T
np.savetxt(self.new_load_path_table, data, delimiter=",", header=header)
except Exception as error_log:
window_title ="ERROR"
title = "Error reached while loading table"
message = f" {str(error_log)} \n\nIt is recommended to skip the header rows."
PrintMessageInput([title, message, window_title])
return self.imported_values, self.basename
def load_specific_impedance_table(self):
header = "specific impedance || Frequency [Hz], real[Pa], imaginary[Pa], absolute[Pa]"
self.specific_impedance, self.basename_specific_impedance = self.load_table(self.lineEdit_load_table_path, header)
def check_table_values(self):
lineEdit_nodeID = self.lineEdit_nodeID.text()
self.stop, self.nodes_typed = self.before_run.check_input_NodeID(lineEdit_nodeID)
if self.stop:
return
if self.lineEdit_load_table_path != "":
if self.specific_impedance is not None:
self.project.set_specific_impedance_bc_by_node(self.nodes_typed, self.specific_impedance, True, table_name=self.basename_specific_impedance)
self.transform_points(self.nodes_typed)
self.close()
def text_label(self, value):
text = ""
if isinstance(value, complex):
value_label = str(value)
elif isinstance(value, np.ndarray):
value_label = 'Table'
text = "{}".format(value_label)
return text
def load_nodes_info(self):
for node in self.project.preprocessor.nodes_with_specific_impedance:
new = QTreeWidgetItem([str(node.external_index), str(self.text_label(node.specific_impedance))])
new.setTextAlignment(0, Qt.AlignCenter)
new.setTextAlignment(1, Qt.AlignCenter)
self.treeWidget_specific_impedance.addTopLevelItem(new)
def on_click_item(self, item):
self.lineEdit_nodeID.setText(item.text(0))
def on_doubleclick_item(self, item):
|
def check_remove_bc_from_node(self):
lineEdit_nodeID = self.lineEdit_nodeID.text()
self.stop, self.nodes_typed = self.before_run.check_input_NodeID(lineEdit_nodeID)
if self.stop:
return
key_strings = ["specific impedance"]
message = "The specific impedance attributed to the {} node(s) have been removed.".format(self.nodes_typed)
remove_bc_from_file(self.nodes_typed, self.acoustic_bc_info_path, key_strings, message)
self.project.preprocessor.set_specific_impedance_bc_by_node(self.nodes_typed, None)
self.transform_points(self.nodes_typed)
self.treeWidget_specific_impedance.clear()
self.load_nodes_info()
self.close()
def update(self):
self.writeNodes(self.opv.getListPickedPoints()) | self.lineEdit_nodeID.setText(item.text(0))
self.check_remove_bc_from_node() |
languageinstructioncommon_builder.go | package parsers
import "errors"
type languageInstructionCommonBuilder struct {
ins Instruction
match Match
}
func | () LanguageInstructionCommonBuilder {
out := languageInstructionCommonBuilder{
ins: nil,
match: nil,
}
return &out
}
// Create initializes the builder
func (app *languageInstructionCommonBuilder) Create() LanguageInstructionCommonBuilder {
return createLanguageInstructionCommonBuilder()
}
// WithInstruction adds an instruction to the builder
func (app *languageInstructionCommonBuilder) WithInstruction(ins Instruction) LanguageInstructionCommonBuilder {
app.ins = ins
return app
}
// WithMatch adds a match to the builder
func (app *languageInstructionCommonBuilder) WithMatch(match Match) LanguageInstructionCommonBuilder {
app.match = match
return app
}
// Now builds a new LanguageInstructionCommon instance
func (app *languageInstructionCommonBuilder) Now() (LanguageInstructionCommon, error) {
if app.ins != nil {
return createLanguageInstructionCommonWithInstruction(app.ins), nil
}
if app.match != nil {
return createLanguageInstructionCommonWithMatch(app.match), nil
}
return nil, errors.New("the LanguageInstructionCommon is invalid")
}
| createLanguageInstructionCommonBuilder |
create_organization_admin_parameters.go | // Code generated by go-swagger; DO NOT EDIT.
package admins
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"context"
"net/http"
"time"
"github.com/go-openapi/errors"
"github.com/go-openapi/runtime"
cr "github.com/go-openapi/runtime/client"
"github.com/go-openapi/strfmt"
"github.com/cisco-sso/meraki-cli/models"
)
// NewCreateOrganizationAdminParams creates a new CreateOrganizationAdminParams object
// with the default values initialized.
func NewCreateOrganizationAdminParams() *CreateOrganizationAdminParams {
var ()
return &CreateOrganizationAdminParams{
timeout: cr.DefaultTimeout,
}
}
// NewCreateOrganizationAdminParamsWithTimeout creates a new CreateOrganizationAdminParams object
// with the default values initialized, and the ability to set a timeout on a request
func NewCreateOrganizationAdminParamsWithTimeout(timeout time.Duration) *CreateOrganizationAdminParams {
var ()
return &CreateOrganizationAdminParams{
timeout: timeout,
}
}
// NewCreateOrganizationAdminParamsWithContext creates a new CreateOrganizationAdminParams object
// with the default values initialized, and the ability to set a context for a request
func NewCreateOrganizationAdminParamsWithContext(ctx context.Context) *CreateOrganizationAdminParams |
// NewCreateOrganizationAdminParamsWithHTTPClient creates a new CreateOrganizationAdminParams object
// with the default values initialized, and the ability to set a custom HTTPClient for a request
func NewCreateOrganizationAdminParamsWithHTTPClient(client *http.Client) *CreateOrganizationAdminParams {
var ()
return &CreateOrganizationAdminParams{
HTTPClient: client,
}
}
/*CreateOrganizationAdminParams contains all the parameters to send to the API endpoint
for the create organization admin operation typically these are written to a http.Request
*/
type CreateOrganizationAdminParams struct {
/*CreateOrganizationAdmin*/
CreateOrganizationAdmin *models.CreateOrganizationAdmin
/*OrganizationID*/
OrganizationID string
timeout time.Duration
Context context.Context
HTTPClient *http.Client
}
// WithTimeout adds the timeout to the create organization admin params
func (o *CreateOrganizationAdminParams) WithTimeout(timeout time.Duration) *CreateOrganizationAdminParams {
o.SetTimeout(timeout)
return o
}
// SetTimeout adds the timeout to the create organization admin params
func (o *CreateOrganizationAdminParams) SetTimeout(timeout time.Duration) {
o.timeout = timeout
}
// WithContext adds the context to the create organization admin params
func (o *CreateOrganizationAdminParams) WithContext(ctx context.Context) *CreateOrganizationAdminParams {
o.SetContext(ctx)
return o
}
// SetContext adds the context to the create organization admin params
func (o *CreateOrganizationAdminParams) SetContext(ctx context.Context) {
o.Context = ctx
}
// WithHTTPClient adds the HTTPClient to the create organization admin params
func (o *CreateOrganizationAdminParams) WithHTTPClient(client *http.Client) *CreateOrganizationAdminParams {
o.SetHTTPClient(client)
return o
}
// SetHTTPClient adds the HTTPClient to the create organization admin params
func (o *CreateOrganizationAdminParams) SetHTTPClient(client *http.Client) {
o.HTTPClient = client
}
// WithCreateOrganizationAdmin adds the createOrganizationAdmin to the create organization admin params
func (o *CreateOrganizationAdminParams) WithCreateOrganizationAdmin(createOrganizationAdmin *models.CreateOrganizationAdmin) *CreateOrganizationAdminParams {
o.SetCreateOrganizationAdmin(createOrganizationAdmin)
return o
}
// SetCreateOrganizationAdmin adds the createOrganizationAdmin to the create organization admin params
func (o *CreateOrganizationAdminParams) SetCreateOrganizationAdmin(createOrganizationAdmin *models.CreateOrganizationAdmin) {
o.CreateOrganizationAdmin = createOrganizationAdmin
}
// WithOrganizationID adds the organizationID to the create organization admin params
func (o *CreateOrganizationAdminParams) WithOrganizationID(organizationID string) *CreateOrganizationAdminParams {
o.SetOrganizationID(organizationID)
return o
}
// SetOrganizationID adds the organizationId to the create organization admin params
func (o *CreateOrganizationAdminParams) SetOrganizationID(organizationID string) {
o.OrganizationID = organizationID
}
// WriteToRequest writes these params to a swagger request
func (o *CreateOrganizationAdminParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {
if err := r.SetTimeout(o.timeout); err != nil {
return err
}
var res []error
if o.CreateOrganizationAdmin != nil {
if err := r.SetBodyParam(o.CreateOrganizationAdmin); err != nil {
return err
}
}
// path param organizationId
if err := r.SetPathParam("organizationId", o.OrganizationID); err != nil {
return err
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
| {
var ()
return &CreateOrganizationAdminParams{
Context: ctx,
}
} |
clean_ppi_numeric_fields_using_parameters.py | """
Apply value ranges to ensure that values are reasonable and to minimize the likelihood
of sensitive information (like phone numbers) within the free text fields.
Original Issues: DC-1058, DC-1061, DC-827, DC-502, DC-487
The intent is to ensure that numeric free-text fields that are not manipulated by de-id
have value range restrictions applied to the value_as_number field across the entire dataset.
"""
# Python imports
import logging
# Project imports
import constants.cdr_cleaner.clean_cdr as cdr_consts
from cdr_cleaner.cleaning_rules.base_cleaning_rule import BaseCleaningRule
from constants.bq_utils import WRITE_TRUNCATE
from common import JINJA_ENV
LOGGER = logging.getLogger(__name__)
# Query to create tables in sandbox with the rows that will be removed per cleaning rule
INVALID_VALUES_SANDBOX_QUERY = JINJA_ENV.from_string("""
CREATE OR REPLACE TABLE
`{{project}}.{{sandbox_dataset}}.{{intermediary_table}}` AS (
SELECT *
FROM
`{{project}}.{{dataset}}.observation`
WHERE
(observation_concept_id = 1585795 AND (value_as_number < 0 OR value_as_number > 99))
OR
(observation_concept_id = 1585802 AND (value_as_number < 0 OR value_as_number > 99))
OR
(observation_concept_id = 1585820 AND (value_as_number < 0 OR value_as_number > 255))
OR
(observation_concept_id = 1585864 AND (value_as_number < 0 OR value_as_number > 99))
OR
(observation_concept_id = 1585870 AND (value_as_number < 0 OR value_as_number > 99))
OR
(observation_concept_id = 1585873 AND (value_as_number < 0 OR value_as_number > 99))
OR
(observation_concept_id = 1586159 AND (value_as_number < 0 OR value_as_number > 99))
OR
(observation_concept_id = 1586162 AND (value_as_number < 0 OR value_as_number > 99))
OR
-- from dc1058: sandbox any participant data who have a household size greater than 11 --
(observation_concept_id IN (1333015, 1585889) AND (value_as_number < 0 OR value_as_number > 10))
OR
-- from dc1061: sandbox any participant data who have 6 or more members under 18 in their household --
(observation_concept_id IN (1333023, 1585890) AND (value_as_number < 0 OR value_as_number > 5)))
""")
CLEAN_INVALID_VALUES_QUERY = JINJA_ENV.from_string("""
SELECT
observation_id,
person_id,
observation_concept_id,
observation_date,
observation_datetime,
observation_type_concept_id,
CASE
WHEN observation_concept_id IN (1585795, 1585802, 1585864, 1585870, 1585873, 1586159, 1586162) AND (value_as_number < 0 OR value_as_number > 99) THEN NULL
WHEN observation_concept_id = 1585820 AND (value_as_number < 0 OR value_as_number > 255) THEN NULL
-- from dc1058: will null invalid values for value_as_number if participant household size is greater than 11 --
WHEN observation_concept_id IN (1333015, 1585889) AND (value_as_number < 0 OR value_as_number > 10) THEN NULL
-- from dc1061: will null invalid values for value_as_number if participant household has 6 or more members under the age of 18 --
WHEN observation_concept_id IN (1333023, 1585890) AND (value_as_number < 0 OR value_as_number > 5) THEN NULL
ELSE value_as_number
END AS
value_as_number,
value_as_string,
CASE
WHEN observation_concept_id IN (1585890, 1333023, 1333015, 1585889) AND (value_as_number < 0 OR value_as_number >= 20) THEN 2000000010
WHEN observation_concept_id IN (1585795, 1585802, 1585864, 1585870, 1585873, 1586159, 1586162) AND (value_as_number < 0 OR value_as_number > 99) THEN 2000000010
WHEN observation_concept_id = 1585820 AND (value_as_number < 0 OR value_as_number > 255) THEN 2000000010
-- from dc1058: if the observation_concept_id is 1585889 or 1333015 and has between less than 11 members in the household --
-- will set value_as_concept_id to the new custom concept --
WHEN observation_concept_id IN (1585889, 1333015) AND (value_as_number < 20 AND value_as_number > 10) THEN 2000000013
-- from dc1061: if the observation_concept_id is 1333023 or 1585890 and less than 6 members in the household --
-- is under the age of 18, will set value_as_concept_id to the new custom concept --
WHEN observation_concept_id IN (1333023, 1585890) AND (value_as_number < 20 AND value_as_number > 5) THEN 2000000012
ELSE value_as_concept_id
END AS
value_as_concept_id,
qualifier_concept_id,
unit_concept_id,
provider_id,
visit_occurrence_id,
observation_source_value,
observation_source_concept_id,
unit_source_value,
qualifier_source_value,
value_source_concept_id,
value_source_value,
questionnaire_response_id
FROM
{{project}}.{{dataset}}.observation""")
class CleanPPINumericFieldsUsingParameters(BaseCleaningRule):
"""
Apply value ranges to ensure that values are reasonable and to minimize the likelihood
of sensitive information (like phone numbers) within the free text fields.
"""
def __init__(self, project_id, dataset_id, sandbox_dataset_id):
"""
Initialize the class with proper information.
Set the issue numbers, description and affected datasets. As other tickets may affect
this SQL, append them to the list of Jira Issues.
DO NOT REMOVE ORIGINAL JIRA ISSUE NUMBERS!
"""
desc = (
'Sets value_as_number to NULL and value_as_concept_id and value_as_number '
'to new AOU custom concept 2000000010 for responses with invalid values.'
'Sets value_as_number to NULL and value_as_concept_id and value_as_number '
'to new AOU custom concept 2000000013 for households with high amount of individuals.'
'Sets value_as_number to NULL and value_as_concept_id and value_as_number '
'to new AOU custom concept 2000000012 for households with 6 or more individuals '
'under the age of 18')
super().__init__(
issue_numbers=['DC1058', 'DC1061', 'DC827', 'DC502', 'DC487'],
description=desc,
affected_datasets=[cdr_consts.RDR],
affected_tables=['observation'],
project_id=project_id,
dataset_id=dataset_id,
sandbox_dataset_id=sandbox_dataset_id)
def get_query_specs(self):
"""
Return a list of dictionary query specifications.
:return: A list of dictionaries. Each dictionary contains a single query
and a specification for how to execute that query. The specifications
are optional but the query is required.
"""
invalid_values_sandbox_query = {
cdr_consts.QUERY:
INVALID_VALUES_SANDBOX_QUERY.render(
project=self.project_id,
dataset=self.dataset_id,
sandbox_dataset=self.sandbox_dataset_id,
intermediary_table=self.get_sandbox_tablenames()),
}
clean_invalid_values_query = {
cdr_consts.QUERY:
CLEAN_INVALID_VALUES_QUERY.render(project=self.project_id,
dataset=self.dataset_id),
cdr_consts.DESTINATION_TABLE:
'observation',
cdr_consts.DESTINATION_DATASET:
self.dataset_id,
cdr_consts.DISPOSITION:
WRITE_TRUNCATE
}
return [invalid_values_sandbox_query, clean_invalid_values_query]
def setup_rule(self, client):
"""
Function to run any data upload options before executing a query.
"""
pass
def | (self, client):
"""
Run required steps for validation setup
"""
raise NotImplementedError("Please fix me.")
def validate_rule(self, client):
"""
Validates the cleaning rule which deletes or updates the data from the tables
"""
raise NotImplementedError("Please fix me.")
def get_sandbox_tablenames(self):
return f'{self._issue_numbers[0].lower()}_{self._affected_tables[0]}'
if __name__ == '__main__':
import cdr_cleaner.args_parser as parser
import cdr_cleaner.clean_cdr_engine as clean_engine
ARGS = parser.parse_args()
if ARGS.list_queries:
clean_engine.add_console_logging()
query_list = clean_engine.get_query_list(
ARGS.project_id, ARGS.dataset_id, ARGS.sandbox_dataset_id,
[(CleanPPINumericFieldsUsingParameters,)])
for query in query_list:
LOGGER.info(query)
else:
clean_engine.add_console_logging(ARGS.console_log)
clean_engine.clean_dataset(ARGS.project_id, ARGS.dataset_id,
ARGS.sandbox_dataset_id,
[(CleanPPINumericFieldsUsingParameters,)])
| setup_validation |
0023_sushicredentials_last_updated_by.py | # Generated by Django 2.2.5 on 2019-10-21 07:13
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration): | dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('sushi', '0022_sushi_credentials_locking'),
]
operations = [
migrations.AddField(
model_name='sushicredentials',
name='last_updated_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
] | |
test_early_stopping.py | from unittest import TestCase
from mock import MagicMock
import torchbearer
from torchbearer.callbacks import EarlyStopping
class TestEarlyStopping(TestCase):
def test_step_on_batch(self):
stopper = EarlyStopping(monitor='test_metric', mode='min', step_on_batch=True)
stopper.step = MagicMock()
stopper.on_step_training('test')
self.assertTrue(stopper.step.call_count == 1)
stopper.on_end_epoch('test')
self.assertTrue(stopper.step.call_count == 1)
def test_min_should_stop(self):
state = {
torchbearer.EPOCH: 1,
torchbearer.STOP_TRAINING: False,
torchbearer.METRICS: {'test_metric': 0.001}
}
stopper = EarlyStopping(monitor='test_metric', mode='min')
stopper.on_start(state)
stopper.on_end_epoch(state)
self.assertFalse(state[torchbearer.STOP_TRAINING])
state[torchbearer.METRICS]['test_metric'] = 0.01
stopper.on_end_epoch(state)
self.assertTrue(state[torchbearer.STOP_TRAINING])
def test_min_should_continue(self):
state = {
torchbearer.EPOCH: 1,
torchbearer.STOP_TRAINING: False,
torchbearer.METRICS: {'test_metric': 0.001}
}
stopper = EarlyStopping(monitor='test_metric', mode='min')
stopper.on_start(state)
stopper.on_end_epoch(state)
self.assertFalse(state[torchbearer.STOP_TRAINING])
state[torchbearer.METRICS]['test_metric'] = 0.0001
stopper.on_end_epoch(state)
self.assertFalse(state[torchbearer.STOP_TRAINING])
def test_max_should_stop(self):
state = {
torchbearer.EPOCH: 1,
torchbearer.STOP_TRAINING: False,
torchbearer.METRICS: {'test_metric': 0.001}
}
stopper = EarlyStopping(monitor='test_metric', mode='max')
stopper.on_start(state)
stopper.on_end_epoch(state)
self.assertFalse(state[torchbearer.STOP_TRAINING])
state[torchbearer.METRICS]['test_metric'] = 0.0001
stopper.on_end_epoch(state)
self.assertTrue(state[torchbearer.STOP_TRAINING])
def test_max_should_continue(self):
state = {
torchbearer.EPOCH: 1,
torchbearer.STOP_TRAINING: False,
torchbearer.METRICS: {'test_metric': 0.001}
}
stopper = EarlyStopping(monitor='test_metric', mode='max')
stopper.on_start(state)
stopper.on_end_epoch(state)
self.assertFalse(state[torchbearer.STOP_TRAINING])
state[torchbearer.METRICS]['test_metric'] = 0.01
stopper.on_end_epoch(state)
self.assertFalse(state[torchbearer.STOP_TRAINING])
def test_max_equal_should_stop(self):
state = {
torchbearer.EPOCH: 1,
torchbearer.STOP_TRAINING: False,
torchbearer.METRICS: {'test_metric': 0.001}
}
stopper = EarlyStopping(monitor='test_metric', mode='max')
stopper.on_start(state)
stopper.on_end_epoch(state)
self.assertFalse(state[torchbearer.STOP_TRAINING])
stopper.on_end_epoch(state)
self.assertTrue(state[torchbearer.STOP_TRAINING])
def test_in_equal_should_stop(self):
state = {
torchbearer.EPOCH: 1,
torchbearer.STOP_TRAINING: False,
torchbearer.METRICS: {'test_metric': 0.001}
}
stopper = EarlyStopping(monitor='test_metric', mode='min')
stopper.on_start(state)
stopper.on_end_epoch(state)
self.assertFalse(state[torchbearer.STOP_TRAINING])
stopper.on_end_epoch(state)
self.assertTrue(state[torchbearer.STOP_TRAINING])
def test_patience_should_stop(self):
state = {
torchbearer.EPOCH: 1,
torchbearer.STOP_TRAINING: False,
torchbearer.METRICS: {'test_metric': 0.001}
}
stopper = EarlyStopping(monitor='test_metric', patience=3)
stopper.on_start(state)
for i in range(3):
stopper.on_end_epoch(state)
self.assertFalse(state[torchbearer.STOP_TRAINING])
stopper.on_end_epoch(state)
self.assertTrue(state[torchbearer.STOP_TRAINING])
def test_patience_should_continue(self):
state = {
torchbearer.EPOCH: 1,
torchbearer.STOP_TRAINING: False,
torchbearer.METRICS: {'test_metric': 0.001}
}
stopper = EarlyStopping(monitor='test_metric', patience=3)
stopper.on_start(state)
for i in range(3):
stopper.on_end_epoch(state)
self.assertFalse(state[torchbearer.STOP_TRAINING])
state[torchbearer.METRICS]['test_metric'] = 0.0001
stopper.on_end_epoch(state)
self.assertFalse(state[torchbearer.STOP_TRAINING])
def test_min_delta_should_continue(self):
state = {
torchbearer.EPOCH: 1,
torchbearer.STOP_TRAINING: False,
torchbearer.METRICS: {'test_metric': 0.001}
}
stopper = EarlyStopping(monitor='test_metric', mode='max', min_delta=0.1)
stopper.on_start(state)
stopper.on_end_epoch(state)
self.assertFalse(state[torchbearer.STOP_TRAINING])
state[torchbearer.METRICS]['test_metric'] = 0.102
stopper.on_end_epoch(state)
self.assertFalse(state[torchbearer.STOP_TRAINING])
def test_min_delta_should_stop(self):
state = {
torchbearer.EPOCH: 1,
torchbearer.STOP_TRAINING: False,
torchbearer.METRICS: {'test_metric': 0.001}
}
stopper = EarlyStopping(monitor='test_metric', mode='max', min_delta=0.1)
stopper.on_start(state)
stopper.on_end_epoch(state)
self.assertFalse(state[torchbearer.STOP_TRAINING])
state[torchbearer.METRICS]['test_metric'] = 0.10
stopper.on_end_epoch(state)
self.assertTrue(state[torchbearer.STOP_TRAINING])
def test_auto_should_be_min(self):
state = {
torchbearer.EPOCH: 1,
torchbearer.STOP_TRAINING: False,
torchbearer.METRICS: {'test_metric': 0.001}
}
stopper = EarlyStopping(monitor='test_metric')
stopper.on_start(state)
stopper.on_end_epoch(state)
self.assertTrue(stopper.mode == 'min')
def test_auto_should_be_max(self):
state = {
torchbearer.EPOCH: 1,
torchbearer.STOP_TRAINING: False,
torchbearer.METRICS: {'acc_metric': 0.001}
}
stopper = EarlyStopping(monitor='acc_metric')
stopper.on_start(state)
stopper.on_end_epoch(state)
self.assertTrue(stopper.mode == 'max')
def test_monitor_should_continue(self):
state = {
torchbearer.EPOCH: 1,
torchbearer.STOP_TRAINING: False,
torchbearer.METRICS: {'test_metric_1': 0.001, 'test_metric_2': 0.001}
}
stopper = EarlyStopping(monitor='test_metric_2', mode='max')
stopper.on_start(state)
stopper.on_end_epoch(state)
self.assertFalse(state[torchbearer.STOP_TRAINING])
state[torchbearer.METRICS]['test_metric_1'] = 0.0001
state[torchbearer.METRICS]['test_metric_2'] = 0.01
stopper.on_end_epoch(state)
self.assertFalse(state[torchbearer.STOP_TRAINING])
def | (self):
state = {
torchbearer.EPOCH: 1,
torchbearer.STOP_TRAINING: False,
torchbearer.METRICS: {'test_metric_1': 0.001, 'test_metric_2': 0.001}
}
stopper = EarlyStopping(monitor='test_metric_2', mode='max')
stopper.on_start(state)
stopper.on_end_epoch(state)
self.assertFalse(state[torchbearer.STOP_TRAINING])
state[torchbearer.METRICS]['test_metric_1'] = 0.1
state[torchbearer.METRICS]['test_metric_2'] = 0.0001
stopper.on_end_epoch(state)
self.assertTrue(state[torchbearer.STOP_TRAINING])
def test_state_dict(self):
stopper = EarlyStopping(monitor='test_metric_1')
stopper.wait = 10
stopper.best = 20
state = stopper.state_dict()
stopper = EarlyStopping(monitor='test_metric_1')
self.assertNotEqual(stopper.wait, 10)
stopper.load_state_dict(state)
self.assertEqual(stopper.wait, 10)
self.assertEqual(stopper.best, 20)
| test_monitor_should_stop |
index.js | import 'assets/index.css';
import React from 'react';
import Tree, { TreeNode } from 'rc-tree';
import './tree.css';
import { getTree, changeParents } from 'api/treeServices';
class Demo extends React.Component {
componentDidMount() {
this.getTreeNodes();
}
constructor(props) {
super(props);
this.state = {
tree: [],
autoExpandParent: true,
expandedKeys: []
};
}
onDragStart = (info) => {
}
getTreeNodes = async () => {
try {
const tNodes = await getTree();
if (tNodes.length > 0) {
const uiNode = [];
uiNode.push(this.makeTreeStructure(tNodes, tNodes[0])); //root
this.setState({ tree: uiNode });
}
else {
console.log("setState for empty tree!")
}
} catch (error) {
console.log('in error ', error);
}
};
makeTreeStructure = (tNodes, node) => {
const nodeInfo = {
key: node._id,
title: node.name
};
const children = this.findImmediateChildren(tNodes, node._id);
if (children.length > 0) {
this.setState({ expandedKeys: this.state.expandedKeys.concat(node._id) })
return {
...nodeInfo,
children: children.map(child => this.makeTreeStructure(tNodes, child))
};
}
return nodeInfo;
}
findImmediateChildren = (nodes, parentId) => {
return nodes.filter(node => node.parentId === parentId)
}
onDragEnter = (info) => {
this.setState({
expandedKeys: info.expandedKeys,
});
}
onDrop = (info) => {
console.log('drop', info); |
changeParents(dragKey, dropKey);
const loop = (data, key, callback) => {
data.forEach((item, index, arr) => {
if (item.key === key) {
callback(item, index, arr);
return;
}
if (item.children) {
loop(item.children, key, callback);
}
});
};
const data = [...this.state.tree];
// Find dragObject
let dragObj;
loop(data, dragKey, (item, index, arr) => {
arr.splice(index, 1);
dragObj = item;
});
if (!info.dropToGap) {
// Drop on the content
loop(data, dropKey, (item) => {
item.children = item.children || [];
// where to insert 示例添加到尾部,可以是随意位置
item.children.push(dragObj);
});
} else if (
(info.node.props.children || []).length > 0 && // Has children
info.node.props.expanded && // Is expanded
dropPosition === 1 // On the bottom gap
) {
loop(data, dropKey, (item) => {
item.children = item.children || [];
// where to insert 示例添加到尾部,可以是随意位置
item.children.unshift(dragObj);
});
} else {
// Drop on the gap
let ar;
let i;
loop(data, dropKey, (item, index, arr) => {
ar = arr;
i = index;
});
if (dropPosition === -1) {
ar.splice(i, 0, dragObj);
} else {
ar.splice(i + 1, 0, dragObj);
}
}
this.setState({
tree: data,
});
}
onExpand = (expandedKeys) => {
this.setState({
expandedKeys,
autoExpandParent: false,
});
}
renderTitle = () => (
<>
<h2>draggable</h2>
<p>drag a node into another node</p>
</>
)
render() {
const loop = data => {
return data.map((item) => {
if (item.children && item.children.length) {
return <TreeNode key={item.key} title={item.title}>{loop(item.children)}</TreeNode>;
}
return <TreeNode key={item.key} title={item.title} />;
});
};
return (
<div className="draggable-demo">
{this.renderTitle()}
<div className="draggable-container">
<Tree
expandedKeys={this.state.expandedKeys}
onExpand={this.onExpand} autoExpandParent={this.state.autoExpandParent}
draggable
onDragStart={this.onDragStart}
onDragEnter={this.onDragEnter}
onDrop={this.onDrop}
>
{loop(this.state.tree)}
</Tree>
</div>
</div>);
}
}
export default Demo; | const dropKey = info.node.props.eventKey;
const dragKey = info.dragNode.props.eventKey;
const dropPos = info.node.props.pos.split('-');
const dropPosition = info.dropPosition - Number(dropPos[dropPos.length - 1]); |
util.go | // The MIT License
//
// Copyright (c) 2020 Temporal Technologies Inc. All rights reserved.
//
// Copyright (c) 2020 Uber Technologies, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package workflow
import (
commandpb "go.temporal.io/api/command/v1"
commonpb "go.temporal.io/api/common/v1"
enumspb "go.temporal.io/api/enums/v1"
"go.temporal.io/api/serviceerror"
workflowpb "go.temporal.io/api/workflow/v1"
"go.temporal.io/server/common"
"go.temporal.io/server/common/clock"
"go.temporal.io/server/common/primitives/timestamp"
"go.temporal.io/server/service/history/consts"
)
func failWorkflowTask(
mutableState MutableState,
workflowTask *WorkflowTaskInfo,
workflowTaskFailureCause enumspb.WorkflowTaskFailedCause,
) error {
if _, err := mutableState.AddWorkflowTaskFailedEvent(
workflowTask.ScheduleID,
workflowTask.StartedID,
workflowTaskFailureCause,
nil,
consts.IdentityHistoryService,
"",
"",
"",
0,
); err != nil {
return err
}
mutableState.FlushBufferedEvents()
return nil
}
func ScheduleWorkflowTask(
mutableState MutableState,
) error {
if mutableState.HasPendingWorkflowTask() {
return nil
}
_, err := mutableState.AddWorkflowTaskScheduledEvent(false)
if err != nil {
return serviceerror.NewInternal("Failed to add workflow task scheduled event.")
}
return nil
}
func RetryWorkflow(
mutableState MutableState,
eventBatchFirstEventID int64,
parentNamespace string,
continueAsNewAttributes *commandpb.ContinueAsNewWorkflowExecutionCommandAttributes,
) (MutableState, error) {
if workflowTask, ok := mutableState.GetInFlightWorkflowTask(); ok {
if err := failWorkflowTask(
mutableState,
workflowTask,
enumspb.WORKFLOW_TASK_FAILED_CAUSE_FORCE_CLOSE_COMMAND,
); err != nil {
return nil, err
}
}
_, newMutableState, err := mutableState.AddContinueAsNewEvent(
eventBatchFirstEventID,
common.EmptyEventID,
parentNamespace,
continueAsNewAttributes,
)
if err != nil {
return nil, err
}
return newMutableState, nil
}
func TimeoutWorkflow(
mutableState MutableState,
eventBatchFirstEventID int64,
retryState enumspb.RetryState,
continuedRunID string,
) error {
if workflowTask, ok := mutableState.GetInFlightWorkflowTask(); ok {
if err := failWorkflowTask(
mutableState,
workflowTask,
enumspb.WORKFLOW_TASK_FAILED_CAUSE_FORCE_CLOSE_COMMAND,
); err != nil |
}
_, err := mutableState.AddTimeoutWorkflowEvent(
eventBatchFirstEventID,
retryState,
continuedRunID,
)
return err
}
func TerminateWorkflow(
mutableState MutableState,
eventBatchFirstEventID int64,
terminateReason string,
terminateDetails *commonpb.Payloads,
terminateIdentity string,
) error {
if workflowTask, ok := mutableState.GetInFlightWorkflowTask(); ok {
if err := failWorkflowTask(
mutableState,
workflowTask,
enumspb.WORKFLOW_TASK_FAILED_CAUSE_FORCE_CLOSE_COMMAND,
); err != nil {
return err
}
}
_, err := mutableState.AddWorkflowExecutionTerminatedEvent(
eventBatchFirstEventID,
terminateReason,
terminateDetails,
terminateIdentity,
)
return err
}
// FindAutoResetPoint returns the auto reset point
func FindAutoResetPoint(
timeSource clock.TimeSource,
verifyChecksum func(string) error,
autoResetPoints *workflowpb.ResetPoints,
) (string, *workflowpb.ResetPointInfo) {
if autoResetPoints == nil {
return "", nil
}
now := timeSource.Now()
for _, p := range autoResetPoints.Points {
if err := verifyChecksum(p.GetBinaryChecksum()); err != nil && p.GetResettable() {
expireTime := timestamp.TimeValue(p.GetExpireTime())
if !expireTime.IsZero() && now.After(expireTime) {
// reset point has expired and we may already deleted the history
continue
}
return err.Error(), p
}
}
return "", nil
}
| {
return err
} |
cloudappliances.go | package storsimple
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/autorest/validation"
"net/http"
)
// CloudAppliancesClient is the client for the CloudAppliances methods of the Storsimple service.
type CloudAppliancesClient struct {
BaseClient
}
// NewCloudAppliancesClient creates an instance of the CloudAppliancesClient client.
func NewCloudAppliancesClient(subscriptionID string) CloudAppliancesClient {
return NewCloudAppliancesClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewCloudAppliancesClientWithBaseURI creates an instance of the CloudAppliancesClient client.
func NewCloudAppliancesClientWithBaseURI(baseURI string, subscriptionID string) CloudAppliancesClient |
// ListSupportedConfigurations lists supported cloud appliance models and supported configurations.
//
// resourceGroupName is the resource group name managerName is the manager name
func (client CloudAppliancesClient) ListSupportedConfigurations(ctx context.Context, resourceGroupName string, managerName string) (result CloudApplianceConfigurationList, err error) {
if err := validation.Validate([]validation.Validation{
{TargetValue: managerName,
Constraints: []validation.Constraint{{Target: "managerName", Name: validation.MaxLength, Rule: 50, Chain: nil},
{Target: "managerName", Name: validation.MinLength, Rule: 2, Chain: nil}}}}); err != nil {
return result, validation.NewErrorWithValidationError(err, "storsimple.CloudAppliancesClient", "ListSupportedConfigurations")
}
req, err := client.ListSupportedConfigurationsPreparer(ctx, resourceGroupName, managerName)
if err != nil {
err = autorest.NewErrorWithError(err, "storsimple.CloudAppliancesClient", "ListSupportedConfigurations", nil, "Failure preparing request")
return
}
resp, err := client.ListSupportedConfigurationsSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "storsimple.CloudAppliancesClient", "ListSupportedConfigurations", resp, "Failure sending request")
return
}
result, err = client.ListSupportedConfigurationsResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "storsimple.CloudAppliancesClient", "ListSupportedConfigurations", resp, "Failure responding to request")
}
return
}
// ListSupportedConfigurationsPreparer prepares the ListSupportedConfigurations request.
func (client CloudAppliancesClient) ListSupportedConfigurationsPreparer(ctx context.Context, resourceGroupName string, managerName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"managerName": managerName,
"resourceGroupName": resourceGroupName,
"subscriptionId": client.SubscriptionID,
}
const APIVersion = "2017-06-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorSimple/managers/{managerName}/cloudApplianceConfigurations", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListSupportedConfigurationsSender sends the ListSupportedConfigurations request. The method will close the
// http.Response Body if it receives an error.
func (client CloudAppliancesClient) ListSupportedConfigurationsSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// ListSupportedConfigurationsResponder handles the response to the ListSupportedConfigurations request. The method always
// closes the http.Response Body.
func (client CloudAppliancesClient) ListSupportedConfigurationsResponder(resp *http.Response) (result CloudApplianceConfigurationList, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// Provision provisions cloud appliance.
//
// parameters is the cloud appliance resourceGroupName is the resource group name managerName is the manager name
func (client CloudAppliancesClient) Provision(ctx context.Context, parameters CloudAppliance, resourceGroupName string, managerName string) (result CloudAppliancesProvisionFuture, err error) {
if err := validation.Validate([]validation.Validation{
{TargetValue: parameters,
Constraints: []validation.Constraint{{Target: "parameters.Name", Name: validation.Null, Rule: true, Chain: nil},
{Target: "parameters.VnetRegion", Name: validation.Null, Rule: true, Chain: nil}}},
{TargetValue: managerName,
Constraints: []validation.Constraint{{Target: "managerName", Name: validation.MaxLength, Rule: 50, Chain: nil},
{Target: "managerName", Name: validation.MinLength, Rule: 2, Chain: nil}}}}); err != nil {
return result, validation.NewErrorWithValidationError(err, "storsimple.CloudAppliancesClient", "Provision")
}
req, err := client.ProvisionPreparer(ctx, parameters, resourceGroupName, managerName)
if err != nil {
err = autorest.NewErrorWithError(err, "storsimple.CloudAppliancesClient", "Provision", nil, "Failure preparing request")
return
}
result, err = client.ProvisionSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "storsimple.CloudAppliancesClient", "Provision", result.Response(), "Failure sending request")
return
}
return
}
// ProvisionPreparer prepares the Provision request.
func (client CloudAppliancesClient) ProvisionPreparer(ctx context.Context, parameters CloudAppliance, resourceGroupName string, managerName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"managerName": managerName,
"resourceGroupName": resourceGroupName,
"subscriptionId": client.SubscriptionID,
}
const APIVersion = "2017-06-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsJSON(),
autorest.AsPost(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorSimple/managers/{managerName}/provisionCloudAppliance", pathParameters),
autorest.WithJSON(parameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ProvisionSender sends the Provision request. The method will close the
// http.Response Body if it receives an error.
func (client CloudAppliancesClient) ProvisionSender(req *http.Request) (future CloudAppliancesProvisionFuture, err error) {
sender := autorest.DecorateSender(client, azure.DoRetryWithRegistration(client.Client))
future.Future = azure.NewFuture(req)
future.req = req
_, err = future.Done(sender)
if err != nil {
return
}
err = autorest.Respond(future.Response(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted))
return
}
// ProvisionResponder handles the response to the Provision request. The method always
// closes the http.Response Body.
func (client CloudAppliancesClient) ProvisionResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted),
autorest.ByClosing())
result.Response = resp
return
}
| {
return CloudAppliancesClient{NewWithBaseURI(baseURI, subscriptionID)}
} |
blru.ts |
declare module 'blru' {
type GetSizeFunction<TKey, TValue> = (value: TValue, key: TKey) => number
export class LRU<TKey, TValue> {
constructor(
capacity: number,
getSize?: GetSizeFunction<TKey, TValue> | null,
CustomMap?: typeof Map | unknown | null,
)
map: Map<TKey, TValue>
size: number
items: number
head: LRUItem<TKey, TValue> | null
tail: LRUItem<TKey, TValue> | null
pending: LRUBatch<TKey, TValue> | null
capacity: number
getSize: GetSizeFunction<TKey, TValue> | null | undefined
reset(): void
set(key: TKey, value: TValue): void
get(key: TKey): TValue | null
has(key: TKey): boolean
remove(key: TKey): boolean
keys(): Array<TKey>
values(): Array<TValue>
toArray(): Array<TValue>
batch(): LRUBatch<TKey, TValue>
start(): void
clear(): void
drop(): void
commit(): void
push(key: TKey, value: TValue): void
unpush(key: TKey): void
}
class LRUItem<TKey, TValue> {
constructor(key: TKey, value: TValue)
key: TKey
value: TValue
next: LRUItem<TKey, TValue> | null
prev: LRUItem<TKey, TValue> | null
}
export class LRUBatch<TKey, TValue> {
constructor(lru: LRU<TKey, TValue>)
lru: LRU<TKey, TValue>
ops: Array<LRUOp<TKey, TValue>>
set(key: TKey, value: TValue): void
remove(key: TKey): void
clear(): void
commit(): void
}
export class | <TKey, TValue> {
constructor(remove: boolean, key: TKey, value: TValue)
remove: boolean
key: TKey
value: TValue
}
export default LRU
}
| LRUOp |
transaction_stress_test.rs | use crate::{get_fee, one_eth, one_hundred_eth, utils::*, TOTAL_TIMEOUT};
use clarity::Address as EthAddress;
use cosmos_gravity::{
query::get_pending_send_to_eth,
send::{cancel_send_to_eth, send_request_batch, send_to_eth},
};
use deep_space::coin::Coin;
use deep_space::Contact;
use ethereum_gravity::{send_to_cosmos::send_to_cosmos, utils::get_tx_batch_nonce};
use futures::future::join_all;
use gravity_proto::gravity::query_client::QueryClient as GravityQueryClient;
use rand::seq::SliceRandom;
use std::{
collections::HashSet,
time::{Duration, Instant},
};
use tokio::time::sleep as delay_for;
use tonic::transport::Channel;
use web30::{client::Web3, types::SendTxOption};
const TIMEOUT: Duration = Duration::from_secs(120);
/// The number of users we will be simulating for this test, each user
/// will get one token from each token type in erc20_addresses and send it
/// across the bridge to Cosmos as a deposit and then send it back to a different
/// Ethereum address in a transaction batch
/// So the total number of
/// Ethereum sends = (2 * NUM_USERS)
/// ERC20 sends = (erc20_addresses.len() * NUM_USERS)
/// Gravity Deposits = (erc20_addresses.len() * NUM_USERS)
/// Batches executed = erc20_addresses.len() * (NUM_USERS / 100)
const NUM_USERS: usize = 100;
/// Perform a stress test by sending thousands of
/// transactions and producing large batches
#[allow(clippy::too_many_arguments)]
pub async fn transaction_stress_test(
web30: &Web3,
contact: &Contact,
grpc_client: GravityQueryClient<Channel>,
keys: Vec<ValidatorKeys>,
gravity_address: EthAddress,
erc20_addresses: Vec<EthAddress>,
) | {
let mut grpc_client = grpc_client;
let no_relay_market_config = create_default_test_config();
start_orchestrators(keys.clone(), gravity_address, false, no_relay_market_config).await;
// Generate 100 user keys to send ETH and multiple types of tokens
let mut user_keys = Vec::new();
for _ in 0..NUM_USERS {
user_keys.push(get_user_key());
}
// the sending eth addresses need Ethereum to send ERC20 tokens to the bridge
let sending_eth_addresses: Vec<EthAddress> = user_keys.iter().map(|i| i.eth_address).collect();
// the destination eth addresses need Ethereum to perform a contract call and get their erc20 balances
let dest_eth_addresses: Vec<EthAddress> =
user_keys.iter().map(|i| i.eth_dest_address).collect();
let mut eth_destinations = Vec::new();
eth_destinations.extend(sending_eth_addresses.clone());
eth_destinations.extend(dest_eth_addresses);
send_eth_bulk(one_eth(), ð_destinations, web30).await;
info!("Sent {} addresses 1 ETH", NUM_USERS);
// now we need to send all the sending eth addresses erc20's to send
for token in erc20_addresses.iter() {
send_erc20_bulk(one_hundred_eth(), *token, &sending_eth_addresses, web30).await;
info!("Sent {} addresses 100 {}", NUM_USERS, token);
}
for token in erc20_addresses.iter() {
let mut sends = Vec::new();
for keys in user_keys.iter() {
let fut = send_to_cosmos(
*token,
gravity_address,
one_hundred_eth(),
keys.cosmos_address,
keys.eth_key,
Some(TIMEOUT),
web30,
vec![SendTxOption::GasPriceMultiplier(5.0)],
);
sends.push(fut);
}
let txids = join_all(sends).await;
let mut wait_for_txid = Vec::new();
for txid in txids {
let wait = web30.wait_for_transaction(txid.unwrap(), TIMEOUT, None);
wait_for_txid.push(wait);
}
let results = join_all(wait_for_txid).await;
for result in results {
let result = result.unwrap();
result.block_number.unwrap();
}
info!(
"Locked 100 {} from {} into the Gravity Ethereum Contract",
token, NUM_USERS
);
}
let start = Instant::now();
let mut good = true;
while Instant::now() - start < TOTAL_TIMEOUT {
good = true;
for keys in user_keys.iter() {
let c_addr = keys.cosmos_address;
let balances = contact.get_balances(c_addr).await.unwrap();
for token in erc20_addresses.iter() {
let mut found = false;
for balance in balances.iter() {
if balance.denom.contains(&token.to_string())
&& balance.amount == one_hundred_eth()
{
found = true;
}
}
if !found {
good = false;
}
}
}
if good {
info!(
"All {} deposits bridged to Cosmos successfully!",
user_keys.len() * erc20_addresses.len()
);
break;
}
delay_for(Duration::from_secs(5)).await;
}
if !good {
panic!(
"Failed to perform all {} deposits to Cosmos!",
user_keys.len() * erc20_addresses.len()
);
}
let send_amount = one_hundred_eth() - 500u16.into();
let mut denoms = HashSet::new();
for token in erc20_addresses.iter() {
let mut futs = Vec::new();
for keys in user_keys.iter() {
let c_addr = keys.cosmos_address;
let c_key = keys.cosmos_key;
let e_dest_addr = keys.eth_dest_address;
let balances = contact.get_balances(c_addr).await.unwrap();
// this way I don't have to hardcode a denom and we can change the way denoms are formed
// without changing this test.
let mut send_coin = None;
for balance in balances {
if balance.denom.contains(&token.to_string()) {
send_coin = Some(balance.clone());
denoms.insert(balance.denom);
}
}
let mut send_coin = send_coin.unwrap();
send_coin.amount = send_amount.clone();
let send_fee = Coin {
denom: send_coin.denom.clone(),
amount: 1u8.into(),
};
let res = send_to_eth(
c_key,
e_dest_addr,
send_coin,
send_fee.clone(),
send_fee,
contact,
);
futs.push(res);
}
let results = join_all(futs).await;
for result in results {
let result = result.unwrap();
trace!("SendToEth result {:?}", result);
}
info!(
"Successfully placed {} {} into the tx pool",
NUM_USERS, token
);
}
// randomly select a user to cancel their transaction, as part of this test
// we make sure that this user withdraws absolutely zero tokens
let mut rng = rand::thread_rng();
let user_who_cancels = user_keys.choose(&mut rng).unwrap();
let pending = get_pending_send_to_eth(&mut grpc_client, user_who_cancels.cosmos_address)
.await
.unwrap();
// if batch creation is made automatic this becomes a race condition we'll have to consider
assert!(pending.transfers_in_batches.is_empty());
assert!(!pending.unbatched_transfers.is_empty());
let denom = denoms.iter().next().unwrap().clone();
let bridge_fee = Coin {
denom,
amount: 1u8.into(),
};
// cancel all outgoing transactions for this user
for tx in pending.unbatched_transfers {
let res = cancel_send_to_eth(
user_who_cancels.cosmos_key,
bridge_fee.clone(),
contact,
tx.id,
)
.await
.unwrap();
info!("{:?}", res);
}
contact.wait_for_next_block(TIMEOUT).await.unwrap();
// check that the cancelation worked
let pending = get_pending_send_to_eth(&mut grpc_client, user_who_cancels.cosmos_address)
.await
.unwrap();
info!("{:?}", pending);
assert!(pending.transfers_in_batches.is_empty());
assert!(pending.unbatched_transfers.is_empty());
// this user will have someone else attempt to cancel their transaction
let mut victim = None;
for key in user_keys.iter() {
if key != user_who_cancels {
victim = Some(key);
break;
}
}
let pending = get_pending_send_to_eth(&mut grpc_client, victim.unwrap().cosmos_address)
.await
.unwrap();
// try to cancel the victims transactions and ensure failure
for tx in pending.unbatched_transfers {
let res = cancel_send_to_eth(
user_who_cancels.cosmos_key,
bridge_fee.clone(),
contact,
tx.id,
)
.await;
info!("{:?}", res);
}
for denom in denoms {
info!("Requesting batch for {}", denom);
let res = send_request_batch(
keys[0].validator_key,
denom,
get_fee(),
contact,
Some(TIMEOUT),
)
.await
.unwrap();
info!("batch request response is {:?}", res);
}
let start = Instant::now();
let mut good = true;
let mut found_canceled = false;
while Instant::now() - start < TOTAL_TIMEOUT {
good = true;
found_canceled = false;
for keys in user_keys.iter() {
let e_dest_addr = keys.eth_dest_address;
for token in erc20_addresses.iter() {
let bal = web30.get_erc20_balance(*token, e_dest_addr).await.unwrap();
if bal != send_amount.clone() {
if e_dest_addr == user_who_cancels.eth_address && bal == 0u8.into() {
info!("We successfully found the user who canceled their sends!");
found_canceled = true;
} else {
good = false;
}
}
}
}
if good && found_canceled {
info!(
"All {} withdraws to Ethereum bridged successfully!",
NUM_USERS * erc20_addresses.len()
);
break;
}
delay_for(Duration::from_secs(5)).await;
}
if !(good && found_canceled) {
panic!(
"Failed to perform all {} withdraws to Ethereum!",
NUM_USERS * erc20_addresses.len()
);
}
// we should find a batch nonce greater than zero since all the batches
// executed
for token in erc20_addresses {
assert!(
get_tx_batch_nonce(
gravity_address,
token,
keys[0].eth_key.to_public_key().unwrap(),
web30
)
.await
.unwrap()
> 0
)
}
} |
|
dashboard.component.ts | import { HttpClient } from '@angular/common/http';
import { Component, Input, OnInit } from '@angular/core';
import {
IAlbumInfo,
IAlbumResponse,
IArtistInfo,
IArtistResponse,
} from './dashboard.models';
import { environment } from '../../environments/environment';
@Component({
selector: 'app-dashboard',
templateUrl: './dashboard.component.html',
styleUrls: ['./dashboard.component.scss'],
})
export class DashboardComponent implements OnInit { | albums: IAlbumInfo[];
selectedAlbums: object[];
selector: string = 'Album';
constructor(private http: HttpClient) {}
ngOnInit(): void {}
handleSearch() {
switch (this.selector) {
case 'Artist':
return this.handleArtistSearch();
case 'Album':
return this.handleAlbumSearch();
default:
return this.handleAlbumSearch();
}
}
handleAlbumSearch() {
this.http
.get(
`https://www.theaudiodb.com/api/v1/json/${environment.apiKey}/searchalbum.php?s=${this.input}`
)
.toPromise()
.then((response: IAlbumResponse) => {
this.albums = response.album;
console.log(this.albums);
return this.albums;
});
}
handleArtistSearch() {
this.http
.get(
`https://www.theaudiodb.com/api/v1/json/1/search.php?s=${this.input}`
)
.toPromise()
.then((response: IArtistResponse) => {
this.artist = response.artists;
console.log(this.artist);
});
}
handleChildChange(filter) {
this.selector = filter;
}
} | @Input() cols: number;
input: string;
artist: IArtistInfo[]; |
kernel.py | import gc
import glob
import json
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import scipy as sp
import lightgbm as lgb
from collections import Counter
from functools import partial
from math import sqrt
from joblib import Parallel, delayed
from tqdm import tqdm
from PIL import Image
from sklearn.model_selection import KFold
from sklearn.model_selection import StratifiedKFold
from sklearn.metrics import cohen_kappa_score, mean_squared_error
from sklearn.metrics import confusion_matrix as sk_cmatrix
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.decomposition import SparsePCA, TruncatedSVD, LatentDirichletAllocation, NMF
# basic datasets
train = pd.read_csv('../input/petfinder-adoption-prediction/train/train.csv')
test = pd.read_csv('../input/petfinder-adoption-prediction/test/test.csv')
sample_submission = pd.read_csv('../input/petfinder-adoption-prediction/test/sample_submission.csv')
labels_breed = pd.read_csv('../input/petfinder-adoption-prediction/breed_labels.csv')
labels_state = pd.read_csv('../input/petfinder-adoption-prediction/color_labels.csv')
labels_color = pd.read_csv('../input/petfinder-adoption-prediction/state_labels.csv')
train_image_files = sorted(glob.glob('../input/petfinder-adoption-prediction/train_images/*.jpg'))
train_metadata_files = sorted(glob.glob('../input/petfinder-adoption-prediction/train_metadata/*.json'))
train_sentiment_files = sorted(glob.glob('../input/petfinder-adoption-prediction/train_sentiment/*.json'))
test_image_files = sorted(glob.glob('../input/petfinder-adoption-prediction/test_images/*.jpg'))
test_metadata_files = sorted(glob.glob('../input/petfinder-adoption-prediction/test_metadata/*.json'))
test_sentiment_files = sorted(glob.glob('../input/petfinder-adoption-prediction/test_sentiment/*.json'))
# extract datasets
# https://www.kaggle.com/christofhenkel/extract-image-features-from-pretrained-nn
train_img_features = pd.read_csv('../input/extract-image-features-from-pretrained-nn/train_img_features.csv')
test_img_features = pd.read_csv('../input/extract-image-features-from-pretrained-nn/test_img_features.csv')
# img_features columns set names
col_names =["PetID"] + ["{}_img_feature".format(_) for _ in range(256)]
train_img_features.columns = col_names
test_img_features.columns = col_names
# ref: https://www.kaggle.com/wrosinski/baselinemodeling
class PetFinderParser(object):
def __init__(self, debug=False):
self.debug = debug
self.sentence_sep = ' '
# Does not have to be extracted because main DF already contains description
self.extract_sentiment_text = False
def open_metadata_file(self, filename):
"""
Load metadata file.
"""
with open(filename, 'r') as f:
metadata_file = json.load(f)
return metadata_file
def open_sentiment_file(self, filename):
"""
Load sentiment file.
"""
with open(filename, 'r') as f:
sentiment_file = json.load(f)
return sentiment_file
def open_image_file(self, filename):
"""
Load image file.
"""
image = np.asarray(Image.open(filename))
return image
def parse_sentiment_file(self, file):
"""
Parse sentiment file. Output DF with sentiment features.
"""
file_sentiment = file['documentSentiment']
file_entities = [x['name'] for x in file['entities']]
file_entities = self.sentence_sep.join(file_entities)
if self.extract_sentiment_text:
file_sentences_text = [x['text']['content'] for x in file['sentences']]
file_sentences_text = self.sentence_sep.join(file_sentences_text)
file_sentences_sentiment = [x['sentiment'] for x in file['sentences']]
file_sentences_sentiment = pd.DataFrame.from_dict(
file_sentences_sentiment, orient='columns').sum()
file_sentences_sentiment = file_sentences_sentiment.add_prefix('document_').to_dict()
file_sentiment.update(file_sentences_sentiment)
df_sentiment = pd.DataFrame.from_dict(file_sentiment, orient='index').T
if self.extract_sentiment_text:
df_sentiment['text'] = file_sentences_text
df_sentiment['entities'] = file_entities
df_sentiment = df_sentiment.add_prefix('sentiment_')
return df_sentiment
def parse_metadata_file(self, file):
"""
Parse metadata file. Output DF with metadata features.
"""
file_keys = list(file.keys())
if 'labelAnnotations' in file_keys:
file_annots = file['labelAnnotations'][:int(len(file['labelAnnotations']) * 0.3)]
file_top_score = np.asarray([x['score'] for x in file_annots]).mean()
file_top_desc = [x['description'] for x in file_annots]
else:
file_top_score = np.nan
file_top_desc = ['']
file_colors = file['imagePropertiesAnnotation']['dominantColors']['colors']
file_crops = file['cropHintsAnnotation']['cropHints']
file_color_score = np.asarray([x['score'] for x in file_colors]).mean()
file_color_pixelfrac = np.asarray([x['pixelFraction'] for x in file_colors]).mean()
file_crop_conf = np.asarray([x['confidence'] for x in file_crops]).mean()
if 'importanceFraction' in file_crops[0].keys():
file_crop_importance = np.asarray([x['importanceFraction'] for x in file_crops]).mean()
else:
file_crop_importance = np.nan
df_metadata = {
'annots_score': file_top_score,
'color_score': file_color_score,
'color_pixelfrac': file_color_pixelfrac,
'crop_conf': file_crop_conf,
'crop_importance': file_crop_importance,
'annots_top_desc': self.sentence_sep.join(file_top_desc)
}
df_metadata = pd.DataFrame.from_dict(df_metadata, orient='index').T
df_metadata = df_metadata.add_prefix('metadata_')
return df_metadata
# Helper function for parallel data processing:
def extract_additional_features(pet_id, mode='train'):
sentiment_filename = '../input/petfinder-adoption-prediction/{}_sentiment/{}.json'.format(mode, pet_id)
try:
sentiment_file = pet_parser.open_sentiment_file(sentiment_filename)
df_sentiment = pet_parser.parse_sentiment_file(sentiment_file)
df_sentiment['PetID'] = pet_id
except FileNotFoundError:
df_sentiment = []
dfs_metadata = []
metadata_filenames = sorted(glob.glob('../input/petfinder-adoption-prediction/{}_metadata/{}*.json'.format(mode, pet_id)))
if len(metadata_filenames) > 0:
for f in metadata_filenames:
metadata_file = pet_parser.open_metadata_file(f)
df_metadata = pet_parser.parse_metadata_file(metadata_file)
df_metadata['PetID'] = pet_id
dfs_metadata.append(df_metadata)
dfs_metadata = pd.concat(dfs_metadata, ignore_index=True, sort=False)
dfs = [df_sentiment, dfs_metadata]
return dfs
def agg_features(df_metadata, df_sentiment):
# Extend aggregates and improve column naming
aggregates = ['mean', "median", 'sum', "var", "std", "min", "max", "nunique"]
metadata_desc = df_metadata.groupby(['PetID'])['metadata_annots_top_desc'].unique()
metadata_desc = metadata_desc.reset_index()
metadata_desc['metadata_annots_top_desc'] = metadata_desc['metadata_annots_top_desc'].apply(lambda x: ' '.join(x))
prefix = 'metadata'
metadata_gr = df_metadata.drop(['metadata_annots_top_desc'], axis=1)
for i in metadata_gr.columns:
if 'PetID' not in i:
metadata_gr[i] = metadata_gr[i].astype(float)
metadata_gr = metadata_gr.groupby(['PetID']).agg(aggregates)
metadata_gr.columns = pd.Index(['{}_{}_{}'.format(prefix, c[0], c[1].upper()) for c in metadata_gr.columns.tolist()])
metadata_gr = metadata_gr.reset_index()
sentiment_desc = df_sentiment.groupby(['PetID'])['sentiment_entities'].unique()
sentiment_desc = sentiment_desc.reset_index()
sentiment_desc['sentiment_entities'] = sentiment_desc['sentiment_entities'].apply(lambda x: ' '.join(x))
prefix = 'sentiment'
sentiment_gr = df_sentiment.drop(['sentiment_entities'], axis=1)
for i in sentiment_gr.columns:
if 'PetID' not in i:
sentiment_gr[i] = sentiment_gr[i].astype(float)
sentiment_gr = sentiment_gr.groupby(['PetID']).agg(aggregates)
sentiment_gr.columns = pd.Index(['{}_{}_{}'.format(
prefix, c[0], c[1].upper()) for c in sentiment_gr.columns.tolist()])
sentiment_gr = sentiment_gr.reset_index()
return sentiment_gr, metadata_gr, metadata_desc, sentiment_desc
def breed_features(df, _labels_breed):
breed_main = df[['Breed1']].merge(_labels_breed, how='left', left_on='Breed1', right_on='BreedID', suffixes=('', '_main_breed'))
breed_main = breed_main.iloc[:, 2:]
breed_main = breed_main.add_prefix('main_breed_')
breed_second = df[['Breed2']].merge(_labels_breed, how='left', left_on='Breed2', right_on='BreedID', suffixes=('', '_second_breed'))
breed_second = breed_second.iloc[:, 2:]
breed_second = breed_second.add_prefix('second_breed_')
return breed_main, breed_second
def impact_coding(data, feature, target='y'):
'''
In this implementation we get the values and the dictionary as two different steps.
This is just because initially we were ignoring the dictionary as a result variable.
In this implementation the KFolds use shuffling. If you want reproducibility the cv
could be moved to a parameter.
'''
n_folds = 20
n_inner_folds = 10
impact_coded = pd.Series()
oof_default_mean = data[target].mean() # Gobal mean to use by default (you could further tune this)
kf = KFold(n_splits=n_folds, shuffle=True)
oof_mean_cv = pd.DataFrame()
split = 0
for infold, oof in kf.split(data[feature]):
impact_coded_cv = pd.Series()
kf_inner = KFold(n_splits=n_inner_folds, shuffle=True)
inner_split = 0
inner_oof_mean_cv = pd.DataFrame()
oof_default_inner_mean = data.iloc[infold][target].mean()
for infold_inner, oof_inner in kf_inner.split(data.iloc[infold]):
# The mean to apply to the inner oof split (a 1/n_folds % based on the rest)
oof_mean = data.iloc[infold_inner].groupby(by=feature)[target].mean()
impact_coded_cv = impact_coded_cv.append(data.iloc[infold].apply(
lambda x: oof_mean[x[feature]]
if x[feature] in oof_mean.index
else oof_default_inner_mean
, axis=1))
# Also populate mapping (this has all group -> mean for all inner CV folds)
inner_oof_mean_cv = inner_oof_mean_cv.join(pd.DataFrame(oof_mean), rsuffix=inner_split, how='outer')
inner_oof_mean_cv.fillna(value=oof_default_inner_mean, inplace=True)
inner_split += 1
# Also populate mapping
oof_mean_cv = oof_mean_cv.join(pd.DataFrame(inner_oof_mean_cv), rsuffix=split, how='outer')
oof_mean_cv.fillna(value=oof_default_mean, inplace=True)
split += 1
impact_coded = impact_coded.append(data.iloc[oof].apply(
lambda x: inner_oof_mean_cv.loc[x[feature]].mean()
if x[feature] in inner_oof_mean_cv.index
else oof_default_mean
, axis=1))
return impact_coded, oof_mean_cv.mean(axis=1), oof_default_mean
def frequency_encoding(df, col_name):
new_name = "{}_counts".format(col_name)
new_col_name = "{}_freq".format(col_name)
grouped = df.groupby(col_name).size().reset_index(name=new_name)
df = df.merge(grouped, how = "left", on = col_name)
df[new_col_name] = df[new_name]/df[new_name].count()
del df[new_name]
return df
# FROM: https://www.kaggle.com/myltykritik/simple-lgbm-image-features
# The following 3 functions have been taken from Ben Hamner's github repository
# https://github.com/benhamner/Metrics
def confusion_matrix(rater_a, rater_b, min_rating=None, max_rating=None):
"""
Returns the confusion matrix between rater's ratings
"""
assert(len(rater_a) == len(rater_b))
if min_rating is None:
min_rating = min(rater_a + rater_b)
if max_rating is None:
max_rating = max(rater_a + rater_b)
num_ratings = int(max_rating - min_rating + 1)
conf_mat = [[0 for i in range(num_ratings)]
for j in range(num_ratings)]
for a, b in zip(rater_a, rater_b):
conf_mat[a - min_rating][b - min_rating] += 1
return conf_mat
def histogram(ratings, min_rating=None, max_rating=None):
"""
Returns the counts of each type of rating that a rater made
"""
if min_rating is None:
min_rating = min(ratings)
if max_rating is None:
max_rating = max(ratings)
num_ratings = int(max_rating - min_rating + 1)
hist_ratings = [0 for x in range(num_ratings)]
for r in ratings:
hist_ratings[r - min_rating] += 1
return hist_ratings
def quadratic_weighted_kappa(y, y_pred):
|
class OptimizedRounder(object):
def __init__(self):
self.coef_ = 0
def _kappa_loss(self, coef, X, y):
X_p = np.copy(X)
for i, pred in enumerate(X_p):
if pred < coef[0]:
X_p[i] = 0
elif pred >= coef[0] and pred < coef[1]:
X_p[i] = 1
elif pred >= coef[1] and pred < coef[2]:
X_p[i] = 2
elif pred >= coef[2] and pred < coef[3]:
X_p[i] = 3
else:
X_p[i] = 4
ll = quadratic_weighted_kappa(y, X_p)
return -ll
def fit(self, X, y):
loss_partial = partial(self._kappa_loss, X=X, y=y)
initial_coef = [0.5, 1.5, 2.5, 3.5]
self.coef_ = sp.optimize.minimize(loss_partial, initial_coef, method='nelder-mead')
def predict(self, X, coef):
X_p = np.copy(X)
for i, pred in enumerate(X_p):
if pred < coef[0]:
X_p[i] = 0
elif pred >= coef[0] and pred < coef[1]:
X_p[i] = 1
elif pred >= coef[1] and pred < coef[2]:
X_p[i] = 2
elif pred >= coef[2] and pred < coef[3]:
X_p[i] = 3
else:
X_p[i] = 4
return X_p
def coefficients(self):
return self.coef_['x']
def rmse(actual, predicted):
return sqrt(mean_squared_error(actual, predicted))
def train_lightgbm(X_train, X_test, params, n_splits, num_rounds, verbose_eval, early_stop):
kfold = StratifiedKFold(n_splits=n_splits, random_state=1337)
oof_train = np.zeros((X_train.shape[0]))
oof_test = np.zeros((X_test.shape[0], n_splits))
i = 0
for train_index, valid_index in kfold.split(X_train, X_train['AdoptionSpeed'].values):
X_tr = X_train.iloc[train_index, :]
X_val = X_train.iloc[valid_index, :]
y_tr = X_tr['AdoptionSpeed'].values
X_tr = X_tr.drop(['AdoptionSpeed'], axis=1)
y_val = X_val['AdoptionSpeed'].values
X_val = X_val.drop(['AdoptionSpeed'], axis=1)
print('\ny_tr distribution: {}'.format(Counter(y_tr)))
d_train = lgb.Dataset(X_tr, label=y_tr)
d_valid = lgb.Dataset(X_val, label=y_val)
watchlist = [d_train, d_valid]
print('training LGB:')
model = lgb.train(params,
train_set=d_train,
num_boost_round=num_rounds,
valid_sets=watchlist,
verbose_eval=verbose_eval,
early_stopping_rounds=early_stop)
val_pred = model.predict(X_val, num_iteration=model.best_iteration)
test_pred = model.predict(X_test, num_iteration=model.best_iteration)
oof_train[valid_index] = val_pred
oof_test[:, i] = test_pred
i += 1
return oof_train, oof_test
pet_parser = PetFinderParser()
def main():
train_pet_ids = train.PetID.unique()
test_pet_ids = test.PetID.unique()
dfs_train = Parallel(n_jobs=6, verbose=1)(
delayed(extract_additional_features)(i, mode='train') for i in train_pet_ids)
train_dfs_sentiment = [x[0] for x in dfs_train if isinstance(x[0], pd.DataFrame)]
train_dfs_metadata = [x[1] for x in dfs_train if isinstance(x[1], pd.DataFrame)]
train_dfs_sentiment = pd.concat(train_dfs_sentiment, ignore_index=True, sort=False)
train_dfs_metadata = pd.concat(train_dfs_metadata, ignore_index=True, sort=False)
dfs_test = Parallel(n_jobs=6, verbose=1)(
delayed(extract_additional_features)(i, mode='test') for i in test_pet_ids)
test_dfs_sentiment = [x[0] for x in dfs_test if isinstance(x[0], pd.DataFrame)]
test_dfs_metadata = [x[1] for x in dfs_test if isinstance(x[1], pd.DataFrame)]
test_dfs_sentiment = pd.concat(test_dfs_sentiment, ignore_index=True, sort=False)
test_dfs_metadata = pd.concat(test_dfs_metadata, ignore_index=True, sort=False)
train_sentiment_gr, train_metadata_gr, train_metadata_desc, train_sentiment_desc = agg_features(train_dfs_metadata, train_dfs_sentiment)
test_sentiment_gr, test_metadata_gr, test_metadata_desc, test_sentiment_desc = agg_features(test_dfs_metadata, test_dfs_sentiment)
train_proc = train.copy()
for tr in [train_sentiment_gr, train_metadata_gr, train_metadata_desc, train_sentiment_desc]:
train_proc = train_proc.merge(tr, how='left', on='PetID')
test_proc = test.copy()
for ts in [test_sentiment_gr, test_metadata_gr, test_metadata_desc, test_sentiment_desc]:
test_proc = test_proc.merge(
ts, how='left', on='PetID')
train_proc = pd.merge(train_proc, train_img_features, on="PetID")
test_proc = pd.merge(test_proc, test_img_features, on="PetID")
train_breed_main, train_breed_second = breed_features(train_proc, labels_breed)
train_proc = pd.concat([train_proc, train_breed_main, train_breed_second], axis=1)
test_breed_main, test_breed_second = breed_features(test_proc, labels_breed)
test_proc = pd.concat([test_proc, test_breed_main, test_breed_second], axis=1)
X = pd.concat([train_proc, test_proc], ignore_index=True, sort=False)
column_types = X.dtypes
int_cols = column_types[column_types == 'int']
float_cols = column_types[column_types == 'float']
cat_cols = column_types[column_types == 'object']
X_temp = X.copy()
text_columns = ['Description', 'metadata_annots_top_desc', 'sentiment_entities']
categorical_columns = ['main_breed_BreedName', 'second_breed_BreedName']
to_drop_columns = ['PetID', 'Name', 'RescuerID']
rescuer_count = X.groupby(['RescuerID'])['PetID'].count().reset_index()
rescuer_count.columns = ['RescuerID', 'RescuerID_COUNT']
X_temp = X_temp.merge(rescuer_count, how='left', on='RescuerID')
for i in categorical_columns:
X_temp.loc[:, i] = pd.factorize(X_temp.loc[:, i])[0]
X_text = X_temp[text_columns]
for i in X_text.columns:
X_text.loc[:, i] = X_text.loc[:, i].fillna('<MISSING>')
n_components = 5
text_features = []
# Generate text features:
for i in X_text.columns:
# Initialize decomposition methods:
print('generating features from: {}'.format(i))
svd_ = TruncatedSVD(
n_components=n_components, random_state=1337)
nmf_ = NMF(
n_components=n_components, random_state=1337)
tfidf_col = TfidfVectorizer().fit_transform(X_text.loc[:, i].values)
svd_col = svd_.fit_transform(tfidf_col)
svd_col = pd.DataFrame(svd_col)
svd_col = svd_col.add_prefix('SVD_{}_'.format(i))
nmf_col = nmf_.fit_transform(tfidf_col)
nmf_col = pd.DataFrame(nmf_col)
nmf_col = nmf_col.add_prefix('NMF_{}_'.format(i))
text_features.append(svd_col)
text_features.append(nmf_col)
# Combine all extracted features:
text_features = pd.concat(text_features, axis=1)
# Concatenate with main DF:
X_temp = pd.concat([X_temp, text_features], axis=1)
# Remove raw text columns:
for i in X_text.columns:
X_temp = X_temp.drop(i, axis=1)
X_temp["name_length"] = X_temp.Name[X_temp.Name.isnull()].map(lambda x: len(str(x)))
X_temp["name_length"] = X_temp.Name.map(lambda x: len(str(x)))
X_temp = X_temp.drop(to_drop_columns, axis=1)
# Split into train and test again:
X_train = X_temp.loc[np.isfinite(X_temp.AdoptionSpeed), :]
X_test = X_temp.loc[~np.isfinite(X_temp.AdoptionSpeed), :]
# Remove missing target column from test:
X_test = X_test.drop(['AdoptionSpeed'], axis=1)
print('X_train shape: {}'.format(X_train.shape))
print('X_test shape: {}'.format(X_test.shape))
assert X_train.shape[0] == train.shape[0]
assert X_test.shape[0] == test.shape[0]
# Check if columns between the two DFs are the same:
train_cols = X_train.columns.tolist()
train_cols.remove('AdoptionSpeed')
test_cols = X_test.columns.tolist()
np.random.seed(13)
categorical_features = ["Type", "Breed1", "Breed2", "Color1" ,"Color2", "Color3", "State"]
impact_coding_map = {}
for f in categorical_features:
print("Impact coding for {}".format(f))
X_train["impact_encoded_{}".format(f)], impact_coding_mapping, default_coding = impact_coding(X_train, f, target="AdoptionSpeed")
impact_coding_map[f] = (impact_coding_mapping, default_coding)
mapping, default_mean = impact_coding_map[f]
X_test["impact_encoded_{}".format(f)] = X_test.apply(lambda x: mapping[x[f]] if x[f] in mapping
else default_mean, axis=1)
for cat in categorical_features:
X_train = frequency_encoding(X_train, cat)
X_test = frequency_encoding(X_test, cat)
params = {'application': 'regression',
'boosting': 'gbdt',
'metric': 'rmse',
'num_leaves': 70,
'max_depth': 9,
'learning_rate': 0.01,
'bagging_fraction': 0.85,
'feature_fraction': 0.8,
'min_split_gain': 0.02,
'min_child_samples': 150,
'min_child_weight': 0.02,
'lambda_l2': 0.0475,
'verbosity': -1,
'data_random_seed': 17}
# Additional parameters:
early_stop = 500
verbose_eval = 100
num_rounds = 10000
n_splits = 5
oof_train, oof_test = train_lightgbm(X_train, X_test, params, n_splits, num_rounds, verbose_eval, early_stop)
optR = OptimizedRounder()
optR.fit(oof_train, X_train['AdoptionSpeed'].values)
coefficients = optR.coefficients()
pred_test_y_k = optR.predict(oof_train, coefficients)
print("\nValid Counts = ", Counter(X_train['AdoptionSpeed'].values))
print("Predicted Counts = ", Counter(pred_test_y_k))
print("Coefficients = ", coefficients)
qwk = quadratic_weighted_kappa(X_train['AdoptionSpeed'].values, pred_test_y_k)
print("QWK = ", qwk)
# Manually adjusted coefficients:
coefficients_ = coefficients.copy()
coefficients_[0] = 1.645
coefficients_[1] = 2.115
coefficients_[3] = 2.84
train_predictions = optR.predict(oof_train, coefficients_).astype(int)
print('train pred distribution: {}'.format(Counter(train_predictions)))
test_predictions = optR.predict(oof_test.mean(axis=1), coefficients_)
print('test pred distribution: {}'.format(Counter(test_predictions)))
# Generate submission:
submission = pd.DataFrame({'PetID': test['PetID'].values, 'AdoptionSpeed': test_predictions.astype(np.int32)})
submission.head()
submission.to_csv('submission.csv', index=False)
if __name__ == '__main__':
main() | """
Calculates the quadratic weighted kappa
axquadratic_weighted_kappa calculates the quadratic weighted kappa
value, which is a measure of inter-rater agreement between two raters
that provide discrete numeric ratings. Potential values range from -1
(representing complete disagreement) to 1 (representing complete
agreement). A kappa value of 0 is expected if all agreement is due to
chance.
quadratic_weighted_kappa(rater_a, rater_b), where rater_a and rater_b
each correspond to a list of integer ratings. These lists must have the
same length.
The ratings should be integers, and it is assumed that they contain
the complete range of possible ratings.
quadratic_weighted_kappa(X, min_rating, max_rating), where min_rating
is the minimum possible rating, and max_rating is the maximum possible
rating
"""
rater_a = y
rater_b = y_pred
min_rating=None
max_rating=None
rater_a = np.array(rater_a, dtype=int)
rater_b = np.array(rater_b, dtype=int)
assert(len(rater_a) == len(rater_b))
if min_rating is None:
min_rating = min(min(rater_a), min(rater_b))
if max_rating is None:
max_rating = max(max(rater_a), max(rater_b))
conf_mat = confusion_matrix(rater_a, rater_b,
min_rating, max_rating)
num_ratings = len(conf_mat)
num_scored_items = float(len(rater_a))
hist_rater_a = histogram(rater_a, min_rating, max_rating)
hist_rater_b = histogram(rater_b, min_rating, max_rating)
numerator = 0.0
denominator = 0.0
for i in range(num_ratings):
for j in range(num_ratings):
expected_count = (hist_rater_a[i] * hist_rater_b[j]
/ num_scored_items)
d = pow(i - j, 2.0) / pow(num_ratings - 1, 2.0)
numerator += d * conf_mat[i][j] / num_scored_items
denominator += d * expected_count / num_scored_items
return (1.0 - numerator / denominator) |
list.class.ts | import IListInterface from '../interfaces/list.interface'
import IBookInterface from '../interfaces/book.interface'
class | implements IListInterface {
public id: string;
public userId: string;
public books: IBookInterface;
constructor(book: IListInterface) {
Object.assign(this, book);
}
}
export default List | List |
vec-macro-with-trailing-comma.rs | // run-pass
pub fn | () {
assert_eq!(vec![1], vec![1,]);
assert_eq!(vec![1, 2, 3], vec![1, 2, 3,]);
}
| main |
blapi.js | const { join } = require('path');
const bttps = require(join(__dirname, 'bttps.js'));
async function handleInternal(discordClient, apiKeys, repeatInterval) {
//set the function to repeat
setTimeout(handleInternal.bind(null, discordClient, apiKeys, repeatInterval), (60000 * repeatInterval));
//the actual code to post the stats
if (discordClient.user) {
if (repeatInterval > 2) { //if the interval isnt below the BotBlock ratelimit, use their API
apiKeys["server_count"] = discordClient.guilds.size;
apiKeys["bot_id"] = discordClient.user.id;
if (discordClient.shard) {
apiKeys["shard_id"] = discordClient.shard.id;
apiKeys["shard_count"] = discordClient.shard.count;
}
bttps.post('botblock.org', '/api/count', 'no key needed for this', apiKeys).catch((e) => console.error(`BLAPI: ${e}`));
} else {
postToAllLists(discordClient.guilds.size, discordClient.user.id, apiKeys);
}
} else {
console.error("BLAPI : Discord client seems to not be connected yet, so we're skipping the post");
}
}
module.exports = {
/**
* This function is for automated use with discord.js
* @param {Client} discordClient Client via wich your code is connected to Discord
* @param {object} apiKeys A JSON object formatted like: {"botlist name":"API Keys for that list", etc.}
* @param {integer} repeatInterval Number of minutes until you want to post again, leave out to use 30
*/
handle: async (discordClient, apiKeys, repeatInterval) => {
//handle inputs
if (!repeatInterval || repeatInterval < 1)
repeatInterval = 30;
handleInternal(discordClient, apiKeys, repeatInterval);
},
/**
* For when you don't use discord.js or just want to post to manual times
* @param {integer} guildCount Integer value of guilds your bot is serving
* @param {string} botID Snowflake of the ID the user your bot is using
* @param {object} apiKeys A JSON object formatted like: {"botlist name":"API Keys for that list", etc.}
* @param {boolean} noBotBlockPlis If you don't want to use the BotBlock API add this as True
*/
manualPost: async (guildCount, botID, apiKeys, noBotBlockPlis) => { //TODO add shard support
if (!noBotBlockPlis) {
apiKeys["server_count"] = guildCount;
apiKeys["bot_id"] = botID;
bttps.post('botblock.org', '/api/count', 'no key needed for this', apiKeys).catch((e) => console.error(`BLAPI: ${e}`));
} else {
postToAllLists(guildCount, botID, apiKeys);
}
}
};
let listData;
async function postToAllLists(guildCount, botID, apiKeys) {
//make sure we have all lists we can post to and their apis
if (!listData) {
listData = await bttps.get('https://botblock.org/api/lists/count').catch((e) => console.error(`BLAPI: ${e}`));
if (!listData) {
console.error("BLAPI : Something went wrong when contacting BotBlock for the API of the lists, so we're using an older preset. Some lists might not be available because of this.");
listData = oldListData;
}
}
for (let listname in listData) {
if (apiKeys[listname]) {
let list = listData[listname];
let url = 'https://' + listname;
let apiPath = list['api_post'].replace(url, '').replace(':id', botID);
let sendObj = JSON.parse(`{ "${list["api_field"]}": ${guildCount} }`);
bttps.post(listname, apiPath, apiKeys[listname], sendObj).catch((e) => console.error(`BLAPI: ${e}`));
}
}
}
const oldListData = {
"botsfordiscord.com": {
"api_docs": "https://botsfordiscord.com/docs/v1",
"api_post": "https://botsfordiscord.com/api/v1/bots/:id",
"api_field": "server_count",
"api_shard_id": null,
"api_shard_count": null,
"api_shards": null
},
"bots.ondiscord.xyz": {
"api_docs": "https://bots.ondiscord.xyz/info/api",
"api_post": "https://bots.ondiscord.xyz/bot-api/bots/:id/guilds",
"api_field": "guildCount",
"api_shard_id": null,
"api_shard_count": null,
"api_shards": null
},
"boatlist.ml": {
"api_docs": null,
"api_post": "https://boatlist.ml/api/bots/:id/stats",
"api_field": "server_count",
"api_shard_id": null,
"api_shard_count": null,
"api_shards": null
},
"botlist.space": {
"api_docs": "https://botlist.space/docs/api",
"api_post": "https://botlist.space/api/bots/:id",
"api_field": "server_count",
"api_shard_id": null,
"api_shard_count": null,
"api_shards": "shards"
},
"carbonitex.net": {
"api_docs": null,
"api_post": null,
"api_field": null,
"api_shard_id": null,
"api_shard_count": null,
"api_shards": null
},
"discordboats.club": {
"api_docs": null,
"api_post": "https://discordboats.club/api/public/bot/stats",
"api_field": "server_count",
"api_shard_id": null,
"api_shard_count": null,
"api_shards": null
},
"discordbots.org": {
"api_docs": "https://discordbots.org/api/docs",
"api_post": "https://discordbots.org/api/bots/:id/stats",
"api_field": "server_count",
"api_shard_id": "shard_id",
"api_shard_count": "shard_count",
"api_shards": "shards"
},
"discordbot.world": {
"api_docs": "https://discordbot.world/docs",
"api_post": "https://discordbot.world/api/bot/:id/stats",
"api_field": "guild_count",
"api_shard_id": null,
"api_shard_count": null,
"api_shards": "shards"
},
"bots.discord.pw": {
"api_docs": "https://bots.discord.pw/api",
"api_post": "https://bots.discord.pw/api/bots/:id/stats",
"api_field": "server_count",
"api_shard_id": "shard_id",
"api_shard_count": "shard_count",
"api_shards": null
},
"discordbots.group": {
"api_docs": "https://discordbots.group/api/docs",
"api_post": "https://discordbots.group/api/bot/:id",
"api_field": "count",
"api_shard_id": null,
"api_shard_count": null,
"api_shards": null
},
"discordbots.co.uk": {
"api_docs": null,
"api_post": null,
"api_field": null,
"api_shard_id": null,
"api_shard_count": null,
"api_shards": null
},
"discordmusicbots.com": {
"api_docs": null,
"api_post": null,
"api_field": null,
"api_shard_id": null,
"api_shard_count": null,
"api_shards": null
},
"discord.services": {
"api_docs": "http://discord.services/api/",
"api_post": "https://discord.services/api/bots/:id",
"api_field": "server_count",
"api_shard_id": null,
"api_shard_count": null,
"api_shards": null
},
"botlist.co": {
"api_docs": null, | "api_post": null,
"api_field": null,
"api_shard_id": null,
"api_shard_count": null,
"api_shards": null
},
"solutions.softonic.com": {
"api_docs": null,
"api_post": null,
"api_field": null,
"api_shard_id": null,
"api_shard_count": null,
"api_shards": null
},
"thereisabotforthat.com": {
"api_docs": null,
"api_post": null,
"api_field": null,
"api_shard_id": null,
"api_shard_count": null,
"api_shards": null
}
}; | |
main_test.go | package main
import (
"testing"
)
func TestAgeKey(t *testing.T) |
func TestPass(t *testing.T) {
pass := GeneratePassphrase()
encrypted, e := Encrypt(EncryptReq{
Recipients: []string{},
Binary: false,
Content: "hello",
Passphrase: pass,
})
decrypted, e := Decrypt(DecryptReq{
Identities: []string{},
Binary: false,
Content: encrypted.Content,
Passphrase: pass,
})
if decrypted.Content != "hello" || e != nil {
t.Errorf("expected \"hello\" got %s, error %v", decrypted.Content, e)
}
}
| {
key, _ := GenerateIdentity()
encrypted, e := Encrypt(EncryptReq{
Recipients: []string{key.PublicKey},
Binary: false,
Content: "hello",
Passphrase: "",
})
decrypted, e := Decrypt(DecryptReq{
Identities: []string{key.PrivateKey},
Binary: false,
Content: encrypted.Content,
Passphrase: "",
})
if decrypted.Content != "hello" || e != nil {
t.Errorf("expected \"hello\" got %s, error %v", decrypted.Content, e)
}
} |
test_parquet.py | # -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from collections import OrderedDict
import datetime
import decimal
import io
import json
import os
import six
import pickle
import pytest
import numpy as np
import pyarrow as pa
from pyarrow.compat import guid, u, BytesIO, unichar, PY2
from pyarrow.pandas_compat import _pandas_api
from pyarrow.tests import util
from pyarrow.filesystem import LocalFileSystem, FileSystem
try:
import pyarrow.parquet as pq
except ImportError:
pq = None
try:
import pandas as pd
import pandas.util.testing as tm
from .pandas_examples import dataframe_with_arrays, dataframe_with_lists
except ImportError:
pd = tm = None
# Marks all of the tests in this module
# Ignore these with pytest ... -m 'not parquet'
pytestmark = pytest.mark.parquet
@pytest.fixture(scope='module')
def datadir(datadir):
|
def _write_table(table, path, **kwargs):
# So we see the ImportError somewhere
import pyarrow.parquet as pq
if _pandas_api.is_data_frame(table):
table = pa.Table.from_pandas(table)
pq.write_table(table, path, **kwargs)
return table
def _read_table(*args, **kwargs):
return pq.read_table(*args, **kwargs)
def _roundtrip_table(table, read_table_kwargs=None,
write_table_kwargs=None):
read_table_kwargs = read_table_kwargs or {}
write_table_kwargs = write_table_kwargs or {}
buf = io.BytesIO()
_write_table(table, buf, **write_table_kwargs)
buf.seek(0)
return _read_table(buf, **read_table_kwargs)
def _check_roundtrip(table, expected=None, read_table_kwargs=None,
**write_table_kwargs):
if expected is None:
expected = table
read_table_kwargs = read_table_kwargs or {}
# intentionally check twice
result = _roundtrip_table(table, read_table_kwargs=read_table_kwargs,
write_table_kwargs=write_table_kwargs)
assert result.equals(expected)
result = _roundtrip_table(result, read_table_kwargs=read_table_kwargs,
write_table_kwargs=write_table_kwargs)
assert result.equals(expected)
def _roundtrip_pandas_dataframe(df, write_kwargs):
table = pa.Table.from_pandas(df)
buf = io.BytesIO()
_write_table(table, buf, **write_kwargs)
buf.seek(0)
table1 = _read_table(buf)
return table1.to_pandas()
@pytest.mark.parametrize('dtype', [int, float])
def test_single_pylist_column_roundtrip(tempdir, dtype):
filename = tempdir / 'single_{}_column.parquet'.format(dtype.__name__)
data = [pa.array(list(map(dtype, range(5))))]
table = pa.Table.from_arrays(data, names=['a'])
_write_table(table, filename)
table_read = _read_table(filename)
for i in range(table.num_columns):
col_written = table[i]
col_read = table_read[i]
assert table.field(i).name == table_read.field(i).name
assert col_read.num_chunks == 1
data_written = col_written.chunk(0)
data_read = col_read.chunk(0)
assert data_written.equals(data_read)
def alltypes_sample(size=10000, seed=0, categorical=False):
np.random.seed(seed)
arrays = {
'uint8': np.arange(size, dtype=np.uint8),
'uint16': np.arange(size, dtype=np.uint16),
'uint32': np.arange(size, dtype=np.uint32),
'uint64': np.arange(size, dtype=np.uint64),
'int8': np.arange(size, dtype=np.int16),
'int16': np.arange(size, dtype=np.int16),
'int32': np.arange(size, dtype=np.int32),
'int64': np.arange(size, dtype=np.int64),
'float32': np.arange(size, dtype=np.float32),
'float64': np.arange(size, dtype=np.float64),
'bool': np.random.randn(size) > 0,
# TODO(wesm): Test other timestamp resolutions now that arrow supports
# them
'datetime': np.arange("2016-01-01T00:00:00.001", size,
dtype='datetime64[ms]'),
'str': pd.Series([str(x) for x in range(size)]),
'empty_str': [''] * size,
'str_with_nulls': [None] + [str(x) for x in range(size - 2)] + [None],
'null': [None] * size,
'null_list': [None] * 2 + [[None] * (x % 4) for x in range(size - 2)],
}
if categorical:
arrays['str_category'] = arrays['str'].astype('category')
return pd.DataFrame(arrays)
@pytest.mark.pandas
@pytest.mark.parametrize('chunk_size', [None, 1000])
def test_pandas_parquet_2_0_roundtrip(tempdir, chunk_size):
df = alltypes_sample(size=10000, categorical=True)
filename = tempdir / 'pandas_roundtrip.parquet'
arrow_table = pa.Table.from_pandas(df)
assert arrow_table.schema.pandas_metadata is not None
_write_table(arrow_table, filename, version="2.0",
coerce_timestamps='ms', chunk_size=chunk_size)
table_read = pq.read_pandas(filename)
assert table_read.schema.pandas_metadata is not None
assert arrow_table.schema.metadata == table_read.schema.metadata
df_read = table_read.to_pandas()
tm.assert_frame_equal(df, df_read)
def test_set_data_page_size():
arr = pa.array([1, 2, 3] * 1000000)
t = pa.Table.from_arrays([arr], names=['f0'])
# 128K, 256K, 512K
page_sizes = [2 << 16, 2 << 17, 2 << 18]
for target_page_size in page_sizes:
_check_roundtrip(t, data_page_size=target_page_size)
@pytest.mark.pandas
def test_chunked_table_write():
# ARROW-232
df = alltypes_sample(size=10)
batch = pa.RecordBatch.from_pandas(df)
table = pa.Table.from_batches([batch] * 3)
_check_roundtrip(table, version='2.0')
df, _ = dataframe_with_lists()
batch = pa.RecordBatch.from_pandas(df)
table = pa.Table.from_batches([batch] * 3)
_check_roundtrip(table, version='2.0')
@pytest.mark.pandas
def test_no_memory_map(tempdir):
df = alltypes_sample(size=10)
table = pa.Table.from_pandas(df)
_check_roundtrip(table, read_table_kwargs={'memory_map': False},
version='2.0')
filename = str(tempdir / 'tmp_file')
with open(filename, 'wb') as f:
_write_table(table, f, version='2.0')
table_read = pq.read_pandas(filename, memory_map=False)
assert table_read.equals(table)
def test_special_chars_filename(tempdir):
table = pa.Table.from_arrays([pa.array([42])], ["ints"])
filename = "foo # bar"
path = tempdir / filename
assert not path.exists()
_write_table(table, str(path))
assert path.exists()
table_read = _read_table(str(path))
assert table_read.equals(table)
@pytest.mark.pandas
def test_empty_table_roundtrip():
df = alltypes_sample(size=10)
# Create a non-empty table to infer the types correctly, then slice to 0
table = pa.Table.from_pandas(df)
table = pa.Table.from_arrays(
[col.chunk(0)[:0] for col in table.itercolumns()],
names=table.schema.names)
assert table.schema.field_by_name('null').type == pa.null()
assert table.schema.field_by_name('null_list').type == pa.list_(pa.null())
_check_roundtrip(table, version='2.0')
@pytest.mark.pandas
def test_empty_table_no_columns():
df = pd.DataFrame()
empty = pa.Table.from_pandas(df, preserve_index=False)
_check_roundtrip(empty)
def test_empty_lists_table_roundtrip():
# ARROW-2744: Shouldn't crash when writing an array of empty lists
arr = pa.array([[], []], type=pa.list_(pa.int32()))
table = pa.Table.from_arrays([arr], ["A"])
_check_roundtrip(table)
@pytest.mark.pandas
def test_pandas_parquet_datetime_tz():
s = pd.Series([datetime.datetime(2017, 9, 6)])
s = s.dt.tz_localize('utc')
s.index = s
# Both a column and an index to hit both use cases
df = pd.DataFrame({'tz_aware': s,
'tz_eastern': s.dt.tz_convert('US/Eastern')},
index=s)
f = BytesIO()
arrow_table = pa.Table.from_pandas(df)
_write_table(arrow_table, f, coerce_timestamps='ms')
f.seek(0)
table_read = pq.read_pandas(f)
df_read = table_read.to_pandas()
tm.assert_frame_equal(df, df_read)
@pytest.mark.pandas
@pytest.mark.skipif(six.PY2, reason='datetime.timezone is available since '
'python version 3.2')
def test_datetime_timezone_tzinfo():
value = datetime.datetime(2018, 1, 1, 1, 23, 45,
tzinfo=datetime.timezone.utc)
df = pd.DataFrame({'foo': [value]})
_roundtrip_pandas_dataframe(df, write_kwargs={})
@pytest.mark.pandas
def test_pandas_parquet_custom_metadata(tempdir):
df = alltypes_sample(size=10000)
filename = tempdir / 'pandas_roundtrip.parquet'
arrow_table = pa.Table.from_pandas(df)
assert b'pandas' in arrow_table.schema.metadata
_write_table(arrow_table, filename, version='2.0', coerce_timestamps='ms')
metadata = pq.read_metadata(filename).metadata
assert b'pandas' in metadata
js = json.loads(metadata[b'pandas'].decode('utf8'))
assert js['index_columns'] == [{'kind': 'range',
'name': None,
'start': 0, 'stop': 10000,
'step': 1}]
@pytest.mark.pandas
def test_pandas_parquet_column_multiindex(tempdir):
df = alltypes_sample(size=10)
df.columns = pd.MultiIndex.from_tuples(
list(zip(df.columns, df.columns[::-1])),
names=['level_1', 'level_2']
)
filename = tempdir / 'pandas_roundtrip.parquet'
arrow_table = pa.Table.from_pandas(df)
assert arrow_table.schema.pandas_metadata is not None
_write_table(arrow_table, filename, version='2.0', coerce_timestamps='ms')
table_read = pq.read_pandas(filename)
df_read = table_read.to_pandas()
tm.assert_frame_equal(df, df_read)
@pytest.mark.pandas
def test_pandas_parquet_2_0_roundtrip_read_pandas_no_index_written(tempdir):
df = alltypes_sample(size=10000)
filename = tempdir / 'pandas_roundtrip.parquet'
arrow_table = pa.Table.from_pandas(df, preserve_index=False)
js = arrow_table.schema.pandas_metadata
assert not js['index_columns']
# ARROW-2170
# While index_columns should be empty, columns needs to be filled still.
assert js['columns']
_write_table(arrow_table, filename, version='2.0', coerce_timestamps='ms')
table_read = pq.read_pandas(filename)
js = table_read.schema.pandas_metadata
assert not js['index_columns']
assert arrow_table.schema.metadata == table_read.schema.metadata
df_read = table_read.to_pandas()
tm.assert_frame_equal(df, df_read)
@pytest.mark.pandas
def test_pandas_parquet_1_0_roundtrip(tempdir):
size = 10000
np.random.seed(0)
df = pd.DataFrame({
'uint8': np.arange(size, dtype=np.uint8),
'uint16': np.arange(size, dtype=np.uint16),
'uint32': np.arange(size, dtype=np.uint32),
'uint64': np.arange(size, dtype=np.uint64),
'int8': np.arange(size, dtype=np.int16),
'int16': np.arange(size, dtype=np.int16),
'int32': np.arange(size, dtype=np.int32),
'int64': np.arange(size, dtype=np.int64),
'float32': np.arange(size, dtype=np.float32),
'float64': np.arange(size, dtype=np.float64),
'bool': np.random.randn(size) > 0,
'str': [str(x) for x in range(size)],
'str_with_nulls': [None] + [str(x) for x in range(size - 2)] + [None],
'empty_str': [''] * size
})
filename = tempdir / 'pandas_roundtrip.parquet'
arrow_table = pa.Table.from_pandas(df)
_write_table(arrow_table, filename, version='1.0')
table_read = _read_table(filename)
df_read = table_read.to_pandas()
# We pass uint32_t as int64_t if we write Parquet version 1.0
df['uint32'] = df['uint32'].values.astype(np.int64)
tm.assert_frame_equal(df, df_read)
@pytest.mark.pandas
def test_multiple_path_types(tempdir):
# Test compatibility with PEP 519 path-like objects
path = tempdir / 'zzz.parquet'
df = pd.DataFrame({'x': np.arange(10, dtype=np.int64)})
_write_table(df, path)
table_read = _read_table(path)
df_read = table_read.to_pandas()
tm.assert_frame_equal(df, df_read)
# Test compatibility with plain string paths
path = str(tempdir) + 'zzz.parquet'
df = pd.DataFrame({'x': np.arange(10, dtype=np.int64)})
_write_table(df, path)
table_read = _read_table(path)
df_read = table_read.to_pandas()
tm.assert_frame_equal(df, df_read)
@pytest.mark.pandas
def test_pandas_column_selection(tempdir):
size = 10000
np.random.seed(0)
df = pd.DataFrame({
'uint8': np.arange(size, dtype=np.uint8),
'uint16': np.arange(size, dtype=np.uint16)
})
filename = tempdir / 'pandas_roundtrip.parquet'
arrow_table = pa.Table.from_pandas(df)
_write_table(arrow_table, filename)
table_read = _read_table(filename, columns=['uint8'])
df_read = table_read.to_pandas()
tm.assert_frame_equal(df[['uint8']], df_read)
# ARROW-4267: Selection of duplicate columns still leads to these columns
# being read uniquely.
table_read = _read_table(filename, columns=['uint8', 'uint8'])
df_read = table_read.to_pandas()
tm.assert_frame_equal(df[['uint8']], df_read)
def _random_integers(size, dtype):
# We do not generate integers outside the int64 range
platform_int_info = np.iinfo('int_')
iinfo = np.iinfo(dtype)
return np.random.randint(max(iinfo.min, platform_int_info.min),
min(iinfo.max, platform_int_info.max),
size=size).astype(dtype)
def _test_dataframe(size=10000, seed=0):
np.random.seed(seed)
df = pd.DataFrame({
'uint8': _random_integers(size, np.uint8),
'uint16': _random_integers(size, np.uint16),
'uint32': _random_integers(size, np.uint32),
'uint64': _random_integers(size, np.uint64),
'int8': _random_integers(size, np.int8),
'int16': _random_integers(size, np.int16),
'int32': _random_integers(size, np.int32),
'int64': _random_integers(size, np.int64),
'float32': np.random.randn(size).astype(np.float32),
'float64': np.arange(size, dtype=np.float64),
'bool': np.random.randn(size) > 0,
'strings': [tm.rands(10) for i in range(size)],
'all_none': [None] * size,
'all_none_category': [None] * size
})
# TODO(PARQUET-1015)
# df['all_none_category'] = df['all_none_category'].astype('category')
return df
@pytest.mark.pandas
def test_pandas_parquet_native_file_roundtrip(tempdir):
df = _test_dataframe(10000)
arrow_table = pa.Table.from_pandas(df)
imos = pa.BufferOutputStream()
_write_table(arrow_table, imos, version="2.0")
buf = imos.getvalue()
reader = pa.BufferReader(buf)
df_read = _read_table(reader).to_pandas()
tm.assert_frame_equal(df, df_read)
@pytest.mark.pandas
def test_parquet_incremental_file_build(tempdir):
df = _test_dataframe(100)
df['unique_id'] = 0
arrow_table = pa.Table.from_pandas(df, preserve_index=False)
out = pa.BufferOutputStream()
writer = pq.ParquetWriter(out, arrow_table.schema, version='2.0')
frames = []
for i in range(10):
df['unique_id'] = i
arrow_table = pa.Table.from_pandas(df, preserve_index=False)
writer.write_table(arrow_table)
frames.append(df.copy())
writer.close()
buf = out.getvalue()
result = _read_table(pa.BufferReader(buf))
expected = pd.concat(frames, ignore_index=True)
tm.assert_frame_equal(result.to_pandas(), expected)
@pytest.mark.pandas
def test_read_pandas_column_subset(tempdir):
df = _test_dataframe(10000)
arrow_table = pa.Table.from_pandas(df)
imos = pa.BufferOutputStream()
_write_table(arrow_table, imos, version="2.0")
buf = imos.getvalue()
reader = pa.BufferReader(buf)
df_read = pq.read_pandas(reader, columns=['strings', 'uint8']).to_pandas()
tm.assert_frame_equal(df[['strings', 'uint8']], df_read)
@pytest.mark.pandas
def test_pandas_parquet_empty_roundtrip(tempdir):
df = _test_dataframe(0)
arrow_table = pa.Table.from_pandas(df)
imos = pa.BufferOutputStream()
_write_table(arrow_table, imos, version="2.0")
buf = imos.getvalue()
reader = pa.BufferReader(buf)
df_read = _read_table(reader).to_pandas()
tm.assert_frame_equal(df, df_read)
@pytest.mark.pandas
def test_pandas_parquet_pyfile_roundtrip(tempdir):
filename = tempdir / 'pandas_pyfile_roundtrip.parquet'
size = 5
df = pd.DataFrame({
'int64': np.arange(size, dtype=np.int64),
'float32': np.arange(size, dtype=np.float32),
'float64': np.arange(size, dtype=np.float64),
'bool': np.random.randn(size) > 0,
'strings': ['foo', 'bar', None, 'baz', 'qux']
})
arrow_table = pa.Table.from_pandas(df)
with filename.open('wb') as f:
_write_table(arrow_table, f, version="1.0")
data = io.BytesIO(filename.read_bytes())
table_read = _read_table(data)
df_read = table_read.to_pandas()
tm.assert_frame_equal(df, df_read)
@pytest.mark.pandas
def test_pandas_parquet_configuration_options(tempdir):
size = 10000
np.random.seed(0)
df = pd.DataFrame({
'uint8': np.arange(size, dtype=np.uint8),
'uint16': np.arange(size, dtype=np.uint16),
'uint32': np.arange(size, dtype=np.uint32),
'uint64': np.arange(size, dtype=np.uint64),
'int8': np.arange(size, dtype=np.int16),
'int16': np.arange(size, dtype=np.int16),
'int32': np.arange(size, dtype=np.int32),
'int64': np.arange(size, dtype=np.int64),
'float32': np.arange(size, dtype=np.float32),
'float64': np.arange(size, dtype=np.float64),
'bool': np.random.randn(size) > 0
})
filename = tempdir / 'pandas_roundtrip.parquet'
arrow_table = pa.Table.from_pandas(df)
for use_dictionary in [True, False]:
_write_table(arrow_table, filename, version='2.0',
use_dictionary=use_dictionary)
table_read = _read_table(filename)
df_read = table_read.to_pandas()
tm.assert_frame_equal(df, df_read)
for write_statistics in [True, False]:
_write_table(arrow_table, filename, version='2.0',
write_statistics=write_statistics)
table_read = _read_table(filename)
df_read = table_read.to_pandas()
tm.assert_frame_equal(df, df_read)
for compression in ['NONE', 'SNAPPY', 'GZIP', 'LZ4', 'ZSTD']:
_write_table(arrow_table, filename, version='2.0',
compression=compression)
table_read = _read_table(filename)
df_read = table_read.to_pandas()
tm.assert_frame_equal(df, df_read)
def make_sample_file(table_or_df):
if isinstance(table_or_df, pa.Table):
a_table = table_or_df
else:
a_table = pa.Table.from_pandas(table_or_df)
buf = io.BytesIO()
_write_table(a_table, buf, compression='SNAPPY', version='2.0',
coerce_timestamps='ms')
buf.seek(0)
return pq.ParquetFile(buf)
@pytest.mark.pandas
def test_parquet_metadata_api():
df = alltypes_sample(size=10000)
df = df.reindex(columns=sorted(df.columns))
df.index = np.random.randint(0, 1000000, size=len(df))
fileh = make_sample_file(df)
ncols = len(df.columns)
# Series of sniff tests
meta = fileh.metadata
repr(meta)
assert meta.num_rows == len(df)
assert meta.num_columns == ncols + 1 # +1 for index
assert meta.num_row_groups == 1
assert meta.format_version == '2.0'
assert 'parquet-cpp' in meta.created_by
assert isinstance(meta.serialized_size, int)
assert isinstance(meta.metadata, dict)
# Schema
schema = fileh.schema
assert meta.schema is schema
assert len(schema) == ncols + 1 # +1 for index
repr(schema)
col = schema[0]
repr(col)
assert col.name == df.columns[0]
assert col.max_definition_level == 1
assert col.max_repetition_level == 0
assert col.max_repetition_level == 0
assert col.physical_type == 'BOOLEAN'
assert col.converted_type == 'NONE'
with pytest.raises(IndexError):
schema[ncols + 1] # +1 for index
with pytest.raises(IndexError):
schema[-1]
# Row group
for rg in range(meta.num_row_groups):
rg_meta = meta.row_group(rg)
assert isinstance(rg_meta, pq.RowGroupMetaData)
repr(rg_meta)
for col in range(rg_meta.num_columns):
col_meta = rg_meta.column(col)
assert isinstance(col_meta, pq.ColumnChunkMetaData)
repr(col_meta)
with pytest.raises(IndexError):
meta.row_group(-1)
with pytest.raises(IndexError):
meta.row_group(meta.num_row_groups + 1)
rg_meta = meta.row_group(0)
assert rg_meta.num_rows == len(df)
assert rg_meta.num_columns == ncols + 1 # +1 for index
assert rg_meta.total_byte_size > 0
with pytest.raises(IndexError):
col_meta = rg_meta.column(-1)
with pytest.raises(IndexError):
col_meta = rg_meta.column(ncols + 2)
col_meta = rg_meta.column(0)
assert col_meta.file_offset > 0
assert col_meta.file_path == '' # created from BytesIO
assert col_meta.physical_type == 'BOOLEAN'
assert col_meta.num_values == 10000
assert col_meta.path_in_schema == 'bool'
assert col_meta.is_stats_set is True
assert isinstance(col_meta.statistics, pq.Statistics)
assert col_meta.compression == 'SNAPPY'
assert col_meta.encodings == ('PLAIN', 'RLE')
assert col_meta.has_dictionary_page is False
assert col_meta.dictionary_page_offset is None
assert col_meta.data_page_offset > 0
assert col_meta.total_compressed_size > 0
assert col_meta.total_uncompressed_size > 0
with pytest.raises(NotImplementedError):
col_meta.has_index_page
with pytest.raises(NotImplementedError):
col_meta.index_page_offset
@pytest.mark.pandas
@pytest.mark.parametrize(
(
'data',
'type',
'physical_type',
'min_value',
'max_value',
'null_count',
'num_values',
'distinct_count'
),
[
([1, 2, 2, None, 4], pa.uint8(), 'INT32', 1, 4, 1, 4, 0),
([1, 2, 2, None, 4], pa.uint16(), 'INT32', 1, 4, 1, 4, 0),
([1, 2, 2, None, 4], pa.uint32(), 'INT32', 1, 4, 1, 4, 0),
([1, 2, 2, None, 4], pa.uint64(), 'INT64', 1, 4, 1, 4, 0),
([-1, 2, 2, None, 4], pa.int8(), 'INT32', -1, 4, 1, 4, 0),
([-1, 2, 2, None, 4], pa.int16(), 'INT32', -1, 4, 1, 4, 0),
([-1, 2, 2, None, 4], pa.int32(), 'INT32', -1, 4, 1, 4, 0),
([-1, 2, 2, None, 4], pa.int64(), 'INT64', -1, 4, 1, 4, 0),
(
[-1.1, 2.2, 2.3, None, 4.4], pa.float32(),
'FLOAT', -1.1, 4.4, 1, 4, 0
),
(
[-1.1, 2.2, 2.3, None, 4.4], pa.float64(),
'DOUBLE', -1.1, 4.4, 1, 4, 0
),
(
[u'', u'b', unichar(1000), None, u'aaa'], pa.binary(),
'BYTE_ARRAY', b'', unichar(1000).encode('utf-8'), 1, 4, 0
),
(
[True, False, False, True, True], pa.bool_(),
'BOOLEAN', False, True, 0, 5, 0
),
(
[b'\x00', b'b', b'12', None, b'aaa'], pa.binary(),
'BYTE_ARRAY', b'\x00', b'b', 1, 4, 0
),
]
)
def test_parquet_column_statistics_api(data, type, physical_type, min_value,
max_value, null_count, num_values,
distinct_count):
df = pd.DataFrame({'data': data})
schema = pa.schema([pa.field('data', type)])
table = pa.Table.from_pandas(df, schema=schema, safe=False)
fileh = make_sample_file(table)
meta = fileh.metadata
rg_meta = meta.row_group(0)
col_meta = rg_meta.column(0)
stat = col_meta.statistics
assert stat.has_min_max
assert _close(type, stat.min, min_value)
assert _close(type, stat.max, max_value)
assert stat.null_count == null_count
assert stat.num_values == num_values
# TODO(kszucs) until parquet-cpp API doesn't expose HasDistinctCount
# method, missing distinct_count is represented as zero instead of None
assert stat.distinct_count == distinct_count
assert stat.physical_type == physical_type
def _close(type, left, right):
if type == pa.float32():
return abs(left - right) < 1E-7
elif type == pa.float64():
return abs(left - right) < 1E-13
else:
return left == right
def test_statistics_convert_logical_types(tempdir):
# ARROW-5166, ARROW-4139
# (min, max, type)
cases = [(10, 11164359321221007157, pa.uint64()),
(10, 4294967295, pa.uint32()),
(u"ähnlich", u"öffentlich", pa.utf8()),
(datetime.time(10, 30, 0, 1000), datetime.time(15, 30, 0, 1000),
pa.time32('ms')),
(datetime.time(10, 30, 0, 1000), datetime.time(15, 30, 0, 1000),
pa.time64('us')),
(datetime.datetime(2019, 6, 24, 0, 0, 0, 1000),
datetime.datetime(2019, 6, 25, 0, 0, 0, 1000),
pa.timestamp('ms')),
(datetime.datetime(2019, 6, 24, 0, 0, 0, 1000),
datetime.datetime(2019, 6, 25, 0, 0, 0, 1000),
pa.timestamp('us'))]
for i, (min_val, max_val, typ) in enumerate(cases):
t = pa.Table.from_arrays([pa.array([min_val, max_val], type=typ)],
['col'])
path = str(tempdir / ('example{}.parquet'.format(i)))
pq.write_table(t, path, version='2.0')
pf = pq.ParquetFile(path)
stats = pf.metadata.row_group(0).column(0).statistics
assert stats.min == min_val
assert stats.max == max_val
def test_parquet_write_disable_statistics(tempdir):
table = pa.Table.from_pydict(
{'a': pa.array([1, 2, 3]), 'b': pa.array(['a', 'b', 'c'])})
_write_table(table, tempdir / 'data.parquet')
meta = pq.read_metadata(tempdir / 'data.parquet')
for col in [0, 1]:
cc = meta.row_group(0).column(col)
assert cc.is_stats_set is True
assert cc.statistics is not None
_write_table(table, tempdir / 'data2.parquet', write_statistics=False)
meta = pq.read_metadata(tempdir / 'data2.parquet')
for col in [0, 1]:
cc = meta.row_group(0).column(col)
assert cc.is_stats_set is False
assert cc.statistics is None
_write_table(table, tempdir / 'data3.parquet', write_statistics=['a'])
meta = pq.read_metadata(tempdir / 'data3.parquet')
cc_a = meta.row_group(0).column(0)
assert cc_a.is_stats_set is True
assert cc_a.statistics is not None
cc_b = meta.row_group(0).column(1)
assert cc_b.is_stats_set is False
assert cc_b.statistics is None
@pytest.mark.pandas
def test_compare_schemas():
df = alltypes_sample(size=10000)
fileh = make_sample_file(df)
fileh2 = make_sample_file(df)
fileh3 = make_sample_file(df[df.columns[::2]])
# ParquetSchema
assert isinstance(fileh.schema, pq.ParquetSchema)
assert fileh.schema.equals(fileh.schema)
assert fileh.schema == fileh.schema
assert fileh.schema.equals(fileh2.schema)
assert fileh.schema == fileh2.schema
assert fileh.schema != 'arbitrary object'
assert not fileh.schema.equals(fileh3.schema)
assert fileh.schema != fileh3.schema
# ColumnSchema
assert isinstance(fileh.schema[0], pq.ColumnSchema)
assert fileh.schema[0].equals(fileh.schema[0])
assert fileh.schema[0] == fileh.schema[0]
assert not fileh.schema[0].equals(fileh.schema[1])
assert fileh.schema[0] != fileh.schema[1]
assert fileh.schema[0] != 'arbitrary object'
def test_validate_schema_write_table(tempdir):
# ARROW-2926
simple_fields = [
pa.field('POS', pa.uint32()),
pa.field('desc', pa.string())
]
simple_schema = pa.schema(simple_fields)
# simple_table schema does not match simple_schema
simple_from_array = [pa.array([1]), pa.array(['bla'])]
simple_table = pa.Table.from_arrays(simple_from_array, ['POS', 'desc'])
path = tempdir / 'simple_validate_schema.parquet'
with pq.ParquetWriter(path, simple_schema,
version='2.0',
compression='snappy', flavor='spark') as w:
with pytest.raises(ValueError):
w.write_table(simple_table)
@pytest.mark.pandas
def test_column_of_arrays(tempdir):
df, schema = dataframe_with_arrays()
filename = tempdir / 'pandas_roundtrip.parquet'
arrow_table = pa.Table.from_pandas(df, schema=schema)
_write_table(arrow_table, filename, version="2.0", coerce_timestamps='ms')
table_read = _read_table(filename)
df_read = table_read.to_pandas()
tm.assert_frame_equal(df, df_read)
@pytest.mark.pandas
def test_coerce_timestamps(tempdir):
from collections import OrderedDict
# ARROW-622
arrays = OrderedDict()
fields = [pa.field('datetime64',
pa.list_(pa.timestamp('ms')))]
arrays['datetime64'] = [
np.array(['2007-07-13T01:23:34.123456789',
None,
'2010-08-13T05:46:57.437699912'],
dtype='datetime64[ms]'),
None,
None,
np.array(['2007-07-13T02',
None,
'2010-08-13T05:46:57.437699912'],
dtype='datetime64[ms]'),
]
df = pd.DataFrame(arrays)
schema = pa.schema(fields)
filename = tempdir / 'pandas_roundtrip.parquet'
arrow_table = pa.Table.from_pandas(df, schema=schema)
_write_table(arrow_table, filename, version="2.0", coerce_timestamps='us')
table_read = _read_table(filename)
df_read = table_read.to_pandas()
df_expected = df.copy()
for i, x in enumerate(df_expected['datetime64']):
if isinstance(x, np.ndarray):
df_expected['datetime64'][i] = x.astype('M8[us]')
tm.assert_frame_equal(df_expected, df_read)
with pytest.raises(ValueError):
_write_table(arrow_table, filename, version='2.0',
coerce_timestamps='unknown')
@pytest.mark.pandas
def test_coerce_timestamps_truncated(tempdir):
"""
ARROW-2555: Test that we can truncate timestamps when coercing if
explicitly allowed.
"""
dt_us = datetime.datetime(year=2017, month=1, day=1, hour=1, minute=1,
second=1, microsecond=1)
dt_ms = datetime.datetime(year=2017, month=1, day=1, hour=1, minute=1,
second=1)
fields_us = [pa.field('datetime64', pa.timestamp('us'))]
arrays_us = {'datetime64': [dt_us, dt_ms]}
df_us = pd.DataFrame(arrays_us)
schema_us = pa.schema(fields_us)
filename = tempdir / 'pandas_truncated.parquet'
table_us = pa.Table.from_pandas(df_us, schema=schema_us)
_write_table(table_us, filename, version="2.0", coerce_timestamps='ms',
allow_truncated_timestamps=True)
table_ms = _read_table(filename)
df_ms = table_ms.to_pandas()
arrays_expected = {'datetime64': [dt_ms, dt_ms]}
df_expected = pd.DataFrame(arrays_expected)
tm.assert_frame_equal(df_expected, df_ms)
@pytest.mark.pandas
def test_column_of_lists(tempdir):
df, schema = dataframe_with_lists(parquet_compatible=True)
filename = tempdir / 'pandas_roundtrip.parquet'
arrow_table = pa.Table.from_pandas(df, schema=schema)
_write_table(arrow_table, filename, version='2.0')
table_read = _read_table(filename)
df_read = table_read.to_pandas()
if PY2:
# assert_frame_equal fails when comparing datetime.date and
# np.datetime64, even with check_datetimelike_compat=True so
# convert the values to np.datetime64 instead
for col in ['date32[day]_list', 'date64[ms]_list']:
df[col] = df[col].apply(
lambda x: list(map(np.datetime64, x)) if x else x
)
tm.assert_frame_equal(df, df_read)
@pytest.mark.pandas
def test_date_time_types(tempdir):
t1 = pa.date32()
data1 = np.array([17259, 17260, 17261], dtype='int32')
a1 = pa.array(data1, type=t1)
t2 = pa.date64()
data2 = data1.astype('int64') * 86400000
a2 = pa.array(data2, type=t2)
t3 = pa.timestamp('us')
start = pd.Timestamp('2001-01-01').value / 1000
data3 = np.array([start, start + 1, start + 2], dtype='int64')
a3 = pa.array(data3, type=t3)
t4 = pa.time32('ms')
data4 = np.arange(3, dtype='i4')
a4 = pa.array(data4, type=t4)
t5 = pa.time64('us')
a5 = pa.array(data4.astype('int64'), type=t5)
t6 = pa.time32('s')
a6 = pa.array(data4, type=t6)
ex_t6 = pa.time32('ms')
ex_a6 = pa.array(data4 * 1000, type=ex_t6)
t7 = pa.timestamp('ns')
start = pd.Timestamp('2001-01-01').value
data7 = np.array([start, start + 1000, start + 2000],
dtype='int64')
a7 = pa.array(data7, type=t7)
table = pa.Table.from_arrays([a1, a2, a3, a4, a5, a6, a7],
['date32', 'date64', 'timestamp[us]',
'time32[s]', 'time64[us]',
'time32_from64[s]',
'timestamp[ns]'])
# date64 as date32
# time32[s] to time32[ms]
expected = pa.Table.from_arrays([a1, a1, a3, a4, a5, ex_a6, a7],
['date32', 'date64', 'timestamp[us]',
'time32[s]', 'time64[us]',
'time32_from64[s]',
'timestamp[ns]'])
_check_roundtrip(table, expected=expected, version='2.0')
t0 = pa.timestamp('ms')
data0 = np.arange(4, dtype='int64')
a0 = pa.array(data0, type=t0)
t1 = pa.timestamp('us')
data1 = np.arange(4, dtype='int64')
a1 = pa.array(data1, type=t1)
t2 = pa.timestamp('ns')
data2 = np.arange(4, dtype='int64')
a2 = pa.array(data2, type=t2)
table = pa.Table.from_arrays([a0, a1, a2],
['ts[ms]', 'ts[us]', 'ts[ns]'])
expected = pa.Table.from_arrays([a0, a1, a2],
['ts[ms]', 'ts[us]', 'ts[ns]'])
# int64 for all timestamps supported by default
filename = tempdir / 'int64_timestamps.parquet'
_write_table(table, filename, version='2.0')
parquet_schema = pq.ParquetFile(filename).schema
for i in range(3):
assert parquet_schema.column(i).physical_type == 'INT64'
read_table = _read_table(filename)
assert read_table.equals(expected)
t0_ns = pa.timestamp('ns')
data0_ns = np.array(data0 * 1000000, dtype='int64')
a0_ns = pa.array(data0_ns, type=t0_ns)
t1_ns = pa.timestamp('ns')
data1_ns = np.array(data1 * 1000, dtype='int64')
a1_ns = pa.array(data1_ns, type=t1_ns)
expected = pa.Table.from_arrays([a0_ns, a1_ns, a2],
['ts[ms]', 'ts[us]', 'ts[ns]'])
# int96 nanosecond timestamps produced upon request
filename = tempdir / 'explicit_int96_timestamps.parquet'
_write_table(table, filename, version='2.0',
use_deprecated_int96_timestamps=True)
parquet_schema = pq.ParquetFile(filename).schema
for i in range(3):
assert parquet_schema.column(i).physical_type == 'INT96'
read_table = _read_table(filename)
assert read_table.equals(expected)
# int96 nanosecond timestamps implied by flavor 'spark'
filename = tempdir / 'spark_int96_timestamps.parquet'
_write_table(table, filename, version='2.0',
flavor='spark')
parquet_schema = pq.ParquetFile(filename).schema
for i in range(3):
assert parquet_schema.column(i).physical_type == 'INT96'
read_table = _read_table(filename)
assert read_table.equals(expected)
def test_timestamp_restore_timezone():
# ARROW-5888, restore timezone from serialized metadata
ty = pa.timestamp('ms', tz='America/New_York')
arr = pa.array([1, 2, 3], type=ty)
t = pa.table([arr], names=['f0'])
_check_roundtrip(t)
@pytest.mark.pandas
def test_list_of_datetime_time_roundtrip():
# ARROW-4135
times = pd.to_datetime(['09:00', '09:30', '10:00', '10:30', '11:00',
'11:30', '12:00'])
df = pd.DataFrame({'time': [times.time]})
_roundtrip_pandas_dataframe(df, write_kwargs={})
@pytest.mark.pandas
def test_parquet_version_timestamp_differences():
i_s = pd.Timestamp('2010-01-01').value / 1000000000 # := 1262304000
d_s = np.arange(i_s, i_s + 10, 1, dtype='int64')
d_ms = d_s * 1000
d_us = d_ms * 1000
d_ns = d_us * 1000
a_s = pa.array(d_s, type=pa.timestamp('s'))
a_ms = pa.array(d_ms, type=pa.timestamp('ms'))
a_us = pa.array(d_us, type=pa.timestamp('us'))
a_ns = pa.array(d_ns, type=pa.timestamp('ns'))
names = ['ts:s', 'ts:ms', 'ts:us', 'ts:ns']
table = pa.Table.from_arrays([a_s, a_ms, a_us, a_ns], names)
# Using Parquet version 1.0, seconds should be coerced to milliseconds
# and nanoseconds should be coerced to microseconds by default
expected = pa.Table.from_arrays([a_ms, a_ms, a_us, a_us], names)
_check_roundtrip(table, expected)
# Using Parquet version 2.0, seconds should be coerced to milliseconds
# and nanoseconds should be retained by default
expected = pa.Table.from_arrays([a_ms, a_ms, a_us, a_ns], names)
_check_roundtrip(table, expected, version='2.0')
# Using Parquet version 1.0, coercing to milliseconds or microseconds
# is allowed
expected = pa.Table.from_arrays([a_ms, a_ms, a_ms, a_ms], names)
_check_roundtrip(table, expected, coerce_timestamps='ms')
# Using Parquet version 2.0, coercing to milliseconds or microseconds
# is allowed
expected = pa.Table.from_arrays([a_us, a_us, a_us, a_us], names)
_check_roundtrip(table, expected, version='2.0', coerce_timestamps='us')
# TODO: after pyarrow allows coerce_timestamps='ns', tests like the
# following should pass ...
# Using Parquet version 1.0, coercing to nanoseconds is not allowed
# expected = None
# with pytest.raises(NotImplementedError):
# _roundtrip_table(table, coerce_timestamps='ns')
# Using Parquet version 2.0, coercing to nanoseconds is allowed
# expected = pa.Table.from_arrays([a_ns, a_ns, a_ns, a_ns], names)
# _check_roundtrip(table, expected, version='2.0', coerce_timestamps='ns')
# For either Parquet version, coercing to nanoseconds is allowed
# if Int96 storage is used
expected = pa.Table.from_arrays([a_ns, a_ns, a_ns, a_ns], names)
_check_roundtrip(table, expected,
use_deprecated_int96_timestamps=True)
_check_roundtrip(table, expected, version='2.0',
use_deprecated_int96_timestamps=True)
def test_large_list_records():
# This was fixed in PARQUET-1100
list_lengths = np.random.randint(0, 500, size=50)
list_lengths[::10] = 0
list_values = [list(map(int, np.random.randint(0, 100, size=x)))
if i % 8 else None
for i, x in enumerate(list_lengths)]
a1 = pa.array(list_values)
table = pa.Table.from_arrays([a1], ['int_lists'])
_check_roundtrip(table)
def test_sanitized_spark_field_names():
a0 = pa.array([0, 1, 2, 3, 4])
name = 'prohib; ,\t{}'
table = pa.Table.from_arrays([a0], [name])
result = _roundtrip_table(table, write_table_kwargs={'flavor': 'spark'})
expected_name = 'prohib______'
assert result.schema[0].name == expected_name
@pytest.mark.pandas
def test_spark_flavor_preserves_pandas_metadata():
df = _test_dataframe(size=100)
df.index = np.arange(0, 10 * len(df), 10)
df.index.name = 'foo'
result = _roundtrip_pandas_dataframe(df, {'version': '2.0',
'flavor': 'spark'})
tm.assert_frame_equal(result, df)
def test_fixed_size_binary():
t0 = pa.binary(10)
data = [b'fooooooooo', None, b'barooooooo', b'quxooooooo']
a0 = pa.array(data, type=t0)
table = pa.Table.from_arrays([a0],
['binary[10]'])
_check_roundtrip(table)
@pytest.mark.pandas
def test_multithreaded_read():
df = alltypes_sample(size=10000)
table = pa.Table.from_pandas(df)
buf = io.BytesIO()
_write_table(table, buf, compression='SNAPPY', version='2.0')
buf.seek(0)
table1 = _read_table(buf, use_threads=True)
buf.seek(0)
table2 = _read_table(buf, use_threads=False)
assert table1.equals(table2)
@pytest.mark.pandas
def test_min_chunksize():
data = pd.DataFrame([np.arange(4)], columns=['A', 'B', 'C', 'D'])
table = pa.Table.from_pandas(data.reset_index())
buf = io.BytesIO()
_write_table(table, buf, chunk_size=-1)
buf.seek(0)
result = _read_table(buf)
assert result.equals(table)
with pytest.raises(ValueError):
_write_table(table, buf, chunk_size=0)
@pytest.mark.pandas
def test_pass_separate_metadata():
# ARROW-471
df = alltypes_sample(size=10000)
a_table = pa.Table.from_pandas(df)
buf = io.BytesIO()
_write_table(a_table, buf, compression='snappy', version='2.0')
buf.seek(0)
metadata = pq.read_metadata(buf)
buf.seek(0)
fileh = pq.ParquetFile(buf, metadata=metadata)
tm.assert_frame_equal(df, fileh.read().to_pandas())
@pytest.mark.pandas
def test_read_single_row_group():
# ARROW-471
N, K = 10000, 4
df = alltypes_sample(size=N)
a_table = pa.Table.from_pandas(df)
buf = io.BytesIO()
_write_table(a_table, buf, row_group_size=N / K,
compression='snappy', version='2.0')
buf.seek(0)
pf = pq.ParquetFile(buf)
assert pf.num_row_groups == K
row_groups = [pf.read_row_group(i) for i in range(K)]
result = pa.concat_tables(row_groups)
tm.assert_frame_equal(df, result.to_pandas())
@pytest.mark.pandas
def test_read_single_row_group_with_column_subset():
N, K = 10000, 4
df = alltypes_sample(size=N)
a_table = pa.Table.from_pandas(df)
buf = io.BytesIO()
_write_table(a_table, buf, row_group_size=N / K,
compression='snappy', version='2.0')
buf.seek(0)
pf = pq.ParquetFile(buf)
cols = list(df.columns[:2])
row_groups = [pf.read_row_group(i, columns=cols) for i in range(K)]
result = pa.concat_tables(row_groups)
tm.assert_frame_equal(df[cols], result.to_pandas())
# ARROW-4267: Selection of duplicate columns still leads to these columns
# being read uniquely.
row_groups = [pf.read_row_group(i, columns=cols + cols) for i in range(K)]
result = pa.concat_tables(row_groups)
tm.assert_frame_equal(df[cols], result.to_pandas())
@pytest.mark.pandas
def test_scan_contents():
N, K = 10000, 4
df = alltypes_sample(size=N)
a_table = pa.Table.from_pandas(df)
buf = io.BytesIO()
_write_table(a_table, buf, row_group_size=N / K,
compression='snappy', version='2.0')
buf.seek(0)
pf = pq.ParquetFile(buf)
assert pf.scan_contents() == 10000
assert pf.scan_contents(df.columns[:4]) == 10000
@pytest.mark.pandas
def test_parquet_piece_read(tempdir):
df = _test_dataframe(1000)
table = pa.Table.from_pandas(df)
path = tempdir / 'parquet_piece_read.parquet'
_write_table(table, path, version='2.0')
piece1 = pq.ParquetDatasetPiece(path)
result = piece1.read()
assert result.equals(table)
@pytest.mark.pandas
def test_parquet_piece_open_and_get_metadata(tempdir):
df = _test_dataframe(100)
table = pa.Table.from_pandas(df)
path = tempdir / 'parquet_piece_read.parquet'
_write_table(table, path, version='2.0')
piece = pq.ParquetDatasetPiece(path)
table1 = piece.read()
assert isinstance(table1, pa.Table)
meta1 = piece.get_metadata()
assert isinstance(meta1, pq.FileMetaData)
assert table == table1
def test_parquet_piece_basics():
path = '/baz.parq'
piece1 = pq.ParquetDatasetPiece(path)
piece2 = pq.ParquetDatasetPiece(path, row_group=1)
piece3 = pq.ParquetDatasetPiece(
path, row_group=1, partition_keys=[('foo', 0), ('bar', 1)])
assert str(piece1) == path
assert str(piece2) == '/baz.parq | row_group=1'
assert str(piece3) == 'partition[foo=0, bar=1] /baz.parq | row_group=1'
assert piece1 == piece1
assert piece2 == piece2
assert piece3 == piece3
assert piece1 != piece3
def test_partition_set_dictionary_type():
set1 = pq.PartitionSet('key1', [u('foo'), u('bar'), u('baz')])
set2 = pq.PartitionSet('key2', [2007, 2008, 2009])
assert isinstance(set1.dictionary, pa.StringArray)
assert isinstance(set2.dictionary, pa.IntegerArray)
set3 = pq.PartitionSet('key2', [datetime.datetime(2007, 1, 1)])
with pytest.raises(TypeError):
set3.dictionary
@pytest.mark.pandas
def test_read_partitioned_directory(tempdir):
fs = LocalFileSystem.get_instance()
_partition_test_for_filesystem(fs, tempdir)
@pytest.mark.pandas
def test_create_parquet_dataset_multi_threaded(tempdir):
fs = LocalFileSystem.get_instance()
base_path = tempdir
_partition_test_for_filesystem(fs, base_path)
manifest = pq.ParquetManifest(base_path, filesystem=fs,
metadata_nthreads=1)
dataset = pq.ParquetDataset(base_path, filesystem=fs, metadata_nthreads=16)
assert len(dataset.pieces) > 0
partitions = dataset.partitions
assert len(partitions.partition_names) > 0
assert partitions.partition_names == manifest.partitions.partition_names
assert len(partitions.levels) == len(manifest.partitions.levels)
@pytest.mark.pandas
def test_equivalency(tempdir):
fs = LocalFileSystem.get_instance()
base_path = tempdir
integer_keys = [0, 1]
string_keys = ['a', 'b', 'c']
boolean_keys = [True, False]
partition_spec = [
['integer', integer_keys],
['string', string_keys],
['boolean', boolean_keys]
]
df = pd.DataFrame({
'integer': np.array(integer_keys, dtype='i4').repeat(15),
'string': np.tile(np.tile(np.array(string_keys, dtype=object), 5), 2),
'boolean': np.tile(np.tile(np.array(boolean_keys, dtype='bool'), 5),
3),
}, columns=['integer', 'string', 'boolean'])
_generate_partition_directories(fs, base_path, partition_spec, df)
# Old filters syntax:
# integer == 1 AND string != b AND boolean == True
dataset = pq.ParquetDataset(
base_path, filesystem=fs,
filters=[('integer', '=', 1), ('string', '!=', 'b'),
('boolean', '==', True)]
)
table = dataset.read()
result_df = (table.to_pandas().reset_index(drop=True))
assert 0 not in result_df['integer'].values
assert 'b' not in result_df['string'].values
assert False not in result_df['boolean'].values
# filters in disjunctive normal form:
# (integer == 1 AND string != b AND boolean == True) OR
# (integer == 2 AND boolean == False)
# TODO(ARROW-3388): boolean columns are reconstructed as string
filters = [
[
('integer', '=', 1),
('string', '!=', 'b'),
('boolean', '==', 'True')
],
[('integer', '=', 0), ('boolean', '==', 'False')]
]
dataset = pq.ParquetDataset(base_path, filesystem=fs, filters=filters)
table = dataset.read()
result_df = table.to_pandas().reset_index(drop=True)
# Check that all rows in the DF fulfill the filter
# Pandas 0.23.x has problems with indexing constant memoryviews in
# categoricals. Thus we need to make an explicity copy here with np.array.
df_filter_1 = (np.array(result_df['integer']) == 1) \
& (np.array(result_df['string']) != 'b') \
& (np.array(result_df['boolean']) == 'True')
df_filter_2 = (np.array(result_df['integer']) == 0) \
& (np.array(result_df['boolean']) == 'False')
assert df_filter_1.sum() > 0
assert df_filter_2.sum() > 0
assert result_df.shape[0] == (df_filter_1.sum() + df_filter_2.sum())
# Check for \0 in predicate values. Until they are correctly implemented
# in ARROW-3391, they would otherwise lead to weird results with the
# current code.
with pytest.raises(NotImplementedError):
filters = [[('string', '==', b'1\0a')]]
pq.ParquetDataset(base_path, filesystem=fs, filters=filters)
with pytest.raises(NotImplementedError):
filters = [[('string', '==', u'1\0a')]]
pq.ParquetDataset(base_path, filesystem=fs, filters=filters)
@pytest.mark.pandas
def test_cutoff_exclusive_integer(tempdir):
fs = LocalFileSystem.get_instance()
base_path = tempdir
integer_keys = [0, 1, 2, 3, 4]
partition_spec = [
['integers', integer_keys],
]
N = 5
df = pd.DataFrame({
'index': np.arange(N),
'integers': np.array(integer_keys, dtype='i4'),
}, columns=['index', 'integers'])
_generate_partition_directories(fs, base_path, partition_spec, df)
dataset = pq.ParquetDataset(
base_path, filesystem=fs,
filters=[
('integers', '<', 4),
('integers', '>', 1),
]
)
table = dataset.read()
result_df = (table.to_pandas()
.sort_values(by='index')
.reset_index(drop=True))
result_list = [x for x in map(int, result_df['integers'].values)]
assert result_list == [2, 3]
@pytest.mark.pandas
@pytest.mark.xfail(
raises=TypeError,
reason='Loss of type information in creation of categoricals.'
)
def test_cutoff_exclusive_datetime(tempdir):
fs = LocalFileSystem.get_instance()
base_path = tempdir
date_keys = [
datetime.date(2018, 4, 9),
datetime.date(2018, 4, 10),
datetime.date(2018, 4, 11),
datetime.date(2018, 4, 12),
datetime.date(2018, 4, 13)
]
partition_spec = [
['dates', date_keys]
]
N = 5
df = pd.DataFrame({
'index': np.arange(N),
'dates': np.array(date_keys, dtype='datetime64'),
}, columns=['index', 'dates'])
_generate_partition_directories(fs, base_path, partition_spec, df)
dataset = pq.ParquetDataset(
base_path, filesystem=fs,
filters=[
('dates', '<', "2018-04-12"),
('dates', '>', "2018-04-10")
]
)
table = dataset.read()
result_df = (table.to_pandas()
.sort_values(by='index')
.reset_index(drop=True))
expected = pd.Categorical(
np.array([datetime.date(2018, 4, 11)], dtype='datetime64'),
categories=np.array(date_keys, dtype='datetime64'))
assert result_df['dates'].values == expected
@pytest.mark.pandas
def test_inclusive_integer(tempdir):
fs = LocalFileSystem.get_instance()
base_path = tempdir
integer_keys = [0, 1, 2, 3, 4]
partition_spec = [
['integers', integer_keys],
]
N = 5
df = pd.DataFrame({
'index': np.arange(N),
'integers': np.array(integer_keys, dtype='i4'),
}, columns=['index', 'integers'])
_generate_partition_directories(fs, base_path, partition_spec, df)
dataset = pq.ParquetDataset(
base_path, filesystem=fs,
filters=[
('integers', '<=', 3),
('integers', '>=', 2),
]
)
table = dataset.read()
result_df = (table.to_pandas()
.sort_values(by='index')
.reset_index(drop=True))
result_list = [int(x) for x in map(int, result_df['integers'].values)]
assert result_list == [2, 3]
@pytest.mark.pandas
def test_inclusive_set(tempdir):
fs = LocalFileSystem.get_instance()
base_path = tempdir
integer_keys = [0, 1]
string_keys = ['a', 'b', 'c']
boolean_keys = [True, False]
partition_spec = [
['integer', integer_keys],
['string', string_keys],
['boolean', boolean_keys]
]
df = pd.DataFrame({
'integer': np.array(integer_keys, dtype='i4').repeat(15),
'string': np.tile(np.tile(np.array(string_keys, dtype=object), 5), 2),
'boolean': np.tile(np.tile(np.array(boolean_keys, dtype='bool'), 5),
3),
}, columns=['integer', 'string', 'boolean'])
_generate_partition_directories(fs, base_path, partition_spec, df)
dataset = pq.ParquetDataset(
base_path, filesystem=fs,
filters=[('integer', 'in', {1}), ('string', 'in', {'a', 'b'}),
('boolean', 'in', {True})]
)
table = dataset.read()
result_df = (table.to_pandas().reset_index(drop=True))
assert 0 not in result_df['integer'].values
assert 'c' not in result_df['string'].values
assert False not in result_df['boolean'].values
@pytest.mark.pandas
def test_invalid_pred_op(tempdir):
fs = LocalFileSystem.get_instance()
base_path = tempdir
integer_keys = [0, 1, 2, 3, 4]
partition_spec = [
['integers', integer_keys],
]
N = 5
df = pd.DataFrame({
'index': np.arange(N),
'integers': np.array(integer_keys, dtype='i4'),
}, columns=['index', 'integers'])
_generate_partition_directories(fs, base_path, partition_spec, df)
with pytest.raises(ValueError):
pq.ParquetDataset(base_path,
filesystem=fs,
filters=[
('integers', '=<', 3),
])
with pytest.raises(ValueError):
pq.ParquetDataset(base_path,
filesystem=fs,
filters=[
('integers', 'in', set()),
])
with pytest.raises(ValueError):
pq.ParquetDataset(base_path,
filesystem=fs,
filters=[
('integers', '!=', {3}),
])
@pytest.mark.pandas
def test_filters_read_table(tempdir):
# test that filters keyword is passed through in read_table
fs = LocalFileSystem.get_instance()
base_path = tempdir
integer_keys = [0, 1, 2, 3, 4]
partition_spec = [
['integers', integer_keys],
]
N = 5
df = pd.DataFrame({
'index': np.arange(N),
'integers': np.array(integer_keys, dtype='i4'),
}, columns=['index', 'integers'])
_generate_partition_directories(fs, base_path, partition_spec, df)
table = pq.read_table(
base_path, filesystem=fs, filters=[('integers', '<', 3)])
assert table.num_rows == 3
table = pq.read_table(
base_path, filesystem=fs, filters=[[('integers', '<', 3)]])
assert table.num_rows == 3
table = pq.read_pandas(
base_path, filters=[('integers', '<', 3)])
assert table.num_rows == 3
@pytest.yield_fixture
def s3_example():
access_key = os.environ['PYARROW_TEST_S3_ACCESS_KEY']
secret_key = os.environ['PYARROW_TEST_S3_SECRET_KEY']
bucket_name = os.environ['PYARROW_TEST_S3_BUCKET']
import s3fs
fs = s3fs.S3FileSystem(key=access_key, secret=secret_key)
test_dir = guid()
bucket_uri = 's3://{0}/{1}'.format(bucket_name, test_dir)
fs.mkdir(bucket_uri)
yield fs, bucket_uri
fs.rm(bucket_uri, recursive=True)
@pytest.mark.pandas
@pytest.mark.s3
def test_read_partitioned_directory_s3fs(s3_example):
from pyarrow.filesystem import S3FSWrapper
fs, bucket_uri = s3_example
wrapper = S3FSWrapper(fs)
_partition_test_for_filesystem(wrapper, bucket_uri)
# Check that we can auto-wrap
dataset = pq.ParquetDataset(bucket_uri, filesystem=fs)
dataset.read()
def _partition_test_for_filesystem(fs, base_path):
foo_keys = [0, 1]
bar_keys = ['a', 'b', 'c']
partition_spec = [
['foo', foo_keys],
['bar', bar_keys]
]
N = 30
df = pd.DataFrame({
'index': np.arange(N),
'foo': np.array(foo_keys, dtype='i4').repeat(15),
'bar': np.tile(np.tile(np.array(bar_keys, dtype=object), 5), 2),
'values': np.random.randn(N)
}, columns=['index', 'foo', 'bar', 'values'])
_generate_partition_directories(fs, base_path, partition_spec, df)
dataset = pq.ParquetDataset(base_path, filesystem=fs)
table = dataset.read()
result_df = (table.to_pandas()
.sort_values(by='index')
.reset_index(drop=True))
expected_df = (df.sort_values(by='index')
.reset_index(drop=True)
.reindex(columns=result_df.columns))
expected_df['foo'] = pd.Categorical(df['foo'], categories=foo_keys)
expected_df['bar'] = pd.Categorical(df['bar'], categories=bar_keys)
assert (result_df.columns == ['index', 'values', 'foo', 'bar']).all()
tm.assert_frame_equal(result_df, expected_df)
def _generate_partition_directories(fs, base_dir, partition_spec, df):
# partition_spec : list of lists, e.g. [['foo', [0, 1, 2],
# ['bar', ['a', 'b', 'c']]
# part_table : a pyarrow.Table to write to each partition
DEPTH = len(partition_spec)
def _visit_level(base_dir, level, part_keys):
name, values = partition_spec[level]
for value in values:
this_part_keys = part_keys + [(name, value)]
level_dir = base_dir / '{0}={1}'.format(name, value)
fs.mkdir(level_dir)
if level == DEPTH - 1:
# Generate example data
file_path = level_dir / guid()
filtered_df = _filter_partition(df, this_part_keys)
part_table = pa.Table.from_pandas(filtered_df)
with fs.open(file_path, 'wb') as f:
_write_table(part_table, f)
assert fs.exists(file_path)
(level_dir / '_SUCCESS').touch()
else:
_visit_level(level_dir, level + 1, this_part_keys)
(level_dir / '_SUCCESS').touch()
_visit_level(base_dir, 0, [])
def _test_read_common_metadata_files(fs, base_path):
N = 100
df = pd.DataFrame({
'index': np.arange(N),
'values': np.random.randn(N)
}, columns=['index', 'values'])
base_path = str(base_path)
data_path = os.path.join(base_path, 'data.parquet')
table = pa.Table.from_pandas(df)
with fs.open(data_path, 'wb') as f:
_write_table(table, f)
metadata_path = os.path.join(base_path, '_common_metadata')
with fs.open(metadata_path, 'wb') as f:
pq.write_metadata(table.schema, f)
dataset = pq.ParquetDataset(base_path, filesystem=fs)
assert dataset.common_metadata_path == str(metadata_path)
with fs.open(data_path) as f:
common_schema = pq.read_metadata(f).schema
assert dataset.schema.equals(common_schema)
# handle list of one directory
dataset2 = pq.ParquetDataset([base_path], filesystem=fs)
assert dataset2.schema.equals(dataset.schema)
@pytest.mark.pandas
def test_read_common_metadata_files(tempdir):
fs = LocalFileSystem.get_instance()
_test_read_common_metadata_files(fs, tempdir)
@pytest.mark.pandas
def test_read_metadata_files(tempdir):
fs = LocalFileSystem.get_instance()
N = 100
df = pd.DataFrame({
'index': np.arange(N),
'values': np.random.randn(N)
}, columns=['index', 'values'])
data_path = tempdir / 'data.parquet'
table = pa.Table.from_pandas(df)
with fs.open(data_path, 'wb') as f:
_write_table(table, f)
metadata_path = tempdir / '_metadata'
with fs.open(metadata_path, 'wb') as f:
pq.write_metadata(table.schema, f)
dataset = pq.ParquetDataset(tempdir, filesystem=fs)
assert dataset.metadata_path == str(metadata_path)
with fs.open(data_path) as f:
metadata_schema = pq.read_metadata(f).schema
assert dataset.schema.equals(metadata_schema)
@pytest.mark.pandas
def test_read_schema(tempdir):
N = 100
df = pd.DataFrame({
'index': np.arange(N),
'values': np.random.randn(N)
}, columns=['index', 'values'])
data_path = tempdir / 'test.parquet'
table = pa.Table.from_pandas(df)
_write_table(table, data_path)
read1 = pq.read_schema(data_path)
read2 = pq.read_schema(data_path, memory_map=True)
assert table.schema.equals(read1, check_metadata=False)
assert table.schema.equals(read2, check_metadata=False)
assert table.schema.metadata[b'pandas'] == read1.metadata[b'pandas']
def _filter_partition(df, part_keys):
predicate = np.ones(len(df), dtype=bool)
to_drop = []
for name, value in part_keys:
to_drop.append(name)
# to avoid pandas warning
if isinstance(value, (datetime.date, datetime.datetime)):
value = pd.Timestamp(value)
predicate &= df[name] == value
return df[predicate].drop(to_drop, axis=1)
@pytest.mark.pandas
def test_read_multiple_files(tempdir):
nfiles = 10
size = 5
dirpath = tempdir / guid()
dirpath.mkdir()
test_data = []
paths = []
for i in range(nfiles):
df = _test_dataframe(size, seed=i)
# Hack so that we don't have a dtype cast in v1 files
df['uint32'] = df['uint32'].astype(np.int64)
path = dirpath / '{}.parquet'.format(i)
table = pa.Table.from_pandas(df)
_write_table(table, path)
test_data.append(table)
paths.append(path)
# Write a _SUCCESS.crc file
(dirpath / '_SUCCESS.crc').touch()
def read_multiple_files(paths, columns=None, use_threads=True, **kwargs):
dataset = pq.ParquetDataset(paths, **kwargs)
return dataset.read(columns=columns, use_threads=use_threads)
result = read_multiple_files(paths)
expected = pa.concat_tables(test_data)
assert result.equals(expected)
# Read with provided metadata
metadata = pq.read_metadata(paths[0])
result2 = read_multiple_files(paths, metadata=metadata)
assert result2.equals(expected)
result3 = pa.localfs.read_parquet(dirpath, schema=metadata.schema)
assert result3.equals(expected)
# Read column subset
to_read = [0, 2, 6, result.num_columns - 1]
col_names = [result.field(i).name for i in to_read]
out = pa.localfs.read_parquet(dirpath, columns=col_names)
expected = pa.Table.from_arrays([result.column(i) for i in to_read],
names=col_names,
metadata=result.schema.metadata)
assert out.equals(expected)
# Read with multiple threads
pa.localfs.read_parquet(dirpath, use_threads=True)
# Test failure modes with non-uniform metadata
bad_apple = _test_dataframe(size, seed=i).iloc[:, :4]
bad_apple_path = tempdir / '{}.parquet'.format(guid())
t = pa.Table.from_pandas(bad_apple)
_write_table(t, bad_apple_path)
bad_meta = pq.read_metadata(bad_apple_path)
with pytest.raises(ValueError):
read_multiple_files(paths + [bad_apple_path])
with pytest.raises(ValueError):
read_multiple_files(paths, metadata=bad_meta)
mixed_paths = [bad_apple_path, paths[0]]
with pytest.raises(ValueError):
read_multiple_files(mixed_paths, schema=bad_meta.schema)
with pytest.raises(ValueError):
read_multiple_files(mixed_paths)
@pytest.mark.pandas
def test_dataset_read_pandas(tempdir):
nfiles = 5
size = 5
dirpath = tempdir / guid()
dirpath.mkdir()
test_data = []
frames = []
paths = []
for i in range(nfiles):
df = _test_dataframe(size, seed=i)
df.index = np.arange(i * size, (i + 1) * size)
df.index.name = 'index'
path = dirpath / '{}.parquet'.format(i)
table = pa.Table.from_pandas(df)
_write_table(table, path)
test_data.append(table)
frames.append(df)
paths.append(path)
dataset = pq.ParquetDataset(dirpath)
columns = ['uint8', 'strings']
result = dataset.read_pandas(columns=columns).to_pandas()
expected = pd.concat([x[columns] for x in frames])
tm.assert_frame_equal(result, expected)
@pytest.mark.pandas
def test_dataset_no_memory_map(tempdir):
# ARROW-2627: Check that we can use ParquetDataset without memory-mapping
dirpath = tempdir / guid()
dirpath.mkdir()
df = _test_dataframe(10, seed=0)
path = dirpath / '{}.parquet'.format(0)
table = pa.Table.from_pandas(df)
_write_table(table, path, version='2.0')
# TODO(wesm): Not sure how to easily check that memory mapping is _not_
# used. Mocking is not especially easy for pa.memory_map
dataset = pq.ParquetDataset(dirpath, memory_map=False)
assert dataset.pieces[0].read().equals(table)
@pytest.mark.pandas
@pytest.mark.parametrize('preserve_index', [True, False, None])
def test_dataset_read_pandas_common_metadata(tempdir, preserve_index):
# ARROW-1103
nfiles = 5
size = 5
dirpath = tempdir / guid()
dirpath.mkdir()
test_data = []
frames = []
paths = []
for i in range(nfiles):
df = _test_dataframe(size, seed=i)
df.index = pd.Index(np.arange(i * size, (i + 1) * size), name='index')
path = dirpath / '{}.parquet'.format(i)
table = pa.Table.from_pandas(df, preserve_index=preserve_index)
# Obliterate metadata
table = table.replace_schema_metadata(None)
assert table.schema.metadata is None
_write_table(table, path)
test_data.append(table)
frames.append(df)
paths.append(path)
# Write _metadata common file
table_for_metadata = pa.Table.from_pandas(
df, preserve_index=preserve_index
)
pq.write_metadata(table_for_metadata.schema, dirpath / '_metadata')
dataset = pq.ParquetDataset(dirpath)
columns = ['uint8', 'strings']
result = dataset.read_pandas(columns=columns).to_pandas()
expected = pd.concat([x[columns] for x in frames])
expected.index.name = (
df.index.name if preserve_index is not False else None)
tm.assert_frame_equal(result, expected)
def _make_example_multifile_dataset(base_path, nfiles=10, file_nrows=5):
test_data = []
paths = []
for i in range(nfiles):
df = _test_dataframe(file_nrows, seed=i)
path = base_path / '{}.parquet'.format(i)
test_data.append(_write_table(df, path))
paths.append(path)
return paths
@pytest.mark.pandas
def test_ignore_private_directories(tempdir):
dirpath = tempdir / guid()
dirpath.mkdir()
paths = _make_example_multifile_dataset(dirpath, nfiles=10,
file_nrows=5)
# private directory
(dirpath / '_impala_staging').mkdir()
dataset = pq.ParquetDataset(dirpath)
assert set(map(str, paths)) == set(x.path for x in dataset.pieces)
@pytest.mark.pandas
def test_ignore_hidden_files_dot(tempdir):
dirpath = tempdir / guid()
dirpath.mkdir()
paths = _make_example_multifile_dataset(dirpath, nfiles=10,
file_nrows=5)
with (dirpath / '.DS_Store').open('wb') as f:
f.write(b'gibberish')
with (dirpath / '.private').open('wb') as f:
f.write(b'gibberish')
dataset = pq.ParquetDataset(dirpath)
assert set(map(str, paths)) == set(x.path for x in dataset.pieces)
@pytest.mark.pandas
def test_ignore_hidden_files_underscore(tempdir):
dirpath = tempdir / guid()
dirpath.mkdir()
paths = _make_example_multifile_dataset(dirpath, nfiles=10,
file_nrows=5)
with (dirpath / '_committed_123').open('wb') as f:
f.write(b'abcd')
with (dirpath / '_started_321').open('wb') as f:
f.write(b'abcd')
dataset = pq.ParquetDataset(dirpath)
assert set(map(str, paths)) == set(x.path for x in dataset.pieces)
@pytest.mark.pandas
def test_multiindex_duplicate_values(tempdir):
num_rows = 3
numbers = list(range(num_rows))
index = pd.MultiIndex.from_arrays(
[['foo', 'foo', 'bar'], numbers],
names=['foobar', 'some_numbers'],
)
df = pd.DataFrame({'numbers': numbers}, index=index)
table = pa.Table.from_pandas(df)
filename = tempdir / 'dup_multi_index_levels.parquet'
_write_table(table, filename)
result_table = _read_table(filename)
assert table.equals(result_table)
result_df = result_table.to_pandas()
tm.assert_frame_equal(result_df, df)
@pytest.mark.pandas
def test_write_error_deletes_incomplete_file(tempdir):
# ARROW-1285
df = pd.DataFrame({'a': list('abc'),
'b': list(range(1, 4)),
'c': np.arange(3, 6).astype('u1'),
'd': np.arange(4.0, 7.0, dtype='float64'),
'e': [True, False, True],
'f': pd.Categorical(list('abc')),
'g': pd.date_range('20130101', periods=3),
'h': pd.date_range('20130101', periods=3,
tz='US/Eastern'),
'i': pd.date_range('20130101', periods=3, freq='ns')})
pdf = pa.Table.from_pandas(df)
filename = tempdir / 'tmp_file'
try:
_write_table(pdf, filename)
except pa.ArrowException:
pass
assert not filename.exists()
@pytest.mark.pandas
def test_noncoerced_nanoseconds_written_without_exception(tempdir):
# ARROW-1957: the Parquet version 2.0 writer preserves Arrow
# nanosecond timestamps by default
n = 9
df = pd.DataFrame({'x': range(n)},
index=pd.DatetimeIndex(start='2017-01-01',
freq='1n',
periods=n))
tb = pa.Table.from_pandas(df)
filename = tempdir / 'written.parquet'
try:
pq.write_table(tb, filename, version='2.0')
except Exception:
pass
assert filename.exists()
recovered_table = pq.read_table(filename)
assert tb.equals(recovered_table)
# Loss of data thru coercion (without explicit override) still an error
filename = tempdir / 'not_written.parquet'
with pytest.raises(ValueError):
pq.write_table(tb, filename, coerce_timestamps='ms', version='2.0')
def test_read_non_existent_file(tempdir):
path = 'non-existent-file.parquet'
try:
pq.read_table(path)
except Exception as e:
assert path in e.args[0]
def test_read_table_doesnt_warn(datadir):
with pytest.warns(None) as record:
pq.read_table(datadir / 'v0.7.1.parquet')
assert len(record) == 0
def _test_write_to_dataset_with_partitions(base_path,
filesystem=None,
schema=None,
index_name=None):
# ARROW-1400
output_df = pd.DataFrame({'group1': list('aaabbbbccc'),
'group2': list('eefeffgeee'),
'num': list(range(10)),
'nan': [pd.np.nan] * 10,
'date': np.arange('2017-01-01', '2017-01-11',
dtype='datetime64[D]')})
cols = output_df.columns.tolist()
partition_by = ['group1', 'group2']
output_table = pa.Table.from_pandas(output_df, schema=schema, safe=False,
preserve_index=False)
pq.write_to_dataset(output_table, base_path, partition_by,
filesystem=filesystem)
metadata_path = os.path.join(base_path, '_common_metadata')
if filesystem is not None:
with filesystem.open(metadata_path, 'wb') as f:
pq.write_metadata(output_table.schema, f)
else:
pq.write_metadata(output_table.schema, metadata_path)
# ARROW-2891: Ensure the output_schema is preserved when writing a
# partitioned dataset
dataset = pq.ParquetDataset(base_path,
filesystem=filesystem,
validate_schema=True)
# ARROW-2209: Ensure the dataset schema also includes the partition columns
dataset_cols = set(dataset.schema.to_arrow_schema().names)
assert dataset_cols == set(output_table.schema.names)
input_table = dataset.read()
input_df = input_table.to_pandas()
# Read data back in and compare with original DataFrame
# Partitioned columns added to the end of the DataFrame when read
input_df_cols = input_df.columns.tolist()
assert partition_by == input_df_cols[-1 * len(partition_by):]
# Partitioned columns become 'categorical' dtypes
input_df = input_df[cols]
for col in partition_by:
output_df[col] = output_df[col].astype('category')
assert output_df.equals(input_df)
def _test_write_to_dataset_no_partitions(base_path, filesystem=None):
# ARROW-1400
output_df = pd.DataFrame({'group1': list('aaabbbbccc'),
'group2': list('eefeffgeee'),
'num': list(range(10)),
'date': np.arange('2017-01-01', '2017-01-11',
dtype='datetime64[D]')})
cols = output_df.columns.tolist()
output_table = pa.Table.from_pandas(output_df)
if filesystem is None:
filesystem = LocalFileSystem.get_instance()
# Without partitions, append files to root_path
n = 5
for i in range(n):
pq.write_to_dataset(output_table, base_path,
filesystem=filesystem)
output_files = [file for file in filesystem.ls(base_path)
if file.endswith(".parquet")]
assert len(output_files) == n
# Deduplicated incoming DataFrame should match
# original outgoing Dataframe
input_table = pq.ParquetDataset(base_path,
filesystem=filesystem).read()
input_df = input_table.to_pandas()
input_df = input_df.drop_duplicates()
input_df = input_df[cols]
assert output_df.equals(input_df)
@pytest.mark.pandas
def test_write_to_dataset_with_partitions(tempdir):
_test_write_to_dataset_with_partitions(str(tempdir))
@pytest.mark.pandas
def test_write_to_dataset_with_partitions_and_schema(tempdir):
schema = pa.schema([pa.field('group1', type=pa.string()),
pa.field('group2', type=pa.string()),
pa.field('num', type=pa.int64()),
pa.field('nan', type=pa.int32()),
pa.field('date', type=pa.timestamp(unit='us'))])
_test_write_to_dataset_with_partitions(str(tempdir), schema=schema)
@pytest.mark.pandas
def test_write_to_dataset_with_partitions_and_index_name(tempdir):
_test_write_to_dataset_with_partitions(str(tempdir),
index_name='index_name')
@pytest.mark.pandas
def test_write_to_dataset_no_partitions(tempdir):
_test_write_to_dataset_no_partitions(str(tempdir))
@pytest.mark.pandas
def test_write_to_dataset_with_partitions_and_custom_filenames(tempdir):
output_df = pd.DataFrame({'group1': list('aaabbbbccc'),
'group2': list('eefeffgeee'),
'num': list(range(10)),
'nan': [pd.np.nan] * 10,
'date': np.arange('2017-01-01', '2017-01-11',
dtype='datetime64[D]')})
partition_by = ['group1', 'group2']
output_table = pa.Table.from_pandas(output_df)
path = str(tempdir)
def partition_filename_callback(keys):
return "{0}-{1}.parquet".format(*keys)
pq.write_to_dataset(output_table, path,
partition_by, partition_filename_callback)
dataset = pq.ParquetDataset(path)
# ARROW-3538: Ensure partition filenames match the given pattern
# defined in the local function partition_filename_callback
expected_basenames = [
'a-e.parquet', 'a-f.parquet',
'b-e.parquet', 'b-f.parquet',
'b-g.parquet', 'c-e.parquet'
]
output_basenames = [os.path.basename(p.path) for p in dataset.pieces]
assert sorted(expected_basenames) == sorted(output_basenames)
@pytest.mark.large_memory
def test_large_table_int32_overflow():
size = np.iinfo('int32').max + 1
arr = np.ones(size, dtype='uint8')
parr = pa.array(arr, type=pa.uint8())
table = pa.Table.from_arrays([parr], names=['one'])
f = io.BytesIO()
_write_table(table, f)
def _simple_table_roundtrip(table):
stream = pa.BufferOutputStream()
_write_table(table, stream)
buf = stream.getvalue()
return _read_table(buf)
@pytest.mark.pandas
@pytest.mark.large_memory
def test_binary_array_overflow_to_chunked():
# ARROW-3762
# 2^31 + 1 bytes
values = [b'x'] + [
b'x' * (1 << 20)
] * 2 * (1 << 10)
df = pd.DataFrame({'byte_col': values})
tbl = pa.Table.from_pandas(df, preserve_index=False)
read_tbl = _simple_table_roundtrip(tbl)
col0_data = read_tbl[0]
assert isinstance(col0_data, pa.ChunkedArray)
# Split up into 2GB chunks
assert col0_data.num_chunks == 2
assert tbl.equals(read_tbl)
@pytest.mark.pandas
@pytest.mark.large_memory
def test_list_of_binary_large_cell():
# ARROW-4688
data = []
# TODO(wesm): handle chunked children
# 2^31 - 1 bytes in a single cell
# data.append([b'x' * (1 << 20)] * 2047 + [b'x' * ((1 << 20) - 1)])
# A little under 2GB in cell each containing approximately 10MB each
data.extend([[b'x' * 1000000] * 10] * 214)
arr = pa.array(data)
table = pa.Table.from_arrays([arr], ['chunky_cells'])
read_table = _simple_table_roundtrip(table)
assert table.equals(read_table)
@pytest.mark.pandas
def test_index_column_name_duplicate(tempdir):
data = {
'close': {
pd.Timestamp('2017-06-30 01:31:00'): 154.99958999999998,
pd.Timestamp('2017-06-30 01:32:00'): 154.99958999999998,
},
'time': {
pd.Timestamp('2017-06-30 01:31:00'): pd.Timestamp(
'2017-06-30 01:31:00'
),
pd.Timestamp('2017-06-30 01:32:00'): pd.Timestamp(
'2017-06-30 01:32:00'
),
}
}
path = str(tempdir / 'data.parquet')
dfx = pd.DataFrame(data).set_index('time', drop=False)
tdfx = pa.Table.from_pandas(dfx)
_write_table(tdfx, path)
arrow_table = _read_table(path)
result_df = arrow_table.to_pandas()
tm.assert_frame_equal(result_df, dfx)
@pytest.mark.pandas
def test_parquet_nested_convenience(tempdir):
# ARROW-1684
df = pd.DataFrame({
'a': [[1, 2, 3], None, [4, 5], []],
'b': [[1.], None, None, [6., 7.]],
})
path = str(tempdir / 'nested_convenience.parquet')
table = pa.Table.from_pandas(df, preserve_index=False)
_write_table(table, path)
read = pq.read_table(path, columns=['a'])
tm.assert_frame_equal(read.to_pandas(), df[['a']])
read = pq.read_table(path, columns=['a', 'b'])
tm.assert_frame_equal(read.to_pandas(), df)
@pytest.mark.pandas
def test_backwards_compatible_index_naming(datadir):
expected_string = b"""\
carat cut color clarity depth table price x y z
0.23 Ideal E SI2 61.5 55.0 326 3.95 3.98 2.43
0.21 Premium E SI1 59.8 61.0 326 3.89 3.84 2.31
0.23 Good E VS1 56.9 65.0 327 4.05 4.07 2.31
0.29 Premium I VS2 62.4 58.0 334 4.20 4.23 2.63
0.31 Good J SI2 63.3 58.0 335 4.34 4.35 2.75
0.24 Very Good J VVS2 62.8 57.0 336 3.94 3.96 2.48
0.24 Very Good I VVS1 62.3 57.0 336 3.95 3.98 2.47
0.26 Very Good H SI1 61.9 55.0 337 4.07 4.11 2.53
0.22 Fair E VS2 65.1 61.0 337 3.87 3.78 2.49
0.23 Very Good H VS1 59.4 61.0 338 4.00 4.05 2.39"""
expected = pd.read_csv(io.BytesIO(expected_string), sep=r'\s{2,}',
index_col=None, header=0, engine='python')
table = _read_table(datadir / 'v0.7.1.parquet')
result = table.to_pandas()
tm.assert_frame_equal(result, expected)
@pytest.mark.pandas
def test_backwards_compatible_index_multi_level_named(datadir):
expected_string = b"""\
carat cut color clarity depth table price x y z
0.23 Ideal E SI2 61.5 55.0 326 3.95 3.98 2.43
0.21 Premium E SI1 59.8 61.0 326 3.89 3.84 2.31
0.23 Good E VS1 56.9 65.0 327 4.05 4.07 2.31
0.29 Premium I VS2 62.4 58.0 334 4.20 4.23 2.63
0.31 Good J SI2 63.3 58.0 335 4.34 4.35 2.75
0.24 Very Good J VVS2 62.8 57.0 336 3.94 3.96 2.48
0.24 Very Good I VVS1 62.3 57.0 336 3.95 3.98 2.47
0.26 Very Good H SI1 61.9 55.0 337 4.07 4.11 2.53
0.22 Fair E VS2 65.1 61.0 337 3.87 3.78 2.49
0.23 Very Good H VS1 59.4 61.0 338 4.00 4.05 2.39"""
expected = pd.read_csv(
io.BytesIO(expected_string), sep=r'\s{2,}',
index_col=['cut', 'color', 'clarity'],
header=0, engine='python'
).sort_index()
table = _read_table(datadir / 'v0.7.1.all-named-index.parquet')
result = table.to_pandas()
tm.assert_frame_equal(result, expected)
@pytest.mark.pandas
def test_backwards_compatible_index_multi_level_some_named(datadir):
expected_string = b"""\
carat cut color clarity depth table price x y z
0.23 Ideal E SI2 61.5 55.0 326 3.95 3.98 2.43
0.21 Premium E SI1 59.8 61.0 326 3.89 3.84 2.31
0.23 Good E VS1 56.9 65.0 327 4.05 4.07 2.31
0.29 Premium I VS2 62.4 58.0 334 4.20 4.23 2.63
0.31 Good J SI2 63.3 58.0 335 4.34 4.35 2.75
0.24 Very Good J VVS2 62.8 57.0 336 3.94 3.96 2.48
0.24 Very Good I VVS1 62.3 57.0 336 3.95 3.98 2.47
0.26 Very Good H SI1 61.9 55.0 337 4.07 4.11 2.53
0.22 Fair E VS2 65.1 61.0 337 3.87 3.78 2.49
0.23 Very Good H VS1 59.4 61.0 338 4.00 4.05 2.39"""
expected = pd.read_csv(
io.BytesIO(expected_string),
sep=r'\s{2,}', index_col=['cut', 'color', 'clarity'],
header=0, engine='python'
).sort_index()
expected.index = expected.index.set_names(['cut', None, 'clarity'])
table = _read_table(datadir / 'v0.7.1.some-named-index.parquet')
result = table.to_pandas()
tm.assert_frame_equal(result, expected)
@pytest.mark.pandas
def test_backwards_compatible_column_metadata_handling(datadir):
expected = pd.DataFrame(
{'a': [1, 2, 3], 'b': [.1, .2, .3],
'c': pd.date_range("2017-01-01", periods=3, tz='Europe/Brussels')})
expected.index = pd.MultiIndex.from_arrays(
[['a', 'b', 'c'],
pd.date_range("2017-01-01", periods=3, tz='Europe/Brussels')],
names=['index', None])
path = datadir / 'v0.7.1.column-metadata-handling.parquet'
table = _read_table(path)
result = table.to_pandas()
tm.assert_frame_equal(result, expected)
table = _read_table(path, columns=['a'])
result = table.to_pandas()
tm.assert_frame_equal(result, expected[['a']].reset_index(drop=True))
def _make_dataset_for_pickling(tempdir, N=100):
path = tempdir / 'data.parquet'
fs = LocalFileSystem.get_instance()
df = pd.DataFrame({
'index': np.arange(N),
'values': np.random.randn(N)
}, columns=['index', 'values'])
table = pa.Table.from_pandas(df)
num_groups = 3
with pq.ParquetWriter(path, table.schema) as writer:
for i in range(num_groups):
writer.write_table(table)
reader = pq.ParquetFile(path)
assert reader.metadata.num_row_groups == num_groups
metadata_path = tempdir / '_metadata'
with fs.open(metadata_path, 'wb') as f:
pq.write_metadata(table.schema, f)
dataset = pq.ParquetDataset(tempdir, filesystem=fs)
assert dataset.metadata_path == str(metadata_path)
return dataset
@pytest.mark.pandas
@pytest.mark.parametrize('pickler', [
pytest.param(pickle, id='builtin'),
pytest.param(pytest.importorskip('cloudpickle'), id='cloudpickle')
])
def test_pickle_dataset(tempdir, datadir, pickler):
def is_pickleable(obj):
return obj == pickler.loads(pickler.dumps(obj))
dataset = _make_dataset_for_pickling(tempdir)
assert is_pickleable(dataset)
assert is_pickleable(dataset.metadata)
assert is_pickleable(dataset.metadata.schema)
assert len(dataset.metadata.schema)
for column in dataset.metadata.schema:
assert is_pickleable(column)
for piece in dataset.pieces:
assert is_pickleable(piece)
metadata = piece.get_metadata()
assert metadata.num_row_groups
for i in range(metadata.num_row_groups):
assert is_pickleable(metadata.row_group(i))
@pytest.mark.pandas
def test_decimal_roundtrip(tempdir):
num_values = 10
columns = {}
for precision in range(1, 39):
for scale in range(0, precision + 1):
with util.random_seed(0):
random_decimal_values = [
util.randdecimal(precision, scale)
for _ in range(num_values)
]
column_name = ('dec_precision_{:d}_scale_{:d}'
.format(precision, scale))
columns[column_name] = random_decimal_values
expected = pd.DataFrame(columns)
filename = tempdir / 'decimals.parquet'
string_filename = str(filename)
table = pa.Table.from_pandas(expected)
_write_table(table, string_filename)
result_table = _read_table(string_filename)
result = result_table.to_pandas()
tm.assert_frame_equal(result, expected)
@pytest.mark.pandas
@pytest.mark.xfail(
raises=pa.ArrowException, reason='Parquet does not support negative scale'
)
def test_decimal_roundtrip_negative_scale(tempdir):
expected = pd.DataFrame({'decimal_num': [decimal.Decimal('1.23E4')]})
filename = tempdir / 'decimals.parquet'
string_filename = str(filename)
t = pa.Table.from_pandas(expected)
_write_table(t, string_filename)
result_table = _read_table(string_filename)
result = result_table.to_pandas()
tm.assert_frame_equal(result, expected)
@pytest.mark.pandas
def test_parquet_writer_context_obj(tempdir):
df = _test_dataframe(100)
df['unique_id'] = 0
arrow_table = pa.Table.from_pandas(df, preserve_index=False)
out = pa.BufferOutputStream()
with pq.ParquetWriter(out, arrow_table.schema, version='2.0') as writer:
frames = []
for i in range(10):
df['unique_id'] = i
arrow_table = pa.Table.from_pandas(df, preserve_index=False)
writer.write_table(arrow_table)
frames.append(df.copy())
buf = out.getvalue()
result = _read_table(pa.BufferReader(buf))
expected = pd.concat(frames, ignore_index=True)
tm.assert_frame_equal(result.to_pandas(), expected)
@pytest.mark.pandas
def test_parquet_writer_context_obj_with_exception(tempdir):
df = _test_dataframe(100)
df['unique_id'] = 0
arrow_table = pa.Table.from_pandas(df, preserve_index=False)
out = pa.BufferOutputStream()
error_text = 'Artificial Error'
try:
with pq.ParquetWriter(out,
arrow_table.schema,
version='2.0') as writer:
frames = []
for i in range(10):
df['unique_id'] = i
arrow_table = pa.Table.from_pandas(df, preserve_index=False)
writer.write_table(arrow_table)
frames.append(df.copy())
if i == 5:
raise ValueError(error_text)
except Exception as e:
assert str(e) == error_text
buf = out.getvalue()
result = _read_table(pa.BufferReader(buf))
expected = pd.concat(frames, ignore_index=True)
tm.assert_frame_equal(result.to_pandas(), expected)
@pytest.mark.pandas
def test_zlib_compression_bug():
# ARROW-3514: "zlib deflate failed, output buffer too small"
table = pa.Table.from_arrays([pa.array(['abc', 'def'])], ['some_col'])
f = io.BytesIO()
pq.write_table(table, f, compression='gzip')
f.seek(0)
roundtrip = pq.read_table(f)
tm.assert_frame_equal(roundtrip.to_pandas(), table.to_pandas())
@pytest.mark.pandas
def test_merging_parquet_tables_with_different_pandas_metadata(tempdir):
# ARROW-3728: Merging Parquet Files - Pandas Meta in Schema Mismatch
schema = pa.schema([
pa.field('int', pa.int16()),
pa.field('float', pa.float32()),
pa.field('string', pa.string())
])
df1 = pd.DataFrame({
'int': np.arange(3, dtype=np.uint8),
'float': np.arange(3, dtype=np.float32),
'string': ['ABBA', 'EDDA', 'ACDC']
})
df2 = pd.DataFrame({
'int': [4, 5],
'float': [1.1, None],
'string': [None, None]
})
table1 = pa.Table.from_pandas(df1, schema=schema, preserve_index=False)
table2 = pa.Table.from_pandas(df2, schema=schema, preserve_index=False)
assert not table1.schema.equals(table2.schema)
assert table1.schema.equals(table2.schema, check_metadata=False)
writer = pq.ParquetWriter(tempdir / 'merged.parquet', schema=schema)
writer.write_table(table1)
writer.write_table(table2)
def test_empty_row_groups(tempdir):
# ARROW-3020
table = pa.Table.from_arrays([pa.array([], type='int32')], ['f0'])
path = tempdir / 'empty_row_groups.parquet'
num_groups = 3
with pq.ParquetWriter(path, table.schema) as writer:
for i in range(num_groups):
writer.write_table(table)
reader = pq.ParquetFile(path)
assert reader.metadata.num_row_groups == num_groups
for i in range(num_groups):
assert reader.read_row_group(i).equals(table)
@pytest.mark.pandas
def test_parquet_writer_with_caller_provided_filesystem():
out = pa.BufferOutputStream()
class CustomFS(FileSystem):
def __init__(self):
self.path = None
self.mode = None
def open(self, path, mode='rb'):
self.path = path
self.mode = mode
return out
fs = CustomFS()
fname = 'expected_fname.parquet'
df = _test_dataframe(100)
table = pa.Table.from_pandas(df, preserve_index=False)
with pq.ParquetWriter(fname, table.schema, filesystem=fs, version='2.0') \
as writer:
writer.write_table(table)
assert fs.path == fname
assert fs.mode == 'wb'
assert out.closed
buf = out.getvalue()
table_read = _read_table(pa.BufferReader(buf))
df_read = table_read.to_pandas()
tm.assert_frame_equal(df_read, df)
# Should raise ValueError when filesystem is passed with file-like object
with pytest.raises(ValueError) as err_info:
pq.ParquetWriter(pa.BufferOutputStream(), table.schema, filesystem=fs)
expected_msg = ("filesystem passed but where is file-like, so"
" there is nothing to open with filesystem.")
assert str(err_info) == expected_msg
def test_writing_empty_lists():
# ARROW-2591: [Python] Segmentation fault issue in pq.write_table
arr1 = pa.array([[], []], pa.list_(pa.int32()))
table = pa.Table.from_arrays([arr1], ['list(int32)'])
_check_roundtrip(table)
def test_write_nested_zero_length_array_chunk_failure():
# Bug report in ARROW-3792
cols = OrderedDict(
int32=pa.int32(),
list_string=pa.list_(pa.string())
)
data = [[], [OrderedDict(int32=1, list_string=('G',)), ]]
# This produces a table with a column like
# <Column name='list_string' type=ListType(list<item: string>)>
# [
# [],
# [
# [
# "G"
# ]
# ]
# ]
#
# Each column is a ChunkedArray with 2 elements
my_arrays = [pa.array(batch, type=pa.struct(cols)).flatten()
for batch in data]
my_batches = [pa.RecordBatch.from_arrays(batch, pa.schema(cols))
for batch in my_arrays]
tbl = pa.Table.from_batches(my_batches, pa.schema(cols))
_check_roundtrip(tbl)
@pytest.mark.pandas
def test_partitioned_dataset(tempdir):
# ARROW-3208: Segmentation fault when reading a Parquet partitioned dataset
# to a Parquet file
path = tempdir / "ARROW-3208"
df = pd.DataFrame({
'one': [-1, 10, 2.5, 100, 1000, 1, 29.2],
'two': [-1, 10, 2, 100, 1000, 1, 11],
'three': [0, 0, 0, 0, 0, 0, 0]
})
table = pa.Table.from_pandas(df)
pq.write_to_dataset(table, root_path=str(path),
partition_cols=['one', 'two'])
table = pq.ParquetDataset(path).read()
pq.write_table(table, path / "output.parquet")
def test_read_column_invalid_index():
table = pa.table([pa.array([4, 5]), pa.array(["foo", "bar"])],
names=['ints', 'strs'])
bio = pa.BufferOutputStream()
pq.write_table(table, bio)
f = pq.ParquetFile(bio.getvalue())
assert f.reader.read_column(0).to_pylist() == [4, 5]
assert f.reader.read_column(1).to_pylist() == ["foo", "bar"]
for index in (-1, 2):
with pytest.raises((ValueError, IndexError)):
f.reader.read_column(index)
def test_direct_read_dictionary():
# ARROW-3325
repeats = 10
nunique = 5
data = [
[tm.rands(10) for i in range(nunique)] * repeats,
]
table = pa.table(data, names=['f0'])
bio = pa.BufferOutputStream()
pq.write_table(table, bio)
contents = bio.getvalue()
result = pq.read_table(pa.BufferReader(contents),
read_dictionary=['f0'])
# Compute dictionary-encoded subfield
expected = pa.table([table[0].dictionary_encode()], names=['f0'])
assert result.equals(expected)
def test_dataset_read_dictionary(tempdir):
path = tempdir / "ARROW-3325-dataset"
t1 = pa.table([[tm.rands(10) for i in range(5)] * 10], names=['f0'])
t2 = pa.table([[tm.rands(10) for i in range(5)] * 10], names=['f0'])
pq.write_to_dataset(t1, root_path=str(path))
pq.write_to_dataset(t2, root_path=str(path))
result = pq.ParquetDataset(path, read_dictionary=['f0']).read()
# The order of the chunks is non-deterministic
ex_chunks = [t1[0].chunk(0).dictionary_encode(),
t2[0].chunk(0).dictionary_encode()]
assert result[0].num_chunks == 2
c0, c1 = result[0].chunk(0), result[0].chunk(1)
if c0.equals(ex_chunks[0]):
assert c1.equals(ex_chunks[1])
else:
assert c0.equals(ex_chunks[1])
assert c1.equals(ex_chunks[0])
def test_direct_read_dictionary_subfield():
repeats = 10
nunique = 5
data = [
[[tm.rands(10)] for i in range(nunique)] * repeats,
]
table = pa.table(data, names=['f0'])
bio = pa.BufferOutputStream()
pq.write_table(table, bio)
contents = bio.getvalue()
result = pq.read_table(pa.BufferReader(contents),
read_dictionary=['f0.list.item'])
arr = pa.array(data[0])
values_as_dict = arr.values.dictionary_encode()
inner_indices = values_as_dict.indices.cast('int32')
new_values = pa.DictionaryArray.from_arrays(inner_indices,
values_as_dict.dictionary)
offsets = pa.array(range(51), type='int32')
expected_arr = pa.ListArray.from_arrays(offsets, new_values)
expected = pa.table([expected_arr], names=['f0'])
assert result.equals(expected)
assert result[0].num_chunks == 1
@pytest.mark.pandas
def test_dataset_metadata(tempdir):
path = tempdir / "ARROW-1983-dataset"
# create and write a test dataset
df = pd.DataFrame({
'one': [1, 2, 3],
'two': [-1, -2, -3],
'three': [[1, 2], [2, 3], [3, 4]],
})
table = pa.Table.from_pandas(df)
metadata_list = []
pq.write_to_dataset(table, root_path=str(path),
partition_cols=['one', 'two'],
metadata_collector=metadata_list)
# open the dataset and collect metadata from pieces:
dataset = pq.ParquetDataset(path)
metadata_list2 = [p.get_metadata() for p in dataset.pieces]
# compare metadata list content:
assert len(metadata_list) == len(metadata_list2)
for md, md2 in zip(metadata_list, metadata_list2):
d = md.to_dict()
d2 = md2.to_dict()
# serialized_size is initialized in the reader:
assert d.pop('serialized_size') == 0
assert d2.pop('serialized_size') > 0
assert d == d2
def test_parquet_file_too_small(tempdir):
path = str(tempdir / "test.parquet")
with pytest.raises(pa.ArrowIOError,
match='size is 0 bytes'):
with open(path, 'wb') as f:
pass
pq.read_table(path)
with pytest.raises(pa.ArrowIOError,
match='size is 4 bytes'):
with open(path, 'wb') as f:
f.write(b'ffff')
pq.read_table(path)
@pytest.mark.pandas
def test_categorical_index_survives_roundtrip():
# ARROW-3652, addressed by ARROW-3246
df = pd.DataFrame([['a', 'b'], ['c', 'd']], columns=['c1', 'c2'])
df['c1'] = df['c1'].astype('category')
df = df.set_index(['c1'])
table = pa.Table.from_pandas(df)
bos = pa.BufferOutputStream()
pq.write_table(table, bos)
ref_df = pq.read_pandas(bos.getvalue()).to_pandas()
assert isinstance(ref_df.index, pd.CategoricalIndex)
assert ref_df.index.equals(df.index)
def test_dictionary_array_automatically_read():
# ARROW-3246
# Make a large dictionary, a little over 4MB of data
dict_length = 4000
dict_values = pa.array([('x' * 1000 + '_{}'.format(i))
for i in range(dict_length)])
num_chunks = 10
chunk_size = 100
chunks = []
for i in range(num_chunks):
indices = np.random.randint(0, dict_length,
size=chunk_size).astype(np.int32)
chunks.append(pa.DictionaryArray.from_arrays(pa.array(indices),
dict_values))
table = pa.table([pa.chunked_array(chunks)], names=['f0'])
bio = pa.BufferOutputStream()
pq.write_table(table, bio)
contents = bio.getvalue()
result = pq.read_table(pa.BufferReader(contents))
assert result.equals(table)
# The only key in the metadata was the Arrow schema key
assert result.schema.metadata is None
@pytest.mark.pandas
def test_pandas_categorical_na_type_row_groups():
# ARROW-5085
df = pd.DataFrame({"col": [None] * 100, "int": [1.0] * 100})
df_category = df.astype({"col": "category", "int": "category"})
table = pa.Table.from_pandas(df)
table_cat = pa.Table.from_pandas(df_category)
buf = pa.BufferOutputStream()
# it works
pq.write_table(table_cat, buf, version="2.0", chunk_size=10)
result = pq.read_table(buf.getvalue())
# Result is non-categorical
assert result[0].equals(table[0])
assert result[1].equals(table[1])
@pytest.mark.pandas
def test_pandas_categorical_roundtrip():
# ARROW-5480, this was enabled by ARROW-3246
# Have one of the categories unobserved and include a null (-1)
codes = np.array([2, 0, 0, 2, 0, -1, 2], dtype='int32')
categories = ['foo', 'bar', 'baz']
df = pd.DataFrame({'x': pd.Categorical.from_codes(
codes, categories=categories)})
buf = pa.BufferOutputStream()
pq.write_table(pa.table(df), buf)
result = pq.read_table(buf.getvalue()).to_pandas()
assert result.x.dtype == 'category'
assert (result.x.cat.categories == categories).all()
tm.assert_frame_equal(result, df)
@pytest.mark.pandas
def test_multi_dataset_metadata(tempdir):
filenames = ["ARROW-1983-dataset.0", "ARROW-1983-dataset.1"]
metapath = str(tempdir / "_metadata")
# create a test dataset
df = pd.DataFrame({
'one': [1, 2, 3],
'two': [-1, -2, -3],
'three': [[1, 2], [2, 3], [3, 4]],
})
table = pa.Table.from_pandas(df)
# write dataset twice and collect/merge metadata
_meta = None
for filename in filenames:
meta = []
pq.write_table(table, str(tempdir / filename),
metadata_collector=meta)
meta[0].set_file_path(filename)
if _meta is None:
_meta = meta[0]
else:
_meta.append_row_groups(meta[0])
# Write merged metadata-only file
with open(metapath, "wb") as f:
_meta.write_metadata_file(f)
# Read back the metadata
meta = pq.read_metadata(metapath)
md = meta.to_dict()
_md = _meta.to_dict()
for key in _md:
if key != 'serialized_size':
assert _md[key] == md[key]
assert _md['num_columns'] == 3
assert _md['num_rows'] == 6
assert _md['num_row_groups'] == 2
assert _md['serialized_size'] == 0
assert md['serialized_size'] > 0
@pytest.mark.pandas
def test_filter_before_validate_schema(tempdir):
# ARROW-4076 apply filter before schema validation
# to avoid checking unneeded schemas
# create partitioned dataset with mismatching schemas which would
# otherwise raise if first validation all schemas
dir1 = tempdir / 'A=0'
dir1.mkdir()
table1 = pa.Table.from_pandas(pd.DataFrame({'B': [1, 2, 3]}))
pq.write_table(table1, dir1 / 'data.parquet')
dir2 = tempdir / 'A=1'
dir2.mkdir()
table2 = pa.Table.from_pandas(pd.DataFrame({'B': ['a', 'b', 'c']}))
pq.write_table(table2, dir2 / 'data.parquet')
# read single file using filter
table = pq.read_table(tempdir, filters=[[('A', '==', 0)]])
assert table.column('B').equals(pa.chunked_array([[1, 2, 3]]))
| return datadir / 'parquet' |
opening-geometry.ts | import { FlipStatus } from './../entity/opening';
import { Geometry } from 'three';
import {BaseEntityGeometry} from './entity-geometry'
import {Opening, FixedWindowFlag, FloorWindowFlag, BayWindowFlag,
OneDoorFlag, TwoDoorsFlag, SlidingDoorsFlag, DoorwayFlag } from '../entity/entity'
import { BaseGeometry, Polygon, Rect, Line, Point, Arc} from '../geometry/geometry'
import {BasePath, RectPath, LinePath, PolygonPath} from '../path/path'
export class OpeningGeometry extends BaseEntityGeometry{
static readonly RECT = 'rect';
static readonly LINE = 'line';
constructor(opening: Opening) {
super(opening);
/*const rect = new Rect(new Point(0.0, 0.0), opening.width(), opening.height());
this.geos_.set(OpeningGeometry.RECT, rect);
const tmpRect = new Rect(new Point(0.0, 0.0), opening.width(), opening.height());
const line = new Line(new Point(-length / 2.0, 0), new Point(length / 2.0, 0));
this.geos_.set(OpeningGeometry.LINE, line);*/
}
}
/**
* 固定窗
*/
export class FixedWindowFlagGeometry extends BaseEntityGeometry {
static readonly RECT1 = 'rect1';
static readonly RECT2 = 'rect2';
protected tmpGeos_: Map<string, BaseGeometry> = null;
constructor(fixedWindowFlag: FixedWindowFlag) {
super(fixedWindowFlag);
this.tmpGeos_ = new Map<string, BaseGeometry>();
const width = fixedWindowFlag.width();
const length = fixedWindowFlag.length();
//获取width和length相对于原模型的倍数
const widthMultiple = width/120;
const lengthMultiple = length/800;
this.tmpGeos_[FixedWindowFlagGeometry.RECT1] = new Rect(new Point(0,-30*widthMultiple), width/2, length);
this.tmpGeos_[FixedWindowFlagGeometry.RECT2] = new Rect(new Point(0,30*widthMultiple), width/2, length);
this.tmpGeos_['tmpRect'] = new Rect(new Point(0,0), width,length);
this.geos_[FixedWindowFlagGeometry.RECT1] = new RectPath(this.tmpGeos_[FixedWindowFlagGeometry.RECT1]);
this.geos_[FixedWindowFlagGeometry.RECT2] = new RectPath(this.tmpGeos_[FixedWindowFlagGeometry.RECT2]);
this.geos_['tmpRect'] = new RectPath(this.tmpGeos_['tmpRect']);
(<BasePath>this.geos_[FixedWindowFlagGeometry.RECT1]).style.strokeColor = '#333843';
(<BasePath>this.geos_[FixedWindowFlagGeometry.RECT2]).style.strokeColor = '#333843';
(<BasePath>this.geos_['tmpRect']).style.fillColor = '#fff';
(<BasePath>this.geos_['tmpRect']).opacity = 0.0;
}
protected fixedWindowFlag(): FixedWindowFlag {
return <FixedWindowFlag> this.refEntity_;
}
protected updateGeometry() {
const width = this.fixedWindowFlag().width();
const length = this.fixedWindowFlag().length();
const widthMultiple = width/120;
const lengthMultiple = length/800;
const rect1 = <Rect>this.tmpGeos_[FixedWindowFlagGeometry.RECT1];
rect1.setCenter(new Point(0,-30*widthMultiple));
rect1.setWidth(width/2);
rect1.setLength(length);
const rect2 = <Rect>this.tmpGeos_[FixedWindowFlagGeometry.RECT2];
rect2.setCenter(new Point(0,30*widthMultiple));
rect2.setWidth(width/2);
rect2.setLength(length);
const tmpRect = <Rect>this.tmpGeos_['tmpRect'];
tmpRect.setWidth(width);
tmpRect.setLength(length);
}
// 获取边界点集合
getOuterGeos():Array<BaseGeometry> {
const result = new Array<BaseGeometry>();
result.push(this.tmpGeos_[FixedWindowFlagGeometry.RECT1]);
result.push(this.tmpGeos_[FixedWindowFlagGeometry.RECT2]);
return result;
}
getflipStatus() : FlipStatus {
return this.fixedWindowFlag().getFlipStatus();
}
}
/**
* 落地窗
*/
export class FloorWindowFlagGeometry extends BaseEntityGeometry {
static readonly RECT1 = 'rect1';
static readonly RECT2 = 'rect2';
static readonly RECT3 = 'rect3';
static readonly LINE1 = 'line1';
protected tmpGeos_: Map<string, BaseGeometry> = null;
constructor(floorWindowFlag: FloorWindowFlag) {
super(floorWindowFlag);
this.tmpGeos_ = new Map<string, BaseGeometry>();
const width = floorWindowFlag.width();
const length = floorWindowFlag.height();
//获取width和length相对于原模型的倍数
const widthMultiple = width/120;
const lengthMultiple = length/1500;
this.tmpGeos_[FloorWindowFlagGeometry.RECT1] = new Rect(new Point(0,-40*widthMultiple+0*40*widthMultiple),
width/3, length);
this.tmpGeos_[FloorWindowFlagGeometry.RECT2] = new Rect(new Point(0,-40*widthMultiple+1*40*widthMultiple),
width/3, length);
this.tmpGeos_[FloorWindowFlagGeometry.RECT3] = new Rect(new Point(0,-40*widthMultiple+2*40*widthMultiple),
width/3, length);
this.tmpGeos_[FloorWindowFlagGeometry.LINE1] = new Line(new Point(0,-20*widthMultiple), new Point(0,20*widthMultiple));
this.tmpGeos_['tmpRect'] = new Rect(new Point(0,0),width, length);
this.geos_[FloorWindowFlagGeometry.RECT1] = new RectPath(this.tmpGeos_[FloorWindowFlagGeometry.RECT1]);
this.geos_[FloorWindowFlagGeometry.RECT2] = new RectPath(this.tmpGeos_[FloorWindowFlagGeometry.RECT2]);
this.geos_[FloorWindowFlagGeometry.RECT3] = new RectPath(this.tmpGeos_[FloorWindowFlagGeometry.RECT3]);
this.geos_[FloorWindowFlagGeometry.LINE1] = new LinePath(this.tmpGeos_[FloorWindowFlagGeometry.LINE1]);
this.geos_['tmpRect'] = new RectPath(this.tmpGeos_['tmpRect']);
(<BasePath>this.geos_[FloorWindowFlagGeometry.RECT1]).style.strokeColor = '#333843';
(<BasePath>this.geos_[FloorWindowFlagGeometry.RECT2]).style.strokeColor = '#333843';
(<BasePath>this.geos_[FloorWindowFlagGeometry.RECT3]).style.strokeColor = '#333843';
(<BasePath>this.geos_[FloorWindowFlagGeometry.LINE1]).style.strokeColor = '#333843';
(<BasePath>this.geos_['tmpRect']).style.fillColor = 'black';
(<BasePath>this.geos_['tmpRect']).opacity = 0.0;
// const geoArray = new Array<BaseGeometry>();
// geoArray.push(this.tmpGeos_[FloorWindowFlagGeometry.RECT1]);
// geoArray.push(this.tmpGeos_[FloorWindowFlagGeometry.RECT2]);
// geoArray.push(this.tmpGeos_[FloorWindowFlagGeometry.RECT3]);
// geoArray.push(this.tmpGeos_[FloorWindowFlagGeometry.LINE1]);
// this.geos_[TwoDoorsGFlagGeometry.BACKGroundImg] = new ImagePath(geoArray,
// 'http://img1.bmlink.com/big/default/2012/6/11/15/421136796670564.jpg',length,width);
}
protected floorWindowFlag(): FloorWindowFlag {
return <FloorWindowFlag> this.refEntity_;
}
protected updateGeometry() {
const length = this.floorWindowFlag().length();
const width = this.floorWindowFlag().width();
//获取的width和length相对于原模型的倍数
const widthMultiple = width/120;
const lengthMultiple = length/1500;
const rectNameArray = [FloorWindowFlagGeometry.RECT1,
FloorWindowFlagGeometry.RECT2,
FloorWindowFlagGeometry.RECT3];
for(let i=0;i<3;i++) {
const rectArry = new Array;
rectArry[i] = <Rect>this.tmpGeos_[rectNameArray[i]];
rectArry[i].setCenter(new Point(0,-40*widthMultiple+i*40*widthMultiple));
rectArry[i].setWidth(width/3);
rectArry[i].setLength(length);
}
const line1 = <Line>this.tmpGeos_['line1'];
line1.setStartPoint(new Point(0,-20*widthMultiple));
line1.setEndPoint(new Point(0,20*widthMultiple));
const tmpRect = <Rect>this.tmpGeos_['tmpRect'];
tmpRect.setWidth(width);
tmpRect.setLength(length);
}
getOuterGeos(): Array<BaseGeometry> {
const result = new Array<BaseGeometry>();
result.push(this.tmpGeos_[FloorWindowFlagGeometry.RECT1]);
result.push(this.tmpGeos_[FloorWindowFlagGeometry.RECT2]);
result.push(this.tmpGeos_[FloorWindowFlagGeometry.RECT3]);
result.push(this.tmpGeos_[FloorWindowFlagGeometry.LINE1]);
return result;
}
getflipStatus() : FlipStatus {
return this.floorWindowFlag().getFlipStatus();
}
}
/**
* 飘窗
*/
export class BayWindowFlagGeometry extends BaseEntityGeometry {
static readonly POLYGON1 = 'polygon1';
static readonly POLYGON2 = 'polygon2';
static readonly RECT = 'rect';
protected tmpGeos_: Map<string, BaseGeometry> = null;
constructor(bayWindowFlag: BayWindowFlag) {
super(bayWindowFlag);
this.tmpGeos_ = new Map<string, BaseGeometry>();
const width = bayWindowFlag.width();
const length = bayWindowFlag.length();
const depth = bayWindowFlag.depth();
//获取的width和length相对于原模型的倍数
const widthMultiple = width/120;
const lengthMultiple = length/800;
const depthMultiple = depth/380;
this.tmpGeos_[BayWindowFlagGeometry.POLYGON1] = new Polygon(
[new Line(new Point(-(12+length/2),-(depth+12+width/2)),new Point(12+length/2,-(depth+12+width/2))),
new Line(new Point(12+length/2,-(depth+12+width/2)),new Point(12+length/2,-width/2)),
new Line(new Point(12+length/2,-width/2),new Point(6+length/2,-width/2)),
new Line(new Point(6+length/2,-width/2),new Point(6+length/2,-(depth+width/2+6))),
new Line(new Point(6+length/2,-(depth+width/2+6)),new Point(-(6+length/2),-(depth+width/2+6))),
new Line(new Point(-(6+length/2),-(depth+width/2+6)),new Point(-(6+length/2),-width/2)),
new Line(new Point(-(6+length/2),-width/2),new Point(-(12+length/2),-width/2)),
new Line(new Point(-(12+length/2),-width/2),new Point(-(12+length/2),-(depth+12+width/2)))]);
this.tmpGeos_[BayWindowFlagGeometry.POLYGON2] = new Polygon(
[new Line(new Point(-(6+length/2),-(depth+6+width/2)),new Point(6+length/2,-(depth+6+width/2))),
new Line(new Point(6+length/2,-(depth+6+width/2)),new Point(6+length/2,-width/2)),
new Line(new Point(6+length/2,-width/2),new Point(length/2,-width/2)),
new Line(new Point(length/2,-width/2),new Point(length/2,-(depth+width/2))),
new Line(new Point(length/2,-(depth+width/2)),new Point(-length/2,-(depth+width/2))),
new Line(new Point(-length/2,-(depth+width/2)),new Point(-length/2,-width/2)),
new Line(new Point(-length/2,-width/2),new Point(-(6+length/2),-width/2)),
new Line(new Point(-(6+length/2),-width/2),new Point(-(6+length/2),-(depth+6+width/2)))]);
this.tmpGeos_[BayWindowFlagGeometry.RECT] = new Rect(new Point(0,-190*depthMultiple), width+depth, length);
this.tmpGeos_['tmpRect'] = new Rect(new Point(0,-190*depthMultiple-6),width+depth+12,length+24);
this.geos_[BayWindowFlagGeometry.POLYGON1] = new PolygonPath(this.tmpGeos_[BayWindowFlagGeometry.POLYGON1]);
this.geos_[BayWindowFlagGeometry.POLYGON2] = new PolygonPath(this.tmpGeos_[BayWindowFlagGeometry.POLYGON2]);
this.geos_[BayWindowFlagGeometry.RECT] = new RectPath(this.tmpGeos_[BayWindowFlagGeometry.RECT]);
this.geos_['tmpRect'] = new RectPath(this.tmpGeos_['tmpRect']);
const geoArray = new Array<BaseGeometry>();
// geoArray.push(this.tmpGeos_[BayWindowFlagGeometry.POLYGON1]);
// geoArray.push(this.tmpGeos_[BayWindowFlagGeometry.POLYGON2]);
// geoArray.push(this.tmpGeos_[BayWindowFlagGeometry.RECT]);
// this.geos_[TwoDoorsGFlagGeometry.BACKGroundImg] = new ImagePath(geoArray,
// 'http://img1.bmlink.com/big/default/2012/6/11/15/421136796670564.jpg');
(<BasePath>this.geos_[BayWindowFlagGeometry.POLYGON1]).style.strokeColor = '#333843';
(<BasePath>this.geos_[BayWindowFlagGeometry.POLYGON2]).style.strokeColor = '#333843';
(<BasePath>this.geos_[BayWindowFlagGeometry.RECT]).style.strokeColor = '#333843';
(<BasePath>this.geos_['tmpRect']).style.fillColor = 'black';
(<BasePath>this.geos_['tmpRect']).opacity = 0.0;
}
protected bayWindowFlag(): BayWindowFlag {
return <BayWindowFlag> this.refEntity_;
}
updateGeometry() {
const length = this.bayWindowFlag().length();
const width = this.bayWindowFlag().width();
const depth = this.bayWindowFlag().depth();
//获取的width和length相对于原模型的倍数
const widthMultiple = width/120;
const lengthMultiple = length/800;
const depthMultiple = depth/380;
const polygon1 = <Polygon> this.tmpGeos_[BayWindowFlagGeometry.POLYGON1];
const segments1 = polygon1.segments();
segments1[0].setStartPoint(new Point(-(12+length/2),-(depth+12+width/2)));
segments1[0].setEndPoint(new Point(12+length/2,-(depth+12+width/2)));
segments1[1].setStartPoint(new Point(12+length/2,-(depth+12+width/2)));
segments1[1].setEndPoint(new Point(12+length/2,-width/2));
segments1[2].setStartPoint(new Point(12+length/2,-width/2));
segments1[2].setEndPoint(new Point(6+length/2,-width/2));
segments1[3].setStartPoint(new Point(6+length/2,-width/2));
segments1[3].setEndPoint(new Point((6+length/2),-(depth+width/2+6)));
segments1[4].setStartPoint(new Point((6+length/2),-(depth+width/2+6)));
segments1[4].setEndPoint(new Point(-(6+length/2),-(depth+width/2+6)));
segments1[5].setStartPoint(new Point(-(6+length/2),-(depth+width/2+6)));
segments1[5].setEndPoint(new Point(-(6+length/2),-width/2));
segments1[6].setStartPoint(new Point(-(6+length/2),-width/2));
segments1[6].setEndPoint(new Point(-(12+length/2),-width/2));
segments1[7].setStartPoint(new Point(-(12+length/2),-width/2));
segments1[7].setEndPoint(new Point(-(12+length/2),-(depth+12+width/2)));
const polygon2 = <Polygon> this.tmpGeos_[BayWindowFlagGeometry.POLYGON2];
const segments2 = polygon2.segments();
segments2[0].setStartPoint(new Point(-(6+length/2),-(depth+6+width/2)));
segments2[0].setEndPoint(new Point(6+length/2,-(depth+6+width/2)));
segments2[1].setStartPoint(new Point(6+length/2,-(depth+6+width/2)));
segments2[1].setEndPoint(new Point(6+length/2,-width/2));
segments2[2].setStartPoint(new Point(6+length/2,-width/2));
segments2[2].setEndPoint(new Point(length/2,-width/2));
segments2[3].setStartPoint(new Point(length/2,-width/2));
segments2[3].setEndPoint(new Point(length/2,-(depth+width/2)));
segments2[4].setStartPoint(new Point(length/2,-(depth+width/2)));
segments2[4].setEndPoint(new Point(-length/2,-(depth+width/2)));
segments2[5].setStartPoint(new Point(-length/2,-(depth+width/2)));
segments2[5].setEndPoint(new Point(-length/2,-width/2));
segments2[6].setStartPoint(new Point(-length/2,-width/2));
segments2[6].setEndPoint(new Point(-(6+length/2),-width/2));
segments2[7].setStartPoint(new Point(-(6+length/2),-width/2));
segments2[7].setEndPoint(new Point(-(6+length/2),-(depth+6+width/2)));
const rect = <Rect>this.tmpGeos_[BayWindowFlagGeometry.RECT];
rect.setCenter(new Point(0,-190*depthMultiple));
rect.setWidth(width+depth);
rect.setLength(length);
const tmpRect = <Rect>this.tmpGeos_['tmpRect'];
tmpRect.setCenter(new Point(0,-190*depthMultiple-6));
tmpRect.setWidth(width+depth+12);
tmpRect.setLength(length+24);
}
getOuterGeos():Array<BaseGeometry> {
const result = new Array<BaseGeometry>(); | result.push(this.tmpGeos_[BayWindowFlagGeometry.RECT]);
return result;
}
getflipStatus() : FlipStatus {
return this.bayWindowFlag().getFlipStatus();
}
}
/**
* 单开门
*/
export class OneDoorFlagGeometry extends BaseEntityGeometry {
static readonly POLYGON1 = 'polygon1';
static readonly POLYGON2 = 'polygon2';
static readonly RECT = 'rect';
static readonly BACKGroundImg = 'backgroundimage';
protected startori: Point;
protected tmpGeos_: Map<string, BaseGeometry> = null;
constructor(oneDoorFlag: OneDoorFlag) {
super(oneDoorFlag);
this.tmpGeos_ = new Map<string, BaseGeometry>();
const width = oneDoorFlag.width();
const length = oneDoorFlag.length();
//获取的width和length相对于原模型的倍数
const widthMultiple = width/120;
const lengthMultiple = length/900;
this.startori = new Point(0,0);
this.tmpGeos_[OneDoorFlagGeometry.POLYGON1] = new Polygon([
new Arc(new Point(-380*lengthMultiple,-(960*lengthMultiple-width/2)),new Point(length/2,-60*widthMultiple),new Point(-600*lengthMultiple,180*lengthMultiple),),
new Line(new Point(length/2,-60*widthMultiple),new Point(-380*lengthMultiple,-60*widthMultiple)),
new Line(new Point(-380*lengthMultiple,-60*widthMultiple), new Point(-380*lengthMultiple,-(960*lengthMultiple-width/2)))]);
this.tmpGeos_[OneDoorFlagGeometry.POLYGON2] = new Polygon([
new Line(new Point(-450*lengthMultiple,0*widthMultiple),new Point(-380*lengthMultiple,0*widthMultiple)),
new Line(new Point(-380*lengthMultiple,0*widthMultiple),new Point(-380*lengthMultiple,-60*widthMultiple)),
new Line(new Point(-380*lengthMultiple,-60*widthMultiple), new Point(length/2,-60*widthMultiple)),
new Line(new Point(length/2,-60*widthMultiple), new Point(length/2,60*widthMultiple)),
new Line( new Point(length/2,60*widthMultiple),new Point(-450*lengthMultiple,60*widthMultiple)),
new Line(new Point(-450*lengthMultiple,60*widthMultiple), new Point(-450*lengthMultiple,0*widthMultiple)),]);
this.tmpGeos_[OneDoorFlagGeometry.RECT] = new Rect(new Point(-415*lengthMultiple,-(960*lengthMultiple-width/2)/2),960*lengthMultiple-width/2, 70*lengthMultiple);
this.tmpGeos_['tmpRect'] = new Rect(new Point(0,-960*lengthMultiple/2+width/2),960*lengthMultiple, length);
this.geos_[OneDoorFlagGeometry.POLYGON1] = new PolygonPath(this.tmpGeos_[OneDoorFlagGeometry.POLYGON1]);
this.geos_[OneDoorFlagGeometry.POLYGON2] = new PolygonPath(this.tmpGeos_[OneDoorFlagGeometry.POLYGON2]);
this.geos_[OneDoorFlagGeometry.RECT] = new RectPath(this.tmpGeos_[OneDoorFlagGeometry.RECT]);
this.geos_['tmpRect'] = new RectPath(this.tmpGeos_['tmpRect']);
(<BasePath>this.geos_[OneDoorFlagGeometry.POLYGON1]).style.strokeColor = '#333843';
(<BasePath>this.geos_[OneDoorFlagGeometry.POLYGON2]).style.strokeColor = '#333843';
(<BasePath>this.geos_[OneDoorFlagGeometry.RECT]).style.strokeColor = '#333843';
(<BasePath>this.geos_['tmpRect']).style.fillColor = 'black';
(<BasePath>this.geos_['tmpRect']).opacity = 0.0;
// const geoArray = new Array<BaseGeometry>();
// geoArray.push(this.tmpGeos_[OneDoorFlagGeometry.POLYGON1]);
// geoArray.push(this.tmpGeos_[OneDoorFlagGeometry.POLYGON2]);
// geoArray.push(this.tmpGeos_[OneDoorFlagGeometry.RECT]);
// this.geos_[OneDoorFlagGeometry.BACKGroundImg] = new ImagePath(geoArray,
// 'http://img1.bmlink.com/big/default/2012/6/11/15/421136796670564.jpg');
}
protected oneDoorFlag(): OneDoorFlag {
return <OneDoorFlag> this.refEntity_;
}
protected updateGeometry() {
const width = this.oneDoorFlag().width();
const length = this.oneDoorFlag().length();
//获取的width和length相对于原模型的倍数
const widthMultiple = width/120;
const lengthMultiple = length/900;
const polygon1 = <Polygon> this.tmpGeos_[OneDoorFlagGeometry.POLYGON1];
const segments1 = polygon1.segments();
const arc1 = <Arc> segments1[0];
arc1.setStartPoint(new Point(-380*lengthMultiple,-(960*lengthMultiple-width/2)));
arc1.setEndPoint(new Point(length/2,-60*widthMultiple));
arc1.setCenter(new Point(-600*lengthMultiple,180*lengthMultiple));
segments1[1].setStartPoint(new Point(length/2,-60*widthMultiple));
segments1[1].setEndPoint(new Point(-380*lengthMultiple,-60*widthMultiple));
segments1[2].setStartPoint(new Point(-380*lengthMultiple,-60*widthMultiple));
segments1[2].setEndPoint(new Point(-380*lengthMultiple,-(960*lengthMultiple-width/2)));
const rect = <Rect>this.tmpGeos_[OneDoorFlagGeometry.RECT];
rect.setCenter(new Point(-415*lengthMultiple,-(960*lengthMultiple-width/2)/2));
rect.setWidth(960*lengthMultiple-width/2);
rect.setLength(70*lengthMultiple);
const polygon2 = <Polygon> this.tmpGeos_[OneDoorFlagGeometry.POLYGON2];
const segments2 = polygon2.segments();
segments2[0].setStartPoint(new Point(-450*lengthMultiple,0*widthMultiple));
segments2[0].setEndPoint(new Point(-380*lengthMultiple,0*widthMultiple));
segments2[1].setStartPoint(new Point(-380*lengthMultiple,0*widthMultiple));
segments2[1].setEndPoint(new Point(-380*lengthMultiple,-60*widthMultiple));
segments2[2].setStartPoint(new Point(-380*lengthMultiple,-60*widthMultiple));
segments2[2].setEndPoint( new Point(length/2,-60*widthMultiple));
segments2[3].setStartPoint( new Point(length/2,-60*widthMultiple));
segments2[3].setEndPoint(new Point(length/2,60*widthMultiple));
segments2[4].setStartPoint(new Point(length/2,60*widthMultiple));
segments2[4].setEndPoint(new Point(-450*lengthMultiple,60*widthMultiple));
segments2[5].setStartPoint(new Point(-450*lengthMultiple,60*widthMultiple));
segments2[5].setEndPoint(new Point(-450*lengthMultiple,0*widthMultiple));
const tmpRect = <Rect>this.tmpGeos_['tmpRect'];
tmpRect.setCenter(new Point(0,-960*lengthMultiple/2+width/2));
tmpRect.setWidth(960*lengthMultiple);
tmpRect.setLength(length);
}
getflipStatus() : FlipStatus {
return this.oneDoorFlag().getFlipStatus();
}
getOuterGeos():Array<BaseGeometry> {
const result = new Array<BaseGeometry>();
result.push(this.tmpGeos_[OneDoorFlagGeometry.POLYGON1]);
result.push(this.tmpGeos_[OneDoorFlagGeometry.POLYGON2]);
result.push(this.tmpGeos_[OneDoorFlagGeometry.RECT]);
return result;
}
}
/**
* 双开门
*/
export class TwoDoorsGFlagGeometry extends BaseEntityGeometry {
static readonly POLYGON1 = 'polygon1';
static readonly POLYGON2 = 'polygon2';
static readonly POLYGON3 = 'polygon3';
static readonly RECT1 = 'rect1';
static readonly RECT2 = 'rect2';
static readonly BACKGroundImg = 'backgroundimage';
protected tmpGeos_: Map<string, BaseGeometry> = null;
constructor(twoDoorsFlag: TwoDoorsFlag) {
super(twoDoorsFlag);
this.tmpGeos_ = new Map<string, BaseGeometry>();
const width = twoDoorsFlag.width();
const length = twoDoorsFlag.length();
//获取的width和length相对于原模型的倍数
const widthMultiple = width/120;
const lengthMultiple = length/1500;
this.tmpGeos_[TwoDoorsGFlagGeometry.POLYGON1] = new Polygon([
new Arc(new Point(-680*lengthMultiple,-(810*lengthMultiple-width/2)),new Point(0,-60*widthMultiple),
new Point(-800*lengthMultiple,-45*lengthMultiple),),
new Line(new Point(0,-60*widthMultiple),new Point(-680*lengthMultiple,-60*widthMultiple)),
new Line(new Point(-680*lengthMultiple,-60*widthMultiple), new Point(-680*lengthMultiple,-(810*lengthMultiple-width/2)))]);
this.tmpGeos_[TwoDoorsGFlagGeometry.POLYGON2] = new Polygon([
new Arc(new Point(0,-60*widthMultiple),new Point(680*lengthMultiple,-(810*lengthMultiple-width/2)),
new Point(500*lengthMultiple,-45*lengthMultiple),),
new Line(new Point(680*lengthMultiple,-(810*lengthMultiple-width/2)),new Point(680*lengthMultiple,-60*widthMultiple)),
new Line(new Point(680*lengthMultiple,-60*widthMultiple), new Point(0,-60*widthMultiple))]);
this.tmpGeos_[TwoDoorsGFlagGeometry.RECT1] = new Rect(new Point(-715*lengthMultiple,-(810*lengthMultiple-width/2)/2),
810*lengthMultiple-width/2, 70*lengthMultiple);
this.tmpGeos_[TwoDoorsGFlagGeometry.RECT2] = new Rect(new Point(715*lengthMultiple,-(810*lengthMultiple-width/2)/2),
810*lengthMultiple-width/2, 70*lengthMultiple);
this.tmpGeos_[TwoDoorsGFlagGeometry.POLYGON3] = new Polygon([
new Line(new Point(-750*lengthMultiple,0*widthMultiple),new Point(-680*lengthMultiple,0*widthMultiple)),
new Line(new Point(-680*lengthMultiple,0*widthMultiple),new Point(-680*lengthMultiple,-60*widthMultiple)),
new Line(new Point(-680*lengthMultiple,-60*widthMultiple),new Point(680*lengthMultiple,-60*widthMultiple)),
new Line(new Point(680*lengthMultiple,-60*widthMultiple),new Point(680*lengthMultiple,0*widthMultiple)),
new Line(new Point(680*lengthMultiple,0*widthMultiple), new Point(length/2,0*widthMultiple)),
new Line(new Point(length/2,0*widthMultiple), new Point(length/2,60*widthMultiple)),
new Line( new Point(length/2,60*widthMultiple),new Point(-750*lengthMultiple,60*widthMultiple)),
new Line(new Point(-750*lengthMultiple,60*widthMultiple), new Point(-750*lengthMultiple,0*widthMultiple)), ]);
this.tmpGeos_['tmpRect'] = new Rect(new Point(0,-810*lengthMultiple/2+width/2), 810*lengthMultiple, length);
this.geos_[TwoDoorsGFlagGeometry.POLYGON1] = new PolygonPath(this.tmpGeos_[TwoDoorsGFlagGeometry.POLYGON1]);
this.geos_[TwoDoorsGFlagGeometry.POLYGON2] = new PolygonPath(this.tmpGeos_[TwoDoorsGFlagGeometry.POLYGON2]);
this.geos_[TwoDoorsGFlagGeometry.POLYGON3] = new PolygonPath(this.tmpGeos_[TwoDoorsGFlagGeometry.POLYGON3]);
this.geos_[TwoDoorsGFlagGeometry.RECT1] = new RectPath(this.tmpGeos_[TwoDoorsGFlagGeometry.RECT1]);
this.geos_[TwoDoorsGFlagGeometry.RECT2] = new RectPath(this.tmpGeos_[TwoDoorsGFlagGeometry.RECT2]);
this.geos_['tmpRect'] = new RectPath(this.tmpGeos_['tmpRect']);
// const geoArray = new Array<BaseGeometry>();
// geoArray.push(this.tmpGeos_[TwoDoorsGFlagGeometry.POLYGON1]);
// geoArray.push(this.tmpGeos_[TwoDoorsGFlagGeometry.POLYGON2]);
// geoArray.push(this.tmpGeos_[TwoDoorsGFlagGeometry.POLYGON3]);
// geoArray.push(this.tmpGeos_[TwoDoorsGFlagGeometry.RECT1]);
// geoArray.push(this.tmpGeos_[TwoDoorsGFlagGeometry.RECT2]);
// this.geos_[TwoDoorsGFlagGeometry.BACKGroundImg] = new ImagePath(geoArray,
// 'http://img1.bmlink.com/big/default/2012/6/11/15/421136796670564.jpg'
// ,150,150);
/// END!!!
(<BasePath>this.geos_[TwoDoorsGFlagGeometry.POLYGON1]).style.strokeColor = '#333843';
(<BasePath>this.geos_[TwoDoorsGFlagGeometry.POLYGON2]).style.strokeColor = '#333843';
(<BasePath>this.geos_[TwoDoorsGFlagGeometry.POLYGON3]).style.strokeColor = '#333843';
(<BasePath>this.geos_[TwoDoorsGFlagGeometry.RECT1]).style.strokeColor = '#333843';
(<BasePath>this.geos_[TwoDoorsGFlagGeometry.RECT2]).style.strokeColor = '#333843';
(<BasePath>this.geos_['tmpRect']).style.fillColor = 'black';
(<BasePath>this.geos_['tmpRect']).opacity = 0.0;
}
protected twoDoorsFlag(): TwoDoorsFlag {
return <TwoDoorsFlag> this.refEntity_;
}
getflipStatus() : FlipStatus {
return this.twoDoorsFlag().getFlipStatus();
}
protected updateGeometry() {
const width = this.twoDoorsFlag().width();
const length = this.twoDoorsFlag().length();
//获取的width和length相对于原模型的倍数
const widthMultiple = width/120;
const lengthMultiple = length/1500;
const polygon1 = <Polygon> this.tmpGeos_[TwoDoorsGFlagGeometry.POLYGON1];
const segments1 = polygon1.segments();
const arc1 = <Arc> segments1[0];
arc1.setStartPoint(new Point(-680*lengthMultiple,-(810*lengthMultiple-width/2)));
arc1.setEndPoint(new Point(0,-60*widthMultiple));
arc1.setCenter(new Point(-800*lengthMultiple,-45*lengthMultiple));
segments1[1].setStartPoint(new Point(0,-60*widthMultiple));
segments1[1].setEndPoint(new Point(-680*lengthMultiple,-60*widthMultiple));
segments1[2].setStartPoint(new Point(-680*lengthMultiple,-60*widthMultiple));
segments1[2].setEndPoint(new Point(-680*lengthMultiple,-(810*lengthMultiple-width/2)));
const polygon2 = <Polygon> this.tmpGeos_[TwoDoorsGFlagGeometry.POLYGON2];
const segments2 = polygon2.segments();
const arc2 = <Arc> segments2[0];
arc2.setStartPoint(new Point(0,-60*widthMultiple));
arc2.setEndPoint(new Point(680*lengthMultiple,-(810*lengthMultiple-width/2)));
arc2.setCenter(new Point(500*lengthMultiple,-45*lengthMultiple));
segments2[1].setStartPoint(new Point(680*lengthMultiple,-(810*lengthMultiple-width/2)));
segments2[1].setEndPoint(new Point(680*lengthMultiple,-60*widthMultiple));
segments2[2].setStartPoint(new Point(680*lengthMultiple,-60*widthMultiple));
segments2[2].setEndPoint(new Point(0,-60*widthMultiple));
const rect1 = <Rect>this.tmpGeos_[TwoDoorsGFlagGeometry.RECT1];
rect1.setCenter(new Point(-715*lengthMultiple,-(810*lengthMultiple-width/2)/2));
rect1.setWidth(810*lengthMultiple-width/2);
rect1.setLength(70*lengthMultiple);
const rect2 = <Rect>this.tmpGeos_[TwoDoorsGFlagGeometry.RECT2];
rect2.setCenter(new Point(715*lengthMultiple,-(810*lengthMultiple-width/2)/2));
rect2.setWidth(810*lengthMultiple-width/2);
rect2.setLength(70*lengthMultiple);
const polygon3 = <Polygon> this.tmpGeos_[TwoDoorsGFlagGeometry.POLYGON3];
const segments3 = polygon3.segments();
segments3[0].setStartPoint(new Point(-750*lengthMultiple,0*widthMultiple));
segments3[0].setEndPoint(new Point(-680*lengthMultiple,0*widthMultiple));
segments3[1].setStartPoint(new Point(-680*lengthMultiple,0*widthMultiple));
segments3[1].setEndPoint(new Point(-680*lengthMultiple,-60*widthMultiple));
segments3[2].setStartPoint(new Point(-680*lengthMultiple,-60*widthMultiple));
segments3[2].setEndPoint(new Point(680*lengthMultiple,-60*widthMultiple));
segments3[3].setStartPoint(new Point(680*lengthMultiple,-60*widthMultiple));
segments3[3].setEndPoint(new Point(680*lengthMultiple,0*widthMultiple));
segments3[4].setStartPoint(new Point(680*lengthMultiple,0*widthMultiple));
segments3[4].setEndPoint(new Point(length/2,0*widthMultiple));
segments3[5].setStartPoint(new Point(length/2,0*widthMultiple));
segments3[5].setEndPoint(new Point(length/2,60*widthMultiple));
segments3[6].setStartPoint(new Point(length/2,60*widthMultiple));
segments3[6].setEndPoint(new Point(-750*lengthMultiple,60*widthMultiple));
segments3[7].setStartPoint(new Point(-750*lengthMultiple,60*widthMultiple));
segments3[7].setEndPoint(new Point(-750*lengthMultiple,0*widthMultiple));
const tmpRect = <Rect>this.tmpGeos_['tmpRect'];
tmpRect.setCenter(new Point(0,-810*lengthMultiple/2+width/2));
tmpRect.setWidth(810*lengthMultiple);
tmpRect.setLength(length);
}
getOuterGeos():Array<BaseGeometry> {
const result = new Array<BaseGeometry>();
result.push(this.tmpGeos_[TwoDoorsGFlagGeometry.POLYGON1]);
result.push(this.tmpGeos_[TwoDoorsGFlagGeometry.POLYGON2]);
result.push(this.tmpGeos_[TwoDoorsGFlagGeometry.POLYGON3]);
result.push(this.tmpGeos_[TwoDoorsGFlagGeometry.RECT1]);
result.push(this.tmpGeos_[TwoDoorsGFlagGeometry.RECT2]);
return result;
}
}
/**
* 推拉门
*/
export class SlidingDoorsFlagGeometry extends BaseEntityGeometry {
static readonly RECT1 = 'rect1';
static readonly RECT2 = 'rect2';
static readonly RECT3 = 'rect3';
protected tmpGeos_: Map<string, BaseGeometry> = null;
constructor(slidingDoorsFlag: SlidingDoorsFlag) {
super(slidingDoorsFlag);
this.tmpGeos_ = new Map<string, BaseGeometry>();
const width = slidingDoorsFlag.width();
const length = slidingDoorsFlag.length();
//获取的width和length相对于原模型的倍数
const widthMultiple = width/120;
const lengthMultiple = length/1500;
this.tmpGeos_[SlidingDoorsFlagGeometry.RECT1] = new Rect(new Point(0,0), width, length);
this.tmpGeos_[SlidingDoorsFlagGeometry.RECT2] = new Rect(new Point(lengthMultiple*(-237),widthMultiple*(-15)),widthMultiple*(30),lengthMultiple*1000);
this.tmpGeos_[SlidingDoorsFlagGeometry.RECT3] = new Rect(new Point(lengthMultiple*237,widthMultiple*15), widthMultiple*(30),lengthMultiple*1000);
this.tmpGeos_['tmpRect'] = new Rect(new Point(0,0),width, length);
this.geos_[SlidingDoorsFlagGeometry.RECT1] = new RectPath(this.tmpGeos_[SlidingDoorsFlagGeometry.RECT1]);
this.geos_[SlidingDoorsFlagGeometry.RECT2] = new RectPath(this.tmpGeos_[SlidingDoorsFlagGeometry.RECT2]);
this.geos_[SlidingDoorsFlagGeometry.RECT3] = new RectPath(this.tmpGeos_[SlidingDoorsFlagGeometry.RECT3]);
this.geos_['tmpRect'] = new RectPath(this.tmpGeos_['tmpRect']);
(<BasePath>this.geos_[SlidingDoorsFlagGeometry.RECT1]).style.strokeColor = '#333843';
(<BasePath>this.geos_[SlidingDoorsFlagGeometry.RECT2]).style.strokeColor = '#333843';
(<BasePath>this.geos_[SlidingDoorsFlagGeometry.RECT3]).style.strokeColor = '#333843';
(<BasePath>this.geos_['tmpRect']).style.fillColor = 'black';
(<BasePath>this.geos_['tmpRect']).opacity = 0.0;
// const geoArray = new Array<BaseGeometry>();
// geoArray.push(this.tmpGeos_[SlidingDoorsFlagGeometry.RECT1]);
// geoArray.push(this.tmpGeos_[SlidingDoorsFlagGeometry.RECT2]);
// geoArray.push(this.tmpGeos_[SlidingDoorsFlagGeometry.RECT3]);
// this.geos_[OneDoorFlagGeometry.BACKGroundImg] = new ImagePath(geoArray,
// 'http://img1.bmlink.com/big/default/2012/6/11/15/421136796670564.jpg');
}
protected slidingDoorsFlag(): SlidingDoorsFlag{
return <SlidingDoorsFlag> this.refEntity_;
}
protected updateGeometry() {
const width = this.slidingDoorsFlag().width();
const length = this.slidingDoorsFlag().length();
//获取的width和length相对于原模型的倍数
const widthMultiple = width/120;
const lengthMultiple = length/1500;
const rect1 = <Rect>this.tmpGeos_[SlidingDoorsFlagGeometry.RECT1];
rect1.setWidth(width);
rect1.setLength(length);
const rect2 = <Rect>this.tmpGeos_[SlidingDoorsFlagGeometry.RECT2];
rect2.setCenter(new Point(lengthMultiple*(-237),widthMultiple*(-15)));
rect2.setWidth(widthMultiple*30);
rect2.setLength(lengthMultiple*1000);
const rect3 = <Rect>this.tmpGeos_[SlidingDoorsFlagGeometry.RECT3];
rect3.setCenter(new Point(lengthMultiple*237,widthMultiple*15));
rect3.setWidth(widthMultiple*30);
rect3.setLength(lengthMultiple*1000);
const tmpRect = <Rect>this.tmpGeos_['tmpRect'];
tmpRect.setWidth(width);
tmpRect.setLength(length);
}
getflipStatus() : FlipStatus {
return this.slidingDoorsFlag().getFlipStatus();
}
getOuterGeos():Array<BaseGeometry> {
const result = new Array<BaseGeometry>();
result.push(this.tmpGeos_[SlidingDoorsFlagGeometry.RECT1]);
result.push(this.tmpGeos_[SlidingDoorsFlagGeometry.RECT2]);
result.push(this.tmpGeos_[SlidingDoorsFlagGeometry.RECT3]);
return result;
}
}
export class DoorwayFlagGeometry extends BaseEntityGeometry {
static readonly RECT = 'rect';
static readonly LINE = 'line';
protected tmpGeos_: Map<string, BaseGeometry> = null;
constructor(doorWayFlag: DoorwayFlag) {
super(doorWayFlag);
this.tmpGeos_ = new Map<string, BaseGeometry>();
const width = doorWayFlag.width();
const length = doorWayFlag.length();
this.tmpGeos_['rect'] = new Rect(new Point(0.0, 0.0), width, length);
this.tmpGeos_['tmpRect'] = new Rect(new Point(0.0, 0.0), width, length);
this.tmpGeos_['line'] = new Line(new Point(-length / 2.0, 0), new Point(length / 2.0, 0));
this.geos_['rect'] = new RectPath(this.tmpGeos_['rect']);
this.geos_['line'] = new LinePath(this.tmpGeos_['line']);
this.geos_['tmpRect'] = new RectPath(this.tmpGeos_['tmpRect']);
(<BasePath>this.geos_['rect']).style.strokeColor = '#333843';
(<BasePath>this.geos_['line']).style.strokeColor = '#333843';
(<BasePath>this.geos_['tmpRect']).style.fillColor = 'black';
(<BasePath>this.geos_['tmpRect']).opacity = 0.0;
// const geoArray = new Array<BaseGeometry>();
// geoArray.push(this.tmpGeos_['rect']);
// geoArray.push(this.tmpGeos_['line']);
// this.geos_[OneDoorFlagGeometry.BACKGroundImg] = new ImagePath(geoArray,
// 'http://img1.bmlink.com/big/default/2012/6/11/15/421136796670564.jpg');
}
getflipStatus() : FlipStatus {
return (<DoorwayFlag>this.refEntity()).getFlipStatus();
}
getOuterGeos():Array<BaseGeometry> {
const result = new Array<BaseGeometry>();
result.push(this.tmpGeos_['rect']);
result.push(this.tmpGeos_['line']);
return result;
}
} |
result.push(this.tmpGeos_[BayWindowFlagGeometry.POLYGON1]);
result.push(this.tmpGeos_[BayWindowFlagGeometry.POLYGON2]); |
filterGrid.js | //Operator: Or = 0, And = 1, Not = 2
//Condition:
// Content = 0,
// StartWith = 1,
// EndWith = 2,
// Equals = 3,
// DoesNotEqual = 4,
// GreaterThan = 5,
// GreaterThanOrEqual = 6,
// LessThan = 7,
// LessThanOrEqual = 8,
//DataType: String = 0, Number = 1, Date = 2, Boolean = 3
export default function | () {
var self = this;
self.getCondition = function(gridApi) {
var filters = [];
var filter = {
Clauses: [],
Op: 0
};
var clauses = [];
var clause = {
Operation: {},
Op: 0
};
var operation = {
Condition: 1,
DataType: 0,
Field: '',
Value: ''
};
if (gridApi && gridApi.grid) {
var grid = gridApi.grid;
for (var i = 0; i < grid.columns.length; i++) {
var col = grid.columns[i];
var filter = col.filters[0];
if (angular.isDefined(filter.term) && filter.term !== null && filter.term !== '') {
operation = {
Condition: getConditionByDataType(col.colDef.dataType),
DataType: !col.colDef.dataType ? 0 : col.colDef.dataType,
Field: col.field,
Value: filter.term
};
clause.Operation = angular.copy(operation);
clauses.push(clause)
}
}
if (clauses.length > 0) {
filter.Clauses = angular.copy(clauses);
filters.push(filter)
}
}
return filters.length === 0 ? null : filters;
}
function getConditionByDataType(datatype) {
switch (datatype) {
case 1:
case 2:
case 3:
return 3;
}
return 0;
}
}
| FilterGrid |
region.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
This file actually contains two passes related to regions. The first
pass builds up the `region_map`, which describes the parent links in
the region hierarchy. The second pass infers which types must be
region parameterized.
*/
use driver::session::Session;
use metadata::csearch;
use middle::ty::{region_variance, rv_covariant, rv_invariant};
use middle::ty::{rv_contravariant};
use middle::ty;
use core::dvec::DVec;
use std::list;
use std::list::list;
use std::map::HashMap;
use syntax::ast_map;
use syntax::codemap::span;
use syntax::print::pprust;
use syntax::{ast, visit};
type parent = Option<ast::node_id>;
/* Records the parameter ID of a region name. */
type binding = {node_id: ast::node_id,
name: ~str,
br: ty::bound_region};
/**
Encodes the bounding lifetime for a given AST node:
- Expressions are mapped to the expression or block encoding the maximum
(static) lifetime of a value produced by that expression. This is
generally the innermost call, statement, match, or block.
- Variables and bindings are mapped to the block in which they are declared.
*/
type region_map = HashMap<ast::node_id, ast::node_id>;
struct ctxt {
sess: Session,
def_map: resolve::DefMap,
// Generated maps:
region_map: region_map,
// Generally speaking, expressions are parented to their innermost
// enclosing block. But some kinds of expressions serve as
// parents: calls, methods, etc. In addition, some expressions
// serve as parents by virtue of where they appear. For example,
// the condition in a while loop is always a parent. In those
// cases, we add the node id of such an expression to this set so
// that when we visit it we can view it as a parent.
root_exprs: HashMap<ast::node_id, ()>,
// The parent scope is the innermost block, statement, call, or alt
// expression during the execution of which the current expression
// will be evaluated. Generally speaking, the innermost parent
// scope is also the closest suitable ancestor in the AST tree.
//
// There is a subtle point concerning call arguments. Imagine
// you have a call:
//
// { // block a
// foo( // call b
// x,
// y);
// }
//
// In what lifetime are the expressions `x` and `y` evaluated? At
// first, I imagine the answer was the block `a`, as the arguments
// are evaluated before the call takes place. But this turns out
// to be wrong. The lifetime of the call must encompass the
// argument evaluation as well.
//
// The reason is that evaluation of an earlier argument could
// create a borrow which exists during the evaluation of later
// arguments. Consider this torture test, for example,
//
// fn test1(x: @mut ~int) {
// foo(&**x, *x = ~5);
// }
//
// Here, the first argument `&**x` will be a borrow of the `~int`,
// but the second argument overwrites that very value! Bad.
// (This test is borrowck-pure-scope-in-call.rs, btw)
parent: parent,
}
/// Returns true if `subscope` is equal to or is lexically nested inside | fn scope_contains(region_map: region_map, superscope: ast::node_id,
subscope: ast::node_id) -> bool {
let mut subscope = subscope;
while superscope != subscope {
match region_map.find(subscope) {
None => return false,
Some(scope) => subscope = scope
}
}
return true;
}
/// Determines whether one region is a subregion of another. This is
/// intended to run *after inference* and sadly the logic is somewhat
/// duplicated with the code in infer.rs.
fn is_subregion_of(region_map: region_map,
sub_region: ty::Region,
super_region: ty::Region) -> bool {
sub_region == super_region ||
match (sub_region, super_region) {
(_, ty::re_static) => {
true
}
(ty::re_scope(sub_scope), ty::re_scope(super_scope)) |
(ty::re_scope(sub_scope), ty::re_free(super_scope, _)) => {
scope_contains(region_map, super_scope, sub_scope)
}
_ => {
false
}
}
}
/// Finds the nearest common ancestor (if any) of two scopes. That
/// is, finds the smallest scope which is greater than or equal to
/// both `scope_a` and `scope_b`.
fn nearest_common_ancestor(region_map: region_map, scope_a: ast::node_id,
scope_b: ast::node_id) -> Option<ast::node_id> {
fn ancestors_of(region_map: region_map, scope: ast::node_id)
-> ~[ast::node_id] {
let mut result = ~[scope];
let mut scope = scope;
loop {
match region_map.find(scope) {
None => return result,
Some(superscope) => {
result.push(superscope);
scope = superscope;
}
}
}
}
if scope_a == scope_b { return Some(scope_a); }
let a_ancestors = ancestors_of(region_map, scope_a);
let b_ancestors = ancestors_of(region_map, scope_b);
let mut a_index = vec::len(a_ancestors) - 1u;
let mut b_index = vec::len(b_ancestors) - 1u;
// Here, ~[ab]_ancestors is a vector going from narrow to broad.
// The end of each vector will be the item where the scope is
// defined; if there are any common ancestors, then the tails of
// the vector will be the same. So basically we want to walk
// backwards from the tail of each vector and find the first point
// where they diverge. If one vector is a suffix of the other,
// then the corresponding scope is a superscope of the other.
if a_ancestors[a_index] != b_ancestors[b_index] {
return None;
}
loop {
// Loop invariant: a_ancestors[a_index] == b_ancestors[b_index]
// for all indices between a_index and the end of the array
if a_index == 0u { return Some(scope_a); }
if b_index == 0u { return Some(scope_b); }
a_index -= 1u;
b_index -= 1u;
if a_ancestors[a_index] != b_ancestors[b_index] {
return Some(a_ancestors[a_index + 1u]);
}
}
}
/// Extracts that current parent from cx, failing if there is none.
fn parent_id(cx: ctxt, span: span) -> ast::node_id {
match cx.parent {
None => {
cx.sess.span_bug(span, ~"crate should not be parent here");
}
Some(parent_id) => {
parent_id
}
}
}
/// Records the current parent (if any) as the parent of `child_id`.
fn record_parent(cx: ctxt, child_id: ast::node_id) {
for cx.parent.each |parent_id| {
debug!("parent of node %d is node %d", child_id, *parent_id);
cx.region_map.insert(child_id, *parent_id);
}
}
fn resolve_block(blk: ast::blk, cx: ctxt, visitor: visit::vt<ctxt>) {
// Record the parent of this block.
record_parent(cx, blk.node.id);
// Descend.
let new_cx: ctxt = ctxt {parent: Some(blk.node.id),.. cx};
visit::visit_block(blk, new_cx, visitor);
}
fn resolve_arm(arm: ast::arm, cx: ctxt, visitor: visit::vt<ctxt>) {
visit::visit_arm(arm, cx, visitor);
}
fn resolve_pat(pat: @ast::pat, cx: ctxt, visitor: visit::vt<ctxt>) {
match pat.node {
ast::pat_ident(*) => {
let defn_opt = cx.def_map.find(pat.id);
match defn_opt {
Some(ast::def_variant(_,_)) => {
/* Nothing to do; this names a variant. */
}
_ => {
/* This names a local. Bind it to the containing scope. */
record_parent(cx, pat.id);
}
}
}
_ => { /* no-op */ }
}
visit::visit_pat(pat, cx, visitor);
}
fn resolve_stmt(stmt: @ast::stmt, cx: ctxt, visitor: visit::vt<ctxt>) {
match stmt.node {
ast::stmt_decl(*) => {
visit::visit_stmt(stmt, cx, visitor);
}
ast::stmt_expr(_, stmt_id) |
ast::stmt_semi(_, stmt_id) => {
record_parent(cx, stmt_id);
let mut expr_cx = cx;
expr_cx.parent = Some(stmt_id);
visit::visit_stmt(stmt, expr_cx, visitor);
}
ast::stmt_mac(*) => cx.sess.bug(~"unexpanded macro")
}
}
fn resolve_expr(expr: @ast::expr, cx: ctxt, visitor: visit::vt<ctxt>) {
record_parent(cx, expr.id);
let mut new_cx = cx;
match expr.node {
// Calls or overloadable operators
// FIXME #3387
// ast::expr_index(*) | ast::expr_binary(*) |
// ast::expr_unary(*) |
ast::expr_call(*) | ast::expr_method_call(*) => {
debug!("node %d: %s", expr.id, pprust::expr_to_str(expr,
cx.sess.intr()));
new_cx.parent = Some(expr.id);
}
ast::expr_match(*) => {
debug!("node %d: %s", expr.id, pprust::expr_to_str(expr,
cx.sess.intr()));
new_cx.parent = Some(expr.id);
}
ast::expr_fn(_, _, _, cap_clause) |
ast::expr_fn_block(_, _, cap_clause) => {
// although the capture items are not expressions per se, they
// do get "evaluated" in some sense as copies or moves of the
// relevant variables so we parent them like an expression
for (*cap_clause).each |cap_item| {
record_parent(new_cx, cap_item.id);
}
}
ast::expr_while(cond, _) => {
new_cx.root_exprs.insert(cond.id, ());
}
_ => {}
};
if new_cx.root_exprs.contains_key(expr.id) {
new_cx.parent = Some(expr.id);
}
visit::visit_expr(expr, new_cx, visitor);
}
fn resolve_local(local: @ast::local, cx: ctxt, visitor: visit::vt<ctxt>) {
record_parent(cx, local.node.id);
visit::visit_local(local, cx, visitor);
}
fn resolve_item(item: @ast::item, cx: ctxt, visitor: visit::vt<ctxt>) {
// Items create a new outer block scope as far as we're concerned.
let new_cx: ctxt = ctxt {parent: None,.. cx};
visit::visit_item(item, new_cx, visitor);
}
fn resolve_fn(fk: visit::fn_kind, decl: ast::fn_decl, body: ast::blk,
sp: span, id: ast::node_id, cx: ctxt,
visitor: visit::vt<ctxt>) {
let fn_cx = match fk {
visit::fk_item_fn(*) | visit::fk_method(*) |
visit::fk_dtor(*) => {
// Top-level functions are a root scope.
ctxt {parent: Some(id),.. cx}
}
visit::fk_anon(*) | visit::fk_fn_block(*) => {
// Closures continue with the inherited scope.
cx
}
};
debug!("visiting fn with body %d. cx.parent: %? \
fn_cx.parent: %?",
body.node.id, cx.parent, fn_cx.parent);
for decl.inputs.each |input| {
cx.region_map.insert(input.id, body.node.id);
}
visit::visit_fn(fk, decl, body, sp, id, fn_cx, visitor);
}
fn resolve_crate(sess: Session, def_map: resolve::DefMap,
crate: @ast::crate) -> region_map {
let cx: ctxt = ctxt {sess: sess,
def_map: def_map,
region_map: HashMap(),
root_exprs: HashMap(),
parent: None};
let visitor = visit::mk_vt(@{
visit_block: resolve_block,
visit_item: resolve_item,
visit_fn: resolve_fn,
visit_arm: resolve_arm,
visit_pat: resolve_pat,
visit_stmt: resolve_stmt,
visit_expr: resolve_expr,
visit_local: resolve_local,
.. *visit::default_visitor()
});
visit::visit_crate(*crate, cx, visitor);
return cx.region_map;
}
// ___________________________________________________________________________
// Determining region parameterization
//
// Infers which type defns must be region parameterized---this is done
// by scanning their contents to see whether they reference a region
// type, directly or indirectly. This is a fixed-point computation.
//
// We do it in two passes. First we walk the AST and construct a map
// from each type defn T1 to other defns which make use of it. For example,
// if we have a type like:
//
// type S = *int;
// type T = S;
//
// Then there would be a map entry from S to T. During the same walk,
// we also construct add any types that reference regions to a set and
// a worklist. We can then process the worklist, propagating indirect
// dependencies until a fixed point is reached.
type region_paramd_items = HashMap<ast::node_id, region_variance>;
type region_dep = {ambient_variance: region_variance, id: ast::node_id};
type dep_map = HashMap<ast::node_id, @DVec<region_dep>>;
impl region_dep : cmp::Eq {
pure fn eq(&self, other: ®ion_dep) -> bool {
(*self).ambient_variance == (*other).ambient_variance &&
(*self).id == (*other).id
}
pure fn ne(&self, other: ®ion_dep) -> bool { !(*self).eq(other) }
}
type determine_rp_ctxt_ = {
sess: Session,
ast_map: ast_map::map,
def_map: resolve::DefMap,
region_paramd_items: region_paramd_items,
dep_map: dep_map,
worklist: DVec<ast::node_id>,
// the innermost enclosing item id
mut item_id: ast::node_id,
// true when we are within an item but not within a method.
// see long discussion on region_is_relevant()
mut anon_implies_rp: bool,
// encodes the context of the current type; invariant if
// mutable, covariant otherwise
mut ambient_variance: region_variance,
};
enum determine_rp_ctxt {
determine_rp_ctxt_(@determine_rp_ctxt_)
}
fn join_variance(++variance1: region_variance,
++variance2: region_variance) -> region_variance{
match (variance1, variance2) {
(rv_invariant, _) => {rv_invariant}
(_, rv_invariant) => {rv_invariant}
(rv_covariant, rv_contravariant) => {rv_invariant}
(rv_contravariant, rv_covariant) => {rv_invariant}
(rv_covariant, rv_covariant) => {rv_covariant}
(rv_contravariant, rv_contravariant) => {rv_contravariant}
}
}
/// Combines the ambient variance with the variance of a
/// particular site to yield the final variance of the reference.
///
/// Example: if we are checking function arguments then the ambient
/// variance is contravariant. If we then find a `&r/T` pointer, `r`
/// appears in a co-variant position. This implies that this
/// occurrence of `r` is contra-variant with respect to the current
/// item, and hence the function returns `rv_contravariant`.
fn add_variance(+ambient_variance: region_variance,
+variance: region_variance) -> region_variance {
match (ambient_variance, variance) {
(rv_invariant, _) => rv_invariant,
(_, rv_invariant) => rv_invariant,
(rv_covariant, c) => c,
(c, rv_covariant) => c,
(rv_contravariant, rv_contravariant) => rv_covariant
}
}
impl determine_rp_ctxt {
fn add_variance(variance: region_variance) -> region_variance {
add_variance(self.ambient_variance, variance)
}
/// Records that item `id` is region-parameterized with the
/// variance `variance`. If `id` was already parameterized, then
/// the new variance is joined with the old variance.
fn add_rp(id: ast::node_id, variance: region_variance) {
assert id != 0;
let old_variance = self.region_paramd_items.find(id);
let joined_variance = match old_variance {
None => variance,
Some(v) => join_variance(v, variance)
};
debug!("add_rp() variance for %s: %? == %? ^ %?",
ast_map::node_id_to_str(self.ast_map, id,
self.sess.parse_sess.interner),
joined_variance, old_variance, variance);
if Some(joined_variance) != old_variance {
self.region_paramd_items.insert(id, joined_variance);
self.worklist.push(id);
}
}
/// Indicates that the region-parameterization of the current item
/// is dependent on the region-parameterization of the item
/// `from`. Put another way, it indicates that the current item
/// contains a value of type `from`, so if `from` is
/// region-parameterized, so is the current item.
fn add_dep(from: ast::node_id) {
debug!("add dependency from %d -> %d (%s -> %s) with variance %?",
from, self.item_id,
ast_map::node_id_to_str(self.ast_map, from,
self.sess.parse_sess.interner),
ast_map::node_id_to_str(self.ast_map, self.item_id,
self.sess.parse_sess.interner),
copy self.ambient_variance);
let vec = match self.dep_map.find(from) {
Some(vec) => vec,
None => {
let vec = @DVec();
self.dep_map.insert(from, vec);
vec
}
};
let dep = {ambient_variance: self.ambient_variance, id: self.item_id};
if !vec.contains(&dep) { vec.push(dep); }
}
// Determines whether a reference to a region that appears in the
// AST implies that the enclosing type is region-parameterized.
//
// This point is subtle. Here are four examples to make it more
// concrete.
//
// 1. impl foo for &int { ... }
// 2. impl foo for &self/int { ... }
// 3. impl foo for bar { fn m() -> &self/int { ... } }
// 4. impl foo for bar { fn m() -> &int { ... } }
//
// In case 1, the anonymous region is being referenced,
// but it appears in a context where the anonymous region
// resolves to self, so the impl foo is region-parameterized.
//
// In case 2, the self parameter is written explicitly.
//
// In case 3, the method refers to self, so that implies that the
// impl must be region parameterized. (If the type bar is not
// region parameterized, that is an error, because the self region
// is effectively unconstrained, but that is detected elsewhere).
//
// In case 4, the anonymous region is referenced, but it
// bound by the method, so it does not refer to self. This impl
// need not be region parameterized.
//
// So the rules basically are: the `self` region always implies
// that the enclosing type is region parameterized. The anonymous
// region also does, unless it appears within a method, in which
// case it is bound. We handle this by setting a flag
// (anon_implies_rp) to true when we enter an item and setting
// that flag to false when we enter a method.
fn region_is_relevant(r: @ast::region) -> bool {
match r.node {
ast::re_static => false,
ast::re_anon => self.anon_implies_rp,
ast::re_self => true,
ast::re_named(_) => false
}
}
// For named types like Foo, if there is no explicit region
// parameter, then we will add the anonymous region, so there is
// a dependency if the anonymous region implies rp.
//
// If the region is explicitly specified, then we follows the
// normal rules.
fn opt_region_is_relevant(opt_r: Option<@ast::region>) -> bool {
debug!("opt_region_is_relevant: %? (anon_implies_rp=%b)",
opt_r, self.anon_implies_rp);
match opt_r {
None => self.anon_implies_rp,
Some(r) => self.region_is_relevant(r)
}
}
fn with(item_id: ast::node_id,
anon_implies_rp: bool,
f: fn()) {
let old_item_id = self.item_id;
let old_anon_implies_rp = self.anon_implies_rp;
self.item_id = item_id;
self.anon_implies_rp = anon_implies_rp;
debug!("with_item_id(%d, %b)", item_id, anon_implies_rp);
let _i = util::common::indenter();
f();
self.item_id = old_item_id;
self.anon_implies_rp = old_anon_implies_rp;
}
fn with_ambient_variance(variance: region_variance, f: fn()) {
let old_ambient_variance = self.ambient_variance;
self.ambient_variance = self.add_variance(variance);
f();
self.ambient_variance = old_ambient_variance;
}
}
fn determine_rp_in_item(item: @ast::item,
&&cx: determine_rp_ctxt,
visitor: visit::vt<determine_rp_ctxt>) {
do cx.with(item.id, true) {
visit::visit_item(item, cx, visitor);
}
}
fn determine_rp_in_fn(fk: visit::fn_kind,
decl: ast::fn_decl,
body: ast::blk,
_sp: span,
_id: ast::node_id,
&&cx: determine_rp_ctxt,
visitor: visit::vt<determine_rp_ctxt>) {
do cx.with(cx.item_id, false) {
do cx.with_ambient_variance(rv_contravariant) {
for decl.inputs.each |a| {
(visitor.visit_ty)(a.ty, cx, visitor);
}
}
(visitor.visit_ty)(decl.output, cx, visitor);
(visitor.visit_ty_params)(visit::tps_of_fn(fk), cx, visitor);
(visitor.visit_block)(body, cx, visitor);
}
}
fn determine_rp_in_ty_method(ty_m: ast::ty_method,
&&cx: determine_rp_ctxt,
visitor: visit::vt<determine_rp_ctxt>) {
do cx.with(cx.item_id, false) {
visit::visit_ty_method(ty_m, cx, visitor);
}
}
fn determine_rp_in_ty(ty: @ast::Ty,
&&cx: determine_rp_ctxt,
visitor: visit::vt<determine_rp_ctxt>) {
// we are only interesting in types that will require an item to
// be region-parameterized. if cx.item_id is zero, then this type
// is not a member of a type defn nor is it a constitutent of an
// impl etc. So we can ignore it and its components.
if cx.item_id == 0 { return; }
// if this type directly references a region pointer like &r/ty,
// add to the worklist/set. Note that &r/ty is contravariant with
// respect to &r, because &r/ty can be used whereever a *smaller*
// region is expected (and hence is a supertype of those
// locations)
match ty.node {
ast::ty_rptr(r, _) => {
debug!("referenced rptr type %s",
pprust::ty_to_str(ty, cx.sess.intr()));
if cx.region_is_relevant(r) {
cx.add_rp(cx.item_id, cx.add_variance(rv_contravariant))
}
}
ast::ty_fn(f) => {
debug!("referenced fn type: %s",
pprust::ty_to_str(ty, cx.sess.intr()));
match f.region {
Some(r) => {
if cx.region_is_relevant(r) {
cx.add_rp(cx.item_id,
cx.add_variance(rv_contravariant))
}
}
None => {
if f.proto == ast::ProtoBorrowed && cx.anon_implies_rp {
cx.add_rp(cx.item_id,
cx.add_variance(rv_contravariant));
}
}
}
}
_ => {}
}
// if this references another named type, add the dependency
// to the dep_map. If the type is not defined in this crate,
// then check whether it is region-parameterized and consider
// that as a direct dependency.
match ty.node {
ast::ty_path(path, id) => {
match cx.def_map.find(id) {
Some(ast::def_ty(did)) | Some(ast::def_struct(did)) => {
if did.crate == ast::local_crate {
if cx.opt_region_is_relevant(path.rp) {
cx.add_dep(did.node);
}
} else {
let cstore = cx.sess.cstore;
match csearch::get_region_param(cstore, did) {
None => {}
Some(variance) => {
debug!("reference to external, rp'd type %s",
pprust::ty_to_str(ty, cx.sess.intr()));
if cx.opt_region_is_relevant(path.rp) {
cx.add_rp(cx.item_id, cx.add_variance(variance))
}
}
}
}
}
_ => {}
}
}
_ => {}
}
match ty.node {
ast::ty_box(mt) | ast::ty_uniq(mt) | ast::ty_vec(mt) |
ast::ty_rptr(_, mt) | ast::ty_ptr(mt) => {
visit_mt(mt, cx, visitor);
}
ast::ty_rec(ref fields) => {
for (*fields).each |field| {
visit_mt(field.node.mt, cx, visitor);
}
}
ast::ty_path(path, _) => {
// type parameters are---for now, anyway---always invariant
do cx.with_ambient_variance(rv_invariant) {
for path.types.each |tp| {
(visitor.visit_ty)(*tp, cx, visitor);
}
}
}
ast::ty_fn(f) => {
// fn() binds the & region, so do not consider &T types that
// appear *inside* a fn() type to affect the enclosing item:
do cx.with(cx.item_id, false) {
// parameters are contravariant
do cx.with_ambient_variance(rv_contravariant) {
for f.decl.inputs.each |a| {
(visitor.visit_ty)(a.ty, cx, visitor);
}
}
visit::visit_ty_param_bounds(f.bounds, cx, visitor);
(visitor.visit_ty)(f.decl.output, cx, visitor);
}
}
_ => {
visit::visit_ty(ty, cx, visitor);
}
}
fn visit_mt(mt: ast::mt, &&cx: determine_rp_ctxt,
visitor: visit::vt<determine_rp_ctxt>) {
// mutability is invariant
if mt.mutbl == ast::m_mutbl {
do cx.with_ambient_variance(rv_invariant) {
(visitor.visit_ty)(mt.ty, cx, visitor);
}
} else {
(visitor.visit_ty)(mt.ty, cx, visitor);
}
}
}
fn determine_rp_in_struct_field(cm: @ast::struct_field,
&&cx: determine_rp_ctxt,
visitor: visit::vt<determine_rp_ctxt>) {
match cm.node.kind {
ast::named_field(_, ast::struct_mutable, _) => {
do cx.with_ambient_variance(rv_invariant) {
visit::visit_struct_field(cm, cx, visitor);
}
}
ast::named_field(_, ast::struct_immutable, _) |
ast::unnamed_field => {
visit::visit_struct_field(cm, cx, visitor);
}
}
}
fn determine_rp_in_crate(sess: Session,
ast_map: ast_map::map,
def_map: resolve::DefMap,
crate: @ast::crate) -> region_paramd_items {
let cx = determine_rp_ctxt_(@{sess: sess,
ast_map: ast_map,
def_map: def_map,
region_paramd_items: HashMap(),
dep_map: HashMap(),
worklist: DVec(),
mut item_id: 0,
mut anon_implies_rp: false,
mut ambient_variance: rv_covariant});
// Gather up the base set, worklist and dep_map
let visitor = visit::mk_vt(@{
visit_fn: determine_rp_in_fn,
visit_item: determine_rp_in_item,
visit_ty: determine_rp_in_ty,
visit_ty_method: determine_rp_in_ty_method,
visit_struct_field: determine_rp_in_struct_field,
.. *visit::default_visitor()
});
visit::visit_crate(*crate, cx, visitor);
// Propagate indirect dependencies
//
// Each entry in the worklist is the id of an item C whose region
// parameterization has been updated. So we pull ids off of the
// worklist, find the current variance, and then iterate through
// all of the dependent items (that is, those items that reference
// C). For each dependent item D, we combine the variance of C
// with the ambient variance where the reference occurred and then
// update the region-parameterization of D to reflect the result.
while cx.worklist.len() != 0 {
let c_id = cx.worklist.pop();
let c_variance = cx.region_paramd_items.get(c_id);
debug!("popped %d from worklist", c_id);
match cx.dep_map.find(c_id) {
None => {}
Some(deps) => {
for deps.each |dep| {
let v = add_variance(dep.ambient_variance, c_variance);
cx.add_rp(dep.id, v);
}
}
}
}
debug!("%s", {
debug!("Region variance results:");
for cx.region_paramd_items.each |key, value| {
debug!("item %? (%s) is parameterized with variance %?",
key,
ast_map::node_id_to_str(ast_map, key,
sess.parse_sess.interner),
value);
}
"----"
});
// return final set
return cx.region_paramd_items;
} | /// `superscope` and false otherwise. |
_setup.py | import glob
import yaml
import json
def | (path):
# print(f"path: {path}")
stem = path.rsplit(".", 1)[0]
data = yaml.load(open(path, "r", encoding="UTF-8"), Loader=yaml.FullLoader)
print(f"stem: {stem}.json")
json.dump(data, open(f"{stem}.json", "w", encoding="utf-8"), ensure_ascii=False, indent=4)
for path in [f.replace("\\", "/") for f in glob.glob("./configs/*.yml")]:
to_json(path) | to_json |
segment_postings.rs | use crate::common::BitSet;
use crate::common::HasLen;
use crate::common::{BinarySerializable, VInt};
use crate::docset::{DocSet, SkipResult};
use crate::positions::PositionReader;
use crate::postings::compression::{compressed_block_size, AlignedBuffer};
use crate::postings::compression::{BlockDecoder, VIntDecoder, COMPRESSION_BLOCK_SIZE};
use crate::postings::serializer::PostingsSerializer;
use crate::postings::BlockSearcher;
use crate::postings::FreqReadingOption;
use crate::postings::Postings;
use crate::postings::SkipReader;
use crate::postings::USE_SKIP_INFO_LIMIT;
use crate::schema::IndexRecordOption;
use crate::DocId;
use owned_read::OwnedRead;
use std::cmp::Ordering;
use tantivy_fst::Streamer;
struct PositionComputer {
// store the amount of position int
// before reading positions.
//
// if none, position are already loaded in
// the positions vec.
position_to_skip: usize,
position_reader: PositionReader,
}
impl PositionComputer {
pub fn new(position_reader: PositionReader) -> PositionComputer {
PositionComputer {
position_to_skip: 0,
position_reader,
}
}
pub fn add_skip(&mut self, num_skip: usize) {
self.position_to_skip += num_skip;
}
// Positions can only be read once.
pub fn positions_with_offset(&mut self, offset: u32, output: &mut [u32]) {
self.position_reader.skip(self.position_to_skip);
self.position_to_skip = 0;
self.position_reader.read(output);
let mut cum = offset;
for output_mut in output.iter_mut() {
cum += *output_mut;
*output_mut = cum;
}
}
}
/// `SegmentPostings` represents the inverted list or postings associated to
/// a term in a `Segment`.
///
/// As we iterate through the `SegmentPostings`, the frequencies are optionally decoded.
/// Positions on the other hand, are optionally entirely decoded upfront.
pub struct SegmentPostings {
block_cursor: BlockSegmentPostings,
cur: usize,
position_computer: Option<PositionComputer>,
block_searcher: BlockSearcher,
}
impl SegmentPostings {
/// Returns an empty segment postings object
pub fn empty() -> Self {
let empty_block_cursor = BlockSegmentPostings::empty();
SegmentPostings {
block_cursor: empty_block_cursor,
cur: COMPRESSION_BLOCK_SIZE,
position_computer: None,
block_searcher: BlockSearcher::default(),
}
}
/// Creates a segment postings object with the given documents
/// and no frequency encoded.
///
/// This method is mostly useful for unit tests.
///
/// It serializes the doc ids using tantivy's codec
/// and returns a `SegmentPostings` object that embeds a
/// buffer with the serialized data.
pub fn create_from_docs(docs: &[u32]) -> SegmentPostings {
let mut buffer = Vec::new();
{
let mut postings_serializer = PostingsSerializer::new(&mut buffer, false, false);
for &doc in docs {
postings_serializer.write_doc(doc, 1u32);
}
postings_serializer
.close_term(docs.len() as u32)
.expect("In memory Serialization should never fail.");
}
let block_segment_postings = BlockSegmentPostings::from_data(
docs.len() as u32,
OwnedRead::new(buffer),
IndexRecordOption::Basic,
IndexRecordOption::Basic,
);
SegmentPostings::from_block_postings(block_segment_postings, None)
}
}
impl SegmentPostings {
/// Reads a Segment postings from an &[u8]
///
/// * `len` - number of document in the posting lists.
/// * `data` - data array. The complete data is not necessarily used.
/// * `freq_handler` - the freq handler is in charge of decoding
/// frequencies and/or positions
pub(crate) fn from_block_postings(
segment_block_postings: BlockSegmentPostings,
positions_stream_opt: Option<PositionReader>,
) -> SegmentPostings |
}
impl DocSet for SegmentPostings {
// goes to the next element.
// next needs to be called a first time to point to the correct element.
#[inline]
fn advance(&mut self) -> bool {
if self.position_computer.is_some() && self.cur < COMPRESSION_BLOCK_SIZE {
let term_freq = self.term_freq() as usize;
if let Some(position_computer) = self.position_computer.as_mut() {
position_computer.add_skip(term_freq);
}
}
self.cur += 1;
if self.cur >= self.block_cursor.block_len() {
self.cur = 0;
if !self.block_cursor.advance() {
self.cur = COMPRESSION_BLOCK_SIZE;
return false;
}
}
true
}
fn skip_next(&mut self, target: DocId) -> SkipResult {
if !self.advance() {
return SkipResult::End;
}
match self.doc().cmp(&target) {
Ordering::Equal => {
return SkipResult::Reached;
}
Ordering::Greater => {
return SkipResult::OverStep;
}
_ => {
// ...
}
}
// In the following, thanks to the call to advance above,
// we know that the position is not loaded and we need
// to skip every doc_freq we cross.
// skip blocks until one that might contain the target
// check if we need to go to the next block
let mut sum_freqs_skipped: u32 = 0;
if !self
.block_cursor
.docs()
.last()
.map(|doc| *doc >= target)
.unwrap_or(false)
// there should always be at least a document in the block
// since advance returned.
{
// we are not in the right block.
//
// First compute all of the freqs skipped from the current block.
if self.position_computer.is_some() {
sum_freqs_skipped = self.block_cursor.freqs()[self.cur..].iter().sum();
match self.block_cursor.skip_to(target) {
BlockSegmentPostingsSkipResult::Success(block_skip_freqs) => {
sum_freqs_skipped += block_skip_freqs;
}
BlockSegmentPostingsSkipResult::Terminated => {
return SkipResult::End;
}
}
} else if self.block_cursor.skip_to(target)
== BlockSegmentPostingsSkipResult::Terminated
{
// no positions needed. no need to sum freqs.
return SkipResult::End;
}
self.cur = 0;
}
let cur = self.cur;
// we're in the right block now, start with an exponential search
let (output, len) = self.block_cursor.docs_aligned();
let new_cur = self
.block_searcher
.search_in_block(&output, len, cur, target);
if let Some(position_computer) = self.position_computer.as_mut() {
sum_freqs_skipped += self.block_cursor.freqs()[cur..new_cur].iter().sum::<u32>();
position_computer.add_skip(sum_freqs_skipped as usize);
}
self.cur = new_cur;
// `doc` is now the first element >= `target`
let doc = output.0[new_cur];
debug_assert!(doc >= target);
if doc == target {
SkipResult::Reached
} else {
SkipResult::OverStep
}
}
/// Return the current document's `DocId`.
///
/// # Panics
///
/// Will panics if called without having called advance before.
#[inline]
fn doc(&self) -> DocId {
let docs = self.block_cursor.docs();
debug_assert!(
self.cur < docs.len(),
"Have you forgotten to call `.advance()` at least once before calling `.doc()` ."
);
docs[self.cur]
}
fn size_hint(&self) -> u32 {
self.len() as u32
}
fn append_to_bitset(&mut self, bitset: &mut BitSet) {
// finish the current block
if self.advance() {
for &doc in &self.block_cursor.docs()[self.cur..] {
bitset.insert(doc);
}
// ... iterate through the remaining blocks.
while self.block_cursor.advance() {
for &doc in self.block_cursor.docs() {
bitset.insert(doc);
}
}
}
}
}
impl HasLen for SegmentPostings {
fn len(&self) -> usize {
self.block_cursor.doc_freq()
}
}
impl Postings for SegmentPostings {
/// Returns the frequency associated to the current document.
/// If the schema is set up so that no frequency have been encoded,
/// this method should always return 1.
///
/// # Panics
///
/// Will panics if called without having called advance before.
fn term_freq(&self) -> u32 {
debug_assert!(
// Here we do not use the len of `freqs()`
// because it is actually ok to request for the freq of doc
// even if no frequency were encoded for the field.
//
// In that case we hit the block just as if the frequency had been
// decoded. The block is simply prefilled by the value 1.
self.cur < COMPRESSION_BLOCK_SIZE,
"Have you forgotten to call `.advance()` at least once before calling \
`.term_freq()`."
);
self.block_cursor.freq(self.cur)
}
fn positions_with_offset(&mut self, offset: u32, output: &mut Vec<u32>) {
let term_freq = self.term_freq() as usize;
if let Some(position_comp) = self.position_computer.as_mut() {
output.resize(term_freq, 0u32);
position_comp.positions_with_offset(offset, &mut output[..]);
} else {
output.clear();
}
}
}
/// `BlockSegmentPostings` is a cursor iterating over blocks
/// of documents.
///
/// # Warning
///
/// While it is useful for some very specific high-performance
/// use cases, you should prefer using `SegmentPostings` for most usage.
pub struct BlockSegmentPostings {
doc_decoder: BlockDecoder,
freq_decoder: BlockDecoder,
freq_reading_option: FreqReadingOption,
doc_freq: usize,
doc_offset: DocId,
num_vint_docs: usize,
remaining_data: OwnedRead,
skip_reader: SkipReader,
}
fn split_into_skips_and_postings(
doc_freq: u32,
mut data: OwnedRead,
) -> (Option<OwnedRead>, OwnedRead) {
if doc_freq >= USE_SKIP_INFO_LIMIT {
let skip_len = VInt::deserialize(&mut data).expect("Data corrupted").0 as usize;
let mut postings_data = data.clone();
postings_data.advance(skip_len);
data.clip(skip_len);
(Some(data), postings_data)
} else {
(None, data)
}
}
#[derive(Debug, Eq, PartialEq)]
pub enum BlockSegmentPostingsSkipResult {
Terminated,
Success(u32), //< number of term freqs to skip
}
impl BlockSegmentPostings {
pub(crate) fn from_data(
doc_freq: u32,
data: OwnedRead,
record_option: IndexRecordOption,
requested_option: IndexRecordOption,
) -> BlockSegmentPostings {
let freq_reading_option = match (record_option, requested_option) {
(IndexRecordOption::Basic, _) => FreqReadingOption::NoFreq,
(_, IndexRecordOption::Basic) => FreqReadingOption::SkipFreq,
(_, _) => FreqReadingOption::ReadFreq,
};
let (skip_data_opt, postings_data) = split_into_skips_and_postings(doc_freq, data);
let skip_reader = match skip_data_opt {
Some(skip_data) => SkipReader::new(skip_data, record_option),
None => SkipReader::new(OwnedRead::new(&[][..]), record_option),
};
let doc_freq = doc_freq as usize;
let num_vint_docs = doc_freq % COMPRESSION_BLOCK_SIZE;
BlockSegmentPostings {
num_vint_docs,
doc_decoder: BlockDecoder::new(),
freq_decoder: BlockDecoder::with_val(1),
freq_reading_option,
doc_offset: 0,
doc_freq,
remaining_data: postings_data,
skip_reader,
}
}
// Resets the block segment postings on another position
// in the postings file.
//
// This is useful for enumerating through a list of terms,
// and consuming the associated posting lists while avoiding
// reallocating a `BlockSegmentPostings`.
//
// # Warning
//
// This does not reset the positions list.
pub(crate) fn reset(&mut self, doc_freq: u32, postings_data: OwnedRead) {
let (skip_data_opt, postings_data) = split_into_skips_and_postings(doc_freq, postings_data);
let num_vint_docs = (doc_freq as usize) & (COMPRESSION_BLOCK_SIZE - 1);
self.num_vint_docs = num_vint_docs;
self.remaining_data = postings_data;
if let Some(skip_data) = skip_data_opt {
self.skip_reader.reset(skip_data);
} else {
self.skip_reader.reset(OwnedRead::new(&[][..]))
}
self.doc_offset = 0;
self.doc_freq = doc_freq as usize;
}
/// Returns the document frequency associated to this block postings.
///
/// This `doc_freq` is simply the sum of the length of all of the blocks
/// length, and it does not take in account deleted documents.
pub fn doc_freq(&self) -> usize {
self.doc_freq
}
/// Returns the array of docs in the current block.
///
/// Before the first call to `.advance()`, the block
/// returned by `.docs()` is empty.
#[inline]
pub fn docs(&self) -> &[DocId] {
self.doc_decoder.output_array()
}
pub(crate) fn docs_aligned(&self) -> (&AlignedBuffer, usize) {
self.doc_decoder.output_aligned()
}
/// Return the document at index `idx` of the block.
#[inline]
pub fn doc(&self, idx: usize) -> u32 {
self.doc_decoder.output(idx)
}
/// Return the array of `term freq` in the block.
#[inline]
pub fn freqs(&self) -> &[u32] {
self.freq_decoder.output_array()
}
/// Return the frequency at index `idx` of the block.
#[inline]
pub fn freq(&self, idx: usize) -> u32 {
self.freq_decoder.output(idx)
}
/// Returns the length of the current block.
///
/// All blocks have a length of `NUM_DOCS_PER_BLOCK`,
/// except the last block that may have a length
/// of any number between 1 and `NUM_DOCS_PER_BLOCK - 1`
#[inline]
fn block_len(&self) -> usize {
self.doc_decoder.output_len
}
/// position on a block that may contains `doc_id`.
/// Always advance the current block.
///
/// Returns true if a block that has an element greater or equal to the target is found.
/// Returning true does not guarantee that the smallest element of the block is smaller
/// than the target. It only guarantees that the last element is greater or equal.
///
/// Returns false iff all of the document remaining are smaller than
/// `doc_id`. In that case, all of these document are consumed.
///
pub fn skip_to(&mut self, target_doc: DocId) -> BlockSegmentPostingsSkipResult {
let mut skip_freqs = 0u32;
while self.skip_reader.advance() {
if self.skip_reader.doc() >= target_doc {
// the last document of the current block is larger
// than the target.
//
// We found our block!
let num_bits = self.skip_reader.doc_num_bits();
let num_consumed_bytes = self.doc_decoder.uncompress_block_sorted(
self.remaining_data.as_ref(),
self.doc_offset,
num_bits,
);
self.remaining_data.advance(num_consumed_bytes);
let tf_num_bits = self.skip_reader.tf_num_bits();
match self.freq_reading_option {
FreqReadingOption::NoFreq => {}
FreqReadingOption::SkipFreq => {
let num_bytes_to_skip = compressed_block_size(tf_num_bits);
self.remaining_data.advance(num_bytes_to_skip);
}
FreqReadingOption::ReadFreq => {
let num_consumed_bytes = self
.freq_decoder
.uncompress_block_unsorted(self.remaining_data.as_ref(), tf_num_bits);
self.remaining_data.advance(num_consumed_bytes);
}
}
self.doc_offset = self.skip_reader.doc();
return BlockSegmentPostingsSkipResult::Success(skip_freqs);
} else {
skip_freqs += self.skip_reader.tf_sum();
let advance_len = self.skip_reader.total_block_len();
self.doc_offset = self.skip_reader.doc();
self.remaining_data.advance(advance_len);
}
}
// we are now on the last, incomplete, variable encoded block.
if self.num_vint_docs > 0 {
let num_compressed_bytes = self.doc_decoder.uncompress_vint_sorted(
self.remaining_data.as_ref(),
self.doc_offset,
self.num_vint_docs,
);
self.remaining_data.advance(num_compressed_bytes);
match self.freq_reading_option {
FreqReadingOption::NoFreq | FreqReadingOption::SkipFreq => {}
FreqReadingOption::ReadFreq => {
self.freq_decoder
.uncompress_vint_unsorted(self.remaining_data.as_ref(), self.num_vint_docs);
}
}
self.num_vint_docs = 0;
return self
.docs()
.last()
.map(|last_doc| {
if *last_doc >= target_doc {
BlockSegmentPostingsSkipResult::Success(skip_freqs)
} else {
BlockSegmentPostingsSkipResult::Terminated
}
})
.unwrap_or(BlockSegmentPostingsSkipResult::Terminated);
}
BlockSegmentPostingsSkipResult::Terminated
}
/// Advance to the next block.
///
/// Returns false iff there was no remaining blocks.
pub fn advance(&mut self) -> bool {
if self.skip_reader.advance() {
let num_bits = self.skip_reader.doc_num_bits();
let num_consumed_bytes = self.doc_decoder.uncompress_block_sorted(
self.remaining_data.as_ref(),
self.doc_offset,
num_bits,
);
self.remaining_data.advance(num_consumed_bytes);
let tf_num_bits = self.skip_reader.tf_num_bits();
match self.freq_reading_option {
FreqReadingOption::NoFreq => {}
FreqReadingOption::SkipFreq => {
let num_bytes_to_skip = compressed_block_size(tf_num_bits);
self.remaining_data.advance(num_bytes_to_skip);
}
FreqReadingOption::ReadFreq => {
let num_consumed_bytes = self
.freq_decoder
.uncompress_block_unsorted(self.remaining_data.as_ref(), tf_num_bits);
self.remaining_data.advance(num_consumed_bytes);
}
}
// it will be used as the next offset.
self.doc_offset = self.doc_decoder.output(COMPRESSION_BLOCK_SIZE - 1);
true
} else if self.num_vint_docs > 0 {
let num_compressed_bytes = self.doc_decoder.uncompress_vint_sorted(
self.remaining_data.as_ref(),
self.doc_offset,
self.num_vint_docs,
);
self.remaining_data.advance(num_compressed_bytes);
match self.freq_reading_option {
FreqReadingOption::NoFreq | FreqReadingOption::SkipFreq => {}
FreqReadingOption::ReadFreq => {
self.freq_decoder
.uncompress_vint_unsorted(self.remaining_data.as_ref(), self.num_vint_docs);
}
}
self.num_vint_docs = 0;
true
} else {
false
}
}
/// Returns an empty segment postings object
pub fn empty() -> BlockSegmentPostings {
BlockSegmentPostings {
num_vint_docs: 0,
doc_decoder: BlockDecoder::new(),
freq_decoder: BlockDecoder::with_val(1),
freq_reading_option: FreqReadingOption::NoFreq,
doc_offset: 0,
doc_freq: 0,
remaining_data: OwnedRead::new(vec![]),
skip_reader: SkipReader::new(OwnedRead::new(vec![]), IndexRecordOption::Basic),
}
}
}
impl<'b> Streamer<'b> for BlockSegmentPostings {
type Item = &'b [DocId];
fn next(&'b mut self) -> Option<&'b [DocId]> {
if self.advance() {
Some(self.docs())
} else {
None
}
}
}
#[cfg(test)]
mod tests {
use super::BlockSegmentPostings;
use super::BlockSegmentPostingsSkipResult;
use super::SegmentPostings;
use crate::common::HasLen;
use crate::core::Index;
use crate::docset::DocSet;
use crate::postings::postings::Postings;
use crate::schema::IndexRecordOption;
use crate::schema::Schema;
use crate::schema::Term;
use crate::schema::INDEXED;
use crate::DocId;
use crate::SkipResult;
use tantivy_fst::Streamer;
#[test]
fn test_empty_segment_postings() {
let mut postings = SegmentPostings::empty();
assert!(!postings.advance());
assert!(!postings.advance());
assert_eq!(postings.len(), 0);
}
#[test]
#[should_panic(expected = "Have you forgotten to call `.advance()`")]
fn test_panic_if_doc_called_before_advance() {
SegmentPostings::empty().doc();
}
#[test]
#[should_panic(expected = "Have you forgotten to call `.advance()`")]
fn test_panic_if_freq_called_before_advance() {
SegmentPostings::empty().term_freq();
}
#[test]
fn test_empty_block_segment_postings() {
let mut postings = BlockSegmentPostings::empty();
assert!(!postings.advance());
assert_eq!(postings.doc_freq(), 0);
}
#[test]
fn test_block_segment_postings() {
let mut block_segments = build_block_postings(&(0..100_000).collect::<Vec<u32>>());
let mut offset: u32 = 0u32;
// checking that the block before calling advance is empty
assert!(block_segments.docs().is_empty());
// checking that the `doc_freq` is correct
assert_eq!(block_segments.doc_freq(), 100_000);
while let Some(block) = block_segments.next() {
for (i, doc) in block.iter().cloned().enumerate() {
assert_eq!(offset + (i as u32), doc);
}
offset += block.len() as u32;
}
}
#[test]
fn test_skip_right_at_new_block() {
let mut doc_ids = (0..128).collect::<Vec<u32>>();
doc_ids.push(129);
doc_ids.push(130);
{
let block_segments = build_block_postings(&doc_ids);
let mut docset = SegmentPostings::from_block_postings(block_segments, None);
assert_eq!(docset.skip_next(128), SkipResult::OverStep);
assert_eq!(docset.doc(), 129);
assert!(docset.advance());
assert_eq!(docset.doc(), 130);
assert!(!docset.advance());
}
{
let block_segments = build_block_postings(&doc_ids);
let mut docset = SegmentPostings::from_block_postings(block_segments, None);
assert_eq!(docset.skip_next(129), SkipResult::Reached);
assert_eq!(docset.doc(), 129);
assert!(docset.advance());
assert_eq!(docset.doc(), 130);
assert!(!docset.advance());
}
{
let block_segments = build_block_postings(&doc_ids);
let mut docset = SegmentPostings::from_block_postings(block_segments, None);
assert_eq!(docset.skip_next(131), SkipResult::End);
}
}
fn build_block_postings(docs: &[DocId]) -> BlockSegmentPostings {
let mut schema_builder = Schema::builder();
let int_field = schema_builder.add_u64_field("id", INDEXED);
let schema = schema_builder.build();
let index = Index::create_in_ram(schema);
let mut index_writer = index.writer_with_num_threads(1, 3_000_000).unwrap();
let mut last_doc = 0u32;
for &doc in docs {
for _ in last_doc..doc {
index_writer.add_document(doc!(int_field=>1u64));
}
index_writer.add_document(doc!(int_field=>0u64));
last_doc = doc + 1;
}
index_writer.commit().unwrap();
let searcher = index.reader().unwrap().searcher();
let segment_reader = searcher.segment_reader(0);
let inverted_index = segment_reader.inverted_index(int_field);
let term = Term::from_field_u64(int_field, 0u64);
let term_info = inverted_index.get_term_info(&term).unwrap();
inverted_index.read_block_postings_from_terminfo(&term_info, IndexRecordOption::Basic)
}
#[test]
fn test_block_segment_postings_skip() {
for i in 0..4 {
let mut block_postings = build_block_postings(&[3]);
assert_eq!(
block_postings.skip_to(i),
BlockSegmentPostingsSkipResult::Success(0u32)
);
assert_eq!(
block_postings.skip_to(i),
BlockSegmentPostingsSkipResult::Terminated
);
}
let mut block_postings = build_block_postings(&[3]);
assert_eq!(
block_postings.skip_to(4u32),
BlockSegmentPostingsSkipResult::Terminated
);
}
#[test]
fn test_block_segment_postings_skip2() {
let mut docs = vec![0];
for i in 0..1300 {
docs.push((i * i / 100) + i);
}
let mut block_postings = build_block_postings(&docs[..]);
for i in vec![0, 424, 10000] {
assert_eq!(
block_postings.skip_to(i),
BlockSegmentPostingsSkipResult::Success(0u32)
);
let docs = block_postings.docs();
assert!(docs[0] <= i);
assert!(docs.last().cloned().unwrap_or(0u32) >= i);
}
assert_eq!(
block_postings.skip_to(100_000),
BlockSegmentPostingsSkipResult::Terminated
);
assert_eq!(
block_postings.skip_to(101_000),
BlockSegmentPostingsSkipResult::Terminated
);
}
#[test]
fn test_reset_block_segment_postings() {
let mut schema_builder = Schema::builder();
let int_field = schema_builder.add_u64_field("id", INDEXED);
let schema = schema_builder.build();
let index = Index::create_in_ram(schema);
let mut index_writer = index.writer_with_num_threads(1, 3_000_000).unwrap();
// create two postings list, one containg even number,
// the other containing odd numbers.
for i in 0..6 {
let doc = doc!(int_field=> (i % 2) as u64);
index_writer.add_document(doc);
}
index_writer.commit().unwrap();
let searcher = index.reader().unwrap().searcher();
let segment_reader = searcher.segment_reader(0);
let mut block_segments;
{
let term = Term::from_field_u64(int_field, 0u64);
let inverted_index = segment_reader.inverted_index(int_field);
let term_info = inverted_index.get_term_info(&term).unwrap();
block_segments = inverted_index
.read_block_postings_from_terminfo(&term_info, IndexRecordOption::Basic);
}
assert!(block_segments.advance());
assert_eq!(block_segments.docs(), &[0, 2, 4]);
{
let term = Term::from_field_u64(int_field, 1u64);
let inverted_index = segment_reader.inverted_index(int_field);
let term_info = inverted_index.get_term_info(&term).unwrap();
inverted_index.reset_block_postings_from_terminfo(&term_info, &mut block_segments);
}
assert!(block_segments.advance());
assert_eq!(block_segments.docs(), &[1, 3, 5]);
}
}
| {
SegmentPostings {
block_cursor: segment_block_postings,
cur: COMPRESSION_BLOCK_SIZE, // cursor within the block
position_computer: positions_stream_opt.map(PositionComputer::new),
block_searcher: BlockSearcher::default(),
}
} |
message.go | package models
import (
"github.com/TopHatCroat/CryptoChat-server/database"
"github.com/TopHatCroat/CryptoChat-server/protocol"
)
type Message struct {
ID int64
SenderID int64
RecieverID int64
Content string
KeyHash string
CreatedAt int64
}
func (msg *Message) Save() error {
db := database.GetDatabase()
preparedStatement, err := db.Prepare("INSERT OR REPLACE INTO messages (sender_id, reciever_id," +
" content, key_hash, created_at) VALUES(?,?,?,?,?)")
if err != nil {
return err
}
_, err = preparedStatement.Exec(msg.SenderID, msg.RecieverID, msg.Content, msg.KeyHash, msg.CreatedAt)
if err != nil {
return err
}
return nil
}
func (msg *Message) Delete() error {
db := database.GetDatabase()
defer db.Close()
preparedStatement, err := db.Prepare("DELETE FROM messages WHERE id = ?")
if err != nil {
return err
}
_, err = preparedStatement.Exec(msg.ID)
if err != nil {
return err
}
return nil
}
func GetNewMessagesForUser(user User, timestamp int64) (messages []protocol.MessageData, err error) | {
db := database.GetDatabase()
preparedStatement, err := db.Prepare("SELECT users.username, messages.content, messages.key_hash, messages.created_at " +
"FROM messages JOIN users on messages.sender_id = users.id " +
"WHERE messages.reciever_id = ? AND messages.created_at > ?")
if err != nil {
return messages, err
}
rows, err := preparedStatement.Query(user.ID, timestamp)
if err != nil {
return messages, err
}
for rows.Next() {
var message protocol.MessageData
rows.Scan(&message.Sender, &message.Content, &message.KeyHash, &message.Timestamp)
messages = append(messages, message)
}
rows.Close()
if rows.Err() != nil {
return messages, rows.Err()
}
return messages, nil
} |
|
preprocessors.py | # -*- coding: utf-8 -*-
"""
Statement pre-processors.
"""
def | (chatbot, statement):
"""
Remove any consecutive whitespace characters from the statement text.
"""
import re
# Replace linebreaks and tabs with spaces
statement.text = statement.text.replace('\n', ' ').replace('\r', ' ').replace('\t', ' ')
# Remove any leeding or trailing whitespace
statement.text = statement.text.strip()
# Remove consecutive spaces
statement.text = re.sub(' +', ' ', statement.text)
return statement
def unescape_html(chatbot, statement):
"""
Convert escaped html characters into unescaped html characters.
For example: "<b>" becomes "<b>".
"""
import sys
# Replace HTML escape characters
if sys.version_info[0] < 3:
from HTMLParser import HTMLParser
html = HTMLParser()
else:
import html
statement.text = html.unescape(statement.text)
return statement
def convert_to_ascii(chatbot, statement):
"""
Converts unicode characters to ASCII character equivalents.
For example: "på fédéral" becomes "pa federal".
"""
import unicodedata
import sys
# Normalize unicode characters
if sys.version_info[0] < 3:
statement.text = unicode(statement.text) # NOQA
text = unicodedata.normalize('NFKD', statement.text)
text = text.encode('ascii', 'ignore').decode('utf-8')
statement.text = str(text)
return statement
| clean_whitespace |
Aws_json1_1.ts | import {
BatchCreatePartitionCommandInput,
BatchCreatePartitionCommandOutput,
} from "../commands/BatchCreatePartitionCommand.ts";
import {
BatchDeleteConnectionCommandInput,
BatchDeleteConnectionCommandOutput,
} from "../commands/BatchDeleteConnectionCommand.ts";
import {
BatchDeletePartitionCommandInput,
BatchDeletePartitionCommandOutput,
} from "../commands/BatchDeletePartitionCommand.ts";
import { BatchDeleteTableCommandInput, BatchDeleteTableCommandOutput } from "../commands/BatchDeleteTableCommand.ts";
import {
BatchDeleteTableVersionCommandInput,
BatchDeleteTableVersionCommandOutput,
} from "../commands/BatchDeleteTableVersionCommand.ts";
import { BatchGetBlueprintsCommandInput, BatchGetBlueprintsCommandOutput } from "../commands/BatchGetBlueprintsCommand.ts";
import { BatchGetCrawlersCommandInput, BatchGetCrawlersCommandOutput } from "../commands/BatchGetCrawlersCommand.ts";
import {
BatchGetDevEndpointsCommandInput,
BatchGetDevEndpointsCommandOutput,
} from "../commands/BatchGetDevEndpointsCommand.ts";
import { BatchGetJobsCommandInput, BatchGetJobsCommandOutput } from "../commands/BatchGetJobsCommand.ts";
import { BatchGetPartitionCommandInput, BatchGetPartitionCommandOutput } from "../commands/BatchGetPartitionCommand.ts";
import { BatchGetTriggersCommandInput, BatchGetTriggersCommandOutput } from "../commands/BatchGetTriggersCommand.ts";
import { BatchGetWorkflowsCommandInput, BatchGetWorkflowsCommandOutput } from "../commands/BatchGetWorkflowsCommand.ts";
import { BatchStopJobRunCommandInput, BatchStopJobRunCommandOutput } from "../commands/BatchStopJobRunCommand.ts";
import {
BatchUpdatePartitionCommandInput,
BatchUpdatePartitionCommandOutput,
} from "../commands/BatchUpdatePartitionCommand.ts";
import { CancelMLTaskRunCommandInput, CancelMLTaskRunCommandOutput } from "../commands/CancelMLTaskRunCommand.ts";
import {
CheckSchemaVersionValidityCommandInput,
CheckSchemaVersionValidityCommandOutput,
} from "../commands/CheckSchemaVersionValidityCommand.ts";
import { CreateBlueprintCommandInput, CreateBlueprintCommandOutput } from "../commands/CreateBlueprintCommand.ts";
import { CreateClassifierCommandInput, CreateClassifierCommandOutput } from "../commands/CreateClassifierCommand.ts";
import { CreateConnectionCommandInput, CreateConnectionCommandOutput } from "../commands/CreateConnectionCommand.ts";
import { CreateCrawlerCommandInput, CreateCrawlerCommandOutput } from "../commands/CreateCrawlerCommand.ts";
import { CreateDatabaseCommandInput, CreateDatabaseCommandOutput } from "../commands/CreateDatabaseCommand.ts";
import { CreateDevEndpointCommandInput, CreateDevEndpointCommandOutput } from "../commands/CreateDevEndpointCommand.ts";
import { CreateJobCommandInput, CreateJobCommandOutput } from "../commands/CreateJobCommand.ts";
import { CreateMLTransformCommandInput, CreateMLTransformCommandOutput } from "../commands/CreateMLTransformCommand.ts";
import { CreatePartitionCommandInput, CreatePartitionCommandOutput } from "../commands/CreatePartitionCommand.ts";
import {
CreatePartitionIndexCommandInput,
CreatePartitionIndexCommandOutput,
} from "../commands/CreatePartitionIndexCommand.ts";
import { CreateRegistryCommandInput, CreateRegistryCommandOutput } from "../commands/CreateRegistryCommand.ts";
import { CreateSchemaCommandInput, CreateSchemaCommandOutput } from "../commands/CreateSchemaCommand.ts";
import { CreateScriptCommandInput, CreateScriptCommandOutput } from "../commands/CreateScriptCommand.ts";
import {
CreateSecurityConfigurationCommandInput,
CreateSecurityConfigurationCommandOutput,
} from "../commands/CreateSecurityConfigurationCommand.ts";
import { CreateTableCommandInput, CreateTableCommandOutput } from "../commands/CreateTableCommand.ts";
import { CreateTriggerCommandInput, CreateTriggerCommandOutput } from "../commands/CreateTriggerCommand.ts";
import {
CreateUserDefinedFunctionCommandInput,
CreateUserDefinedFunctionCommandOutput,
} from "../commands/CreateUserDefinedFunctionCommand.ts";
import { CreateWorkflowCommandInput, CreateWorkflowCommandOutput } from "../commands/CreateWorkflowCommand.ts";
import { DeleteBlueprintCommandInput, DeleteBlueprintCommandOutput } from "../commands/DeleteBlueprintCommand.ts";
import { DeleteClassifierCommandInput, DeleteClassifierCommandOutput } from "../commands/DeleteClassifierCommand.ts";
import {
DeleteColumnStatisticsForPartitionCommandInput,
DeleteColumnStatisticsForPartitionCommandOutput,
} from "../commands/DeleteColumnStatisticsForPartitionCommand.ts";
import {
DeleteColumnStatisticsForTableCommandInput,
DeleteColumnStatisticsForTableCommandOutput,
} from "../commands/DeleteColumnStatisticsForTableCommand.ts";
import { DeleteConnectionCommandInput, DeleteConnectionCommandOutput } from "../commands/DeleteConnectionCommand.ts";
import { DeleteCrawlerCommandInput, DeleteCrawlerCommandOutput } from "../commands/DeleteCrawlerCommand.ts";
import { DeleteDatabaseCommandInput, DeleteDatabaseCommandOutput } from "../commands/DeleteDatabaseCommand.ts";
import { DeleteDevEndpointCommandInput, DeleteDevEndpointCommandOutput } from "../commands/DeleteDevEndpointCommand.ts";
import { DeleteJobCommandInput, DeleteJobCommandOutput } from "../commands/DeleteJobCommand.ts";
import { DeleteMLTransformCommandInput, DeleteMLTransformCommandOutput } from "../commands/DeleteMLTransformCommand.ts";
import { DeletePartitionCommandInput, DeletePartitionCommandOutput } from "../commands/DeletePartitionCommand.ts";
import {
DeletePartitionIndexCommandInput,
DeletePartitionIndexCommandOutput,
} from "../commands/DeletePartitionIndexCommand.ts";
import { DeleteRegistryCommandInput, DeleteRegistryCommandOutput } from "../commands/DeleteRegistryCommand.ts";
import {
DeleteResourcePolicyCommandInput,
DeleteResourcePolicyCommandOutput,
} from "../commands/DeleteResourcePolicyCommand.ts";
import { DeleteSchemaCommandInput, DeleteSchemaCommandOutput } from "../commands/DeleteSchemaCommand.ts";
import {
DeleteSchemaVersionsCommandInput,
DeleteSchemaVersionsCommandOutput,
} from "../commands/DeleteSchemaVersionsCommand.ts";
import {
DeleteSecurityConfigurationCommandInput,
DeleteSecurityConfigurationCommandOutput,
} from "../commands/DeleteSecurityConfigurationCommand.ts";
import { DeleteTableCommandInput, DeleteTableCommandOutput } from "../commands/DeleteTableCommand.ts";
import { DeleteTableVersionCommandInput, DeleteTableVersionCommandOutput } from "../commands/DeleteTableVersionCommand.ts";
import { DeleteTriggerCommandInput, DeleteTriggerCommandOutput } from "../commands/DeleteTriggerCommand.ts";
import {
DeleteUserDefinedFunctionCommandInput,
DeleteUserDefinedFunctionCommandOutput,
} from "../commands/DeleteUserDefinedFunctionCommand.ts";
import { DeleteWorkflowCommandInput, DeleteWorkflowCommandOutput } from "../commands/DeleteWorkflowCommand.ts";
import { GetBlueprintCommandInput, GetBlueprintCommandOutput } from "../commands/GetBlueprintCommand.ts";
import { GetBlueprintRunCommandInput, GetBlueprintRunCommandOutput } from "../commands/GetBlueprintRunCommand.ts";
import { GetBlueprintRunsCommandInput, GetBlueprintRunsCommandOutput } from "../commands/GetBlueprintRunsCommand.ts";
import {
GetCatalogImportStatusCommandInput,
GetCatalogImportStatusCommandOutput,
} from "../commands/GetCatalogImportStatusCommand.ts";
import { GetClassifierCommandInput, GetClassifierCommandOutput } from "../commands/GetClassifierCommand.ts";
import { GetClassifiersCommandInput, GetClassifiersCommandOutput } from "../commands/GetClassifiersCommand.ts";
import {
GetColumnStatisticsForPartitionCommandInput,
GetColumnStatisticsForPartitionCommandOutput,
} from "../commands/GetColumnStatisticsForPartitionCommand.ts";
import {
GetColumnStatisticsForTableCommandInput,
GetColumnStatisticsForTableCommandOutput,
} from "../commands/GetColumnStatisticsForTableCommand.ts";
import { GetConnectionCommandInput, GetConnectionCommandOutput } from "../commands/GetConnectionCommand.ts";
import { GetConnectionsCommandInput, GetConnectionsCommandOutput } from "../commands/GetConnectionsCommand.ts";
import { GetCrawlerCommandInput, GetCrawlerCommandOutput } from "../commands/GetCrawlerCommand.ts";
import { GetCrawlerMetricsCommandInput, GetCrawlerMetricsCommandOutput } from "../commands/GetCrawlerMetricsCommand.ts";
import { GetCrawlersCommandInput, GetCrawlersCommandOutput } from "../commands/GetCrawlersCommand.ts";
import {
GetDataCatalogEncryptionSettingsCommandInput,
GetDataCatalogEncryptionSettingsCommandOutput,
} from "../commands/GetDataCatalogEncryptionSettingsCommand.ts";
import { GetDatabaseCommandInput, GetDatabaseCommandOutput } from "../commands/GetDatabaseCommand.ts";
import { GetDatabasesCommandInput, GetDatabasesCommandOutput } from "../commands/GetDatabasesCommand.ts";
import { GetDataflowGraphCommandInput, GetDataflowGraphCommandOutput } from "../commands/GetDataflowGraphCommand.ts";
import { GetDevEndpointCommandInput, GetDevEndpointCommandOutput } from "../commands/GetDevEndpointCommand.ts";
import { GetDevEndpointsCommandInput, GetDevEndpointsCommandOutput } from "../commands/GetDevEndpointsCommand.ts";
import { GetJobBookmarkCommandInput, GetJobBookmarkCommandOutput } from "../commands/GetJobBookmarkCommand.ts";
import { GetJobCommandInput, GetJobCommandOutput } from "../commands/GetJobCommand.ts";
import { GetJobRunCommandInput, GetJobRunCommandOutput } from "../commands/GetJobRunCommand.ts";
import { GetJobRunsCommandInput, GetJobRunsCommandOutput } from "../commands/GetJobRunsCommand.ts";
import { GetJobsCommandInput, GetJobsCommandOutput } from "../commands/GetJobsCommand.ts";
import { GetMLTaskRunCommandInput, GetMLTaskRunCommandOutput } from "../commands/GetMLTaskRunCommand.ts";
import { GetMLTaskRunsCommandInput, GetMLTaskRunsCommandOutput } from "../commands/GetMLTaskRunsCommand.ts";
import { GetMLTransformCommandInput, GetMLTransformCommandOutput } from "../commands/GetMLTransformCommand.ts";
import { GetMLTransformsCommandInput, GetMLTransformsCommandOutput } from "../commands/GetMLTransformsCommand.ts";
import { GetMappingCommandInput, GetMappingCommandOutput } from "../commands/GetMappingCommand.ts";
import { GetPartitionCommandInput, GetPartitionCommandOutput } from "../commands/GetPartitionCommand.ts";
import {
GetPartitionIndexesCommandInput,
GetPartitionIndexesCommandOutput,
} from "../commands/GetPartitionIndexesCommand.ts";
import { GetPartitionsCommandInput, GetPartitionsCommandOutput } from "../commands/GetPartitionsCommand.ts";
import { GetPlanCommandInput, GetPlanCommandOutput } from "../commands/GetPlanCommand.ts";
import { GetRegistryCommandInput, GetRegistryCommandOutput } from "../commands/GetRegistryCommand.ts";
import {
GetResourcePoliciesCommandInput,
GetResourcePoliciesCommandOutput,
} from "../commands/GetResourcePoliciesCommand.ts";
import { GetResourcePolicyCommandInput, GetResourcePolicyCommandOutput } from "../commands/GetResourcePolicyCommand.ts";
import {
GetSchemaByDefinitionCommandInput,
GetSchemaByDefinitionCommandOutput,
} from "../commands/GetSchemaByDefinitionCommand.ts";
import { GetSchemaCommandInput, GetSchemaCommandOutput } from "../commands/GetSchemaCommand.ts";
import { GetSchemaVersionCommandInput, GetSchemaVersionCommandOutput } from "../commands/GetSchemaVersionCommand.ts";
import {
GetSchemaVersionsDiffCommandInput,
GetSchemaVersionsDiffCommandOutput,
} from "../commands/GetSchemaVersionsDiffCommand.ts";
import {
GetSecurityConfigurationCommandInput,
GetSecurityConfigurationCommandOutput,
} from "../commands/GetSecurityConfigurationCommand.ts";
import {
GetSecurityConfigurationsCommandInput,
GetSecurityConfigurationsCommandOutput,
} from "../commands/GetSecurityConfigurationsCommand.ts";
import { GetTableCommandInput, GetTableCommandOutput } from "../commands/GetTableCommand.ts";
import { GetTableVersionCommandInput, GetTableVersionCommandOutput } from "../commands/GetTableVersionCommand.ts";
import { GetTableVersionsCommandInput, GetTableVersionsCommandOutput } from "../commands/GetTableVersionsCommand.ts";
import { GetTablesCommandInput, GetTablesCommandOutput } from "../commands/GetTablesCommand.ts";
import { GetTagsCommandInput, GetTagsCommandOutput } from "../commands/GetTagsCommand.ts";
import { GetTriggerCommandInput, GetTriggerCommandOutput } from "../commands/GetTriggerCommand.ts";
import { GetTriggersCommandInput, GetTriggersCommandOutput } from "../commands/GetTriggersCommand.ts";
import {
GetUserDefinedFunctionCommandInput,
GetUserDefinedFunctionCommandOutput,
} from "../commands/GetUserDefinedFunctionCommand.ts";
import {
GetUserDefinedFunctionsCommandInput,
GetUserDefinedFunctionsCommandOutput,
} from "../commands/GetUserDefinedFunctionsCommand.ts";
import { GetWorkflowCommandInput, GetWorkflowCommandOutput } from "../commands/GetWorkflowCommand.ts";
import { GetWorkflowRunCommandInput, GetWorkflowRunCommandOutput } from "../commands/GetWorkflowRunCommand.ts";
import {
GetWorkflowRunPropertiesCommandInput,
GetWorkflowRunPropertiesCommandOutput,
} from "../commands/GetWorkflowRunPropertiesCommand.ts";
import { GetWorkflowRunsCommandInput, GetWorkflowRunsCommandOutput } from "../commands/GetWorkflowRunsCommand.ts";
import {
ImportCatalogToGlueCommandInput,
ImportCatalogToGlueCommandOutput,
} from "../commands/ImportCatalogToGlueCommand.ts";
import { ListBlueprintsCommandInput, ListBlueprintsCommandOutput } from "../commands/ListBlueprintsCommand.ts";
import { ListCrawlersCommandInput, ListCrawlersCommandOutput } from "../commands/ListCrawlersCommand.ts";
import { ListDevEndpointsCommandInput, ListDevEndpointsCommandOutput } from "../commands/ListDevEndpointsCommand.ts";
import { ListJobsCommandInput, ListJobsCommandOutput } from "../commands/ListJobsCommand.ts";
import { ListMLTransformsCommandInput, ListMLTransformsCommandOutput } from "../commands/ListMLTransformsCommand.ts";
import { ListRegistriesCommandInput, ListRegistriesCommandOutput } from "../commands/ListRegistriesCommand.ts";
import { ListSchemaVersionsCommandInput, ListSchemaVersionsCommandOutput } from "../commands/ListSchemaVersionsCommand.ts";
import { ListSchemasCommandInput, ListSchemasCommandOutput } from "../commands/ListSchemasCommand.ts";
import { ListTriggersCommandInput, ListTriggersCommandOutput } from "../commands/ListTriggersCommand.ts";
import { ListWorkflowsCommandInput, ListWorkflowsCommandOutput } from "../commands/ListWorkflowsCommand.ts";
import {
PutDataCatalogEncryptionSettingsCommandInput,
PutDataCatalogEncryptionSettingsCommandOutput,
} from "../commands/PutDataCatalogEncryptionSettingsCommand.ts";
import { PutResourcePolicyCommandInput, PutResourcePolicyCommandOutput } from "../commands/PutResourcePolicyCommand.ts";
import {
PutSchemaVersionMetadataCommandInput,
PutSchemaVersionMetadataCommandOutput,
} from "../commands/PutSchemaVersionMetadataCommand.ts";
import {
PutWorkflowRunPropertiesCommandInput,
PutWorkflowRunPropertiesCommandOutput,
} from "../commands/PutWorkflowRunPropertiesCommand.ts";
import {
QuerySchemaVersionMetadataCommandInput,
QuerySchemaVersionMetadataCommandOutput,
} from "../commands/QuerySchemaVersionMetadataCommand.ts";
import {
RegisterSchemaVersionCommandInput,
RegisterSchemaVersionCommandOutput,
} from "../commands/RegisterSchemaVersionCommand.ts";
import {
RemoveSchemaVersionMetadataCommandInput,
RemoveSchemaVersionMetadataCommandOutput,
} from "../commands/RemoveSchemaVersionMetadataCommand.ts";
import { ResetJobBookmarkCommandInput, ResetJobBookmarkCommandOutput } from "../commands/ResetJobBookmarkCommand.ts";
import { ResumeWorkflowRunCommandInput, ResumeWorkflowRunCommandOutput } from "../commands/ResumeWorkflowRunCommand.ts";
import { SearchTablesCommandInput, SearchTablesCommandOutput } from "../commands/SearchTablesCommand.ts";
import { StartBlueprintRunCommandInput, StartBlueprintRunCommandOutput } from "../commands/StartBlueprintRunCommand.ts";
import { StartCrawlerCommandInput, StartCrawlerCommandOutput } from "../commands/StartCrawlerCommand.ts";
import {
StartCrawlerScheduleCommandInput,
StartCrawlerScheduleCommandOutput,
} from "../commands/StartCrawlerScheduleCommand.ts";
import {
StartExportLabelsTaskRunCommandInput,
StartExportLabelsTaskRunCommandOutput,
} from "../commands/StartExportLabelsTaskRunCommand.ts";
import {
StartImportLabelsTaskRunCommandInput,
StartImportLabelsTaskRunCommandOutput,
} from "../commands/StartImportLabelsTaskRunCommand.ts";
import { StartJobRunCommandInput, StartJobRunCommandOutput } from "../commands/StartJobRunCommand.ts";
import {
StartMLEvaluationTaskRunCommandInput,
StartMLEvaluationTaskRunCommandOutput,
} from "../commands/StartMLEvaluationTaskRunCommand.ts";
import {
StartMLLabelingSetGenerationTaskRunCommandInput,
StartMLLabelingSetGenerationTaskRunCommandOutput,
} from "../commands/StartMLLabelingSetGenerationTaskRunCommand.ts";
import { StartTriggerCommandInput, StartTriggerCommandOutput } from "../commands/StartTriggerCommand.ts";
import { StartWorkflowRunCommandInput, StartWorkflowRunCommandOutput } from "../commands/StartWorkflowRunCommand.ts";
import { StopCrawlerCommandInput, StopCrawlerCommandOutput } from "../commands/StopCrawlerCommand.ts";
import {
StopCrawlerScheduleCommandInput,
StopCrawlerScheduleCommandOutput,
} from "../commands/StopCrawlerScheduleCommand.ts";
import { StopTriggerCommandInput, StopTriggerCommandOutput } from "../commands/StopTriggerCommand.ts";
import { StopWorkflowRunCommandInput, StopWorkflowRunCommandOutput } from "../commands/StopWorkflowRunCommand.ts";
import { TagResourceCommandInput, TagResourceCommandOutput } from "../commands/TagResourceCommand.ts";
import { UntagResourceCommandInput, UntagResourceCommandOutput } from "../commands/UntagResourceCommand.ts";
import { UpdateBlueprintCommandInput, UpdateBlueprintCommandOutput } from "../commands/UpdateBlueprintCommand.ts";
import { UpdateClassifierCommandInput, UpdateClassifierCommandOutput } from "../commands/UpdateClassifierCommand.ts";
import {
UpdateColumnStatisticsForPartitionCommandInput,
UpdateColumnStatisticsForPartitionCommandOutput,
} from "../commands/UpdateColumnStatisticsForPartitionCommand.ts";
import {
UpdateColumnStatisticsForTableCommandInput,
UpdateColumnStatisticsForTableCommandOutput,
} from "../commands/UpdateColumnStatisticsForTableCommand.ts";
import { UpdateConnectionCommandInput, UpdateConnectionCommandOutput } from "../commands/UpdateConnectionCommand.ts";
import { UpdateCrawlerCommandInput, UpdateCrawlerCommandOutput } from "../commands/UpdateCrawlerCommand.ts";
import {
UpdateCrawlerScheduleCommandInput,
UpdateCrawlerScheduleCommandOutput,
} from "../commands/UpdateCrawlerScheduleCommand.ts";
import { UpdateDatabaseCommandInput, UpdateDatabaseCommandOutput } from "../commands/UpdateDatabaseCommand.ts";
import { UpdateDevEndpointCommandInput, UpdateDevEndpointCommandOutput } from "../commands/UpdateDevEndpointCommand.ts";
import { UpdateJobCommandInput, UpdateJobCommandOutput } from "../commands/UpdateJobCommand.ts";
import { UpdateMLTransformCommandInput, UpdateMLTransformCommandOutput } from "../commands/UpdateMLTransformCommand.ts";
import { UpdatePartitionCommandInput, UpdatePartitionCommandOutput } from "../commands/UpdatePartitionCommand.ts";
import { UpdateRegistryCommandInput, UpdateRegistryCommandOutput } from "../commands/UpdateRegistryCommand.ts";
import { UpdateSchemaCommandInput, UpdateSchemaCommandOutput } from "../commands/UpdateSchemaCommand.ts";
import { UpdateTableCommandInput, UpdateTableCommandOutput } from "../commands/UpdateTableCommand.ts";
import { UpdateTriggerCommandInput, UpdateTriggerCommandOutput } from "../commands/UpdateTriggerCommand.ts";
import {
UpdateUserDefinedFunctionCommandInput,
UpdateUserDefinedFunctionCommandOutput,
} from "../commands/UpdateUserDefinedFunctionCommand.ts";
import { UpdateWorkflowCommandInput, UpdateWorkflowCommandOutput } from "../commands/UpdateWorkflowCommand.ts";
import {
AccessDeniedException,
Action,
AlreadyExistsException,
BatchCreatePartitionRequest,
BatchCreatePartitionResponse,
BatchDeleteConnectionRequest,
BatchDeleteConnectionResponse,
BatchDeletePartitionRequest,
BatchDeletePartitionResponse,
BatchDeleteTableRequest,
BatchDeleteTableResponse,
BatchDeleteTableVersionRequest,
BatchDeleteTableVersionResponse,
BatchGetBlueprintsRequest,
BatchGetBlueprintsResponse,
BatchGetCrawlersRequest,
BatchGetCrawlersResponse,
BatchGetDevEndpointsRequest,
BatchGetDevEndpointsResponse,
BatchGetJobsRequest,
BatchGetJobsResponse,
BatchGetPartitionRequest,
BatchGetPartitionResponse,
BatchGetTriggersRequest,
BatchGetTriggersResponse,
BatchGetWorkflowsRequest,
BatchGetWorkflowsResponse,
BatchStopJobRunError,
BatchStopJobRunRequest,
BatchStopJobRunResponse,
BatchStopJobRunSuccessfulSubmission,
BatchUpdatePartitionFailureEntry,
BatchUpdatePartitionRequest,
BatchUpdatePartitionRequestEntry,
BatchUpdatePartitionResponse,
BinaryColumnStatisticsData,
Blueprint,
BlueprintDetails,
BlueprintRun,
BooleanColumnStatisticsData,
CancelMLTaskRunRequest,
CancelMLTaskRunResponse,
CatalogImportStatus,
CatalogTarget,
CheckSchemaVersionValidityInput,
CheckSchemaVersionValidityResponse,
Classifier,
CloudWatchEncryption,
CodeGenEdge,
CodeGenNode,
CodeGenNodeArg,
Column,
ColumnError,
ColumnStatistics,
ColumnStatisticsData,
ConcurrentModificationException,
Condition,
ConditionCheckFailureException,
ConflictException,
Connection,
ConnectionInput,
ConnectionPropertyKey,
ConnectionsList,
Crawl,
Crawler,
CrawlerMetrics,
CrawlerNodeDetails,
CrawlerRunningException,
CrawlerTargets,
CreateBlueprintRequest,
CreateBlueprintResponse,
CreateClassifierRequest,
CreateClassifierResponse,
CreateConnectionRequest,
CreateConnectionResponse,
CreateCrawlerRequest,
CreateCrawlerResponse,
CreateCsvClassifierRequest,
CreateDatabaseRequest,
CreateDatabaseResponse,
CreateDevEndpointRequest,
CreateDevEndpointResponse,
CreateGrokClassifierRequest,
CreateJobRequest,
CreateJobResponse,
CreateJsonClassifierRequest,
CreateMLTransformRequest,
CreateMLTransformResponse,
CreatePartitionIndexRequest,
CreatePartitionIndexResponse,
CreatePartitionRequest,
CreatePartitionResponse,
CreateRegistryInput,
CreateRegistryResponse,
CreateSchemaInput,
CreateSchemaResponse,
CreateScriptRequest,
CreateScriptResponse,
CreateSecurityConfigurationRequest,
CreateSecurityConfigurationResponse,
CreateTableRequest,
CreateTableResponse,
CreateTriggerRequest,
CreateTriggerResponse,
CreateUserDefinedFunctionRequest,
CreateUserDefinedFunctionResponse,
CreateWorkflowRequest,
CreateWorkflowResponse,
CreateXMLClassifierRequest,
CsvClassifier,
DataLakePrincipal,
Database,
DatabaseIdentifier,
DatabaseInput,
DateColumnStatisticsData,
DecimalColumnStatisticsData,
DecimalNumber,
DeleteBlueprintRequest,
DeleteBlueprintResponse,
DeleteClassifierRequest,
DeleteClassifierResponse,
DeleteColumnStatisticsForPartitionRequest,
DeleteColumnStatisticsForPartitionResponse,
DeleteColumnStatisticsForTableRequest,
DeleteColumnStatisticsForTableResponse,
DeleteConnectionRequest,
DeleteConnectionResponse,
DeleteCrawlerRequest,
DeleteCrawlerResponse,
DeleteDatabaseRequest,
DeleteDatabaseResponse,
DeleteDevEndpointRequest,
DeleteDevEndpointResponse,
DeleteJobRequest,
DeleteJobResponse,
DeleteMLTransformRequest,
DeleteMLTransformResponse,
DeletePartitionIndexRequest,
DeletePartitionIndexResponse,
DeletePartitionRequest,
DeletePartitionResponse,
DeleteRegistryInput,
DeleteRegistryResponse,
DeleteResourcePolicyRequest,
DeleteResourcePolicyResponse,
DeleteSchemaInput,
DeleteSchemaResponse,
DeleteSchemaVersionsInput,
DeleteSchemaVersionsResponse,
DeleteSecurityConfigurationRequest,
DeleteSecurityConfigurationResponse,
DeleteTableRequest,
DeleteTableResponse,
DeleteTableVersionRequest,
DeleteTableVersionResponse,
DeleteTriggerRequest,
DeleteTriggerResponse,
DeleteUserDefinedFunctionRequest,
DeleteUserDefinedFunctionResponse,
DeleteWorkflowRequest,
DeleteWorkflowResponse,
DevEndpoint,
DoubleColumnStatisticsData,
DynamoDBTarget,
Edge,
EncryptionConfiguration,
EntityNotFoundException,
ErrorDetail,
ErrorDetails,
EventBatchingCondition,
ExecutionProperty,
FindMatchesParameters,
GetBlueprintRequest,
GetBlueprintResponse,
GetBlueprintRunRequest,
GetBlueprintRunResponse,
GetBlueprintRunsRequest,
GetBlueprintRunsResponse,
GetCatalogImportStatusRequest,
GetCatalogImportStatusResponse,
GetClassifierRequest,
GetClassifierResponse,
GetClassifiersRequest,
GetClassifiersResponse,
GetColumnStatisticsForPartitionRequest,
GetColumnStatisticsForPartitionResponse,
GetColumnStatisticsForTableRequest,
GetColumnStatisticsForTableResponse,
GetConnectionRequest,
GetConnectionResponse,
GetConnectionsFilter,
GetConnectionsRequest,
GetConnectionsResponse,
GetCrawlerMetricsRequest,
GetCrawlerMetricsResponse,
GetCrawlerRequest,
GetCrawlerResponse,
GetCrawlersRequest,
GetCrawlersResponse,
GetDatabaseRequest,
GlueEncryptionException,
GlueTable,
GrokClassifier,
IdempotentParameterMismatchException,
InternalServiceException,
InvalidInputException,
JdbcTarget,
Job,
JobBookmarksEncryption,
JobCommand,
JobNodeDetails,
JobRun,
JsonClassifier,
LastActiveDefinition,
LastCrawlInfo,
LineageConfiguration,
LongColumnStatisticsData,
MLUserDataEncryption,
MongoDBTarget,
Node,
NotificationProperty,
OperationTimeoutException,
Order,
Partition,
PartitionError,
PartitionIndex,
PartitionInput,
PartitionValueList,
Permission,
PhysicalConnectionRequirements,
Predecessor,
Predicate,
PrincipalPermissions,
RecrawlPolicy,
RegistryId,
ResourceNumberLimitExceededException,
ResourceUri,
S3Encryption,
S3Target,
Schedule,
SchedulerTransitioningException,
SchemaChangePolicy,
SchemaId,
SchemaReference,
SchemaVersionErrorItem,
SerDeInfo,
SkewedInfo,
StartingEventBatchCondition,
StorageDescriptor,
StringColumnStatisticsData,
TableError,
TableIdentifier,
TableInput,
TableVersionError,
TransformEncryption,
TransformParameters,
Trigger,
TriggerNodeDetails,
UserDefinedFunctionInput,
ValidationException,
Workflow,
WorkflowGraph,
WorkflowRun,
WorkflowRunStatistics,
XMLClassifier,
} from "../models/models_0.ts";
import {
BackfillError,
CatalogEntry,
ColumnImportance,
ColumnStatisticsError,
ConcurrentRunsExceededException,
ConfusionMatrix,
ConnectionPasswordEncryption,
CrawlerNotRunningException,
CrawlerStoppingException,
DataCatalogEncryptionSettings,
DevEndpointCustomLibraries,
EncryptionAtRest,
EvaluationMetrics,
ExportLabelsTaskRunProperties,
FindMatchesMetrics,
FindMatchesTaskRunProperties,
GetDataCatalogEncryptionSettingsRequest,
GetDataCatalogEncryptionSettingsResponse,
GetDatabaseResponse,
GetDatabasesRequest,
GetDatabasesResponse,
GetDataflowGraphRequest,
GetDataflowGraphResponse,
GetDevEndpointRequest,
GetDevEndpointResponse,
GetDevEndpointsRequest,
GetDevEndpointsResponse,
GetJobBookmarkRequest,
GetJobBookmarkResponse,
GetJobRequest,
GetJobResponse,
GetJobRunRequest,
GetJobRunResponse,
GetJobRunsRequest,
GetJobRunsResponse,
GetJobsRequest,
GetJobsResponse,
GetMLTaskRunRequest,
GetMLTaskRunResponse,
GetMLTaskRunsRequest,
GetMLTaskRunsResponse,
GetMLTransformRequest,
GetMLTransformResponse,
GetMLTransformsRequest,
GetMLTransformsResponse,
GetMappingRequest,
GetMappingResponse,
GetPartitionIndexesRequest,
GetPartitionIndexesResponse,
GetPartitionRequest,
GetPartitionResponse,
GetPartitionsRequest,
GetPartitionsResponse,
GetPlanRequest,
GetPlanResponse,
GetRegistryInput,
GetRegistryResponse,
GetResourcePoliciesRequest,
GetResourcePoliciesResponse,
GetResourcePolicyRequest,
GetResourcePolicyResponse,
GetSchemaByDefinitionInput,
GetSchemaByDefinitionResponse,
GetSchemaInput,
GetSchemaResponse,
GetSchemaVersionInput,
GetSchemaVersionResponse,
GetSchemaVersionsDiffInput,
GetSchemaVersionsDiffResponse,
GetSecurityConfigurationRequest,
GetSecurityConfigurationResponse,
GetSecurityConfigurationsRequest,
GetSecurityConfigurationsResponse,
GetTableRequest,
GetTableResponse,
GetTableVersionRequest,
GetTableVersionResponse,
GetTableVersionsRequest,
GetTableVersionsResponse,
GetTablesRequest,
GetTablesResponse,
GetTagsRequest,
GetTagsResponse,
GetTriggerRequest,
GetTriggerResponse,
GetTriggersRequest,
GetTriggersResponse,
GetUserDefinedFunctionRequest,
GetUserDefinedFunctionResponse,
GetUserDefinedFunctionsRequest,
GetUserDefinedFunctionsResponse,
GetWorkflowRequest,
GetWorkflowResponse,
GetWorkflowRunPropertiesRequest,
GetWorkflowRunPropertiesResponse,
GetWorkflowRunRequest,
GetWorkflowRunResponse,
GetWorkflowRunsRequest,
GetWorkflowRunsResponse,
GluePolicy,
IllegalBlueprintStateException,
IllegalWorkflowStateException,
ImportCatalogToGlueRequest,
ImportCatalogToGlueResponse,
ImportLabelsTaskRunProperties,
JobBookmarkEntry,
JobUpdate,
KeySchemaElement,
LabelingSetGenerationTaskRunProperties,
ListBlueprintsRequest,
ListBlueprintsResponse,
ListCrawlersRequest,
ListCrawlersResponse,
ListDevEndpointsRequest,
ListDevEndpointsResponse,
ListJobsRequest,
ListJobsResponse,
ListMLTransformsRequest,
ListMLTransformsResponse,
ListRegistriesInput,
ListRegistriesResponse,
ListSchemaVersionsInput,
ListSchemaVersionsResponse,
ListSchemasInput,
ListSchemasResponse,
ListTriggersRequest,
ListTriggersResponse,
ListWorkflowsRequest,
ListWorkflowsResponse,
Location,
MLTransform,
MLTransformNotReadyException,
MappingEntry,
MetadataInfo,
MetadataKeyValuePair,
NoScheduleException,
OtherMetadataValueListItem,
PartitionIndexDescriptor,
PropertyPredicate,
PutDataCatalogEncryptionSettingsRequest,
PutDataCatalogEncryptionSettingsResponse,
PutResourcePolicyRequest,
PutResourcePolicyResponse,
PutSchemaVersionMetadataInput,
PutSchemaVersionMetadataResponse,
PutWorkflowRunPropertiesRequest,
PutWorkflowRunPropertiesResponse,
QuerySchemaVersionMetadataInput,
QuerySchemaVersionMetadataResponse,
RegisterSchemaVersionInput,
RegisterSchemaVersionResponse,
RegistryListItem,
RemoveSchemaVersionMetadataInput,
RemoveSchemaVersionMetadataResponse,
ResetJobBookmarkRequest,
ResetJobBookmarkResponse,
ResumeWorkflowRunRequest,
ResumeWorkflowRunResponse,
SchedulerNotRunningException,
SchedulerRunningException,
SchemaColumn,
SchemaListItem,
SchemaVersionListItem,
SchemaVersionNumber,
SearchTablesRequest,
SearchTablesResponse,
SecurityConfiguration,
Segment,
SortCriterion,
StartBlueprintRunRequest,
StartBlueprintRunResponse,
StartCrawlerRequest,
StartCrawlerResponse,
StartCrawlerScheduleRequest,
StartCrawlerScheduleResponse,
StartExportLabelsTaskRunRequest,
StartExportLabelsTaskRunResponse,
StartImportLabelsTaskRunRequest,
StartImportLabelsTaskRunResponse,
StartJobRunRequest,
StartJobRunResponse,
StartMLEvaluationTaskRunRequest,
StartMLEvaluationTaskRunResponse,
StartMLLabelingSetGenerationTaskRunRequest,
StartMLLabelingSetGenerationTaskRunResponse,
StartTriggerRequest,
StartTriggerResponse,
StartWorkflowRunRequest,
StartWorkflowRunResponse,
StopCrawlerRequest,
StopCrawlerResponse,
StopCrawlerScheduleRequest,
StopCrawlerScheduleResponse,
StopTriggerRequest,
StopTriggerResponse,
StopWorkflowRunRequest,
StopWorkflowRunResponse,
Table,
TableVersion,
TagResourceRequest,
TagResourceResponse,
TaskRun,
TaskRunFilterCriteria,
TaskRunProperties,
TaskRunSortCriteria,
TransformFilterCriteria,
TransformSortCriteria,
TriggerUpdate,
UntagResourceRequest,
UntagResourceResponse,
UpdateBlueprintRequest,
UpdateBlueprintResponse,
UpdateClassifierRequest,
UpdateClassifierResponse,
UpdateColumnStatisticsForPartitionRequest,
UpdateColumnStatisticsForPartitionResponse,
UpdateColumnStatisticsForTableRequest,
UpdateColumnStatisticsForTableResponse,
UpdateConnectionRequest,
UpdateConnectionResponse,
UpdateCrawlerRequest,
UpdateCrawlerResponse,
UpdateCrawlerScheduleRequest,
UpdateCrawlerScheduleResponse,
UpdateCsvClassifierRequest,
UpdateDatabaseRequest,
UpdateDatabaseResponse,
UpdateDevEndpointRequest,
UpdateDevEndpointResponse,
UpdateGrokClassifierRequest,
UpdateJobRequest,
UpdateJobResponse,
UpdateJsonClassifierRequest,
UpdateMLTransformRequest,
UpdateMLTransformResponse,
UpdatePartitionRequest,
UpdatePartitionResponse,
UpdateRegistryInput,
UpdateRegistryResponse,
UpdateSchemaInput,
UpdateSchemaResponse,
UpdateTableRequest,
UpdateTableResponse,
UpdateTriggerRequest,
UpdateTriggerResponse,
UpdateUserDefinedFunctionRequest,
UpdateUserDefinedFunctionResponse,
UpdateWorkflowRequest,
UpdateWorkflowResponse,
UpdateXMLClassifierRequest,
UserDefinedFunction,
VersionMismatchException,
} from "../models/models_1.ts";
import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "../../protocol-http/mod.ts";
import {
expectBoolean as __expectBoolean,
expectInt32 as __expectInt32,
expectLong as __expectLong,
expectNonNull as __expectNonNull,
expectNumber as __expectNumber,
expectString as __expectString,
limitedParseDouble as __limitedParseDouble,
parseEpochTimestamp as __parseEpochTimestamp,
serializeFloat as __serializeFloat,
} from "../../smithy-client/mod.ts";
import {
Endpoint as __Endpoint,
HeaderBag as __HeaderBag,
MetadataBearer as __MetadataBearer,
ResponseMetadata as __ResponseMetadata,
SerdeContext as __SerdeContext,
SmithyException as __SmithyException,
} from "../../types/mod.ts";
export const serializeAws_json1_1BatchCreatePartitionCommand = async (
input: BatchCreatePartitionCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.BatchCreatePartition",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1BatchCreatePartitionRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1BatchDeleteConnectionCommand = async (
input: BatchDeleteConnectionCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.BatchDeleteConnection",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1BatchDeleteConnectionRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1BatchDeletePartitionCommand = async (
input: BatchDeletePartitionCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.BatchDeletePartition",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1BatchDeletePartitionRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1BatchDeleteTableCommand = async (
input: BatchDeleteTableCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.BatchDeleteTable",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1BatchDeleteTableRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1BatchDeleteTableVersionCommand = async (
input: BatchDeleteTableVersionCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.BatchDeleteTableVersion",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1BatchDeleteTableVersionRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1BatchGetBlueprintsCommand = async (
input: BatchGetBlueprintsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.BatchGetBlueprints",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1BatchGetBlueprintsRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1BatchGetCrawlersCommand = async (
input: BatchGetCrawlersCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.BatchGetCrawlers",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1BatchGetCrawlersRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1BatchGetDevEndpointsCommand = async (
input: BatchGetDevEndpointsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.BatchGetDevEndpoints",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1BatchGetDevEndpointsRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1BatchGetJobsCommand = async (
input: BatchGetJobsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.BatchGetJobs",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1BatchGetJobsRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1BatchGetPartitionCommand = async (
input: BatchGetPartitionCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.BatchGetPartition",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1BatchGetPartitionRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1BatchGetTriggersCommand = async (
input: BatchGetTriggersCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.BatchGetTriggers",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1BatchGetTriggersRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1BatchGetWorkflowsCommand = async (
input: BatchGetWorkflowsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.BatchGetWorkflows",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1BatchGetWorkflowsRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1BatchStopJobRunCommand = async (
input: BatchStopJobRunCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.BatchStopJobRun",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1BatchStopJobRunRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1BatchUpdatePartitionCommand = async (
input: BatchUpdatePartitionCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.BatchUpdatePartition",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1BatchUpdatePartitionRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1CancelMLTaskRunCommand = async (
input: CancelMLTaskRunCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.CancelMLTaskRun",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1CancelMLTaskRunRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1CheckSchemaVersionValidityCommand = async (
input: CheckSchemaVersionValidityCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.CheckSchemaVersionValidity",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1CheckSchemaVersionValidityInput(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1CreateBlueprintCommand = async (
input: CreateBlueprintCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.CreateBlueprint",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1CreateBlueprintRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1CreateClassifierCommand = async (
input: CreateClassifierCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.CreateClassifier",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1CreateClassifierRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1CreateConnectionCommand = async (
input: CreateConnectionCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.CreateConnection",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1CreateConnectionRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1CreateCrawlerCommand = async (
input: CreateCrawlerCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.CreateCrawler",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1CreateCrawlerRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1CreateDatabaseCommand = async (
input: CreateDatabaseCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.CreateDatabase",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1CreateDatabaseRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1CreateDevEndpointCommand = async (
input: CreateDevEndpointCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.CreateDevEndpoint",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1CreateDevEndpointRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1CreateJobCommand = async (
input: CreateJobCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.CreateJob",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1CreateJobRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1CreateMLTransformCommand = async (
input: CreateMLTransformCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.CreateMLTransform",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1CreateMLTransformRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1CreatePartitionCommand = async (
input: CreatePartitionCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.CreatePartition",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1CreatePartitionRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1CreatePartitionIndexCommand = async (
input: CreatePartitionIndexCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.CreatePartitionIndex",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1CreatePartitionIndexRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1CreateRegistryCommand = async (
input: CreateRegistryCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.CreateRegistry",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1CreateRegistryInput(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1CreateSchemaCommand = async (
input: CreateSchemaCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.CreateSchema",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1CreateSchemaInput(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1CreateScriptCommand = async (
input: CreateScriptCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.CreateScript",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1CreateScriptRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1CreateSecurityConfigurationCommand = async (
input: CreateSecurityConfigurationCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.CreateSecurityConfiguration",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1CreateSecurityConfigurationRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1CreateTableCommand = async (
input: CreateTableCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.CreateTable",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1CreateTableRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1CreateTriggerCommand = async (
input: CreateTriggerCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.CreateTrigger",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1CreateTriggerRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1CreateUserDefinedFunctionCommand = async (
input: CreateUserDefinedFunctionCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.CreateUserDefinedFunction",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1CreateUserDefinedFunctionRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1CreateWorkflowCommand = async (
input: CreateWorkflowCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.CreateWorkflow",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1CreateWorkflowRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1DeleteBlueprintCommand = async (
input: DeleteBlueprintCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.DeleteBlueprint",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1DeleteBlueprintRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1DeleteClassifierCommand = async (
input: DeleteClassifierCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.DeleteClassifier",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1DeleteClassifierRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1DeleteColumnStatisticsForPartitionCommand = async (
input: DeleteColumnStatisticsForPartitionCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.DeleteColumnStatisticsForPartition",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1DeleteColumnStatisticsForPartitionRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1DeleteColumnStatisticsForTableCommand = async (
input: DeleteColumnStatisticsForTableCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.DeleteColumnStatisticsForTable",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1DeleteColumnStatisticsForTableRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1DeleteConnectionCommand = async (
input: DeleteConnectionCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.DeleteConnection",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1DeleteConnectionRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1DeleteCrawlerCommand = async (
input: DeleteCrawlerCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.DeleteCrawler",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1DeleteCrawlerRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1DeleteDatabaseCommand = async (
input: DeleteDatabaseCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.DeleteDatabase",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1DeleteDatabaseRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1DeleteDevEndpointCommand = async (
input: DeleteDevEndpointCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.DeleteDevEndpoint",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1DeleteDevEndpointRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1DeleteJobCommand = async (
input: DeleteJobCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.DeleteJob",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1DeleteJobRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1DeleteMLTransformCommand = async (
input: DeleteMLTransformCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.DeleteMLTransform",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1DeleteMLTransformRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1DeletePartitionCommand = async (
input: DeletePartitionCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.DeletePartition",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1DeletePartitionRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1DeletePartitionIndexCommand = async (
input: DeletePartitionIndexCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.DeletePartitionIndex",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1DeletePartitionIndexRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1DeleteRegistryCommand = async (
input: DeleteRegistryCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.DeleteRegistry",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1DeleteRegistryInput(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1DeleteResourcePolicyCommand = async (
input: DeleteResourcePolicyCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.DeleteResourcePolicy",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1DeleteResourcePolicyRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1DeleteSchemaCommand = async (
input: DeleteSchemaCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.DeleteSchema",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1DeleteSchemaInput(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1DeleteSchemaVersionsCommand = async (
input: DeleteSchemaVersionsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.DeleteSchemaVersions",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1DeleteSchemaVersionsInput(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1DeleteSecurityConfigurationCommand = async (
input: DeleteSecurityConfigurationCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.DeleteSecurityConfiguration",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1DeleteSecurityConfigurationRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1DeleteTableCommand = async (
input: DeleteTableCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.DeleteTable",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1DeleteTableRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1DeleteTableVersionCommand = async (
input: DeleteTableVersionCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.DeleteTableVersion",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1DeleteTableVersionRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1DeleteTriggerCommand = async (
input: DeleteTriggerCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.DeleteTrigger",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1DeleteTriggerRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1DeleteUserDefinedFunctionCommand = async (
input: DeleteUserDefinedFunctionCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.DeleteUserDefinedFunction",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1DeleteUserDefinedFunctionRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1DeleteWorkflowCommand = async (
input: DeleteWorkflowCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.DeleteWorkflow",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1DeleteWorkflowRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetBlueprintCommand = async (
input: GetBlueprintCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetBlueprint",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetBlueprintRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetBlueprintRunCommand = async (
input: GetBlueprintRunCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetBlueprintRun",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetBlueprintRunRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetBlueprintRunsCommand = async (
input: GetBlueprintRunsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetBlueprintRuns",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetBlueprintRunsRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetCatalogImportStatusCommand = async (
input: GetCatalogImportStatusCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetCatalogImportStatus",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetCatalogImportStatusRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetClassifierCommand = async (
input: GetClassifierCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetClassifier",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetClassifierRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetClassifiersCommand = async (
input: GetClassifiersCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetClassifiers",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetClassifiersRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetColumnStatisticsForPartitionCommand = async (
input: GetColumnStatisticsForPartitionCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetColumnStatisticsForPartition",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetColumnStatisticsForPartitionRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetColumnStatisticsForTableCommand = async (
input: GetColumnStatisticsForTableCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetColumnStatisticsForTable",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetColumnStatisticsForTableRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetConnectionCommand = async (
input: GetConnectionCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetConnection",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetConnectionRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetConnectionsCommand = async (
input: GetConnectionsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetConnections",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetConnectionsRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetCrawlerCommand = async (
input: GetCrawlerCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetCrawler",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetCrawlerRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetCrawlerMetricsCommand = async (
input: GetCrawlerMetricsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetCrawlerMetrics",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetCrawlerMetricsRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetCrawlersCommand = async (
input: GetCrawlersCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetCrawlers",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetCrawlersRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetDatabaseCommand = async (
input: GetDatabaseCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetDatabase",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetDatabaseRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetDatabasesCommand = async (
input: GetDatabasesCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetDatabases",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetDatabasesRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetDataCatalogEncryptionSettingsCommand = async (
input: GetDataCatalogEncryptionSettingsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetDataCatalogEncryptionSettings",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetDataCatalogEncryptionSettingsRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetDataflowGraphCommand = async (
input: GetDataflowGraphCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetDataflowGraph",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetDataflowGraphRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetDevEndpointCommand = async (
input: GetDevEndpointCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetDevEndpoint",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetDevEndpointRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetDevEndpointsCommand = async (
input: GetDevEndpointsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetDevEndpoints",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetDevEndpointsRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetJobCommand = async (
input: GetJobCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetJob",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetJobRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetJobBookmarkCommand = async (
input: GetJobBookmarkCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetJobBookmark",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetJobBookmarkRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetJobRunCommand = async (
input: GetJobRunCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetJobRun",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetJobRunRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetJobRunsCommand = async (
input: GetJobRunsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetJobRuns",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetJobRunsRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetJobsCommand = async (
input: GetJobsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetJobs",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetJobsRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetMappingCommand = async (
input: GetMappingCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetMapping",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetMappingRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetMLTaskRunCommand = async (
input: GetMLTaskRunCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetMLTaskRun",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetMLTaskRunRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetMLTaskRunsCommand = async (
input: GetMLTaskRunsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetMLTaskRuns",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetMLTaskRunsRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetMLTransformCommand = async (
input: GetMLTransformCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetMLTransform",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetMLTransformRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetMLTransformsCommand = async (
input: GetMLTransformsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetMLTransforms",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetMLTransformsRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetPartitionCommand = async (
input: GetPartitionCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetPartition",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetPartitionRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetPartitionIndexesCommand = async (
input: GetPartitionIndexesCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetPartitionIndexes",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetPartitionIndexesRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetPartitionsCommand = async (
input: GetPartitionsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetPartitions",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetPartitionsRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetPlanCommand = async (
input: GetPlanCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetPlan",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetPlanRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetRegistryCommand = async (
input: GetRegistryCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetRegistry",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetRegistryInput(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetResourcePoliciesCommand = async (
input: GetResourcePoliciesCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetResourcePolicies",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetResourcePoliciesRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetResourcePolicyCommand = async (
input: GetResourcePolicyCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetResourcePolicy",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetResourcePolicyRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetSchemaCommand = async (
input: GetSchemaCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetSchema",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetSchemaInput(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetSchemaByDefinitionCommand = async (
input: GetSchemaByDefinitionCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetSchemaByDefinition",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetSchemaByDefinitionInput(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetSchemaVersionCommand = async (
input: GetSchemaVersionCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetSchemaVersion",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetSchemaVersionInput(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetSchemaVersionsDiffCommand = async (
input: GetSchemaVersionsDiffCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetSchemaVersionsDiff",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetSchemaVersionsDiffInput(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetSecurityConfigurationCommand = async (
input: GetSecurityConfigurationCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetSecurityConfiguration",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetSecurityConfigurationRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetSecurityConfigurationsCommand = async (
input: GetSecurityConfigurationsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetSecurityConfigurations",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetSecurityConfigurationsRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetTableCommand = async (
input: GetTableCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetTable",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetTableRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetTablesCommand = async (
input: GetTablesCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetTables",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetTablesRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetTableVersionCommand = async (
input: GetTableVersionCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetTableVersion",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetTableVersionRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetTableVersionsCommand = async (
input: GetTableVersionsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetTableVersions",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetTableVersionsRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetTagsCommand = async (
input: GetTagsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetTags",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetTagsRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetTriggerCommand = async (
input: GetTriggerCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetTrigger",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetTriggerRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetTriggersCommand = async (
input: GetTriggersCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetTriggers",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetTriggersRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetUserDefinedFunctionCommand = async (
input: GetUserDefinedFunctionCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetUserDefinedFunction",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetUserDefinedFunctionRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetUserDefinedFunctionsCommand = async (
input: GetUserDefinedFunctionsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetUserDefinedFunctions",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetUserDefinedFunctionsRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetWorkflowCommand = async (
input: GetWorkflowCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetWorkflow",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetWorkflowRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetWorkflowRunCommand = async (
input: GetWorkflowRunCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetWorkflowRun",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetWorkflowRunRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetWorkflowRunPropertiesCommand = async (
input: GetWorkflowRunPropertiesCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetWorkflowRunProperties",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetWorkflowRunPropertiesRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1GetWorkflowRunsCommand = async (
input: GetWorkflowRunsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.GetWorkflowRuns",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1GetWorkflowRunsRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1ImportCatalogToGlueCommand = async (
input: ImportCatalogToGlueCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.ImportCatalogToGlue",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1ImportCatalogToGlueRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1ListBlueprintsCommand = async (
input: ListBlueprintsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.ListBlueprints",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1ListBlueprintsRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1ListCrawlersCommand = async (
input: ListCrawlersCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.ListCrawlers",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1ListCrawlersRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1ListDevEndpointsCommand = async (
input: ListDevEndpointsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.ListDevEndpoints",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1ListDevEndpointsRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1ListJobsCommand = async (
input: ListJobsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.ListJobs",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1ListJobsRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1ListMLTransformsCommand = async (
input: ListMLTransformsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.ListMLTransforms",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1ListMLTransformsRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1ListRegistriesCommand = async (
input: ListRegistriesCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.ListRegistries",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1ListRegistriesInput(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1ListSchemasCommand = async (
input: ListSchemasCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.ListSchemas",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1ListSchemasInput(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1ListSchemaVersionsCommand = async (
input: ListSchemaVersionsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.ListSchemaVersions",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1ListSchemaVersionsInput(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1ListTriggersCommand = async (
input: ListTriggersCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.ListTriggers",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1ListTriggersRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1ListWorkflowsCommand = async (
input: ListWorkflowsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.ListWorkflows",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1ListWorkflowsRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1PutDataCatalogEncryptionSettingsCommand = async (
input: PutDataCatalogEncryptionSettingsCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.PutDataCatalogEncryptionSettings",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1PutDataCatalogEncryptionSettingsRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1PutResourcePolicyCommand = async (
input: PutResourcePolicyCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.PutResourcePolicy",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1PutResourcePolicyRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1PutSchemaVersionMetadataCommand = async (
input: PutSchemaVersionMetadataCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.PutSchemaVersionMetadata",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1PutSchemaVersionMetadataInput(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1PutWorkflowRunPropertiesCommand = async (
input: PutWorkflowRunPropertiesCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.PutWorkflowRunProperties",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1PutWorkflowRunPropertiesRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1QuerySchemaVersionMetadataCommand = async (
input: QuerySchemaVersionMetadataCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.QuerySchemaVersionMetadata",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1QuerySchemaVersionMetadataInput(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1RegisterSchemaVersionCommand = async (
input: RegisterSchemaVersionCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.RegisterSchemaVersion",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1RegisterSchemaVersionInput(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1RemoveSchemaVersionMetadataCommand = async (
input: RemoveSchemaVersionMetadataCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.RemoveSchemaVersionMetadata",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1RemoveSchemaVersionMetadataInput(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1ResetJobBookmarkCommand = async (
input: ResetJobBookmarkCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.ResetJobBookmark",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1ResetJobBookmarkRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1ResumeWorkflowRunCommand = async (
input: ResumeWorkflowRunCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.ResumeWorkflowRun",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1ResumeWorkflowRunRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1SearchTablesCommand = async (
input: SearchTablesCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.SearchTables",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1SearchTablesRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1StartBlueprintRunCommand = async (
input: StartBlueprintRunCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.StartBlueprintRun",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1StartBlueprintRunRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1StartCrawlerCommand = async (
input: StartCrawlerCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.StartCrawler",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1StartCrawlerRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1StartCrawlerScheduleCommand = async (
input: StartCrawlerScheduleCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.StartCrawlerSchedule",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1StartCrawlerScheduleRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1StartExportLabelsTaskRunCommand = async (
input: StartExportLabelsTaskRunCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.StartExportLabelsTaskRun",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1StartExportLabelsTaskRunRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1StartImportLabelsTaskRunCommand = async (
input: StartImportLabelsTaskRunCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.StartImportLabelsTaskRun",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1StartImportLabelsTaskRunRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1StartJobRunCommand = async (
input: StartJobRunCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.StartJobRun",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1StartJobRunRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1StartMLEvaluationTaskRunCommand = async (
input: StartMLEvaluationTaskRunCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.StartMLEvaluationTaskRun",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1StartMLEvaluationTaskRunRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1StartMLLabelingSetGenerationTaskRunCommand = async (
input: StartMLLabelingSetGenerationTaskRunCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.StartMLLabelingSetGenerationTaskRun",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1StartMLLabelingSetGenerationTaskRunRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1StartTriggerCommand = async (
input: StartTriggerCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.StartTrigger",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1StartTriggerRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1StartWorkflowRunCommand = async (
input: StartWorkflowRunCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.StartWorkflowRun",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1StartWorkflowRunRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1StopCrawlerCommand = async (
input: StopCrawlerCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.StopCrawler",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1StopCrawlerRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1StopCrawlerScheduleCommand = async (
input: StopCrawlerScheduleCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.StopCrawlerSchedule",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1StopCrawlerScheduleRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1StopTriggerCommand = async (
input: StopTriggerCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.StopTrigger",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1StopTriggerRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1StopWorkflowRunCommand = async (
input: StopWorkflowRunCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.StopWorkflowRun",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1StopWorkflowRunRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1TagResourceCommand = async (
input: TagResourceCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.TagResource",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1TagResourceRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1UntagResourceCommand = async (
input: UntagResourceCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.UntagResource",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1UntagResourceRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1UpdateBlueprintCommand = async (
input: UpdateBlueprintCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.UpdateBlueprint",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1UpdateBlueprintRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1UpdateClassifierCommand = async (
input: UpdateClassifierCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.UpdateClassifier",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1UpdateClassifierRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1UpdateColumnStatisticsForPartitionCommand = async (
input: UpdateColumnStatisticsForPartitionCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.UpdateColumnStatisticsForPartition",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1UpdateColumnStatisticsForPartitionRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1UpdateColumnStatisticsForTableCommand = async (
input: UpdateColumnStatisticsForTableCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.UpdateColumnStatisticsForTable",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1UpdateColumnStatisticsForTableRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1UpdateConnectionCommand = async (
input: UpdateConnectionCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.UpdateConnection",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1UpdateConnectionRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1UpdateCrawlerCommand = async (
input: UpdateCrawlerCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.UpdateCrawler",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1UpdateCrawlerRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1UpdateCrawlerScheduleCommand = async (
input: UpdateCrawlerScheduleCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.UpdateCrawlerSchedule",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1UpdateCrawlerScheduleRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1UpdateDatabaseCommand = async (
input: UpdateDatabaseCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.UpdateDatabase",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1UpdateDatabaseRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1UpdateDevEndpointCommand = async (
input: UpdateDevEndpointCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.UpdateDevEndpoint",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1UpdateDevEndpointRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1UpdateJobCommand = async (
input: UpdateJobCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.UpdateJob",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1UpdateJobRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1UpdateMLTransformCommand = async (
input: UpdateMLTransformCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.UpdateMLTransform",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1UpdateMLTransformRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1UpdatePartitionCommand = async (
input: UpdatePartitionCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.UpdatePartition",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1UpdatePartitionRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1UpdateRegistryCommand = async (
input: UpdateRegistryCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.UpdateRegistry",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1UpdateRegistryInput(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1UpdateSchemaCommand = async (
input: UpdateSchemaCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.UpdateSchema",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1UpdateSchemaInput(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1UpdateTableCommand = async (
input: UpdateTableCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.UpdateTable",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1UpdateTableRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1UpdateTriggerCommand = async (
input: UpdateTriggerCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.UpdateTrigger",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1UpdateTriggerRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1UpdateUserDefinedFunctionCommand = async (
input: UpdateUserDefinedFunctionCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.UpdateUserDefinedFunction",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1UpdateUserDefinedFunctionRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const serializeAws_json1_1UpdateWorkflowCommand = async (
input: UpdateWorkflowCommandInput,
context: __SerdeContext
): Promise<__HttpRequest> => {
const headers: __HeaderBag = {
"content-type": "application/x-amz-json-1.1",
"x-amz-target": "AWSGlue.UpdateWorkflow",
};
let body: any;
body = JSON.stringify(serializeAws_json1_1UpdateWorkflowRequest(input, context));
return buildHttpRpcRequest(context, headers, "/", undefined, body);
};
export const deserializeAws_json1_1BatchCreatePartitionCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchCreatePartitionCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1BatchCreatePartitionCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1BatchCreatePartitionResponse(data, context);
const response: BatchCreatePartitionCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1BatchCreatePartitionCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchCreatePartitionCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AlreadyExistsException":
case "com.amazonaws.glue#AlreadyExistsException":
response = {
...(await deserializeAws_json1_1AlreadyExistsExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ResourceNumberLimitExceededException":
case "com.amazonaws.glue#ResourceNumberLimitExceededException":
response = {
...(await deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1BatchDeleteConnectionCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchDeleteConnectionCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1BatchDeleteConnectionCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1BatchDeleteConnectionResponse(data, context);
const response: BatchDeleteConnectionCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1BatchDeleteConnectionCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchDeleteConnectionCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1BatchDeletePartitionCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchDeletePartitionCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1BatchDeletePartitionCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1BatchDeletePartitionResponse(data, context);
const response: BatchDeletePartitionCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1BatchDeletePartitionCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchDeletePartitionCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1BatchDeleteTableCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchDeleteTableCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1BatchDeleteTableCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1BatchDeleteTableResponse(data, context);
const response: BatchDeleteTableCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1BatchDeleteTableCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchDeleteTableCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1BatchDeleteTableVersionCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchDeleteTableVersionCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1BatchDeleteTableVersionCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1BatchDeleteTableVersionResponse(data, context);
const response: BatchDeleteTableVersionCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1BatchDeleteTableVersionCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchDeleteTableVersionCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1BatchGetBlueprintsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchGetBlueprintsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1BatchGetBlueprintsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1BatchGetBlueprintsResponse(data, context);
const response: BatchGetBlueprintsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1BatchGetBlueprintsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchGetBlueprintsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1BatchGetCrawlersCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchGetCrawlersCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1BatchGetCrawlersCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1BatchGetCrawlersResponse(data, context);
const response: BatchGetCrawlersCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1BatchGetCrawlersCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchGetCrawlersCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1BatchGetDevEndpointsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchGetDevEndpointsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1BatchGetDevEndpointsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1BatchGetDevEndpointsResponse(data, context);
const response: BatchGetDevEndpointsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1BatchGetDevEndpointsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchGetDevEndpointsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AccessDeniedException":
case "com.amazonaws.glue#AccessDeniedException":
response = {
...(await deserializeAws_json1_1AccessDeniedExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1BatchGetJobsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchGetJobsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1BatchGetJobsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1BatchGetJobsResponse(data, context);
const response: BatchGetJobsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1BatchGetJobsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchGetJobsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1BatchGetPartitionCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchGetPartitionCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1BatchGetPartitionCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1BatchGetPartitionResponse(data, context);
const response: BatchGetPartitionCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1BatchGetPartitionCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchGetPartitionCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1BatchGetTriggersCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchGetTriggersCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1BatchGetTriggersCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1BatchGetTriggersResponse(data, context);
const response: BatchGetTriggersCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1BatchGetTriggersCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchGetTriggersCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1BatchGetWorkflowsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchGetWorkflowsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1BatchGetWorkflowsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1BatchGetWorkflowsResponse(data, context);
const response: BatchGetWorkflowsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1BatchGetWorkflowsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchGetWorkflowsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1BatchStopJobRunCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchStopJobRunCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1BatchStopJobRunCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1BatchStopJobRunResponse(data, context);
const response: BatchStopJobRunCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1BatchStopJobRunCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchStopJobRunCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1BatchUpdatePartitionCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchUpdatePartitionCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1BatchUpdatePartitionCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1BatchUpdatePartitionResponse(data, context);
const response: BatchUpdatePartitionCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1BatchUpdatePartitionCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<BatchUpdatePartitionCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1CancelMLTaskRunCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CancelMLTaskRunCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1CancelMLTaskRunCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1CancelMLTaskRunResponse(data, context);
const response: CancelMLTaskRunCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1CancelMLTaskRunCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CancelMLTaskRunCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1CheckSchemaVersionValidityCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CheckSchemaVersionValidityCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1CheckSchemaVersionValidityCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1CheckSchemaVersionValidityResponse(data, context);
const response: CheckSchemaVersionValidityCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1CheckSchemaVersionValidityCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CheckSchemaVersionValidityCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AccessDeniedException":
case "com.amazonaws.glue#AccessDeniedException":
response = {
...(await deserializeAws_json1_1AccessDeniedExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1CreateBlueprintCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateBlueprintCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1CreateBlueprintCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1CreateBlueprintResponse(data, context);
const response: CreateBlueprintCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1CreateBlueprintCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateBlueprintCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AlreadyExistsException":
case "com.amazonaws.glue#AlreadyExistsException":
response = {
...(await deserializeAws_json1_1AlreadyExistsExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ResourceNumberLimitExceededException":
case "com.amazonaws.glue#ResourceNumberLimitExceededException":
response = {
...(await deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1CreateClassifierCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateClassifierCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1CreateClassifierCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1CreateClassifierResponse(data, context);
const response: CreateClassifierCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1CreateClassifierCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateClassifierCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AlreadyExistsException":
case "com.amazonaws.glue#AlreadyExistsException":
response = {
...(await deserializeAws_json1_1AlreadyExistsExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1CreateConnectionCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateConnectionCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1CreateConnectionCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1CreateConnectionResponse(data, context);
const response: CreateConnectionCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1CreateConnectionCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateConnectionCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AlreadyExistsException":
case "com.amazonaws.glue#AlreadyExistsException":
response = {
...(await deserializeAws_json1_1AlreadyExistsExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ResourceNumberLimitExceededException":
case "com.amazonaws.glue#ResourceNumberLimitExceededException":
response = {
...(await deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1CreateCrawlerCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateCrawlerCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1CreateCrawlerCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1CreateCrawlerResponse(data, context);
const response: CreateCrawlerCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1CreateCrawlerCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateCrawlerCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AlreadyExistsException":
case "com.amazonaws.glue#AlreadyExistsException":
response = {
...(await deserializeAws_json1_1AlreadyExistsExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ResourceNumberLimitExceededException":
case "com.amazonaws.glue#ResourceNumberLimitExceededException":
response = {
...(await deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1CreateDatabaseCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateDatabaseCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1CreateDatabaseCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1CreateDatabaseResponse(data, context);
const response: CreateDatabaseCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1CreateDatabaseCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateDatabaseCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AlreadyExistsException":
case "com.amazonaws.glue#AlreadyExistsException":
response = {
...(await deserializeAws_json1_1AlreadyExistsExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ConcurrentModificationException":
case "com.amazonaws.glue#ConcurrentModificationException":
response = {
...(await deserializeAws_json1_1ConcurrentModificationExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ResourceNumberLimitExceededException":
case "com.amazonaws.glue#ResourceNumberLimitExceededException":
response = {
...(await deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1CreateDevEndpointCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateDevEndpointCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1CreateDevEndpointCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1CreateDevEndpointResponse(data, context);
const response: CreateDevEndpointCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1CreateDevEndpointCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateDevEndpointCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AccessDeniedException":
case "com.amazonaws.glue#AccessDeniedException":
response = {
...(await deserializeAws_json1_1AccessDeniedExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "AlreadyExistsException":
case "com.amazonaws.glue#AlreadyExistsException":
response = {
...(await deserializeAws_json1_1AlreadyExistsExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "IdempotentParameterMismatchException":
case "com.amazonaws.glue#IdempotentParameterMismatchException":
response = {
...(await deserializeAws_json1_1IdempotentParameterMismatchExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ResourceNumberLimitExceededException":
case "com.amazonaws.glue#ResourceNumberLimitExceededException":
response = {
...(await deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ValidationException":
case "com.amazonaws.glue#ValidationException":
response = {
...(await deserializeAws_json1_1ValidationExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1CreateJobCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateJobCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1CreateJobCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1CreateJobResponse(data, context);
const response: CreateJobCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1CreateJobCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateJobCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AlreadyExistsException":
case "com.amazonaws.glue#AlreadyExistsException":
response = {
...(await deserializeAws_json1_1AlreadyExistsExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ConcurrentModificationException":
case "com.amazonaws.glue#ConcurrentModificationException":
response = {
...(await deserializeAws_json1_1ConcurrentModificationExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "IdempotentParameterMismatchException":
case "com.amazonaws.glue#IdempotentParameterMismatchException":
response = {
...(await deserializeAws_json1_1IdempotentParameterMismatchExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ResourceNumberLimitExceededException":
case "com.amazonaws.glue#ResourceNumberLimitExceededException":
response = {
...(await deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1CreateMLTransformCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateMLTransformCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1CreateMLTransformCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1CreateMLTransformResponse(data, context);
const response: CreateMLTransformCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1CreateMLTransformCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateMLTransformCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AccessDeniedException":
case "com.amazonaws.glue#AccessDeniedException":
response = {
...(await deserializeAws_json1_1AccessDeniedExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "AlreadyExistsException":
case "com.amazonaws.glue#AlreadyExistsException":
response = {
...(await deserializeAws_json1_1AlreadyExistsExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "IdempotentParameterMismatchException":
case "com.amazonaws.glue#IdempotentParameterMismatchException":
response = {
...(await deserializeAws_json1_1IdempotentParameterMismatchExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ResourceNumberLimitExceededException":
case "com.amazonaws.glue#ResourceNumberLimitExceededException":
response = {
...(await deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1CreatePartitionCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreatePartitionCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1CreatePartitionCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1CreatePartitionResponse(data, context);
const response: CreatePartitionCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1CreatePartitionCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreatePartitionCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AlreadyExistsException":
case "com.amazonaws.glue#AlreadyExistsException":
response = {
...(await deserializeAws_json1_1AlreadyExistsExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ResourceNumberLimitExceededException":
case "com.amazonaws.glue#ResourceNumberLimitExceededException":
response = {
...(await deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1CreatePartitionIndexCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreatePartitionIndexCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1CreatePartitionIndexCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1CreatePartitionIndexResponse(data, context);
const response: CreatePartitionIndexCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1CreatePartitionIndexCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreatePartitionIndexCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AlreadyExistsException":
case "com.amazonaws.glue#AlreadyExistsException":
response = {
...(await deserializeAws_json1_1AlreadyExistsExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ResourceNumberLimitExceededException":
case "com.amazonaws.glue#ResourceNumberLimitExceededException":
response = {
...(await deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1CreateRegistryCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateRegistryCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1CreateRegistryCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1CreateRegistryResponse(data, context);
const response: CreateRegistryCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1CreateRegistryCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateRegistryCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AccessDeniedException":
case "com.amazonaws.glue#AccessDeniedException":
response = {
...(await deserializeAws_json1_1AccessDeniedExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "AlreadyExistsException":
case "com.amazonaws.glue#AlreadyExistsException":
response = {
...(await deserializeAws_json1_1AlreadyExistsExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ResourceNumberLimitExceededException":
case "com.amazonaws.glue#ResourceNumberLimitExceededException":
response = {
...(await deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1CreateSchemaCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateSchemaCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1CreateSchemaCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1CreateSchemaResponse(data, context);
const response: CreateSchemaCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1CreateSchemaCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateSchemaCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AccessDeniedException":
case "com.amazonaws.glue#AccessDeniedException":
response = {
...(await deserializeAws_json1_1AccessDeniedExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "AlreadyExistsException":
case "com.amazonaws.glue#AlreadyExistsException":
response = {
...(await deserializeAws_json1_1AlreadyExistsExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ResourceNumberLimitExceededException":
case "com.amazonaws.glue#ResourceNumberLimitExceededException":
response = {
...(await deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1CreateScriptCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateScriptCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1CreateScriptCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1CreateScriptResponse(data, context);
const response: CreateScriptCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1CreateScriptCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateScriptCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1CreateSecurityConfigurationCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateSecurityConfigurationCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1CreateSecurityConfigurationCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1CreateSecurityConfigurationResponse(data, context);
const response: CreateSecurityConfigurationCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1CreateSecurityConfigurationCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateSecurityConfigurationCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AlreadyExistsException":
case "com.amazonaws.glue#AlreadyExistsException":
response = {
...(await deserializeAws_json1_1AlreadyExistsExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ResourceNumberLimitExceededException":
case "com.amazonaws.glue#ResourceNumberLimitExceededException":
response = {
...(await deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1CreateTableCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateTableCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1CreateTableCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1CreateTableResponse(data, context);
const response: CreateTableCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1CreateTableCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateTableCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AlreadyExistsException":
case "com.amazonaws.glue#AlreadyExistsException":
response = {
...(await deserializeAws_json1_1AlreadyExistsExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ConcurrentModificationException":
case "com.amazonaws.glue#ConcurrentModificationException":
response = {
...(await deserializeAws_json1_1ConcurrentModificationExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ResourceNumberLimitExceededException":
case "com.amazonaws.glue#ResourceNumberLimitExceededException":
response = {
...(await deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1CreateTriggerCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateTriggerCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1CreateTriggerCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1CreateTriggerResponse(data, context);
const response: CreateTriggerCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1CreateTriggerCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateTriggerCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AlreadyExistsException":
case "com.amazonaws.glue#AlreadyExistsException":
response = {
...(await deserializeAws_json1_1AlreadyExistsExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ConcurrentModificationException":
case "com.amazonaws.glue#ConcurrentModificationException":
response = {
...(await deserializeAws_json1_1ConcurrentModificationExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "IdempotentParameterMismatchException":
case "com.amazonaws.glue#IdempotentParameterMismatchException":
response = {
...(await deserializeAws_json1_1IdempotentParameterMismatchExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ResourceNumberLimitExceededException":
case "com.amazonaws.glue#ResourceNumberLimitExceededException":
response = {
...(await deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1CreateUserDefinedFunctionCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateUserDefinedFunctionCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1CreateUserDefinedFunctionCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1CreateUserDefinedFunctionResponse(data, context);
const response: CreateUserDefinedFunctionCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1CreateUserDefinedFunctionCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateUserDefinedFunctionCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AlreadyExistsException":
case "com.amazonaws.glue#AlreadyExistsException":
response = {
...(await deserializeAws_json1_1AlreadyExistsExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ResourceNumberLimitExceededException":
case "com.amazonaws.glue#ResourceNumberLimitExceededException":
response = {
...(await deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1CreateWorkflowCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateWorkflowCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1CreateWorkflowCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1CreateWorkflowResponse(data, context);
const response: CreateWorkflowCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1CreateWorkflowCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<CreateWorkflowCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AlreadyExistsException":
case "com.amazonaws.glue#AlreadyExistsException":
response = {
...(await deserializeAws_json1_1AlreadyExistsExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ConcurrentModificationException":
case "com.amazonaws.glue#ConcurrentModificationException":
response = {
...(await deserializeAws_json1_1ConcurrentModificationExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ResourceNumberLimitExceededException":
case "com.amazonaws.glue#ResourceNumberLimitExceededException":
response = {
...(await deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1DeleteBlueprintCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteBlueprintCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1DeleteBlueprintCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1DeleteBlueprintResponse(data, context);
const response: DeleteBlueprintCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1DeleteBlueprintCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteBlueprintCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1DeleteClassifierCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteClassifierCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1DeleteClassifierCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1DeleteClassifierResponse(data, context);
const response: DeleteClassifierCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1DeleteClassifierCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteClassifierCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1DeleteColumnStatisticsForPartitionCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteColumnStatisticsForPartitionCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1DeleteColumnStatisticsForPartitionCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1DeleteColumnStatisticsForPartitionResponse(data, context);
const response: DeleteColumnStatisticsForPartitionCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1DeleteColumnStatisticsForPartitionCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteColumnStatisticsForPartitionCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1DeleteColumnStatisticsForTableCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteColumnStatisticsForTableCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1DeleteColumnStatisticsForTableCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1DeleteColumnStatisticsForTableResponse(data, context);
const response: DeleteColumnStatisticsForTableCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1DeleteColumnStatisticsForTableCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteColumnStatisticsForTableCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1DeleteConnectionCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteConnectionCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1DeleteConnectionCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1DeleteConnectionResponse(data, context);
const response: DeleteConnectionCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1DeleteConnectionCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteConnectionCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1DeleteCrawlerCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteCrawlerCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1DeleteCrawlerCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1DeleteCrawlerResponse(data, context);
const response: DeleteCrawlerCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1DeleteCrawlerCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteCrawlerCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "CrawlerRunningException":
case "com.amazonaws.glue#CrawlerRunningException":
response = {
...(await deserializeAws_json1_1CrawlerRunningExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "SchedulerTransitioningException":
case "com.amazonaws.glue#SchedulerTransitioningException":
response = {
...(await deserializeAws_json1_1SchedulerTransitioningExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1DeleteDatabaseCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteDatabaseCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1DeleteDatabaseCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1DeleteDatabaseResponse(data, context);
const response: DeleteDatabaseCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1DeleteDatabaseCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteDatabaseCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "ConcurrentModificationException":
case "com.amazonaws.glue#ConcurrentModificationException":
response = {
...(await deserializeAws_json1_1ConcurrentModificationExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1DeleteDevEndpointCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteDevEndpointCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1DeleteDevEndpointCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1DeleteDevEndpointResponse(data, context);
const response: DeleteDevEndpointCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1DeleteDevEndpointCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteDevEndpointCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1DeleteJobCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteJobCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1DeleteJobCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1DeleteJobResponse(data, context);
const response: DeleteJobCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1DeleteJobCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteJobCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1DeleteMLTransformCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteMLTransformCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1DeleteMLTransformCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1DeleteMLTransformResponse(data, context);
const response: DeleteMLTransformCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1DeleteMLTransformCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteMLTransformCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1DeletePartitionCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeletePartitionCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1DeletePartitionCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1DeletePartitionResponse(data, context);
const response: DeletePartitionCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1DeletePartitionCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeletePartitionCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1DeletePartitionIndexCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeletePartitionIndexCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1DeletePartitionIndexCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1DeletePartitionIndexResponse(data, context);
const response: DeletePartitionIndexCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1DeletePartitionIndexCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeletePartitionIndexCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "ConflictException":
case "com.amazonaws.glue#ConflictException":
response = {
...(await deserializeAws_json1_1ConflictExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1DeleteRegistryCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteRegistryCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1DeleteRegistryCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1DeleteRegistryResponse(data, context);
const response: DeleteRegistryCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1DeleteRegistryCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteRegistryCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AccessDeniedException":
case "com.amazonaws.glue#AccessDeniedException":
response = {
...(await deserializeAws_json1_1AccessDeniedExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ConcurrentModificationException":
case "com.amazonaws.glue#ConcurrentModificationException":
response = {
...(await deserializeAws_json1_1ConcurrentModificationExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1DeleteResourcePolicyCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteResourcePolicyCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1DeleteResourcePolicyCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1DeleteResourcePolicyResponse(data, context);
const response: DeleteResourcePolicyCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1DeleteResourcePolicyCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteResourcePolicyCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "ConditionCheckFailureException":
case "com.amazonaws.glue#ConditionCheckFailureException":
response = {
...(await deserializeAws_json1_1ConditionCheckFailureExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1DeleteSchemaCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteSchemaCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1DeleteSchemaCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1DeleteSchemaResponse(data, context);
const response: DeleteSchemaCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1DeleteSchemaCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteSchemaCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AccessDeniedException":
case "com.amazonaws.glue#AccessDeniedException":
response = {
...(await deserializeAws_json1_1AccessDeniedExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ConcurrentModificationException":
case "com.amazonaws.glue#ConcurrentModificationException":
response = {
...(await deserializeAws_json1_1ConcurrentModificationExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1DeleteSchemaVersionsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteSchemaVersionsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1DeleteSchemaVersionsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1DeleteSchemaVersionsResponse(data, context);
const response: DeleteSchemaVersionsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1DeleteSchemaVersionsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteSchemaVersionsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AccessDeniedException":
case "com.amazonaws.glue#AccessDeniedException":
response = {
...(await deserializeAws_json1_1AccessDeniedExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ConcurrentModificationException":
case "com.amazonaws.glue#ConcurrentModificationException":
response = {
...(await deserializeAws_json1_1ConcurrentModificationExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1DeleteSecurityConfigurationCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteSecurityConfigurationCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1DeleteSecurityConfigurationCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1DeleteSecurityConfigurationResponse(data, context);
const response: DeleteSecurityConfigurationCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1DeleteSecurityConfigurationCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteSecurityConfigurationCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1DeleteTableCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteTableCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1DeleteTableCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1DeleteTableResponse(data, context);
const response: DeleteTableCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1DeleteTableCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteTableCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "ConcurrentModificationException":
case "com.amazonaws.glue#ConcurrentModificationException":
response = {
...(await deserializeAws_json1_1ConcurrentModificationExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1DeleteTableVersionCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteTableVersionCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1DeleteTableVersionCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1DeleteTableVersionResponse(data, context);
const response: DeleteTableVersionCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1DeleteTableVersionCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteTableVersionCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1DeleteTriggerCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteTriggerCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1DeleteTriggerCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1DeleteTriggerResponse(data, context);
const response: DeleteTriggerCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1DeleteTriggerCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteTriggerCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "ConcurrentModificationException":
case "com.amazonaws.glue#ConcurrentModificationException":
response = {
...(await deserializeAws_json1_1ConcurrentModificationExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1DeleteUserDefinedFunctionCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteUserDefinedFunctionCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1DeleteUserDefinedFunctionCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1DeleteUserDefinedFunctionResponse(data, context);
const response: DeleteUserDefinedFunctionCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1DeleteUserDefinedFunctionCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteUserDefinedFunctionCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1DeleteWorkflowCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteWorkflowCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1DeleteWorkflowCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1DeleteWorkflowResponse(data, context);
const response: DeleteWorkflowCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1DeleteWorkflowCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<DeleteWorkflowCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "ConcurrentModificationException":
case "com.amazonaws.glue#ConcurrentModificationException":
response = {
...(await deserializeAws_json1_1ConcurrentModificationExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetBlueprintCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetBlueprintCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetBlueprintCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetBlueprintResponse(data, context);
const response: GetBlueprintCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetBlueprintCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetBlueprintCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetBlueprintRunCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetBlueprintRunCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetBlueprintRunCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetBlueprintRunResponse(data, context);
const response: GetBlueprintRunCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetBlueprintRunCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetBlueprintRunCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetBlueprintRunsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetBlueprintRunsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetBlueprintRunsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetBlueprintRunsResponse(data, context);
const response: GetBlueprintRunsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetBlueprintRunsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetBlueprintRunsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetCatalogImportStatusCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetCatalogImportStatusCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetCatalogImportStatusCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetCatalogImportStatusResponse(data, context);
const response: GetCatalogImportStatusCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetCatalogImportStatusCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetCatalogImportStatusCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetClassifierCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetClassifierCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetClassifierCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetClassifierResponse(data, context);
const response: GetClassifierCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetClassifierCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetClassifierCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetClassifiersCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetClassifiersCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetClassifiersCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetClassifiersResponse(data, context);
const response: GetClassifiersCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetClassifiersCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetClassifiersCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetColumnStatisticsForPartitionCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetColumnStatisticsForPartitionCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetColumnStatisticsForPartitionCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetColumnStatisticsForPartitionResponse(data, context);
const response: GetColumnStatisticsForPartitionCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetColumnStatisticsForPartitionCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetColumnStatisticsForPartitionCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetColumnStatisticsForTableCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetColumnStatisticsForTableCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetColumnStatisticsForTableCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetColumnStatisticsForTableResponse(data, context);
const response: GetColumnStatisticsForTableCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetColumnStatisticsForTableCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetColumnStatisticsForTableCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetConnectionCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetConnectionCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetConnectionCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetConnectionResponse(data, context);
const response: GetConnectionCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetConnectionCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetConnectionCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetConnectionsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetConnectionsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetConnectionsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetConnectionsResponse(data, context);
const response: GetConnectionsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetConnectionsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetConnectionsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetCrawlerCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetCrawlerCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetCrawlerCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetCrawlerResponse(data, context);
const response: GetCrawlerCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetCrawlerCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetCrawlerCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetCrawlerMetricsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetCrawlerMetricsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetCrawlerMetricsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetCrawlerMetricsResponse(data, context);
const response: GetCrawlerMetricsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetCrawlerMetricsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetCrawlerMetricsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetCrawlersCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetCrawlersCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetCrawlersCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetCrawlersResponse(data, context);
const response: GetCrawlersCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetCrawlersCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetCrawlersCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetDatabaseCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetDatabaseCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetDatabaseCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetDatabaseResponse(data, context);
const response: GetDatabaseCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetDatabaseCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetDatabaseCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetDatabasesCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetDatabasesCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetDatabasesCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetDatabasesResponse(data, context);
const response: GetDatabasesCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetDatabasesCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetDatabasesCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetDataCatalogEncryptionSettingsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetDataCatalogEncryptionSettingsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetDataCatalogEncryptionSettingsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetDataCatalogEncryptionSettingsResponse(data, context);
const response: GetDataCatalogEncryptionSettingsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetDataCatalogEncryptionSettingsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetDataCatalogEncryptionSettingsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetDataflowGraphCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetDataflowGraphCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetDataflowGraphCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetDataflowGraphResponse(data, context);
const response: GetDataflowGraphCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetDataflowGraphCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetDataflowGraphCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
}; | case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetDevEndpointCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetDevEndpointCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetDevEndpointCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetDevEndpointResponse(data, context);
const response: GetDevEndpointCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetDevEndpointCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetDevEndpointCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetDevEndpointsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetDevEndpointsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetDevEndpointsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetDevEndpointsResponse(data, context);
const response: GetDevEndpointsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetDevEndpointsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetDevEndpointsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetJobCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetJobCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetJobCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetJobResponse(data, context);
const response: GetJobCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetJobCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetJobCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetJobBookmarkCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetJobBookmarkCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetJobBookmarkCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetJobBookmarkResponse(data, context);
const response: GetJobBookmarkCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetJobBookmarkCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetJobBookmarkCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ValidationException":
case "com.amazonaws.glue#ValidationException":
response = {
...(await deserializeAws_json1_1ValidationExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetJobRunCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetJobRunCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetJobRunCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetJobRunResponse(data, context);
const response: GetJobRunCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetJobRunCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetJobRunCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetJobRunsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetJobRunsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetJobRunsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetJobRunsResponse(data, context);
const response: GetJobRunsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetJobRunsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetJobRunsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetJobsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetJobsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetJobsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetJobsResponse(data, context);
const response: GetJobsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetJobsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetJobsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetMappingCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetMappingCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetMappingCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetMappingResponse(data, context);
const response: GetMappingCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetMappingCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetMappingCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetMLTaskRunCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetMLTaskRunCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetMLTaskRunCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetMLTaskRunResponse(data, context);
const response: GetMLTaskRunCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetMLTaskRunCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetMLTaskRunCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetMLTaskRunsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetMLTaskRunsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetMLTaskRunsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetMLTaskRunsResponse(data, context);
const response: GetMLTaskRunsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetMLTaskRunsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetMLTaskRunsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetMLTransformCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetMLTransformCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetMLTransformCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetMLTransformResponse(data, context);
const response: GetMLTransformCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetMLTransformCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetMLTransformCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetMLTransformsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetMLTransformsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetMLTransformsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetMLTransformsResponse(data, context);
const response: GetMLTransformsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetMLTransformsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetMLTransformsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetPartitionCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetPartitionCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetPartitionCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetPartitionResponse(data, context);
const response: GetPartitionCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetPartitionCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetPartitionCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetPartitionIndexesCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetPartitionIndexesCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetPartitionIndexesCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetPartitionIndexesResponse(data, context);
const response: GetPartitionIndexesCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetPartitionIndexesCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetPartitionIndexesCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "ConflictException":
case "com.amazonaws.glue#ConflictException":
response = {
...(await deserializeAws_json1_1ConflictExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetPartitionsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetPartitionsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetPartitionsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetPartitionsResponse(data, context);
const response: GetPartitionsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetPartitionsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetPartitionsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetPlanCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetPlanCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetPlanCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetPlanResponse(data, context);
const response: GetPlanCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetPlanCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetPlanCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetRegistryCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetRegistryCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetRegistryCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetRegistryResponse(data, context);
const response: GetRegistryCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetRegistryCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetRegistryCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AccessDeniedException":
case "com.amazonaws.glue#AccessDeniedException":
response = {
...(await deserializeAws_json1_1AccessDeniedExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetResourcePoliciesCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetResourcePoliciesCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetResourcePoliciesCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetResourcePoliciesResponse(data, context);
const response: GetResourcePoliciesCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetResourcePoliciesCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetResourcePoliciesCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetResourcePolicyCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetResourcePolicyCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetResourcePolicyCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetResourcePolicyResponse(data, context);
const response: GetResourcePolicyCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetResourcePolicyCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetResourcePolicyCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetSchemaCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetSchemaCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetSchemaCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetSchemaResponse(data, context);
const response: GetSchemaCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetSchemaCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetSchemaCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AccessDeniedException":
case "com.amazonaws.glue#AccessDeniedException":
response = {
...(await deserializeAws_json1_1AccessDeniedExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetSchemaByDefinitionCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetSchemaByDefinitionCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetSchemaByDefinitionCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetSchemaByDefinitionResponse(data, context);
const response: GetSchemaByDefinitionCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetSchemaByDefinitionCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetSchemaByDefinitionCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AccessDeniedException":
case "com.amazonaws.glue#AccessDeniedException":
response = {
...(await deserializeAws_json1_1AccessDeniedExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetSchemaVersionCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetSchemaVersionCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetSchemaVersionCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetSchemaVersionResponse(data, context);
const response: GetSchemaVersionCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetSchemaVersionCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetSchemaVersionCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AccessDeniedException":
case "com.amazonaws.glue#AccessDeniedException":
response = {
...(await deserializeAws_json1_1AccessDeniedExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetSchemaVersionsDiffCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetSchemaVersionsDiffCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetSchemaVersionsDiffCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetSchemaVersionsDiffResponse(data, context);
const response: GetSchemaVersionsDiffCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetSchemaVersionsDiffCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetSchemaVersionsDiffCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AccessDeniedException":
case "com.amazonaws.glue#AccessDeniedException":
response = {
...(await deserializeAws_json1_1AccessDeniedExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetSecurityConfigurationCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetSecurityConfigurationCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetSecurityConfigurationCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetSecurityConfigurationResponse(data, context);
const response: GetSecurityConfigurationCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetSecurityConfigurationCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetSecurityConfigurationCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetSecurityConfigurationsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetSecurityConfigurationsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetSecurityConfigurationsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetSecurityConfigurationsResponse(data, context);
const response: GetSecurityConfigurationsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetSecurityConfigurationsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetSecurityConfigurationsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetTableCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetTableCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetTableCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetTableResponse(data, context);
const response: GetTableCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetTableCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetTableCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetTablesCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetTablesCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetTablesCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetTablesResponse(data, context);
const response: GetTablesCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetTablesCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetTablesCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetTableVersionCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetTableVersionCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetTableVersionCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetTableVersionResponse(data, context);
const response: GetTableVersionCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetTableVersionCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetTableVersionCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetTableVersionsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetTableVersionsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetTableVersionsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetTableVersionsResponse(data, context);
const response: GetTableVersionsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetTableVersionsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetTableVersionsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetTagsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetTagsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetTagsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetTagsResponse(data, context);
const response: GetTagsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetTagsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetTagsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetTriggerCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetTriggerCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetTriggerCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetTriggerResponse(data, context);
const response: GetTriggerCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetTriggerCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetTriggerCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetTriggersCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetTriggersCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetTriggersCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetTriggersResponse(data, context);
const response: GetTriggersCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetTriggersCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetTriggersCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetUserDefinedFunctionCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetUserDefinedFunctionCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetUserDefinedFunctionCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetUserDefinedFunctionResponse(data, context);
const response: GetUserDefinedFunctionCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetUserDefinedFunctionCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetUserDefinedFunctionCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetUserDefinedFunctionsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetUserDefinedFunctionsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetUserDefinedFunctionsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetUserDefinedFunctionsResponse(data, context);
const response: GetUserDefinedFunctionsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetUserDefinedFunctionsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetUserDefinedFunctionsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetWorkflowCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetWorkflowCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetWorkflowCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetWorkflowResponse(data, context);
const response: GetWorkflowCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetWorkflowCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetWorkflowCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetWorkflowRunCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetWorkflowRunCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetWorkflowRunCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetWorkflowRunResponse(data, context);
const response: GetWorkflowRunCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetWorkflowRunCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetWorkflowRunCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetWorkflowRunPropertiesCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetWorkflowRunPropertiesCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetWorkflowRunPropertiesCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetWorkflowRunPropertiesResponse(data, context);
const response: GetWorkflowRunPropertiesCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetWorkflowRunPropertiesCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetWorkflowRunPropertiesCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1GetWorkflowRunsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetWorkflowRunsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1GetWorkflowRunsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1GetWorkflowRunsResponse(data, context);
const response: GetWorkflowRunsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1GetWorkflowRunsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<GetWorkflowRunsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1ImportCatalogToGlueCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<ImportCatalogToGlueCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1ImportCatalogToGlueCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1ImportCatalogToGlueResponse(data, context);
const response: ImportCatalogToGlueCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1ImportCatalogToGlueCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<ImportCatalogToGlueCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1ListBlueprintsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<ListBlueprintsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1ListBlueprintsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1ListBlueprintsResponse(data, context);
const response: ListBlueprintsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1ListBlueprintsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<ListBlueprintsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1ListCrawlersCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<ListCrawlersCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1ListCrawlersCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1ListCrawlersResponse(data, context);
const response: ListCrawlersCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1ListCrawlersCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<ListCrawlersCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1ListDevEndpointsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<ListDevEndpointsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1ListDevEndpointsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1ListDevEndpointsResponse(data, context);
const response: ListDevEndpointsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1ListDevEndpointsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<ListDevEndpointsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1ListJobsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<ListJobsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1ListJobsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1ListJobsResponse(data, context);
const response: ListJobsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1ListJobsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<ListJobsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1ListMLTransformsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<ListMLTransformsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1ListMLTransformsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1ListMLTransformsResponse(data, context);
const response: ListMLTransformsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1ListMLTransformsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<ListMLTransformsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1ListRegistriesCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<ListRegistriesCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1ListRegistriesCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1ListRegistriesResponse(data, context);
const response: ListRegistriesCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1ListRegistriesCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<ListRegistriesCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AccessDeniedException":
case "com.amazonaws.glue#AccessDeniedException":
response = {
...(await deserializeAws_json1_1AccessDeniedExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1ListSchemasCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<ListSchemasCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1ListSchemasCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1ListSchemasResponse(data, context);
const response: ListSchemasCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1ListSchemasCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<ListSchemasCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AccessDeniedException":
case "com.amazonaws.glue#AccessDeniedException":
response = {
...(await deserializeAws_json1_1AccessDeniedExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1ListSchemaVersionsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<ListSchemaVersionsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1ListSchemaVersionsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1ListSchemaVersionsResponse(data, context);
const response: ListSchemaVersionsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1ListSchemaVersionsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<ListSchemaVersionsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AccessDeniedException":
case "com.amazonaws.glue#AccessDeniedException":
response = {
...(await deserializeAws_json1_1AccessDeniedExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1ListTriggersCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<ListTriggersCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1ListTriggersCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1ListTriggersResponse(data, context);
const response: ListTriggersCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1ListTriggersCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<ListTriggersCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1ListWorkflowsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<ListWorkflowsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1ListWorkflowsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1ListWorkflowsResponse(data, context);
const response: ListWorkflowsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1ListWorkflowsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<ListWorkflowsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1PutDataCatalogEncryptionSettingsCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<PutDataCatalogEncryptionSettingsCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1PutDataCatalogEncryptionSettingsCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1PutDataCatalogEncryptionSettingsResponse(data, context);
const response: PutDataCatalogEncryptionSettingsCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1PutDataCatalogEncryptionSettingsCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<PutDataCatalogEncryptionSettingsCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1PutResourcePolicyCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<PutResourcePolicyCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1PutResourcePolicyCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1PutResourcePolicyResponse(data, context);
const response: PutResourcePolicyCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1PutResourcePolicyCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<PutResourcePolicyCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "ConditionCheckFailureException":
case "com.amazonaws.glue#ConditionCheckFailureException":
response = {
...(await deserializeAws_json1_1ConditionCheckFailureExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1PutSchemaVersionMetadataCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<PutSchemaVersionMetadataCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1PutSchemaVersionMetadataCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1PutSchemaVersionMetadataResponse(data, context);
const response: PutSchemaVersionMetadataCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1PutSchemaVersionMetadataCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<PutSchemaVersionMetadataCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AccessDeniedException":
case "com.amazonaws.glue#AccessDeniedException":
response = {
...(await deserializeAws_json1_1AccessDeniedExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "AlreadyExistsException":
case "com.amazonaws.glue#AlreadyExistsException":
response = {
...(await deserializeAws_json1_1AlreadyExistsExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ResourceNumberLimitExceededException":
case "com.amazonaws.glue#ResourceNumberLimitExceededException":
response = {
...(await deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1PutWorkflowRunPropertiesCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<PutWorkflowRunPropertiesCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1PutWorkflowRunPropertiesCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1PutWorkflowRunPropertiesResponse(data, context);
const response: PutWorkflowRunPropertiesCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1PutWorkflowRunPropertiesCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<PutWorkflowRunPropertiesCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AlreadyExistsException":
case "com.amazonaws.glue#AlreadyExistsException":
response = {
...(await deserializeAws_json1_1AlreadyExistsExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ConcurrentModificationException":
case "com.amazonaws.glue#ConcurrentModificationException":
response = {
...(await deserializeAws_json1_1ConcurrentModificationExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ResourceNumberLimitExceededException":
case "com.amazonaws.glue#ResourceNumberLimitExceededException":
response = {
...(await deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1QuerySchemaVersionMetadataCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<QuerySchemaVersionMetadataCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1QuerySchemaVersionMetadataCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1QuerySchemaVersionMetadataResponse(data, context);
const response: QuerySchemaVersionMetadataCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1QuerySchemaVersionMetadataCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<QuerySchemaVersionMetadataCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AccessDeniedException":
case "com.amazonaws.glue#AccessDeniedException":
response = {
...(await deserializeAws_json1_1AccessDeniedExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1RegisterSchemaVersionCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<RegisterSchemaVersionCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1RegisterSchemaVersionCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1RegisterSchemaVersionResponse(data, context);
const response: RegisterSchemaVersionCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1RegisterSchemaVersionCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<RegisterSchemaVersionCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AccessDeniedException":
case "com.amazonaws.glue#AccessDeniedException":
response = {
...(await deserializeAws_json1_1AccessDeniedExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ConcurrentModificationException":
case "com.amazonaws.glue#ConcurrentModificationException":
response = {
...(await deserializeAws_json1_1ConcurrentModificationExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ResourceNumberLimitExceededException":
case "com.amazonaws.glue#ResourceNumberLimitExceededException":
response = {
...(await deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1RemoveSchemaVersionMetadataCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<RemoveSchemaVersionMetadataCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1RemoveSchemaVersionMetadataCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1RemoveSchemaVersionMetadataResponse(data, context);
const response: RemoveSchemaVersionMetadataCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1RemoveSchemaVersionMetadataCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<RemoveSchemaVersionMetadataCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AccessDeniedException":
case "com.amazonaws.glue#AccessDeniedException":
response = {
...(await deserializeAws_json1_1AccessDeniedExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1ResetJobBookmarkCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<ResetJobBookmarkCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1ResetJobBookmarkCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1ResetJobBookmarkResponse(data, context);
const response: ResetJobBookmarkCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1ResetJobBookmarkCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<ResetJobBookmarkCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1ResumeWorkflowRunCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<ResumeWorkflowRunCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1ResumeWorkflowRunCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1ResumeWorkflowRunResponse(data, context);
const response: ResumeWorkflowRunCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1ResumeWorkflowRunCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<ResumeWorkflowRunCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "ConcurrentRunsExceededException":
case "com.amazonaws.glue#ConcurrentRunsExceededException":
response = {
...(await deserializeAws_json1_1ConcurrentRunsExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "IllegalWorkflowStateException":
case "com.amazonaws.glue#IllegalWorkflowStateException":
response = {
...(await deserializeAws_json1_1IllegalWorkflowStateExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1SearchTablesCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<SearchTablesCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1SearchTablesCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1SearchTablesResponse(data, context);
const response: SearchTablesCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1SearchTablesCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<SearchTablesCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1StartBlueprintRunCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StartBlueprintRunCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1StartBlueprintRunCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1StartBlueprintRunResponse(data, context);
const response: StartBlueprintRunCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1StartBlueprintRunCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StartBlueprintRunCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "IllegalBlueprintStateException":
case "com.amazonaws.glue#IllegalBlueprintStateException":
response = {
...(await deserializeAws_json1_1IllegalBlueprintStateExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ResourceNumberLimitExceededException":
case "com.amazonaws.glue#ResourceNumberLimitExceededException":
response = {
...(await deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1StartCrawlerCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StartCrawlerCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1StartCrawlerCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1StartCrawlerResponse(data, context);
const response: StartCrawlerCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1StartCrawlerCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StartCrawlerCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "CrawlerRunningException":
case "com.amazonaws.glue#CrawlerRunningException":
response = {
...(await deserializeAws_json1_1CrawlerRunningExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1StartCrawlerScheduleCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StartCrawlerScheduleCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1StartCrawlerScheduleCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1StartCrawlerScheduleResponse(data, context);
const response: StartCrawlerScheduleCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1StartCrawlerScheduleCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StartCrawlerScheduleCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "NoScheduleException":
case "com.amazonaws.glue#NoScheduleException":
response = {
...(await deserializeAws_json1_1NoScheduleExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "SchedulerRunningException":
case "com.amazonaws.glue#SchedulerRunningException":
response = {
...(await deserializeAws_json1_1SchedulerRunningExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "SchedulerTransitioningException":
case "com.amazonaws.glue#SchedulerTransitioningException":
response = {
...(await deserializeAws_json1_1SchedulerTransitioningExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1StartExportLabelsTaskRunCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StartExportLabelsTaskRunCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1StartExportLabelsTaskRunCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1StartExportLabelsTaskRunResponse(data, context);
const response: StartExportLabelsTaskRunCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1StartExportLabelsTaskRunCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StartExportLabelsTaskRunCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1StartImportLabelsTaskRunCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StartImportLabelsTaskRunCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1StartImportLabelsTaskRunCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1StartImportLabelsTaskRunResponse(data, context);
const response: StartImportLabelsTaskRunCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1StartImportLabelsTaskRunCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StartImportLabelsTaskRunCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ResourceNumberLimitExceededException":
case "com.amazonaws.glue#ResourceNumberLimitExceededException":
response = {
...(await deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1StartJobRunCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StartJobRunCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1StartJobRunCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1StartJobRunResponse(data, context);
const response: StartJobRunCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1StartJobRunCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StartJobRunCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "ConcurrentRunsExceededException":
case "com.amazonaws.glue#ConcurrentRunsExceededException":
response = {
...(await deserializeAws_json1_1ConcurrentRunsExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ResourceNumberLimitExceededException":
case "com.amazonaws.glue#ResourceNumberLimitExceededException":
response = {
...(await deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1StartMLEvaluationTaskRunCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StartMLEvaluationTaskRunCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1StartMLEvaluationTaskRunCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1StartMLEvaluationTaskRunResponse(data, context);
const response: StartMLEvaluationTaskRunCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1StartMLEvaluationTaskRunCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StartMLEvaluationTaskRunCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "ConcurrentRunsExceededException":
case "com.amazonaws.glue#ConcurrentRunsExceededException":
response = {
...(await deserializeAws_json1_1ConcurrentRunsExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "MLTransformNotReadyException":
case "com.amazonaws.glue#MLTransformNotReadyException":
response = {
...(await deserializeAws_json1_1MLTransformNotReadyExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1StartMLLabelingSetGenerationTaskRunCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StartMLLabelingSetGenerationTaskRunCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1StartMLLabelingSetGenerationTaskRunCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1StartMLLabelingSetGenerationTaskRunResponse(data, context);
const response: StartMLLabelingSetGenerationTaskRunCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1StartMLLabelingSetGenerationTaskRunCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StartMLLabelingSetGenerationTaskRunCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "ConcurrentRunsExceededException":
case "com.amazonaws.glue#ConcurrentRunsExceededException":
response = {
...(await deserializeAws_json1_1ConcurrentRunsExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1StartTriggerCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StartTriggerCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1StartTriggerCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1StartTriggerResponse(data, context);
const response: StartTriggerCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1StartTriggerCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StartTriggerCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "ConcurrentRunsExceededException":
case "com.amazonaws.glue#ConcurrentRunsExceededException":
response = {
...(await deserializeAws_json1_1ConcurrentRunsExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ResourceNumberLimitExceededException":
case "com.amazonaws.glue#ResourceNumberLimitExceededException":
response = {
...(await deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1StartWorkflowRunCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StartWorkflowRunCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1StartWorkflowRunCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1StartWorkflowRunResponse(data, context);
const response: StartWorkflowRunCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1StartWorkflowRunCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StartWorkflowRunCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "ConcurrentRunsExceededException":
case "com.amazonaws.glue#ConcurrentRunsExceededException":
response = {
...(await deserializeAws_json1_1ConcurrentRunsExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ResourceNumberLimitExceededException":
case "com.amazonaws.glue#ResourceNumberLimitExceededException":
response = {
...(await deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1StopCrawlerCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StopCrawlerCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1StopCrawlerCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1StopCrawlerResponse(data, context);
const response: StopCrawlerCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1StopCrawlerCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StopCrawlerCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "CrawlerNotRunningException":
case "com.amazonaws.glue#CrawlerNotRunningException":
response = {
...(await deserializeAws_json1_1CrawlerNotRunningExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "CrawlerStoppingException":
case "com.amazonaws.glue#CrawlerStoppingException":
response = {
...(await deserializeAws_json1_1CrawlerStoppingExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1StopCrawlerScheduleCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StopCrawlerScheduleCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1StopCrawlerScheduleCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1StopCrawlerScheduleResponse(data, context);
const response: StopCrawlerScheduleCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1StopCrawlerScheduleCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StopCrawlerScheduleCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "SchedulerNotRunningException":
case "com.amazonaws.glue#SchedulerNotRunningException":
response = {
...(await deserializeAws_json1_1SchedulerNotRunningExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "SchedulerTransitioningException":
case "com.amazonaws.glue#SchedulerTransitioningException":
response = {
...(await deserializeAws_json1_1SchedulerTransitioningExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1StopTriggerCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StopTriggerCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1StopTriggerCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1StopTriggerResponse(data, context);
const response: StopTriggerCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1StopTriggerCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StopTriggerCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "ConcurrentModificationException":
case "com.amazonaws.glue#ConcurrentModificationException":
response = {
...(await deserializeAws_json1_1ConcurrentModificationExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1StopWorkflowRunCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StopWorkflowRunCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1StopWorkflowRunCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1StopWorkflowRunResponse(data, context);
const response: StopWorkflowRunCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1StopWorkflowRunCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<StopWorkflowRunCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "IllegalWorkflowStateException":
case "com.amazonaws.glue#IllegalWorkflowStateException":
response = {
...(await deserializeAws_json1_1IllegalWorkflowStateExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1TagResourceCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<TagResourceCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1TagResourceCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1TagResourceResponse(data, context);
const response: TagResourceCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1TagResourceCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<TagResourceCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1UntagResourceCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UntagResourceCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1UntagResourceCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1UntagResourceResponse(data, context);
const response: UntagResourceCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1UntagResourceCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UntagResourceCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1UpdateBlueprintCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateBlueprintCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1UpdateBlueprintCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1UpdateBlueprintResponse(data, context);
const response: UpdateBlueprintCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1UpdateBlueprintCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateBlueprintCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "ConcurrentModificationException":
case "com.amazonaws.glue#ConcurrentModificationException":
response = {
...(await deserializeAws_json1_1ConcurrentModificationExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "IllegalBlueprintStateException":
case "com.amazonaws.glue#IllegalBlueprintStateException":
response = {
...(await deserializeAws_json1_1IllegalBlueprintStateExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1UpdateClassifierCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateClassifierCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1UpdateClassifierCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1UpdateClassifierResponse(data, context);
const response: UpdateClassifierCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1UpdateClassifierCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateClassifierCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "VersionMismatchException":
case "com.amazonaws.glue#VersionMismatchException":
response = {
...(await deserializeAws_json1_1VersionMismatchExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1UpdateColumnStatisticsForPartitionCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateColumnStatisticsForPartitionCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1UpdateColumnStatisticsForPartitionCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1UpdateColumnStatisticsForPartitionResponse(data, context);
const response: UpdateColumnStatisticsForPartitionCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1UpdateColumnStatisticsForPartitionCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateColumnStatisticsForPartitionCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1UpdateColumnStatisticsForTableCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateColumnStatisticsForTableCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1UpdateColumnStatisticsForTableCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1UpdateColumnStatisticsForTableResponse(data, context);
const response: UpdateColumnStatisticsForTableCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1UpdateColumnStatisticsForTableCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateColumnStatisticsForTableCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1UpdateConnectionCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateConnectionCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1UpdateConnectionCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1UpdateConnectionResponse(data, context);
const response: UpdateConnectionCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1UpdateConnectionCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateConnectionCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1UpdateCrawlerCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateCrawlerCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1UpdateCrawlerCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1UpdateCrawlerResponse(data, context);
const response: UpdateCrawlerCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1UpdateCrawlerCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateCrawlerCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "CrawlerRunningException":
case "com.amazonaws.glue#CrawlerRunningException":
response = {
...(await deserializeAws_json1_1CrawlerRunningExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "VersionMismatchException":
case "com.amazonaws.glue#VersionMismatchException":
response = {
...(await deserializeAws_json1_1VersionMismatchExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1UpdateCrawlerScheduleCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateCrawlerScheduleCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1UpdateCrawlerScheduleCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1UpdateCrawlerScheduleResponse(data, context);
const response: UpdateCrawlerScheduleCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1UpdateCrawlerScheduleCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateCrawlerScheduleCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "SchedulerTransitioningException":
case "com.amazonaws.glue#SchedulerTransitioningException":
response = {
...(await deserializeAws_json1_1SchedulerTransitioningExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "VersionMismatchException":
case "com.amazonaws.glue#VersionMismatchException":
response = {
...(await deserializeAws_json1_1VersionMismatchExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1UpdateDatabaseCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateDatabaseCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1UpdateDatabaseCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1UpdateDatabaseResponse(data, context);
const response: UpdateDatabaseCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1UpdateDatabaseCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateDatabaseCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "ConcurrentModificationException":
case "com.amazonaws.glue#ConcurrentModificationException":
response = {
...(await deserializeAws_json1_1ConcurrentModificationExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1UpdateDevEndpointCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateDevEndpointCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1UpdateDevEndpointCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1UpdateDevEndpointResponse(data, context);
const response: UpdateDevEndpointCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1UpdateDevEndpointCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateDevEndpointCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ValidationException":
case "com.amazonaws.glue#ValidationException":
response = {
...(await deserializeAws_json1_1ValidationExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1UpdateJobCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateJobCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1UpdateJobCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1UpdateJobResponse(data, context);
const response: UpdateJobCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1UpdateJobCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateJobCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "ConcurrentModificationException":
case "com.amazonaws.glue#ConcurrentModificationException":
response = {
...(await deserializeAws_json1_1ConcurrentModificationExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1UpdateMLTransformCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateMLTransformCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1UpdateMLTransformCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1UpdateMLTransformResponse(data, context);
const response: UpdateMLTransformCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1UpdateMLTransformCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateMLTransformCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AccessDeniedException":
case "com.amazonaws.glue#AccessDeniedException":
response = {
...(await deserializeAws_json1_1AccessDeniedExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1UpdatePartitionCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdatePartitionCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1UpdatePartitionCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1UpdatePartitionResponse(data, context);
const response: UpdatePartitionCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1UpdatePartitionCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdatePartitionCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1UpdateRegistryCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateRegistryCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1UpdateRegistryCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1UpdateRegistryResponse(data, context);
const response: UpdateRegistryCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1UpdateRegistryCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateRegistryCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AccessDeniedException":
case "com.amazonaws.glue#AccessDeniedException":
response = {
...(await deserializeAws_json1_1AccessDeniedExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ConcurrentModificationException":
case "com.amazonaws.glue#ConcurrentModificationException":
response = {
...(await deserializeAws_json1_1ConcurrentModificationExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1UpdateSchemaCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateSchemaCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1UpdateSchemaCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1UpdateSchemaResponse(data, context);
const response: UpdateSchemaCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1UpdateSchemaCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateSchemaCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "AccessDeniedException":
case "com.amazonaws.glue#AccessDeniedException":
response = {
...(await deserializeAws_json1_1AccessDeniedExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ConcurrentModificationException":
case "com.amazonaws.glue#ConcurrentModificationException":
response = {
...(await deserializeAws_json1_1ConcurrentModificationExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1UpdateTableCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateTableCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1UpdateTableCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1UpdateTableResponse(data, context);
const response: UpdateTableCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1UpdateTableCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateTableCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "ConcurrentModificationException":
case "com.amazonaws.glue#ConcurrentModificationException":
response = {
...(await deserializeAws_json1_1ConcurrentModificationExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "ResourceNumberLimitExceededException":
case "com.amazonaws.glue#ResourceNumberLimitExceededException":
response = {
...(await deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1UpdateTriggerCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateTriggerCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1UpdateTriggerCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1UpdateTriggerResponse(data, context);
const response: UpdateTriggerCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1UpdateTriggerCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateTriggerCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "ConcurrentModificationException":
case "com.amazonaws.glue#ConcurrentModificationException":
response = {
...(await deserializeAws_json1_1ConcurrentModificationExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1UpdateUserDefinedFunctionCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateUserDefinedFunctionCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1UpdateUserDefinedFunctionCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1UpdateUserDefinedFunctionResponse(data, context);
const response: UpdateUserDefinedFunctionCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1UpdateUserDefinedFunctionCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateUserDefinedFunctionCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "GlueEncryptionException":
case "com.amazonaws.glue#GlueEncryptionException":
response = {
...(await deserializeAws_json1_1GlueEncryptionExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
export const deserializeAws_json1_1UpdateWorkflowCommand = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateWorkflowCommandOutput> => {
if (output.statusCode >= 300) {
return deserializeAws_json1_1UpdateWorkflowCommandError(output, context);
}
const data: any = await parseBody(output.body, context);
let contents: any = {};
contents = deserializeAws_json1_1UpdateWorkflowResponse(data, context);
const response: UpdateWorkflowCommandOutput = {
$metadata: deserializeMetadata(output),
...contents,
};
return Promise.resolve(response);
};
const deserializeAws_json1_1UpdateWorkflowCommandError = async (
output: __HttpResponse,
context: __SerdeContext
): Promise<UpdateWorkflowCommandOutput> => {
const parsedOutput: any = {
...output,
body: await parseBody(output.body, context),
};
let response: __SmithyException & __MetadataBearer & { [key: string]: any };
let errorCode: string = "UnknownError";
errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
switch (errorCode) {
case "ConcurrentModificationException":
case "com.amazonaws.glue#ConcurrentModificationException":
response = {
...(await deserializeAws_json1_1ConcurrentModificationExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "EntityNotFoundException":
case "com.amazonaws.glue#EntityNotFoundException":
response = {
...(await deserializeAws_json1_1EntityNotFoundExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InternalServiceException":
case "com.amazonaws.glue#InternalServiceException":
response = {
...(await deserializeAws_json1_1InternalServiceExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "InvalidInputException":
case "com.amazonaws.glue#InvalidInputException":
response = {
...(await deserializeAws_json1_1InvalidInputExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
case "OperationTimeoutException":
case "com.amazonaws.glue#OperationTimeoutException":
response = {
...(await deserializeAws_json1_1OperationTimeoutExceptionResponse(parsedOutput, context)),
name: errorCode,
$metadata: deserializeMetadata(output),
};
break;
default:
const parsedBody = parsedOutput.body;
errorCode = parsedBody.code || parsedBody.Code || errorCode;
response = {
...parsedBody,
name: `${errorCode}`,
message: parsedBody.message || parsedBody.Message || errorCode,
$fault: "client",
$metadata: deserializeMetadata(output),
} as any;
}
const message = response.message || response.Message || errorCode;
response.message = message;
delete response.Message;
return Promise.reject(Object.assign(new Error(message), response));
};
const deserializeAws_json1_1AccessDeniedExceptionResponse = async (
parsedOutput: any,
context: __SerdeContext
): Promise<AccessDeniedException> => {
const body = parsedOutput.body;
const deserialized: any = deserializeAws_json1_1AccessDeniedException(body, context);
const contents: AccessDeniedException = {
name: "AccessDeniedException",
$fault: "client",
$metadata: deserializeMetadata(parsedOutput),
...deserialized,
};
return contents;
};
const deserializeAws_json1_1AlreadyExistsExceptionResponse = async (
parsedOutput: any,
context: __SerdeContext
): Promise<AlreadyExistsException> => {
const body = parsedOutput.body;
const deserialized: any = deserializeAws_json1_1AlreadyExistsException(body, context);
const contents: AlreadyExistsException = {
name: "AlreadyExistsException",
$fault: "client",
$metadata: deserializeMetadata(parsedOutput),
...deserialized,
};
return contents;
};
const deserializeAws_json1_1ConcurrentModificationExceptionResponse = async (
parsedOutput: any,
context: __SerdeContext
): Promise<ConcurrentModificationException> => {
const body = parsedOutput.body;
const deserialized: any = deserializeAws_json1_1ConcurrentModificationException(body, context);
const contents: ConcurrentModificationException = {
name: "ConcurrentModificationException",
$fault: "client",
$metadata: deserializeMetadata(parsedOutput),
...deserialized,
};
return contents;
};
const deserializeAws_json1_1ConcurrentRunsExceededExceptionResponse = async (
parsedOutput: any,
context: __SerdeContext
): Promise<ConcurrentRunsExceededException> => {
const body = parsedOutput.body;
const deserialized: any = deserializeAws_json1_1ConcurrentRunsExceededException(body, context);
const contents: ConcurrentRunsExceededException = {
name: "ConcurrentRunsExceededException",
$fault: "client",
$metadata: deserializeMetadata(parsedOutput),
...deserialized,
};
return contents;
};
const deserializeAws_json1_1ConditionCheckFailureExceptionResponse = async (
parsedOutput: any,
context: __SerdeContext
): Promise<ConditionCheckFailureException> => {
const body = parsedOutput.body;
const deserialized: any = deserializeAws_json1_1ConditionCheckFailureException(body, context);
const contents: ConditionCheckFailureException = {
name: "ConditionCheckFailureException",
$fault: "client",
$metadata: deserializeMetadata(parsedOutput),
...deserialized,
};
return contents;
};
const deserializeAws_json1_1ConflictExceptionResponse = async (
parsedOutput: any,
context: __SerdeContext
): Promise<ConflictException> => {
const body = parsedOutput.body;
const deserialized: any = deserializeAws_json1_1ConflictException(body, context);
const contents: ConflictException = {
name: "ConflictException",
$fault: "client",
$metadata: deserializeMetadata(parsedOutput),
...deserialized,
};
return contents;
};
const deserializeAws_json1_1CrawlerNotRunningExceptionResponse = async (
parsedOutput: any,
context: __SerdeContext
): Promise<CrawlerNotRunningException> => {
const body = parsedOutput.body;
const deserialized: any = deserializeAws_json1_1CrawlerNotRunningException(body, context);
const contents: CrawlerNotRunningException = {
name: "CrawlerNotRunningException",
$fault: "client",
$metadata: deserializeMetadata(parsedOutput),
...deserialized,
};
return contents;
};
const deserializeAws_json1_1CrawlerRunningExceptionResponse = async (
parsedOutput: any,
context: __SerdeContext
): Promise<CrawlerRunningException> => {
const body = parsedOutput.body;
const deserialized: any = deserializeAws_json1_1CrawlerRunningException(body, context);
const contents: CrawlerRunningException = {
name: "CrawlerRunningException",
$fault: "client",
$metadata: deserializeMetadata(parsedOutput),
...deserialized,
};
return contents;
};
const deserializeAws_json1_1CrawlerStoppingExceptionResponse = async (
parsedOutput: any,
context: __SerdeContext
): Promise<CrawlerStoppingException> => {
const body = parsedOutput.body;
const deserialized: any = deserializeAws_json1_1CrawlerStoppingException(body, context);
const contents: CrawlerStoppingException = {
name: "CrawlerStoppingException",
$fault: "client",
$metadata: deserializeMetadata(parsedOutput),
...deserialized,
};
return contents;
};
const deserializeAws_json1_1EntityNotFoundExceptionResponse = async (
parsedOutput: any,
context: __SerdeContext
): Promise<EntityNotFoundException> => {
const body = parsedOutput.body;
const deserialized: any = deserializeAws_json1_1EntityNotFoundException(body, context);
const contents: EntityNotFoundException = {
name: "EntityNotFoundException",
$fault: "client",
$metadata: deserializeMetadata(parsedOutput),
...deserialized,
};
return contents;
};
const deserializeAws_json1_1GlueEncryptionExceptionResponse = async (
parsedOutput: any,
context: __SerdeContext
): Promise<GlueEncryptionException> => {
const body = parsedOutput.body;
const deserialized: any = deserializeAws_json1_1GlueEncryptionException(body, context);
const contents: GlueEncryptionException = {
name: "GlueEncryptionException",
$fault: "client",
$metadata: deserializeMetadata(parsedOutput),
...deserialized,
};
return contents;
};
const deserializeAws_json1_1IdempotentParameterMismatchExceptionResponse = async (
parsedOutput: any,
context: __SerdeContext
): Promise<IdempotentParameterMismatchException> => {
const body = parsedOutput.body;
const deserialized: any = deserializeAws_json1_1IdempotentParameterMismatchException(body, context);
const contents: IdempotentParameterMismatchException = {
name: "IdempotentParameterMismatchException",
$fault: "client",
$metadata: deserializeMetadata(parsedOutput),
...deserialized,
};
return contents;
};
const deserializeAws_json1_1IllegalBlueprintStateExceptionResponse = async (
parsedOutput: any,
context: __SerdeContext
): Promise<IllegalBlueprintStateException> => {
const body = parsedOutput.body;
const deserialized: any = deserializeAws_json1_1IllegalBlueprintStateException(body, context);
const contents: IllegalBlueprintStateException = {
name: "IllegalBlueprintStateException",
$fault: "client",
$metadata: deserializeMetadata(parsedOutput),
...deserialized,
};
return contents;
};
const deserializeAws_json1_1IllegalWorkflowStateExceptionResponse = async (
parsedOutput: any,
context: __SerdeContext
): Promise<IllegalWorkflowStateException> => {
const body = parsedOutput.body;
const deserialized: any = deserializeAws_json1_1IllegalWorkflowStateException(body, context);
const contents: IllegalWorkflowStateException = {
name: "IllegalWorkflowStateException",
$fault: "client",
$metadata: deserializeMetadata(parsedOutput),
...deserialized,
};
return contents;
};
const deserializeAws_json1_1InternalServiceExceptionResponse = async (
parsedOutput: any,
context: __SerdeContext
): Promise<InternalServiceException> => {
const body = parsedOutput.body;
const deserialized: any = deserializeAws_json1_1InternalServiceException(body, context);
const contents: InternalServiceException = {
name: "InternalServiceException",
$fault: "server",
$metadata: deserializeMetadata(parsedOutput),
...deserialized,
};
return contents;
};
const deserializeAws_json1_1InvalidInputExceptionResponse = async (
parsedOutput: any,
context: __SerdeContext
): Promise<InvalidInputException> => {
const body = parsedOutput.body;
const deserialized: any = deserializeAws_json1_1InvalidInputException(body, context);
const contents: InvalidInputException = {
name: "InvalidInputException",
$fault: "client",
$metadata: deserializeMetadata(parsedOutput),
...deserialized,
};
return contents;
};
const deserializeAws_json1_1MLTransformNotReadyExceptionResponse = async (
parsedOutput: any,
context: __SerdeContext
): Promise<MLTransformNotReadyException> => {
const body = parsedOutput.body;
const deserialized: any = deserializeAws_json1_1MLTransformNotReadyException(body, context);
const contents: MLTransformNotReadyException = {
name: "MLTransformNotReadyException",
$fault: "client",
$metadata: deserializeMetadata(parsedOutput),
...deserialized,
};
return contents;
};
const deserializeAws_json1_1NoScheduleExceptionResponse = async (
parsedOutput: any,
context: __SerdeContext
): Promise<NoScheduleException> => {
const body = parsedOutput.body;
const deserialized: any = deserializeAws_json1_1NoScheduleException(body, context);
const contents: NoScheduleException = {
name: "NoScheduleException",
$fault: "client",
$metadata: deserializeMetadata(parsedOutput),
...deserialized,
};
return contents;
};
const deserializeAws_json1_1OperationTimeoutExceptionResponse = async (
parsedOutput: any,
context: __SerdeContext
): Promise<OperationTimeoutException> => {
const body = parsedOutput.body;
const deserialized: any = deserializeAws_json1_1OperationTimeoutException(body, context);
const contents: OperationTimeoutException = {
name: "OperationTimeoutException",
$fault: "client",
$metadata: deserializeMetadata(parsedOutput),
...deserialized,
};
return contents;
};
const deserializeAws_json1_1ResourceNumberLimitExceededExceptionResponse = async (
parsedOutput: any,
context: __SerdeContext
): Promise<ResourceNumberLimitExceededException> => {
const body = parsedOutput.body;
const deserialized: any = deserializeAws_json1_1ResourceNumberLimitExceededException(body, context);
const contents: ResourceNumberLimitExceededException = {
name: "ResourceNumberLimitExceededException",
$fault: "client",
$metadata: deserializeMetadata(parsedOutput),
...deserialized,
};
return contents;
};
const deserializeAws_json1_1SchedulerNotRunningExceptionResponse = async (
parsedOutput: any,
context: __SerdeContext
): Promise<SchedulerNotRunningException> => {
const body = parsedOutput.body;
const deserialized: any = deserializeAws_json1_1SchedulerNotRunningException(body, context);
const contents: SchedulerNotRunningException = {
name: "SchedulerNotRunningException",
$fault: "client",
$metadata: deserializeMetadata(parsedOutput),
...deserialized,
};
return contents;
};
const deserializeAws_json1_1SchedulerRunningExceptionResponse = async (
parsedOutput: any,
context: __SerdeContext
): Promise<SchedulerRunningException> => {
const body = parsedOutput.body;
const deserialized: any = deserializeAws_json1_1SchedulerRunningException(body, context);
const contents: SchedulerRunningException = {
name: "SchedulerRunningException",
$fault: "client",
$metadata: deserializeMetadata(parsedOutput),
...deserialized,
};
return contents;
};
const deserializeAws_json1_1SchedulerTransitioningExceptionResponse = async (
parsedOutput: any,
context: __SerdeContext
): Promise<SchedulerTransitioningException> => {
const body = parsedOutput.body;
const deserialized: any = deserializeAws_json1_1SchedulerTransitioningException(body, context);
const contents: SchedulerTransitioningException = {
name: "SchedulerTransitioningException",
$fault: "client",
$metadata: deserializeMetadata(parsedOutput),
...deserialized,
};
return contents;
};
const deserializeAws_json1_1ValidationExceptionResponse = async (
parsedOutput: any,
context: __SerdeContext
): Promise<ValidationException> => {
const body = parsedOutput.body;
const deserialized: any = deserializeAws_json1_1ValidationException(body, context);
const contents: ValidationException = {
name: "ValidationException",
$fault: "client",
$metadata: deserializeMetadata(parsedOutput),
...deserialized,
};
return contents;
};
const deserializeAws_json1_1VersionMismatchExceptionResponse = async (
parsedOutput: any,
context: __SerdeContext
): Promise<VersionMismatchException> => {
const body = parsedOutput.body;
const deserialized: any = deserializeAws_json1_1VersionMismatchException(body, context);
const contents: VersionMismatchException = {
name: "VersionMismatchException",
$fault: "client",
$metadata: deserializeMetadata(parsedOutput),
...deserialized,
};
return contents;
};
const serializeAws_json1_1Action = (input: Action, context: __SerdeContext): any => {
return {
...(input.Arguments !== undefined &&
input.Arguments !== null && { Arguments: serializeAws_json1_1GenericMap(input.Arguments, context) }),
...(input.CrawlerName !== undefined && input.CrawlerName !== null && { CrawlerName: input.CrawlerName }),
...(input.JobName !== undefined && input.JobName !== null && { JobName: input.JobName }),
...(input.NotificationProperty !== undefined &&
input.NotificationProperty !== null && {
NotificationProperty: serializeAws_json1_1NotificationProperty(input.NotificationProperty, context),
}),
...(input.SecurityConfiguration !== undefined &&
input.SecurityConfiguration !== null && { SecurityConfiguration: input.SecurityConfiguration }),
...(input.Timeout !== undefined && input.Timeout !== null && { Timeout: input.Timeout }),
};
};
const serializeAws_json1_1ActionList = (input: Action[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1Action(entry, context);
});
};
const serializeAws_json1_1AdditionalPlanOptionsMap = (
input: { [key: string]: string },
context: __SerdeContext
): any => {
return Object.entries(input).reduce((acc: { [key: string]: any }, [key, value]: [string, any]) => {
if (value === null) {
return acc;
}
return {
...acc,
[key]: value,
};
}, {});
};
const serializeAws_json1_1BatchCreatePartitionRequest = (
input: BatchCreatePartitionRequest,
context: __SerdeContext
): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.PartitionInputList !== undefined &&
input.PartitionInputList !== null && {
PartitionInputList: serializeAws_json1_1PartitionInputList(input.PartitionInputList, context),
}),
...(input.TableName !== undefined && input.TableName !== null && { TableName: input.TableName }),
};
};
const serializeAws_json1_1BatchDeleteConnectionRequest = (
input: BatchDeleteConnectionRequest,
context: __SerdeContext
): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.ConnectionNameList !== undefined &&
input.ConnectionNameList !== null && {
ConnectionNameList: serializeAws_json1_1DeleteConnectionNameList(input.ConnectionNameList, context),
}),
};
};
const serializeAws_json1_1BatchDeletePartitionRequest = (
input: BatchDeletePartitionRequest,
context: __SerdeContext
): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.PartitionsToDelete !== undefined &&
input.PartitionsToDelete !== null && {
PartitionsToDelete: serializeAws_json1_1BatchDeletePartitionValueList(input.PartitionsToDelete, context),
}),
...(input.TableName !== undefined && input.TableName !== null && { TableName: input.TableName }),
};
};
const serializeAws_json1_1BatchDeletePartitionValueList = (
input: PartitionValueList[],
context: __SerdeContext
): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1PartitionValueList(entry, context);
});
};
const serializeAws_json1_1BatchDeleteTableNameList = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1BatchDeleteTableRequest = (input: BatchDeleteTableRequest, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.TablesToDelete !== undefined &&
input.TablesToDelete !== null && {
TablesToDelete: serializeAws_json1_1BatchDeleteTableNameList(input.TablesToDelete, context),
}),
};
};
const serializeAws_json1_1BatchDeleteTableVersionList = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1BatchDeleteTableVersionRequest = (
input: BatchDeleteTableVersionRequest,
context: __SerdeContext
): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.TableName !== undefined && input.TableName !== null && { TableName: input.TableName }),
...(input.VersionIds !== undefined &&
input.VersionIds !== null && {
VersionIds: serializeAws_json1_1BatchDeleteTableVersionList(input.VersionIds, context),
}),
};
};
const serializeAws_json1_1BatchGetBlueprintNames = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1BatchGetBlueprintsRequest = (
input: BatchGetBlueprintsRequest,
context: __SerdeContext
): any => {
return {
...(input.IncludeBlueprint !== undefined &&
input.IncludeBlueprint !== null && { IncludeBlueprint: input.IncludeBlueprint }),
...(input.IncludeParameterSpec !== undefined &&
input.IncludeParameterSpec !== null && { IncludeParameterSpec: input.IncludeParameterSpec }),
...(input.Names !== undefined &&
input.Names !== null && { Names: serializeAws_json1_1BatchGetBlueprintNames(input.Names, context) }),
};
};
const serializeAws_json1_1BatchGetCrawlersRequest = (input: BatchGetCrawlersRequest, context: __SerdeContext): any => {
return {
...(input.CrawlerNames !== undefined &&
input.CrawlerNames !== null && {
CrawlerNames: serializeAws_json1_1CrawlerNameList(input.CrawlerNames, context),
}),
};
};
const serializeAws_json1_1BatchGetDevEndpointsRequest = (
input: BatchGetDevEndpointsRequest,
context: __SerdeContext
): any => {
return {
...(input.DevEndpointNames !== undefined &&
input.DevEndpointNames !== null && {
DevEndpointNames: serializeAws_json1_1DevEndpointNames(input.DevEndpointNames, context),
}),
};
};
const serializeAws_json1_1BatchGetJobsRequest = (input: BatchGetJobsRequest, context: __SerdeContext): any => {
return {
...(input.JobNames !== undefined &&
input.JobNames !== null && { JobNames: serializeAws_json1_1JobNameList(input.JobNames, context) }),
};
};
const serializeAws_json1_1BatchGetPartitionRequest = (
input: BatchGetPartitionRequest,
context: __SerdeContext
): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.PartitionsToGet !== undefined &&
input.PartitionsToGet !== null && {
PartitionsToGet: serializeAws_json1_1BatchGetPartitionValueList(input.PartitionsToGet, context),
}),
...(input.TableName !== undefined && input.TableName !== null && { TableName: input.TableName }),
};
};
const serializeAws_json1_1BatchGetPartitionValueList = (input: PartitionValueList[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1PartitionValueList(entry, context);
});
};
const serializeAws_json1_1BatchGetTriggersRequest = (input: BatchGetTriggersRequest, context: __SerdeContext): any => {
return {
...(input.TriggerNames !== undefined &&
input.TriggerNames !== null && {
TriggerNames: serializeAws_json1_1TriggerNameList(input.TriggerNames, context),
}),
};
};
const serializeAws_json1_1BatchGetWorkflowsRequest = (
input: BatchGetWorkflowsRequest,
context: __SerdeContext
): any => {
return {
...(input.IncludeGraph !== undefined && input.IncludeGraph !== null && { IncludeGraph: input.IncludeGraph }),
...(input.Names !== undefined &&
input.Names !== null && { Names: serializeAws_json1_1WorkflowNames(input.Names, context) }),
};
};
const serializeAws_json1_1BatchStopJobRunJobRunIdList = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1BatchStopJobRunRequest = (input: BatchStopJobRunRequest, context: __SerdeContext): any => {
return {
...(input.JobName !== undefined && input.JobName !== null && { JobName: input.JobName }),
...(input.JobRunIds !== undefined &&
input.JobRunIds !== null && {
JobRunIds: serializeAws_json1_1BatchStopJobRunJobRunIdList(input.JobRunIds, context),
}),
};
};
const serializeAws_json1_1BatchUpdatePartitionRequest = (
input: BatchUpdatePartitionRequest,
context: __SerdeContext
): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.Entries !== undefined &&
input.Entries !== null && {
Entries: serializeAws_json1_1BatchUpdatePartitionRequestEntryList(input.Entries, context),
}),
...(input.TableName !== undefined && input.TableName !== null && { TableName: input.TableName }),
};
};
const serializeAws_json1_1BatchUpdatePartitionRequestEntry = (
input: BatchUpdatePartitionRequestEntry,
context: __SerdeContext
): any => {
return {
...(input.PartitionInput !== undefined &&
input.PartitionInput !== null && {
PartitionInput: serializeAws_json1_1PartitionInput(input.PartitionInput, context),
}),
...(input.PartitionValueList !== undefined &&
input.PartitionValueList !== null && {
PartitionValueList: serializeAws_json1_1BoundedPartitionValueList(input.PartitionValueList, context),
}),
};
};
const serializeAws_json1_1BatchUpdatePartitionRequestEntryList = (
input: BatchUpdatePartitionRequestEntry[],
context: __SerdeContext
): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1BatchUpdatePartitionRequestEntry(entry, context);
});
};
const serializeAws_json1_1BinaryColumnStatisticsData = (
input: BinaryColumnStatisticsData,
context: __SerdeContext
): any => {
return {
...(input.AverageLength !== undefined &&
input.AverageLength !== null && { AverageLength: __serializeFloat(input.AverageLength) }),
...(input.MaximumLength !== undefined && input.MaximumLength !== null && { MaximumLength: input.MaximumLength }),
...(input.NumberOfNulls !== undefined && input.NumberOfNulls !== null && { NumberOfNulls: input.NumberOfNulls }),
};
};
const serializeAws_json1_1BooleanColumnStatisticsData = (
input: BooleanColumnStatisticsData,
context: __SerdeContext
): any => {
return {
...(input.NumberOfFalses !== undefined &&
input.NumberOfFalses !== null && { NumberOfFalses: input.NumberOfFalses }),
...(input.NumberOfNulls !== undefined && input.NumberOfNulls !== null && { NumberOfNulls: input.NumberOfNulls }),
...(input.NumberOfTrues !== undefined && input.NumberOfTrues !== null && { NumberOfTrues: input.NumberOfTrues }),
};
};
const serializeAws_json1_1BoundedPartitionValueList = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1CancelMLTaskRunRequest = (input: CancelMLTaskRunRequest, context: __SerdeContext): any => {
return {
...(input.TaskRunId !== undefined && input.TaskRunId !== null && { TaskRunId: input.TaskRunId }),
...(input.TransformId !== undefined && input.TransformId !== null && { TransformId: input.TransformId }),
};
};
const serializeAws_json1_1CatalogEntries = (input: CatalogEntry[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1CatalogEntry(entry, context);
});
};
const serializeAws_json1_1CatalogEntry = (input: CatalogEntry, context: __SerdeContext): any => {
return {
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.TableName !== undefined && input.TableName !== null && { TableName: input.TableName }),
};
};
const serializeAws_json1_1CatalogTablesList = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1CatalogTarget = (input: CatalogTarget, context: __SerdeContext): any => {
return {
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.Tables !== undefined &&
input.Tables !== null && { Tables: serializeAws_json1_1CatalogTablesList(input.Tables, context) }),
};
};
const serializeAws_json1_1CatalogTargetList = (input: CatalogTarget[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1CatalogTarget(entry, context);
});
};
const serializeAws_json1_1CheckSchemaVersionValidityInput = (
input: CheckSchemaVersionValidityInput,
context: __SerdeContext
): any => {
return {
...(input.DataFormat !== undefined && input.DataFormat !== null && { DataFormat: input.DataFormat }),
...(input.SchemaDefinition !== undefined &&
input.SchemaDefinition !== null && { SchemaDefinition: input.SchemaDefinition }),
};
};
const serializeAws_json1_1ClassifierNameList = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1CloudWatchEncryption = (input: CloudWatchEncryption, context: __SerdeContext): any => {
return {
...(input.CloudWatchEncryptionMode !== undefined &&
input.CloudWatchEncryptionMode !== null && { CloudWatchEncryptionMode: input.CloudWatchEncryptionMode }),
...(input.KmsKeyArn !== undefined && input.KmsKeyArn !== null && { KmsKeyArn: input.KmsKeyArn }),
};
};
const serializeAws_json1_1CodeGenEdge = (input: CodeGenEdge, context: __SerdeContext): any => {
return {
...(input.Source !== undefined && input.Source !== null && { Source: input.Source }),
...(input.Target !== undefined && input.Target !== null && { Target: input.Target }),
...(input.TargetParameter !== undefined &&
input.TargetParameter !== null && { TargetParameter: input.TargetParameter }),
};
};
const serializeAws_json1_1CodeGenNode = (input: CodeGenNode, context: __SerdeContext): any => {
return {
...(input.Args !== undefined &&
input.Args !== null && { Args: serializeAws_json1_1CodeGenNodeArgs(input.Args, context) }),
...(input.Id !== undefined && input.Id !== null && { Id: input.Id }),
...(input.LineNumber !== undefined && input.LineNumber !== null && { LineNumber: input.LineNumber }),
...(input.NodeType !== undefined && input.NodeType !== null && { NodeType: input.NodeType }),
};
};
const serializeAws_json1_1CodeGenNodeArg = (input: CodeGenNodeArg, context: __SerdeContext): any => {
return {
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.Param !== undefined && input.Param !== null && { Param: input.Param }),
...(input.Value !== undefined && input.Value !== null && { Value: input.Value }),
};
};
const serializeAws_json1_1CodeGenNodeArgs = (input: CodeGenNodeArg[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1CodeGenNodeArg(entry, context);
});
};
const serializeAws_json1_1Column = (input: Column, context: __SerdeContext): any => {
return {
...(input.Comment !== undefined && input.Comment !== null && { Comment: input.Comment }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.Parameters !== undefined &&
input.Parameters !== null && { Parameters: serializeAws_json1_1ParametersMap(input.Parameters, context) }),
...(input.Type !== undefined && input.Type !== null && { Type: input.Type }),
};
};
const serializeAws_json1_1ColumnList = (input: Column[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1Column(entry, context);
});
};
const serializeAws_json1_1ColumnStatistics = (input: ColumnStatistics, context: __SerdeContext): any => {
return {
...(input.AnalyzedTime !== undefined &&
input.AnalyzedTime !== null && { AnalyzedTime: Math.round(input.AnalyzedTime.getTime() / 1000) }),
...(input.ColumnName !== undefined && input.ColumnName !== null && { ColumnName: input.ColumnName }),
...(input.ColumnType !== undefined && input.ColumnType !== null && { ColumnType: input.ColumnType }),
...(input.StatisticsData !== undefined &&
input.StatisticsData !== null && {
StatisticsData: serializeAws_json1_1ColumnStatisticsData(input.StatisticsData, context),
}),
};
};
const serializeAws_json1_1ColumnStatisticsData = (input: ColumnStatisticsData, context: __SerdeContext): any => {
return {
...(input.BinaryColumnStatisticsData !== undefined &&
input.BinaryColumnStatisticsData !== null && {
BinaryColumnStatisticsData: serializeAws_json1_1BinaryColumnStatisticsData(
input.BinaryColumnStatisticsData,
context
),
}),
...(input.BooleanColumnStatisticsData !== undefined &&
input.BooleanColumnStatisticsData !== null && {
BooleanColumnStatisticsData: serializeAws_json1_1BooleanColumnStatisticsData(
input.BooleanColumnStatisticsData,
context
),
}),
...(input.DateColumnStatisticsData !== undefined &&
input.DateColumnStatisticsData !== null && {
DateColumnStatisticsData: serializeAws_json1_1DateColumnStatisticsData(input.DateColumnStatisticsData, context),
}),
...(input.DecimalColumnStatisticsData !== undefined &&
input.DecimalColumnStatisticsData !== null && {
DecimalColumnStatisticsData: serializeAws_json1_1DecimalColumnStatisticsData(
input.DecimalColumnStatisticsData,
context
),
}),
...(input.DoubleColumnStatisticsData !== undefined &&
input.DoubleColumnStatisticsData !== null && {
DoubleColumnStatisticsData: serializeAws_json1_1DoubleColumnStatisticsData(
input.DoubleColumnStatisticsData,
context
),
}),
...(input.LongColumnStatisticsData !== undefined &&
input.LongColumnStatisticsData !== null && {
LongColumnStatisticsData: serializeAws_json1_1LongColumnStatisticsData(input.LongColumnStatisticsData, context),
}),
...(input.StringColumnStatisticsData !== undefined &&
input.StringColumnStatisticsData !== null && {
StringColumnStatisticsData: serializeAws_json1_1StringColumnStatisticsData(
input.StringColumnStatisticsData,
context
),
}),
...(input.Type !== undefined && input.Type !== null && { Type: input.Type }),
};
};
const serializeAws_json1_1ColumnValueStringList = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1Condition = (input: Condition, context: __SerdeContext): any => {
return {
...(input.CrawlState !== undefined && input.CrawlState !== null && { CrawlState: input.CrawlState }),
...(input.CrawlerName !== undefined && input.CrawlerName !== null && { CrawlerName: input.CrawlerName }),
...(input.JobName !== undefined && input.JobName !== null && { JobName: input.JobName }),
...(input.LogicalOperator !== undefined &&
input.LogicalOperator !== null && { LogicalOperator: input.LogicalOperator }),
...(input.State !== undefined && input.State !== null && { State: input.State }),
};
};
const serializeAws_json1_1ConditionList = (input: Condition[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1Condition(entry, context);
});
};
const serializeAws_json1_1ConnectionInput = (input: ConnectionInput, context: __SerdeContext): any => {
return {
...(input.ConnectionProperties !== undefined &&
input.ConnectionProperties !== null && {
ConnectionProperties: serializeAws_json1_1ConnectionProperties(input.ConnectionProperties, context),
}),
...(input.ConnectionType !== undefined &&
input.ConnectionType !== null && { ConnectionType: input.ConnectionType }),
...(input.Description !== undefined && input.Description !== null && { Description: input.Description }),
...(input.MatchCriteria !== undefined &&
input.MatchCriteria !== null && {
MatchCriteria: serializeAws_json1_1MatchCriteria(input.MatchCriteria, context),
}),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.PhysicalConnectionRequirements !== undefined &&
input.PhysicalConnectionRequirements !== null && {
PhysicalConnectionRequirements: serializeAws_json1_1PhysicalConnectionRequirements(
input.PhysicalConnectionRequirements,
context
),
}),
};
};
const serializeAws_json1_1ConnectionPasswordEncryption = (
input: ConnectionPasswordEncryption,
context: __SerdeContext
): any => {
return {
...(input.AwsKmsKeyId !== undefined && input.AwsKmsKeyId !== null && { AwsKmsKeyId: input.AwsKmsKeyId }),
...(input.ReturnConnectionPasswordEncrypted !== undefined &&
input.ReturnConnectionPasswordEncrypted !== null && {
ReturnConnectionPasswordEncrypted: input.ReturnConnectionPasswordEncrypted,
}),
};
};
const serializeAws_json1_1ConnectionProperties = (input: { [key: string]: string }, context: __SerdeContext): any => {
return Object.entries(input).reduce(
(acc: { [key: string]: any }, [key, value]: [ConnectionPropertyKey | string, any]) => {
if (value === null) {
return acc;
}
return {
...acc,
[key]: value,
};
},
{}
);
};
const serializeAws_json1_1ConnectionsList = (input: ConnectionsList, context: __SerdeContext): any => {
return {
...(input.Connections !== undefined &&
input.Connections !== null && {
Connections: serializeAws_json1_1OrchestrationStringList(input.Connections, context),
}),
};
};
const serializeAws_json1_1CrawlerNameList = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1CrawlerTargets = (input: CrawlerTargets, context: __SerdeContext): any => {
return {
...(input.CatalogTargets !== undefined &&
input.CatalogTargets !== null && {
CatalogTargets: serializeAws_json1_1CatalogTargetList(input.CatalogTargets, context),
}),
...(input.DynamoDBTargets !== undefined &&
input.DynamoDBTargets !== null && {
DynamoDBTargets: serializeAws_json1_1DynamoDBTargetList(input.DynamoDBTargets, context),
}),
...(input.JdbcTargets !== undefined &&
input.JdbcTargets !== null && { JdbcTargets: serializeAws_json1_1JdbcTargetList(input.JdbcTargets, context) }),
...(input.MongoDBTargets !== undefined &&
input.MongoDBTargets !== null && {
MongoDBTargets: serializeAws_json1_1MongoDBTargetList(input.MongoDBTargets, context),
}),
...(input.S3Targets !== undefined &&
input.S3Targets !== null && { S3Targets: serializeAws_json1_1S3TargetList(input.S3Targets, context) }),
};
};
const serializeAws_json1_1CreateBlueprintRequest = (input: CreateBlueprintRequest, context: __SerdeContext): any => {
return {
...(input.BlueprintLocation !== undefined &&
input.BlueprintLocation !== null && { BlueprintLocation: input.BlueprintLocation }),
...(input.Description !== undefined && input.Description !== null && { Description: input.Description }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.Tags !== undefined && input.Tags !== null && { Tags: serializeAws_json1_1TagsMap(input.Tags, context) }),
};
};
const serializeAws_json1_1CreateClassifierRequest = (input: CreateClassifierRequest, context: __SerdeContext): any => {
return {
...(input.CsvClassifier !== undefined &&
input.CsvClassifier !== null && {
CsvClassifier: serializeAws_json1_1CreateCsvClassifierRequest(input.CsvClassifier, context),
}),
...(input.GrokClassifier !== undefined &&
input.GrokClassifier !== null && {
GrokClassifier: serializeAws_json1_1CreateGrokClassifierRequest(input.GrokClassifier, context),
}),
...(input.JsonClassifier !== undefined &&
input.JsonClassifier !== null && {
JsonClassifier: serializeAws_json1_1CreateJsonClassifierRequest(input.JsonClassifier, context),
}),
...(input.XMLClassifier !== undefined &&
input.XMLClassifier !== null && {
XMLClassifier: serializeAws_json1_1CreateXMLClassifierRequest(input.XMLClassifier, context),
}),
};
};
const serializeAws_json1_1CreateConnectionRequest = (input: CreateConnectionRequest, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.ConnectionInput !== undefined &&
input.ConnectionInput !== null && {
ConnectionInput: serializeAws_json1_1ConnectionInput(input.ConnectionInput, context),
}),
};
};
const serializeAws_json1_1CreateCrawlerRequest = (input: CreateCrawlerRequest, context: __SerdeContext): any => {
return {
...(input.Classifiers !== undefined &&
input.Classifiers !== null && {
Classifiers: serializeAws_json1_1ClassifierNameList(input.Classifiers, context),
}),
...(input.Configuration !== undefined && input.Configuration !== null && { Configuration: input.Configuration }),
...(input.CrawlerSecurityConfiguration !== undefined &&
input.CrawlerSecurityConfiguration !== null && {
CrawlerSecurityConfiguration: input.CrawlerSecurityConfiguration,
}),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.Description !== undefined && input.Description !== null && { Description: input.Description }),
...(input.LineageConfiguration !== undefined &&
input.LineageConfiguration !== null && {
LineageConfiguration: serializeAws_json1_1LineageConfiguration(input.LineageConfiguration, context),
}),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.RecrawlPolicy !== undefined &&
input.RecrawlPolicy !== null && {
RecrawlPolicy: serializeAws_json1_1RecrawlPolicy(input.RecrawlPolicy, context),
}),
...(input.Role !== undefined && input.Role !== null && { Role: input.Role }),
...(input.Schedule !== undefined && input.Schedule !== null && { Schedule: input.Schedule }),
...(input.SchemaChangePolicy !== undefined &&
input.SchemaChangePolicy !== null && {
SchemaChangePolicy: serializeAws_json1_1SchemaChangePolicy(input.SchemaChangePolicy, context),
}),
...(input.TablePrefix !== undefined && input.TablePrefix !== null && { TablePrefix: input.TablePrefix }),
...(input.Tags !== undefined && input.Tags !== null && { Tags: serializeAws_json1_1TagsMap(input.Tags, context) }),
...(input.Targets !== undefined &&
input.Targets !== null && { Targets: serializeAws_json1_1CrawlerTargets(input.Targets, context) }),
};
};
const serializeAws_json1_1CreateCsvClassifierRequest = (
input: CreateCsvClassifierRequest,
context: __SerdeContext
): any => {
return {
...(input.AllowSingleColumn !== undefined &&
input.AllowSingleColumn !== null && { AllowSingleColumn: input.AllowSingleColumn }),
...(input.ContainsHeader !== undefined &&
input.ContainsHeader !== null && { ContainsHeader: input.ContainsHeader }),
...(input.Delimiter !== undefined && input.Delimiter !== null && { Delimiter: input.Delimiter }),
...(input.DisableValueTrimming !== undefined &&
input.DisableValueTrimming !== null && { DisableValueTrimming: input.DisableValueTrimming }),
...(input.Header !== undefined &&
input.Header !== null && { Header: serializeAws_json1_1CsvHeader(input.Header, context) }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.QuoteSymbol !== undefined && input.QuoteSymbol !== null && { QuoteSymbol: input.QuoteSymbol }),
};
};
const serializeAws_json1_1CreateDatabaseRequest = (input: CreateDatabaseRequest, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseInput !== undefined &&
input.DatabaseInput !== null && {
DatabaseInput: serializeAws_json1_1DatabaseInput(input.DatabaseInput, context),
}),
};
};
const serializeAws_json1_1CreateDevEndpointRequest = (
input: CreateDevEndpointRequest,
context: __SerdeContext
): any => {
return {
...(input.Arguments !== undefined &&
input.Arguments !== null && { Arguments: serializeAws_json1_1MapValue(input.Arguments, context) }),
...(input.EndpointName !== undefined && input.EndpointName !== null && { EndpointName: input.EndpointName }),
...(input.ExtraJarsS3Path !== undefined &&
input.ExtraJarsS3Path !== null && { ExtraJarsS3Path: input.ExtraJarsS3Path }),
...(input.ExtraPythonLibsS3Path !== undefined &&
input.ExtraPythonLibsS3Path !== null && { ExtraPythonLibsS3Path: input.ExtraPythonLibsS3Path }),
...(input.GlueVersion !== undefined && input.GlueVersion !== null && { GlueVersion: input.GlueVersion }),
...(input.NumberOfNodes !== undefined && input.NumberOfNodes !== null && { NumberOfNodes: input.NumberOfNodes }),
...(input.NumberOfWorkers !== undefined &&
input.NumberOfWorkers !== null && { NumberOfWorkers: input.NumberOfWorkers }),
...(input.PublicKey !== undefined && input.PublicKey !== null && { PublicKey: input.PublicKey }),
...(input.PublicKeys !== undefined &&
input.PublicKeys !== null && { PublicKeys: serializeAws_json1_1PublicKeysList(input.PublicKeys, context) }),
...(input.RoleArn !== undefined && input.RoleArn !== null && { RoleArn: input.RoleArn }),
...(input.SecurityConfiguration !== undefined &&
input.SecurityConfiguration !== null && { SecurityConfiguration: input.SecurityConfiguration }),
...(input.SecurityGroupIds !== undefined &&
input.SecurityGroupIds !== null && {
SecurityGroupIds: serializeAws_json1_1StringList(input.SecurityGroupIds, context),
}),
...(input.SubnetId !== undefined && input.SubnetId !== null && { SubnetId: input.SubnetId }),
...(input.Tags !== undefined && input.Tags !== null && { Tags: serializeAws_json1_1TagsMap(input.Tags, context) }),
...(input.WorkerType !== undefined && input.WorkerType !== null && { WorkerType: input.WorkerType }),
};
};
const serializeAws_json1_1CreateGrokClassifierRequest = (
input: CreateGrokClassifierRequest,
context: __SerdeContext
): any => {
return {
...(input.Classification !== undefined &&
input.Classification !== null && { Classification: input.Classification }),
...(input.CustomPatterns !== undefined &&
input.CustomPatterns !== null && { CustomPatterns: input.CustomPatterns }),
...(input.GrokPattern !== undefined && input.GrokPattern !== null && { GrokPattern: input.GrokPattern }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1CreateJobRequest = (input: CreateJobRequest, context: __SerdeContext): any => {
return {
...(input.AllocatedCapacity !== undefined &&
input.AllocatedCapacity !== null && { AllocatedCapacity: input.AllocatedCapacity }),
...(input.Command !== undefined &&
input.Command !== null && { Command: serializeAws_json1_1JobCommand(input.Command, context) }),
...(input.Connections !== undefined &&
input.Connections !== null && { Connections: serializeAws_json1_1ConnectionsList(input.Connections, context) }),
...(input.DefaultArguments !== undefined &&
input.DefaultArguments !== null && {
DefaultArguments: serializeAws_json1_1GenericMap(input.DefaultArguments, context),
}),
...(input.Description !== undefined && input.Description !== null && { Description: input.Description }),
...(input.ExecutionProperty !== undefined &&
input.ExecutionProperty !== null && {
ExecutionProperty: serializeAws_json1_1ExecutionProperty(input.ExecutionProperty, context),
}),
...(input.GlueVersion !== undefined && input.GlueVersion !== null && { GlueVersion: input.GlueVersion }),
...(input.LogUri !== undefined && input.LogUri !== null && { LogUri: input.LogUri }),
...(input.MaxCapacity !== undefined &&
input.MaxCapacity !== null && { MaxCapacity: __serializeFloat(input.MaxCapacity) }),
...(input.MaxRetries !== undefined && input.MaxRetries !== null && { MaxRetries: input.MaxRetries }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.NonOverridableArguments !== undefined &&
input.NonOverridableArguments !== null && {
NonOverridableArguments: serializeAws_json1_1GenericMap(input.NonOverridableArguments, context),
}),
...(input.NotificationProperty !== undefined &&
input.NotificationProperty !== null && {
NotificationProperty: serializeAws_json1_1NotificationProperty(input.NotificationProperty, context),
}),
...(input.NumberOfWorkers !== undefined &&
input.NumberOfWorkers !== null && { NumberOfWorkers: input.NumberOfWorkers }),
...(input.Role !== undefined && input.Role !== null && { Role: input.Role }),
...(input.SecurityConfiguration !== undefined &&
input.SecurityConfiguration !== null && { SecurityConfiguration: input.SecurityConfiguration }),
...(input.Tags !== undefined && input.Tags !== null && { Tags: serializeAws_json1_1TagsMap(input.Tags, context) }),
...(input.Timeout !== undefined && input.Timeout !== null && { Timeout: input.Timeout }),
...(input.WorkerType !== undefined && input.WorkerType !== null && { WorkerType: input.WorkerType }),
};
};
const serializeAws_json1_1CreateJsonClassifierRequest = (
input: CreateJsonClassifierRequest,
context: __SerdeContext
): any => {
return {
...(input.JsonPath !== undefined && input.JsonPath !== null && { JsonPath: input.JsonPath }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1CreateMLTransformRequest = (
input: CreateMLTransformRequest,
context: __SerdeContext
): any => {
return {
...(input.Description !== undefined && input.Description !== null && { Description: input.Description }),
...(input.GlueVersion !== undefined && input.GlueVersion !== null && { GlueVersion: input.GlueVersion }),
...(input.InputRecordTables !== undefined &&
input.InputRecordTables !== null && {
InputRecordTables: serializeAws_json1_1GlueTables(input.InputRecordTables, context),
}),
...(input.MaxCapacity !== undefined &&
input.MaxCapacity !== null && { MaxCapacity: __serializeFloat(input.MaxCapacity) }),
...(input.MaxRetries !== undefined && input.MaxRetries !== null && { MaxRetries: input.MaxRetries }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.NumberOfWorkers !== undefined &&
input.NumberOfWorkers !== null && { NumberOfWorkers: input.NumberOfWorkers }),
...(input.Parameters !== undefined &&
input.Parameters !== null && { Parameters: serializeAws_json1_1TransformParameters(input.Parameters, context) }),
...(input.Role !== undefined && input.Role !== null && { Role: input.Role }),
...(input.Tags !== undefined && input.Tags !== null && { Tags: serializeAws_json1_1TagsMap(input.Tags, context) }),
...(input.Timeout !== undefined && input.Timeout !== null && { Timeout: input.Timeout }),
...(input.TransformEncryption !== undefined &&
input.TransformEncryption !== null && {
TransformEncryption: serializeAws_json1_1TransformEncryption(input.TransformEncryption, context),
}),
...(input.WorkerType !== undefined && input.WorkerType !== null && { WorkerType: input.WorkerType }),
};
};
const serializeAws_json1_1CreatePartitionIndexRequest = (
input: CreatePartitionIndexRequest,
context: __SerdeContext
): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.PartitionIndex !== undefined &&
input.PartitionIndex !== null && {
PartitionIndex: serializeAws_json1_1PartitionIndex(input.PartitionIndex, context),
}),
...(input.TableName !== undefined && input.TableName !== null && { TableName: input.TableName }),
};
};
const serializeAws_json1_1CreatePartitionRequest = (input: CreatePartitionRequest, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.PartitionInput !== undefined &&
input.PartitionInput !== null && {
PartitionInput: serializeAws_json1_1PartitionInput(input.PartitionInput, context),
}),
...(input.TableName !== undefined && input.TableName !== null && { TableName: input.TableName }),
};
};
const serializeAws_json1_1CreateRegistryInput = (input: CreateRegistryInput, context: __SerdeContext): any => {
return {
...(input.Description !== undefined && input.Description !== null && { Description: input.Description }),
...(input.RegistryName !== undefined && input.RegistryName !== null && { RegistryName: input.RegistryName }),
...(input.Tags !== undefined && input.Tags !== null && { Tags: serializeAws_json1_1TagsMap(input.Tags, context) }),
};
};
const serializeAws_json1_1CreateSchemaInput = (input: CreateSchemaInput, context: __SerdeContext): any => {
return {
...(input.Compatibility !== undefined && input.Compatibility !== null && { Compatibility: input.Compatibility }),
...(input.DataFormat !== undefined && input.DataFormat !== null && { DataFormat: input.DataFormat }),
...(input.Description !== undefined && input.Description !== null && { Description: input.Description }),
...(input.RegistryId !== undefined &&
input.RegistryId !== null && { RegistryId: serializeAws_json1_1RegistryId(input.RegistryId, context) }),
...(input.SchemaDefinition !== undefined &&
input.SchemaDefinition !== null && { SchemaDefinition: input.SchemaDefinition }),
...(input.SchemaName !== undefined && input.SchemaName !== null && { SchemaName: input.SchemaName }),
...(input.Tags !== undefined && input.Tags !== null && { Tags: serializeAws_json1_1TagsMap(input.Tags, context) }),
};
};
const serializeAws_json1_1CreateScriptRequest = (input: CreateScriptRequest, context: __SerdeContext): any => {
return {
...(input.DagEdges !== undefined &&
input.DagEdges !== null && { DagEdges: serializeAws_json1_1DagEdges(input.DagEdges, context) }),
...(input.DagNodes !== undefined &&
input.DagNodes !== null && { DagNodes: serializeAws_json1_1DagNodes(input.DagNodes, context) }),
...(input.Language !== undefined && input.Language !== null && { Language: input.Language }),
};
};
const serializeAws_json1_1CreateSecurityConfigurationRequest = (
input: CreateSecurityConfigurationRequest,
context: __SerdeContext
): any => {
return {
...(input.EncryptionConfiguration !== undefined &&
input.EncryptionConfiguration !== null && {
EncryptionConfiguration: serializeAws_json1_1EncryptionConfiguration(input.EncryptionConfiguration, context),
}),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1CreateTableRequest = (input: CreateTableRequest, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.PartitionIndexes !== undefined &&
input.PartitionIndexes !== null && {
PartitionIndexes: serializeAws_json1_1PartitionIndexList(input.PartitionIndexes, context),
}),
...(input.TableInput !== undefined &&
input.TableInput !== null && { TableInput: serializeAws_json1_1TableInput(input.TableInput, context) }),
};
};
const serializeAws_json1_1CreateTriggerRequest = (input: CreateTriggerRequest, context: __SerdeContext): any => {
return {
...(input.Actions !== undefined &&
input.Actions !== null && { Actions: serializeAws_json1_1ActionList(input.Actions, context) }),
...(input.Description !== undefined && input.Description !== null && { Description: input.Description }),
...(input.EventBatchingCondition !== undefined &&
input.EventBatchingCondition !== null && {
EventBatchingCondition: serializeAws_json1_1EventBatchingCondition(input.EventBatchingCondition, context),
}),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.Predicate !== undefined &&
input.Predicate !== null && { Predicate: serializeAws_json1_1Predicate(input.Predicate, context) }),
...(input.Schedule !== undefined && input.Schedule !== null && { Schedule: input.Schedule }),
...(input.StartOnCreation !== undefined &&
input.StartOnCreation !== null && { StartOnCreation: input.StartOnCreation }),
...(input.Tags !== undefined && input.Tags !== null && { Tags: serializeAws_json1_1TagsMap(input.Tags, context) }),
...(input.Type !== undefined && input.Type !== null && { Type: input.Type }),
...(input.WorkflowName !== undefined && input.WorkflowName !== null && { WorkflowName: input.WorkflowName }),
};
};
const serializeAws_json1_1CreateUserDefinedFunctionRequest = (
input: CreateUserDefinedFunctionRequest,
context: __SerdeContext
): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.FunctionInput !== undefined &&
input.FunctionInput !== null && {
FunctionInput: serializeAws_json1_1UserDefinedFunctionInput(input.FunctionInput, context),
}),
};
};
const serializeAws_json1_1CreateWorkflowRequest = (input: CreateWorkflowRequest, context: __SerdeContext): any => {
return {
...(input.DefaultRunProperties !== undefined &&
input.DefaultRunProperties !== null && {
DefaultRunProperties: serializeAws_json1_1WorkflowRunProperties(input.DefaultRunProperties, context),
}),
...(input.Description !== undefined && input.Description !== null && { Description: input.Description }),
...(input.MaxConcurrentRuns !== undefined &&
input.MaxConcurrentRuns !== null && { MaxConcurrentRuns: input.MaxConcurrentRuns }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.Tags !== undefined && input.Tags !== null && { Tags: serializeAws_json1_1TagsMap(input.Tags, context) }),
};
};
const serializeAws_json1_1CreateXMLClassifierRequest = (
input: CreateXMLClassifierRequest,
context: __SerdeContext
): any => {
return {
...(input.Classification !== undefined &&
input.Classification !== null && { Classification: input.Classification }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.RowTag !== undefined && input.RowTag !== null && { RowTag: input.RowTag }),
};
};
const serializeAws_json1_1CsvHeader = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1DagEdges = (input: CodeGenEdge[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1CodeGenEdge(entry, context);
});
};
const serializeAws_json1_1DagNodes = (input: CodeGenNode[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1CodeGenNode(entry, context);
});
};
const serializeAws_json1_1DatabaseIdentifier = (input: DatabaseIdentifier, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
};
};
const serializeAws_json1_1DatabaseInput = (input: DatabaseInput, context: __SerdeContext): any => {
return {
...(input.CreateTableDefaultPermissions !== undefined &&
input.CreateTableDefaultPermissions !== null && {
CreateTableDefaultPermissions: serializeAws_json1_1PrincipalPermissionsList(
input.CreateTableDefaultPermissions,
context
),
}),
...(input.Description !== undefined && input.Description !== null && { Description: input.Description }),
...(input.LocationUri !== undefined && input.LocationUri !== null && { LocationUri: input.LocationUri }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.Parameters !== undefined &&
input.Parameters !== null && { Parameters: serializeAws_json1_1ParametersMap(input.Parameters, context) }),
...(input.TargetDatabase !== undefined &&
input.TargetDatabase !== null && {
TargetDatabase: serializeAws_json1_1DatabaseIdentifier(input.TargetDatabase, context),
}),
};
};
const serializeAws_json1_1DataCatalogEncryptionSettings = (
input: DataCatalogEncryptionSettings,
context: __SerdeContext
): any => {
return {
...(input.ConnectionPasswordEncryption !== undefined &&
input.ConnectionPasswordEncryption !== null && {
ConnectionPasswordEncryption: serializeAws_json1_1ConnectionPasswordEncryption(
input.ConnectionPasswordEncryption,
context
),
}),
...(input.EncryptionAtRest !== undefined &&
input.EncryptionAtRest !== null && {
EncryptionAtRest: serializeAws_json1_1EncryptionAtRest(input.EncryptionAtRest, context),
}),
};
};
const serializeAws_json1_1DataLakePrincipal = (input: DataLakePrincipal, context: __SerdeContext): any => {
return {
...(input.DataLakePrincipalIdentifier !== undefined &&
input.DataLakePrincipalIdentifier !== null && { DataLakePrincipalIdentifier: input.DataLakePrincipalIdentifier }),
};
};
const serializeAws_json1_1DateColumnStatisticsData = (
input: DateColumnStatisticsData,
context: __SerdeContext
): any => {
return {
...(input.MaximumValue !== undefined &&
input.MaximumValue !== null && { MaximumValue: Math.round(input.MaximumValue.getTime() / 1000) }),
...(input.MinimumValue !== undefined &&
input.MinimumValue !== null && { MinimumValue: Math.round(input.MinimumValue.getTime() / 1000) }),
...(input.NumberOfDistinctValues !== undefined &&
input.NumberOfDistinctValues !== null && { NumberOfDistinctValues: input.NumberOfDistinctValues }),
...(input.NumberOfNulls !== undefined && input.NumberOfNulls !== null && { NumberOfNulls: input.NumberOfNulls }),
};
};
const serializeAws_json1_1DecimalColumnStatisticsData = (
input: DecimalColumnStatisticsData,
context: __SerdeContext
): any => {
return {
...(input.MaximumValue !== undefined &&
input.MaximumValue !== null && { MaximumValue: serializeAws_json1_1DecimalNumber(input.MaximumValue, context) }),
...(input.MinimumValue !== undefined &&
input.MinimumValue !== null && { MinimumValue: serializeAws_json1_1DecimalNumber(input.MinimumValue, context) }),
...(input.NumberOfDistinctValues !== undefined &&
input.NumberOfDistinctValues !== null && { NumberOfDistinctValues: input.NumberOfDistinctValues }),
...(input.NumberOfNulls !== undefined && input.NumberOfNulls !== null && { NumberOfNulls: input.NumberOfNulls }),
};
};
const serializeAws_json1_1DecimalNumber = (input: DecimalNumber, context: __SerdeContext): any => {
return {
...(input.Scale !== undefined && input.Scale !== null && { Scale: input.Scale }),
...(input.UnscaledValue !== undefined &&
input.UnscaledValue !== null && { UnscaledValue: context.base64Encoder(input.UnscaledValue) }),
};
};
const serializeAws_json1_1DeleteBlueprintRequest = (input: DeleteBlueprintRequest, context: __SerdeContext): any => {
return {
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1DeleteClassifierRequest = (input: DeleteClassifierRequest, context: __SerdeContext): any => {
return {
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1DeleteColumnStatisticsForPartitionRequest = (
input: DeleteColumnStatisticsForPartitionRequest,
context: __SerdeContext
): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.ColumnName !== undefined && input.ColumnName !== null && { ColumnName: input.ColumnName }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.PartitionValues !== undefined &&
input.PartitionValues !== null && {
PartitionValues: serializeAws_json1_1ValueStringList(input.PartitionValues, context),
}),
...(input.TableName !== undefined && input.TableName !== null && { TableName: input.TableName }),
};
};
const serializeAws_json1_1DeleteColumnStatisticsForTableRequest = (
input: DeleteColumnStatisticsForTableRequest,
context: __SerdeContext
): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.ColumnName !== undefined && input.ColumnName !== null && { ColumnName: input.ColumnName }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.TableName !== undefined && input.TableName !== null && { TableName: input.TableName }),
};
};
const serializeAws_json1_1DeleteConnectionNameList = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1DeleteConnectionRequest = (input: DeleteConnectionRequest, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.ConnectionName !== undefined &&
input.ConnectionName !== null && { ConnectionName: input.ConnectionName }),
};
};
const serializeAws_json1_1DeleteCrawlerRequest = (input: DeleteCrawlerRequest, context: __SerdeContext): any => {
return {
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1DeleteDatabaseRequest = (input: DeleteDatabaseRequest, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1DeleteDevEndpointRequest = (
input: DeleteDevEndpointRequest,
context: __SerdeContext
): any => {
return {
...(input.EndpointName !== undefined && input.EndpointName !== null && { EndpointName: input.EndpointName }),
};
};
const serializeAws_json1_1DeleteJobRequest = (input: DeleteJobRequest, context: __SerdeContext): any => {
return {
...(input.JobName !== undefined && input.JobName !== null && { JobName: input.JobName }),
};
};
const serializeAws_json1_1DeleteMLTransformRequest = (
input: DeleteMLTransformRequest,
context: __SerdeContext
): any => {
return {
...(input.TransformId !== undefined && input.TransformId !== null && { TransformId: input.TransformId }),
};
};
const serializeAws_json1_1DeletePartitionIndexRequest = (
input: DeletePartitionIndexRequest,
context: __SerdeContext
): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.IndexName !== undefined && input.IndexName !== null && { IndexName: input.IndexName }),
...(input.TableName !== undefined && input.TableName !== null && { TableName: input.TableName }),
};
};
const serializeAws_json1_1DeletePartitionRequest = (input: DeletePartitionRequest, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.PartitionValues !== undefined &&
input.PartitionValues !== null && {
PartitionValues: serializeAws_json1_1ValueStringList(input.PartitionValues, context),
}),
...(input.TableName !== undefined && input.TableName !== null && { TableName: input.TableName }),
};
};
const serializeAws_json1_1DeleteRegistryInput = (input: DeleteRegistryInput, context: __SerdeContext): any => {
return {
...(input.RegistryId !== undefined &&
input.RegistryId !== null && { RegistryId: serializeAws_json1_1RegistryId(input.RegistryId, context) }),
};
};
const serializeAws_json1_1DeleteResourcePolicyRequest = (
input: DeleteResourcePolicyRequest,
context: __SerdeContext
): any => {
return {
...(input.PolicyHashCondition !== undefined &&
input.PolicyHashCondition !== null && { PolicyHashCondition: input.PolicyHashCondition }),
...(input.ResourceArn !== undefined && input.ResourceArn !== null && { ResourceArn: input.ResourceArn }),
};
};
const serializeAws_json1_1DeleteSchemaInput = (input: DeleteSchemaInput, context: __SerdeContext): any => {
return {
...(input.SchemaId !== undefined &&
input.SchemaId !== null && { SchemaId: serializeAws_json1_1SchemaId(input.SchemaId, context) }),
};
};
const serializeAws_json1_1DeleteSchemaVersionsInput = (
input: DeleteSchemaVersionsInput,
context: __SerdeContext
): any => {
return {
...(input.SchemaId !== undefined &&
input.SchemaId !== null && { SchemaId: serializeAws_json1_1SchemaId(input.SchemaId, context) }),
...(input.Versions !== undefined && input.Versions !== null && { Versions: input.Versions }),
};
};
const serializeAws_json1_1DeleteSecurityConfigurationRequest = (
input: DeleteSecurityConfigurationRequest,
context: __SerdeContext
): any => {
return {
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1DeleteTableRequest = (input: DeleteTableRequest, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1DeleteTableVersionRequest = (
input: DeleteTableVersionRequest,
context: __SerdeContext
): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.TableName !== undefined && input.TableName !== null && { TableName: input.TableName }),
...(input.VersionId !== undefined && input.VersionId !== null && { VersionId: input.VersionId }),
};
};
const serializeAws_json1_1DeleteTriggerRequest = (input: DeleteTriggerRequest, context: __SerdeContext): any => {
return {
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1DeleteUserDefinedFunctionRequest = (
input: DeleteUserDefinedFunctionRequest,
context: __SerdeContext
): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.FunctionName !== undefined && input.FunctionName !== null && { FunctionName: input.FunctionName }),
};
};
const serializeAws_json1_1DeleteWorkflowRequest = (input: DeleteWorkflowRequest, context: __SerdeContext): any => {
return {
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1DevEndpointCustomLibraries = (
input: DevEndpointCustomLibraries,
context: __SerdeContext
): any => {
return {
...(input.ExtraJarsS3Path !== undefined &&
input.ExtraJarsS3Path !== null && { ExtraJarsS3Path: input.ExtraJarsS3Path }),
...(input.ExtraPythonLibsS3Path !== undefined &&
input.ExtraPythonLibsS3Path !== null && { ExtraPythonLibsS3Path: input.ExtraPythonLibsS3Path }),
};
};
const serializeAws_json1_1DevEndpointNames = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1DoubleColumnStatisticsData = (
input: DoubleColumnStatisticsData,
context: __SerdeContext
): any => {
return {
...(input.MaximumValue !== undefined &&
input.MaximumValue !== null && { MaximumValue: __serializeFloat(input.MaximumValue) }),
...(input.MinimumValue !== undefined &&
input.MinimumValue !== null && { MinimumValue: __serializeFloat(input.MinimumValue) }),
...(input.NumberOfDistinctValues !== undefined &&
input.NumberOfDistinctValues !== null && { NumberOfDistinctValues: input.NumberOfDistinctValues }),
...(input.NumberOfNulls !== undefined && input.NumberOfNulls !== null && { NumberOfNulls: input.NumberOfNulls }),
};
};
const serializeAws_json1_1DynamoDBTarget = (input: DynamoDBTarget, context: __SerdeContext): any => {
return {
...(input.Path !== undefined && input.Path !== null && { Path: input.Path }),
...(input.scanAll !== undefined && input.scanAll !== null && { scanAll: input.scanAll }),
...(input.scanRate !== undefined && input.scanRate !== null && { scanRate: __serializeFloat(input.scanRate) }),
};
};
const serializeAws_json1_1DynamoDBTargetList = (input: DynamoDBTarget[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1DynamoDBTarget(entry, context);
});
};
const serializeAws_json1_1EncryptionAtRest = (input: EncryptionAtRest, context: __SerdeContext): any => {
return {
...(input.CatalogEncryptionMode !== undefined &&
input.CatalogEncryptionMode !== null && { CatalogEncryptionMode: input.CatalogEncryptionMode }),
...(input.SseAwsKmsKeyId !== undefined &&
input.SseAwsKmsKeyId !== null && { SseAwsKmsKeyId: input.SseAwsKmsKeyId }),
};
};
const serializeAws_json1_1EncryptionConfiguration = (input: EncryptionConfiguration, context: __SerdeContext): any => {
return {
...(input.CloudWatchEncryption !== undefined &&
input.CloudWatchEncryption !== null && {
CloudWatchEncryption: serializeAws_json1_1CloudWatchEncryption(input.CloudWatchEncryption, context),
}),
...(input.JobBookmarksEncryption !== undefined &&
input.JobBookmarksEncryption !== null && {
JobBookmarksEncryption: serializeAws_json1_1JobBookmarksEncryption(input.JobBookmarksEncryption, context),
}),
...(input.S3Encryption !== undefined &&
input.S3Encryption !== null && {
S3Encryption: serializeAws_json1_1S3EncryptionList(input.S3Encryption, context),
}),
};
};
const serializeAws_json1_1EventBatchingCondition = (input: EventBatchingCondition, context: __SerdeContext): any => {
return {
...(input.BatchSize !== undefined && input.BatchSize !== null && { BatchSize: input.BatchSize }),
...(input.BatchWindow !== undefined && input.BatchWindow !== null && { BatchWindow: input.BatchWindow }),
};
};
const serializeAws_json1_1ExecutionProperty = (input: ExecutionProperty, context: __SerdeContext): any => {
return {
...(input.MaxConcurrentRuns !== undefined &&
input.MaxConcurrentRuns !== null && { MaxConcurrentRuns: input.MaxConcurrentRuns }),
};
};
const serializeAws_json1_1FindMatchesParameters = (input: FindMatchesParameters, context: __SerdeContext): any => {
return {
...(input.AccuracyCostTradeoff !== undefined &&
input.AccuracyCostTradeoff !== null && { AccuracyCostTradeoff: __serializeFloat(input.AccuracyCostTradeoff) }),
...(input.EnforceProvidedLabels !== undefined &&
input.EnforceProvidedLabels !== null && { EnforceProvidedLabels: input.EnforceProvidedLabels }),
...(input.PrecisionRecallTradeoff !== undefined &&
input.PrecisionRecallTradeoff !== null && {
PrecisionRecallTradeoff: __serializeFloat(input.PrecisionRecallTradeoff),
}),
...(input.PrimaryKeyColumnName !== undefined &&
input.PrimaryKeyColumnName !== null && { PrimaryKeyColumnName: input.PrimaryKeyColumnName }),
};
};
const serializeAws_json1_1GenericMap = (input: { [key: string]: string }, context: __SerdeContext): any => {
return Object.entries(input).reduce((acc: { [key: string]: any }, [key, value]: [string, any]) => {
if (value === null) {
return acc;
}
return {
...acc,
[key]: value,
};
}, {});
};
const serializeAws_json1_1GetBlueprintRequest = (input: GetBlueprintRequest, context: __SerdeContext): any => {
return {
...(input.IncludeBlueprint !== undefined &&
input.IncludeBlueprint !== null && { IncludeBlueprint: input.IncludeBlueprint }),
...(input.IncludeParameterSpec !== undefined &&
input.IncludeParameterSpec !== null && { IncludeParameterSpec: input.IncludeParameterSpec }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1GetBlueprintRunRequest = (input: GetBlueprintRunRequest, context: __SerdeContext): any => {
return {
...(input.BlueprintName !== undefined && input.BlueprintName !== null && { BlueprintName: input.BlueprintName }),
...(input.RunId !== undefined && input.RunId !== null && { RunId: input.RunId }),
};
};
const serializeAws_json1_1GetBlueprintRunsRequest = (input: GetBlueprintRunsRequest, context: __SerdeContext): any => {
return {
...(input.BlueprintName !== undefined && input.BlueprintName !== null && { BlueprintName: input.BlueprintName }),
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
};
};
const serializeAws_json1_1GetCatalogImportStatusRequest = (
input: GetCatalogImportStatusRequest,
context: __SerdeContext
): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
};
};
const serializeAws_json1_1GetClassifierRequest = (input: GetClassifierRequest, context: __SerdeContext): any => {
return {
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1GetClassifiersRequest = (input: GetClassifiersRequest, context: __SerdeContext): any => {
return {
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
};
};
const serializeAws_json1_1GetColumnNamesList = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1GetColumnStatisticsForPartitionRequest = (
input: GetColumnStatisticsForPartitionRequest,
context: __SerdeContext
): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.ColumnNames !== undefined &&
input.ColumnNames !== null && {
ColumnNames: serializeAws_json1_1GetColumnNamesList(input.ColumnNames, context),
}),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.PartitionValues !== undefined &&
input.PartitionValues !== null && {
PartitionValues: serializeAws_json1_1ValueStringList(input.PartitionValues, context),
}),
...(input.TableName !== undefined && input.TableName !== null && { TableName: input.TableName }),
};
};
const serializeAws_json1_1GetColumnStatisticsForTableRequest = (
input: GetColumnStatisticsForTableRequest,
context: __SerdeContext
): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.ColumnNames !== undefined &&
input.ColumnNames !== null && {
ColumnNames: serializeAws_json1_1GetColumnNamesList(input.ColumnNames, context),
}),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.TableName !== undefined && input.TableName !== null && { TableName: input.TableName }),
};
};
const serializeAws_json1_1GetConnectionRequest = (input: GetConnectionRequest, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.HidePassword !== undefined && input.HidePassword !== null && { HidePassword: input.HidePassword }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1GetConnectionsFilter = (input: GetConnectionsFilter, context: __SerdeContext): any => {
return {
...(input.ConnectionType !== undefined &&
input.ConnectionType !== null && { ConnectionType: input.ConnectionType }),
...(input.MatchCriteria !== undefined &&
input.MatchCriteria !== null && {
MatchCriteria: serializeAws_json1_1MatchCriteria(input.MatchCriteria, context),
}),
};
};
const serializeAws_json1_1GetConnectionsRequest = (input: GetConnectionsRequest, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.Filter !== undefined &&
input.Filter !== null && { Filter: serializeAws_json1_1GetConnectionsFilter(input.Filter, context) }),
...(input.HidePassword !== undefined && input.HidePassword !== null && { HidePassword: input.HidePassword }),
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
};
};
const serializeAws_json1_1GetCrawlerMetricsRequest = (
input: GetCrawlerMetricsRequest,
context: __SerdeContext
): any => {
return {
...(input.CrawlerNameList !== undefined &&
input.CrawlerNameList !== null && {
CrawlerNameList: serializeAws_json1_1CrawlerNameList(input.CrawlerNameList, context),
}),
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
};
};
const serializeAws_json1_1GetCrawlerRequest = (input: GetCrawlerRequest, context: __SerdeContext): any => {
return {
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1GetCrawlersRequest = (input: GetCrawlersRequest, context: __SerdeContext): any => {
return {
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
};
};
const serializeAws_json1_1GetDatabaseRequest = (input: GetDatabaseRequest, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1GetDatabasesRequest = (input: GetDatabasesRequest, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
...(input.ResourceShareType !== undefined &&
input.ResourceShareType !== null && { ResourceShareType: input.ResourceShareType }),
};
};
const serializeAws_json1_1GetDataCatalogEncryptionSettingsRequest = (
input: GetDataCatalogEncryptionSettingsRequest,
context: __SerdeContext
): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
};
};
const serializeAws_json1_1GetDataflowGraphRequest = (input: GetDataflowGraphRequest, context: __SerdeContext): any => {
return {
...(input.PythonScript !== undefined && input.PythonScript !== null && { PythonScript: input.PythonScript }),
};
};
const serializeAws_json1_1GetDevEndpointRequest = (input: GetDevEndpointRequest, context: __SerdeContext): any => {
return {
...(input.EndpointName !== undefined && input.EndpointName !== null && { EndpointName: input.EndpointName }),
};
};
const serializeAws_json1_1GetDevEndpointsRequest = (input: GetDevEndpointsRequest, context: __SerdeContext): any => {
return {
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
};
};
const serializeAws_json1_1GetJobBookmarkRequest = (input: GetJobBookmarkRequest, context: __SerdeContext): any => {
return {
...(input.JobName !== undefined && input.JobName !== null && { JobName: input.JobName }),
...(input.RunId !== undefined && input.RunId !== null && { RunId: input.RunId }),
};
};
const serializeAws_json1_1GetJobRequest = (input: GetJobRequest, context: __SerdeContext): any => {
return {
...(input.JobName !== undefined && input.JobName !== null && { JobName: input.JobName }),
};
};
const serializeAws_json1_1GetJobRunRequest = (input: GetJobRunRequest, context: __SerdeContext): any => {
return {
...(input.JobName !== undefined && input.JobName !== null && { JobName: input.JobName }),
...(input.PredecessorsIncluded !== undefined &&
input.PredecessorsIncluded !== null && { PredecessorsIncluded: input.PredecessorsIncluded }),
...(input.RunId !== undefined && input.RunId !== null && { RunId: input.RunId }),
};
};
const serializeAws_json1_1GetJobRunsRequest = (input: GetJobRunsRequest, context: __SerdeContext): any => {
return {
...(input.JobName !== undefined && input.JobName !== null && { JobName: input.JobName }),
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
};
};
const serializeAws_json1_1GetJobsRequest = (input: GetJobsRequest, context: __SerdeContext): any => {
return {
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
};
};
const serializeAws_json1_1GetMappingRequest = (input: GetMappingRequest, context: __SerdeContext): any => {
return {
...(input.Location !== undefined &&
input.Location !== null && { Location: serializeAws_json1_1Location(input.Location, context) }),
...(input.Sinks !== undefined &&
input.Sinks !== null && { Sinks: serializeAws_json1_1CatalogEntries(input.Sinks, context) }),
...(input.Source !== undefined &&
input.Source !== null && { Source: serializeAws_json1_1CatalogEntry(input.Source, context) }),
};
};
const serializeAws_json1_1GetMLTaskRunRequest = (input: GetMLTaskRunRequest, context: __SerdeContext): any => {
return {
...(input.TaskRunId !== undefined && input.TaskRunId !== null && { TaskRunId: input.TaskRunId }),
...(input.TransformId !== undefined && input.TransformId !== null && { TransformId: input.TransformId }),
};
};
const serializeAws_json1_1GetMLTaskRunsRequest = (input: GetMLTaskRunsRequest, context: __SerdeContext): any => {
return {
...(input.Filter !== undefined &&
input.Filter !== null && { Filter: serializeAws_json1_1TaskRunFilterCriteria(input.Filter, context) }),
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
...(input.Sort !== undefined &&
input.Sort !== null && { Sort: serializeAws_json1_1TaskRunSortCriteria(input.Sort, context) }),
...(input.TransformId !== undefined && input.TransformId !== null && { TransformId: input.TransformId }),
};
};
const serializeAws_json1_1GetMLTransformRequest = (input: GetMLTransformRequest, context: __SerdeContext): any => {
return {
...(input.TransformId !== undefined && input.TransformId !== null && { TransformId: input.TransformId }),
};
};
const serializeAws_json1_1GetMLTransformsRequest = (input: GetMLTransformsRequest, context: __SerdeContext): any => {
return {
...(input.Filter !== undefined &&
input.Filter !== null && { Filter: serializeAws_json1_1TransformFilterCriteria(input.Filter, context) }),
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
...(input.Sort !== undefined &&
input.Sort !== null && { Sort: serializeAws_json1_1TransformSortCriteria(input.Sort, context) }),
};
};
const serializeAws_json1_1GetPartitionIndexesRequest = (
input: GetPartitionIndexesRequest,
context: __SerdeContext
): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
...(input.TableName !== undefined && input.TableName !== null && { TableName: input.TableName }),
};
};
const serializeAws_json1_1GetPartitionRequest = (input: GetPartitionRequest, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.PartitionValues !== undefined &&
input.PartitionValues !== null && {
PartitionValues: serializeAws_json1_1ValueStringList(input.PartitionValues, context),
}),
...(input.TableName !== undefined && input.TableName !== null && { TableName: input.TableName }),
};
};
const serializeAws_json1_1GetPartitionsRequest = (input: GetPartitionsRequest, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.ExcludeColumnSchema !== undefined &&
input.ExcludeColumnSchema !== null && { ExcludeColumnSchema: input.ExcludeColumnSchema }),
...(input.Expression !== undefined && input.Expression !== null && { Expression: input.Expression }),
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
...(input.Segment !== undefined &&
input.Segment !== null && { Segment: serializeAws_json1_1Segment(input.Segment, context) }),
...(input.TableName !== undefined && input.TableName !== null && { TableName: input.TableName }),
};
};
const serializeAws_json1_1GetPlanRequest = (input: GetPlanRequest, context: __SerdeContext): any => {
return {
...(input.AdditionalPlanOptionsMap !== undefined &&
input.AdditionalPlanOptionsMap !== null && {
AdditionalPlanOptionsMap: serializeAws_json1_1AdditionalPlanOptionsMap(input.AdditionalPlanOptionsMap, context),
}),
...(input.Language !== undefined && input.Language !== null && { Language: input.Language }),
...(input.Location !== undefined &&
input.Location !== null && { Location: serializeAws_json1_1Location(input.Location, context) }),
...(input.Mapping !== undefined &&
input.Mapping !== null && { Mapping: serializeAws_json1_1MappingList(input.Mapping, context) }),
...(input.Sinks !== undefined &&
input.Sinks !== null && { Sinks: serializeAws_json1_1CatalogEntries(input.Sinks, context) }),
...(input.Source !== undefined &&
input.Source !== null && { Source: serializeAws_json1_1CatalogEntry(input.Source, context) }),
};
};
const serializeAws_json1_1GetRegistryInput = (input: GetRegistryInput, context: __SerdeContext): any => {
return {
...(input.RegistryId !== undefined &&
input.RegistryId !== null && { RegistryId: serializeAws_json1_1RegistryId(input.RegistryId, context) }),
};
};
const serializeAws_json1_1GetResourcePoliciesRequest = (
input: GetResourcePoliciesRequest,
context: __SerdeContext
): any => {
return {
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
};
};
const serializeAws_json1_1GetResourcePolicyRequest = (
input: GetResourcePolicyRequest,
context: __SerdeContext
): any => {
return {
...(input.ResourceArn !== undefined && input.ResourceArn !== null && { ResourceArn: input.ResourceArn }),
};
};
const serializeAws_json1_1GetSchemaByDefinitionInput = (
input: GetSchemaByDefinitionInput,
context: __SerdeContext
): any => {
return {
...(input.SchemaDefinition !== undefined &&
input.SchemaDefinition !== null && { SchemaDefinition: input.SchemaDefinition }),
...(input.SchemaId !== undefined &&
input.SchemaId !== null && { SchemaId: serializeAws_json1_1SchemaId(input.SchemaId, context) }),
};
};
const serializeAws_json1_1GetSchemaInput = (input: GetSchemaInput, context: __SerdeContext): any => {
return {
...(input.SchemaId !== undefined &&
input.SchemaId !== null && { SchemaId: serializeAws_json1_1SchemaId(input.SchemaId, context) }),
};
};
const serializeAws_json1_1GetSchemaVersionInput = (input: GetSchemaVersionInput, context: __SerdeContext): any => {
return {
...(input.SchemaId !== undefined &&
input.SchemaId !== null && { SchemaId: serializeAws_json1_1SchemaId(input.SchemaId, context) }),
...(input.SchemaVersionId !== undefined &&
input.SchemaVersionId !== null && { SchemaVersionId: input.SchemaVersionId }),
...(input.SchemaVersionNumber !== undefined &&
input.SchemaVersionNumber !== null && {
SchemaVersionNumber: serializeAws_json1_1SchemaVersionNumber(input.SchemaVersionNumber, context),
}),
};
};
const serializeAws_json1_1GetSchemaVersionsDiffInput = (
input: GetSchemaVersionsDiffInput,
context: __SerdeContext
): any => {
return {
...(input.FirstSchemaVersionNumber !== undefined &&
input.FirstSchemaVersionNumber !== null && {
FirstSchemaVersionNumber: serializeAws_json1_1SchemaVersionNumber(input.FirstSchemaVersionNumber, context),
}),
...(input.SchemaDiffType !== undefined &&
input.SchemaDiffType !== null && { SchemaDiffType: input.SchemaDiffType }),
...(input.SchemaId !== undefined &&
input.SchemaId !== null && { SchemaId: serializeAws_json1_1SchemaId(input.SchemaId, context) }),
...(input.SecondSchemaVersionNumber !== undefined &&
input.SecondSchemaVersionNumber !== null && {
SecondSchemaVersionNumber: serializeAws_json1_1SchemaVersionNumber(input.SecondSchemaVersionNumber, context),
}),
};
};
const serializeAws_json1_1GetSecurityConfigurationRequest = (
input: GetSecurityConfigurationRequest,
context: __SerdeContext
): any => {
return {
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1GetSecurityConfigurationsRequest = (
input: GetSecurityConfigurationsRequest,
context: __SerdeContext
): any => {
return {
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
};
};
const serializeAws_json1_1GetTableRequest = (input: GetTableRequest, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1GetTablesRequest = (input: GetTablesRequest, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.Expression !== undefined && input.Expression !== null && { Expression: input.Expression }),
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
};
};
const serializeAws_json1_1GetTableVersionRequest = (input: GetTableVersionRequest, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.TableName !== undefined && input.TableName !== null && { TableName: input.TableName }),
...(input.VersionId !== undefined && input.VersionId !== null && { VersionId: input.VersionId }),
};
};
const serializeAws_json1_1GetTableVersionsRequest = (input: GetTableVersionsRequest, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
...(input.TableName !== undefined && input.TableName !== null && { TableName: input.TableName }),
};
};
const serializeAws_json1_1GetTagsRequest = (input: GetTagsRequest, context: __SerdeContext): any => {
return {
...(input.ResourceArn !== undefined && input.ResourceArn !== null && { ResourceArn: input.ResourceArn }),
};
};
const serializeAws_json1_1GetTriggerRequest = (input: GetTriggerRequest, context: __SerdeContext): any => {
return {
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1GetTriggersRequest = (input: GetTriggersRequest, context: __SerdeContext): any => {
return {
...(input.DependentJobName !== undefined &&
input.DependentJobName !== null && { DependentJobName: input.DependentJobName }),
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
};
};
const serializeAws_json1_1GetUserDefinedFunctionRequest = (
input: GetUserDefinedFunctionRequest,
context: __SerdeContext
): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.FunctionName !== undefined && input.FunctionName !== null && { FunctionName: input.FunctionName }),
};
};
const serializeAws_json1_1GetUserDefinedFunctionsRequest = (
input: GetUserDefinedFunctionsRequest,
context: __SerdeContext
): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
...(input.Pattern !== undefined && input.Pattern !== null && { Pattern: input.Pattern }),
};
};
const serializeAws_json1_1GetWorkflowRequest = (input: GetWorkflowRequest, context: __SerdeContext): any => {
return {
...(input.IncludeGraph !== undefined && input.IncludeGraph !== null && { IncludeGraph: input.IncludeGraph }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1GetWorkflowRunPropertiesRequest = (
input: GetWorkflowRunPropertiesRequest,
context: __SerdeContext
): any => {
return {
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.RunId !== undefined && input.RunId !== null && { RunId: input.RunId }),
};
};
const serializeAws_json1_1GetWorkflowRunRequest = (input: GetWorkflowRunRequest, context: __SerdeContext): any => {
return {
...(input.IncludeGraph !== undefined && input.IncludeGraph !== null && { IncludeGraph: input.IncludeGraph }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.RunId !== undefined && input.RunId !== null && { RunId: input.RunId }),
};
};
const serializeAws_json1_1GetWorkflowRunsRequest = (input: GetWorkflowRunsRequest, context: __SerdeContext): any => {
return {
...(input.IncludeGraph !== undefined && input.IncludeGraph !== null && { IncludeGraph: input.IncludeGraph }),
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
};
};
const serializeAws_json1_1GlueTable = (input: GlueTable, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.ConnectionName !== undefined &&
input.ConnectionName !== null && { ConnectionName: input.ConnectionName }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.TableName !== undefined && input.TableName !== null && { TableName: input.TableName }),
};
};
const serializeAws_json1_1GlueTables = (input: GlueTable[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1GlueTable(entry, context);
});
};
const serializeAws_json1_1ImportCatalogToGlueRequest = (
input: ImportCatalogToGlueRequest,
context: __SerdeContext
): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
};
};
const serializeAws_json1_1JdbcTarget = (input: JdbcTarget, context: __SerdeContext): any => {
return {
...(input.ConnectionName !== undefined &&
input.ConnectionName !== null && { ConnectionName: input.ConnectionName }),
...(input.Exclusions !== undefined &&
input.Exclusions !== null && { Exclusions: serializeAws_json1_1PathList(input.Exclusions, context) }),
...(input.Path !== undefined && input.Path !== null && { Path: input.Path }),
};
};
const serializeAws_json1_1JdbcTargetList = (input: JdbcTarget[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1JdbcTarget(entry, context);
});
};
const serializeAws_json1_1JobBookmarksEncryption = (input: JobBookmarksEncryption, context: __SerdeContext): any => {
return {
...(input.JobBookmarksEncryptionMode !== undefined &&
input.JobBookmarksEncryptionMode !== null && { JobBookmarksEncryptionMode: input.JobBookmarksEncryptionMode }),
...(input.KmsKeyArn !== undefined && input.KmsKeyArn !== null && { KmsKeyArn: input.KmsKeyArn }),
};
};
const serializeAws_json1_1JobCommand = (input: JobCommand, context: __SerdeContext): any => {
return {
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.PythonVersion !== undefined && input.PythonVersion !== null && { PythonVersion: input.PythonVersion }),
...(input.ScriptLocation !== undefined &&
input.ScriptLocation !== null && { ScriptLocation: input.ScriptLocation }),
};
};
const serializeAws_json1_1JobNameList = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1JobUpdate = (input: JobUpdate, context: __SerdeContext): any => {
return {
...(input.AllocatedCapacity !== undefined &&
input.AllocatedCapacity !== null && { AllocatedCapacity: input.AllocatedCapacity }),
...(input.Command !== undefined &&
input.Command !== null && { Command: serializeAws_json1_1JobCommand(input.Command, context) }),
...(input.Connections !== undefined &&
input.Connections !== null && { Connections: serializeAws_json1_1ConnectionsList(input.Connections, context) }),
...(input.DefaultArguments !== undefined &&
input.DefaultArguments !== null && {
DefaultArguments: serializeAws_json1_1GenericMap(input.DefaultArguments, context),
}),
...(input.Description !== undefined && input.Description !== null && { Description: input.Description }),
...(input.ExecutionProperty !== undefined &&
input.ExecutionProperty !== null && {
ExecutionProperty: serializeAws_json1_1ExecutionProperty(input.ExecutionProperty, context),
}),
...(input.GlueVersion !== undefined && input.GlueVersion !== null && { GlueVersion: input.GlueVersion }),
...(input.LogUri !== undefined && input.LogUri !== null && { LogUri: input.LogUri }),
...(input.MaxCapacity !== undefined &&
input.MaxCapacity !== null && { MaxCapacity: __serializeFloat(input.MaxCapacity) }),
...(input.MaxRetries !== undefined && input.MaxRetries !== null && { MaxRetries: input.MaxRetries }),
...(input.NonOverridableArguments !== undefined &&
input.NonOverridableArguments !== null && {
NonOverridableArguments: serializeAws_json1_1GenericMap(input.NonOverridableArguments, context),
}),
...(input.NotificationProperty !== undefined &&
input.NotificationProperty !== null && {
NotificationProperty: serializeAws_json1_1NotificationProperty(input.NotificationProperty, context),
}),
...(input.NumberOfWorkers !== undefined &&
input.NumberOfWorkers !== null && { NumberOfWorkers: input.NumberOfWorkers }),
...(input.Role !== undefined && input.Role !== null && { Role: input.Role }),
...(input.SecurityConfiguration !== undefined &&
input.SecurityConfiguration !== null && { SecurityConfiguration: input.SecurityConfiguration }),
...(input.Timeout !== undefined && input.Timeout !== null && { Timeout: input.Timeout }),
...(input.WorkerType !== undefined && input.WorkerType !== null && { WorkerType: input.WorkerType }),
};
};
const serializeAws_json1_1KeyList = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1LineageConfiguration = (input: LineageConfiguration, context: __SerdeContext): any => {
return {
...(input.CrawlerLineageSettings !== undefined &&
input.CrawlerLineageSettings !== null && { CrawlerLineageSettings: input.CrawlerLineageSettings }),
};
};
const serializeAws_json1_1ListBlueprintsRequest = (input: ListBlueprintsRequest, context: __SerdeContext): any => {
return {
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
...(input.Tags !== undefined && input.Tags !== null && { Tags: serializeAws_json1_1TagsMap(input.Tags, context) }),
};
};
const serializeAws_json1_1ListCrawlersRequest = (input: ListCrawlersRequest, context: __SerdeContext): any => {
return {
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
...(input.Tags !== undefined && input.Tags !== null && { Tags: serializeAws_json1_1TagsMap(input.Tags, context) }),
};
};
const serializeAws_json1_1ListDevEndpointsRequest = (input: ListDevEndpointsRequest, context: __SerdeContext): any => {
return {
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
...(input.Tags !== undefined && input.Tags !== null && { Tags: serializeAws_json1_1TagsMap(input.Tags, context) }),
};
};
const serializeAws_json1_1ListJobsRequest = (input: ListJobsRequest, context: __SerdeContext): any => {
return {
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
...(input.Tags !== undefined && input.Tags !== null && { Tags: serializeAws_json1_1TagsMap(input.Tags, context) }),
};
};
const serializeAws_json1_1ListMLTransformsRequest = (input: ListMLTransformsRequest, context: __SerdeContext): any => {
return {
...(input.Filter !== undefined &&
input.Filter !== null && { Filter: serializeAws_json1_1TransformFilterCriteria(input.Filter, context) }),
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
...(input.Sort !== undefined &&
input.Sort !== null && { Sort: serializeAws_json1_1TransformSortCriteria(input.Sort, context) }),
...(input.Tags !== undefined && input.Tags !== null && { Tags: serializeAws_json1_1TagsMap(input.Tags, context) }),
};
};
const serializeAws_json1_1ListRegistriesInput = (input: ListRegistriesInput, context: __SerdeContext): any => {
return {
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
};
};
const serializeAws_json1_1ListSchemasInput = (input: ListSchemasInput, context: __SerdeContext): any => {
return {
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
...(input.RegistryId !== undefined &&
input.RegistryId !== null && { RegistryId: serializeAws_json1_1RegistryId(input.RegistryId, context) }),
};
};
const serializeAws_json1_1ListSchemaVersionsInput = (input: ListSchemaVersionsInput, context: __SerdeContext): any => {
return {
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
...(input.SchemaId !== undefined &&
input.SchemaId !== null && { SchemaId: serializeAws_json1_1SchemaId(input.SchemaId, context) }),
};
};
const serializeAws_json1_1ListTriggersRequest = (input: ListTriggersRequest, context: __SerdeContext): any => {
return {
...(input.DependentJobName !== undefined &&
input.DependentJobName !== null && { DependentJobName: input.DependentJobName }),
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
...(input.Tags !== undefined && input.Tags !== null && { Tags: serializeAws_json1_1TagsMap(input.Tags, context) }),
};
};
const serializeAws_json1_1ListWorkflowsRequest = (input: ListWorkflowsRequest, context: __SerdeContext): any => {
return {
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
};
};
const serializeAws_json1_1Location = (input: Location, context: __SerdeContext): any => {
return {
...(input.DynamoDB !== undefined &&
input.DynamoDB !== null && { DynamoDB: serializeAws_json1_1CodeGenNodeArgs(input.DynamoDB, context) }),
...(input.Jdbc !== undefined &&
input.Jdbc !== null && { Jdbc: serializeAws_json1_1CodeGenNodeArgs(input.Jdbc, context) }),
...(input.S3 !== undefined && input.S3 !== null && { S3: serializeAws_json1_1CodeGenNodeArgs(input.S3, context) }),
};
};
const serializeAws_json1_1LocationMap = (input: { [key: string]: string }, context: __SerdeContext): any => {
return Object.entries(input).reduce((acc: { [key: string]: any }, [key, value]: [string, any]) => {
if (value === null) {
return acc;
}
return {
...acc,
[key]: value,
};
}, {});
};
const serializeAws_json1_1LongColumnStatisticsData = (
input: LongColumnStatisticsData,
context: __SerdeContext
): any => {
return {
...(input.MaximumValue !== undefined && input.MaximumValue !== null && { MaximumValue: input.MaximumValue }),
...(input.MinimumValue !== undefined && input.MinimumValue !== null && { MinimumValue: input.MinimumValue }),
...(input.NumberOfDistinctValues !== undefined &&
input.NumberOfDistinctValues !== null && { NumberOfDistinctValues: input.NumberOfDistinctValues }),
...(input.NumberOfNulls !== undefined && input.NumberOfNulls !== null && { NumberOfNulls: input.NumberOfNulls }),
};
};
const serializeAws_json1_1MappingEntry = (input: MappingEntry, context: __SerdeContext): any => {
return {
...(input.SourcePath !== undefined && input.SourcePath !== null && { SourcePath: input.SourcePath }),
...(input.SourceTable !== undefined && input.SourceTable !== null && { SourceTable: input.SourceTable }),
...(input.SourceType !== undefined && input.SourceType !== null && { SourceType: input.SourceType }),
...(input.TargetPath !== undefined && input.TargetPath !== null && { TargetPath: input.TargetPath }),
...(input.TargetTable !== undefined && input.TargetTable !== null && { TargetTable: input.TargetTable }),
...(input.TargetType !== undefined && input.TargetType !== null && { TargetType: input.TargetType }),
};
};
const serializeAws_json1_1MappingList = (input: MappingEntry[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1MappingEntry(entry, context);
});
};
const serializeAws_json1_1MapValue = (input: { [key: string]: string }, context: __SerdeContext): any => {
return Object.entries(input).reduce((acc: { [key: string]: any }, [key, value]: [string, any]) => {
if (value === null) {
return acc;
}
return {
...acc,
[key]: value,
};
}, {});
};
const serializeAws_json1_1MatchCriteria = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1MetadataKeyValuePair = (input: MetadataKeyValuePair, context: __SerdeContext): any => {
return {
...(input.MetadataKey !== undefined && input.MetadataKey !== null && { MetadataKey: input.MetadataKey }),
...(input.MetadataValue !== undefined && input.MetadataValue !== null && { MetadataValue: input.MetadataValue }),
};
};
const serializeAws_json1_1MetadataList = (input: MetadataKeyValuePair[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1MetadataKeyValuePair(entry, context);
});
};
const serializeAws_json1_1MLUserDataEncryption = (input: MLUserDataEncryption, context: __SerdeContext): any => {
return {
...(input.KmsKeyId !== undefined && input.KmsKeyId !== null && { KmsKeyId: input.KmsKeyId }),
...(input.MlUserDataEncryptionMode !== undefined &&
input.MlUserDataEncryptionMode !== null && { MlUserDataEncryptionMode: input.MlUserDataEncryptionMode }),
};
};
const serializeAws_json1_1MongoDBTarget = (input: MongoDBTarget, context: __SerdeContext): any => {
return {
...(input.ConnectionName !== undefined &&
input.ConnectionName !== null && { ConnectionName: input.ConnectionName }),
...(input.Path !== undefined && input.Path !== null && { Path: input.Path }),
...(input.ScanAll !== undefined && input.ScanAll !== null && { ScanAll: input.ScanAll }),
};
};
const serializeAws_json1_1MongoDBTargetList = (input: MongoDBTarget[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1MongoDBTarget(entry, context);
});
};
const serializeAws_json1_1NameStringList = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1NodeIdList = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1NotificationProperty = (input: NotificationProperty, context: __SerdeContext): any => {
return {
...(input.NotifyDelayAfter !== undefined &&
input.NotifyDelayAfter !== null && { NotifyDelayAfter: input.NotifyDelayAfter }),
};
};
const serializeAws_json1_1OrchestrationStringList = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1Order = (input: Order, context: __SerdeContext): any => {
return {
...(input.Column !== undefined && input.Column !== null && { Column: input.Column }),
...(input.SortOrder !== undefined && input.SortOrder !== null && { SortOrder: input.SortOrder }),
};
};
const serializeAws_json1_1OrderList = (input: Order[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1Order(entry, context);
});
};
const serializeAws_json1_1ParametersMap = (input: { [key: string]: string }, context: __SerdeContext): any => {
return Object.entries(input).reduce((acc: { [key: string]: any }, [key, value]: [string, any]) => {
if (value === null) {
return acc;
}
return {
...acc,
[key]: value,
};
}, {});
};
const serializeAws_json1_1PartitionIndex = (input: PartitionIndex, context: __SerdeContext): any => {
return {
...(input.IndexName !== undefined && input.IndexName !== null && { IndexName: input.IndexName }),
...(input.Keys !== undefined && input.Keys !== null && { Keys: serializeAws_json1_1KeyList(input.Keys, context) }),
};
};
const serializeAws_json1_1PartitionIndexList = (input: PartitionIndex[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1PartitionIndex(entry, context);
});
};
const serializeAws_json1_1PartitionInput = (input: PartitionInput, context: __SerdeContext): any => {
return {
...(input.LastAccessTime !== undefined &&
input.LastAccessTime !== null && { LastAccessTime: Math.round(input.LastAccessTime.getTime() / 1000) }),
...(input.LastAnalyzedTime !== undefined &&
input.LastAnalyzedTime !== null && { LastAnalyzedTime: Math.round(input.LastAnalyzedTime.getTime() / 1000) }),
...(input.Parameters !== undefined &&
input.Parameters !== null && { Parameters: serializeAws_json1_1ParametersMap(input.Parameters, context) }),
...(input.StorageDescriptor !== undefined &&
input.StorageDescriptor !== null && {
StorageDescriptor: serializeAws_json1_1StorageDescriptor(input.StorageDescriptor, context),
}),
...(input.Values !== undefined &&
input.Values !== null && { Values: serializeAws_json1_1ValueStringList(input.Values, context) }),
};
};
const serializeAws_json1_1PartitionInputList = (input: PartitionInput[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1PartitionInput(entry, context);
});
};
const serializeAws_json1_1PartitionValueList = (input: PartitionValueList, context: __SerdeContext): any => {
return {
...(input.Values !== undefined &&
input.Values !== null && { Values: serializeAws_json1_1ValueStringList(input.Values, context) }),
};
};
const serializeAws_json1_1PathList = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1PermissionList = (input: (Permission | string)[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1PhysicalConnectionRequirements = (
input: PhysicalConnectionRequirements,
context: __SerdeContext
): any => {
return {
...(input.AvailabilityZone !== undefined &&
input.AvailabilityZone !== null && { AvailabilityZone: input.AvailabilityZone }),
...(input.SecurityGroupIdList !== undefined &&
input.SecurityGroupIdList !== null && {
SecurityGroupIdList: serializeAws_json1_1SecurityGroupIdList(input.SecurityGroupIdList, context),
}),
...(input.SubnetId !== undefined && input.SubnetId !== null && { SubnetId: input.SubnetId }),
};
};
const serializeAws_json1_1Predicate = (input: Predicate, context: __SerdeContext): any => {
return {
...(input.Conditions !== undefined &&
input.Conditions !== null && { Conditions: serializeAws_json1_1ConditionList(input.Conditions, context) }),
...(input.Logical !== undefined && input.Logical !== null && { Logical: input.Logical }),
};
};
const serializeAws_json1_1PrincipalPermissions = (input: PrincipalPermissions, context: __SerdeContext): any => {
return {
...(input.Permissions !== undefined &&
input.Permissions !== null && { Permissions: serializeAws_json1_1PermissionList(input.Permissions, context) }),
...(input.Principal !== undefined &&
input.Principal !== null && { Principal: serializeAws_json1_1DataLakePrincipal(input.Principal, context) }),
};
};
const serializeAws_json1_1PrincipalPermissionsList = (input: PrincipalPermissions[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1PrincipalPermissions(entry, context);
});
};
const serializeAws_json1_1PropertyPredicate = (input: PropertyPredicate, context: __SerdeContext): any => {
return {
...(input.Comparator !== undefined && input.Comparator !== null && { Comparator: input.Comparator }),
...(input.Key !== undefined && input.Key !== null && { Key: input.Key }),
...(input.Value !== undefined && input.Value !== null && { Value: input.Value }),
};
};
const serializeAws_json1_1PublicKeysList = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1PutDataCatalogEncryptionSettingsRequest = (
input: PutDataCatalogEncryptionSettingsRequest,
context: __SerdeContext
): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DataCatalogEncryptionSettings !== undefined &&
input.DataCatalogEncryptionSettings !== null && {
DataCatalogEncryptionSettings: serializeAws_json1_1DataCatalogEncryptionSettings(
input.DataCatalogEncryptionSettings,
context
),
}),
};
};
const serializeAws_json1_1PutResourcePolicyRequest = (
input: PutResourcePolicyRequest,
context: __SerdeContext
): any => {
return {
...(input.EnableHybrid !== undefined && input.EnableHybrid !== null && { EnableHybrid: input.EnableHybrid }),
...(input.PolicyExistsCondition !== undefined &&
input.PolicyExistsCondition !== null && { PolicyExistsCondition: input.PolicyExistsCondition }),
...(input.PolicyHashCondition !== undefined &&
input.PolicyHashCondition !== null && { PolicyHashCondition: input.PolicyHashCondition }),
...(input.PolicyInJson !== undefined && input.PolicyInJson !== null && { PolicyInJson: input.PolicyInJson }),
...(input.ResourceArn !== undefined && input.ResourceArn !== null && { ResourceArn: input.ResourceArn }),
};
};
const serializeAws_json1_1PutSchemaVersionMetadataInput = (
input: PutSchemaVersionMetadataInput,
context: __SerdeContext
): any => {
return {
...(input.MetadataKeyValue !== undefined &&
input.MetadataKeyValue !== null && {
MetadataKeyValue: serializeAws_json1_1MetadataKeyValuePair(input.MetadataKeyValue, context),
}),
...(input.SchemaId !== undefined &&
input.SchemaId !== null && { SchemaId: serializeAws_json1_1SchemaId(input.SchemaId, context) }),
...(input.SchemaVersionId !== undefined &&
input.SchemaVersionId !== null && { SchemaVersionId: input.SchemaVersionId }),
...(input.SchemaVersionNumber !== undefined &&
input.SchemaVersionNumber !== null && {
SchemaVersionNumber: serializeAws_json1_1SchemaVersionNumber(input.SchemaVersionNumber, context),
}),
};
};
const serializeAws_json1_1PutWorkflowRunPropertiesRequest = (
input: PutWorkflowRunPropertiesRequest,
context: __SerdeContext
): any => {
return {
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.RunId !== undefined && input.RunId !== null && { RunId: input.RunId }),
...(input.RunProperties !== undefined &&
input.RunProperties !== null && {
RunProperties: serializeAws_json1_1WorkflowRunProperties(input.RunProperties, context),
}),
};
};
const serializeAws_json1_1QuerySchemaVersionMetadataInput = (
input: QuerySchemaVersionMetadataInput,
context: __SerdeContext
): any => {
return {
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.MetadataList !== undefined &&
input.MetadataList !== null && { MetadataList: serializeAws_json1_1MetadataList(input.MetadataList, context) }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
...(input.SchemaId !== undefined &&
input.SchemaId !== null && { SchemaId: serializeAws_json1_1SchemaId(input.SchemaId, context) }),
...(input.SchemaVersionId !== undefined &&
input.SchemaVersionId !== null && { SchemaVersionId: input.SchemaVersionId }),
...(input.SchemaVersionNumber !== undefined &&
input.SchemaVersionNumber !== null && {
SchemaVersionNumber: serializeAws_json1_1SchemaVersionNumber(input.SchemaVersionNumber, context),
}),
};
};
const serializeAws_json1_1RecrawlPolicy = (input: RecrawlPolicy, context: __SerdeContext): any => {
return {
...(input.RecrawlBehavior !== undefined &&
input.RecrawlBehavior !== null && { RecrawlBehavior: input.RecrawlBehavior }),
};
};
const serializeAws_json1_1RegisterSchemaVersionInput = (
input: RegisterSchemaVersionInput,
context: __SerdeContext
): any => {
return {
...(input.SchemaDefinition !== undefined &&
input.SchemaDefinition !== null && { SchemaDefinition: input.SchemaDefinition }),
...(input.SchemaId !== undefined &&
input.SchemaId !== null && { SchemaId: serializeAws_json1_1SchemaId(input.SchemaId, context) }),
};
};
const serializeAws_json1_1RegistryId = (input: RegistryId, context: __SerdeContext): any => {
return {
...(input.RegistryArn !== undefined && input.RegistryArn !== null && { RegistryArn: input.RegistryArn }),
...(input.RegistryName !== undefined && input.RegistryName !== null && { RegistryName: input.RegistryName }),
};
};
const serializeAws_json1_1RemoveSchemaVersionMetadataInput = (
input: RemoveSchemaVersionMetadataInput,
context: __SerdeContext
): any => {
return {
...(input.MetadataKeyValue !== undefined &&
input.MetadataKeyValue !== null && {
MetadataKeyValue: serializeAws_json1_1MetadataKeyValuePair(input.MetadataKeyValue, context),
}),
...(input.SchemaId !== undefined &&
input.SchemaId !== null && { SchemaId: serializeAws_json1_1SchemaId(input.SchemaId, context) }),
...(input.SchemaVersionId !== undefined &&
input.SchemaVersionId !== null && { SchemaVersionId: input.SchemaVersionId }),
...(input.SchemaVersionNumber !== undefined &&
input.SchemaVersionNumber !== null && {
SchemaVersionNumber: serializeAws_json1_1SchemaVersionNumber(input.SchemaVersionNumber, context),
}),
};
};
const serializeAws_json1_1ResetJobBookmarkRequest = (input: ResetJobBookmarkRequest, context: __SerdeContext): any => {
return {
...(input.JobName !== undefined && input.JobName !== null && { JobName: input.JobName }),
...(input.RunId !== undefined && input.RunId !== null && { RunId: input.RunId }),
};
};
const serializeAws_json1_1ResourceUri = (input: ResourceUri, context: __SerdeContext): any => {
return {
...(input.ResourceType !== undefined && input.ResourceType !== null && { ResourceType: input.ResourceType }),
...(input.Uri !== undefined && input.Uri !== null && { Uri: input.Uri }),
};
};
const serializeAws_json1_1ResourceUriList = (input: ResourceUri[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1ResourceUri(entry, context);
});
};
const serializeAws_json1_1ResumeWorkflowRunRequest = (
input: ResumeWorkflowRunRequest,
context: __SerdeContext
): any => {
return {
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.NodeIds !== undefined &&
input.NodeIds !== null && { NodeIds: serializeAws_json1_1NodeIdList(input.NodeIds, context) }),
...(input.RunId !== undefined && input.RunId !== null && { RunId: input.RunId }),
};
};
const serializeAws_json1_1S3Encryption = (input: S3Encryption, context: __SerdeContext): any => {
return {
...(input.KmsKeyArn !== undefined && input.KmsKeyArn !== null && { KmsKeyArn: input.KmsKeyArn }),
...(input.S3EncryptionMode !== undefined &&
input.S3EncryptionMode !== null && { S3EncryptionMode: input.S3EncryptionMode }),
};
};
const serializeAws_json1_1S3EncryptionList = (input: S3Encryption[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1S3Encryption(entry, context);
});
};
const serializeAws_json1_1S3Target = (input: S3Target, context: __SerdeContext): any => {
return {
...(input.ConnectionName !== undefined &&
input.ConnectionName !== null && { ConnectionName: input.ConnectionName }),
...(input.Exclusions !== undefined &&
input.Exclusions !== null && { Exclusions: serializeAws_json1_1PathList(input.Exclusions, context) }),
...(input.Path !== undefined && input.Path !== null && { Path: input.Path }),
...(input.SampleSize !== undefined && input.SampleSize !== null && { SampleSize: input.SampleSize }),
};
};
const serializeAws_json1_1S3TargetList = (input: S3Target[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1S3Target(entry, context);
});
};
const serializeAws_json1_1SchemaChangePolicy = (input: SchemaChangePolicy, context: __SerdeContext): any => {
return {
...(input.DeleteBehavior !== undefined &&
input.DeleteBehavior !== null && { DeleteBehavior: input.DeleteBehavior }),
...(input.UpdateBehavior !== undefined &&
input.UpdateBehavior !== null && { UpdateBehavior: input.UpdateBehavior }),
};
};
const serializeAws_json1_1SchemaColumn = (input: SchemaColumn, context: __SerdeContext): any => {
return {
...(input.DataType !== undefined && input.DataType !== null && { DataType: input.DataType }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1SchemaId = (input: SchemaId, context: __SerdeContext): any => {
return {
...(input.RegistryName !== undefined && input.RegistryName !== null && { RegistryName: input.RegistryName }),
...(input.SchemaArn !== undefined && input.SchemaArn !== null && { SchemaArn: input.SchemaArn }),
...(input.SchemaName !== undefined && input.SchemaName !== null && { SchemaName: input.SchemaName }),
};
};
const serializeAws_json1_1SchemaReference = (input: SchemaReference, context: __SerdeContext): any => {
return {
...(input.SchemaId !== undefined &&
input.SchemaId !== null && { SchemaId: serializeAws_json1_1SchemaId(input.SchemaId, context) }),
...(input.SchemaVersionId !== undefined &&
input.SchemaVersionId !== null && { SchemaVersionId: input.SchemaVersionId }),
...(input.SchemaVersionNumber !== undefined &&
input.SchemaVersionNumber !== null && { SchemaVersionNumber: input.SchemaVersionNumber }),
};
};
const serializeAws_json1_1SchemaVersionNumber = (input: SchemaVersionNumber, context: __SerdeContext): any => {
return {
...(input.LatestVersion !== undefined && input.LatestVersion !== null && { LatestVersion: input.LatestVersion }),
...(input.VersionNumber !== undefined && input.VersionNumber !== null && { VersionNumber: input.VersionNumber }),
};
};
const serializeAws_json1_1SearchPropertyPredicates = (input: PropertyPredicate[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1PropertyPredicate(entry, context);
});
};
const serializeAws_json1_1SearchTablesRequest = (input: SearchTablesRequest, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.Filters !== undefined &&
input.Filters !== null && { Filters: serializeAws_json1_1SearchPropertyPredicates(input.Filters, context) }),
...(input.MaxResults !== undefined && input.MaxResults !== null && { MaxResults: input.MaxResults }),
...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }),
...(input.ResourceShareType !== undefined &&
input.ResourceShareType !== null && { ResourceShareType: input.ResourceShareType }),
...(input.SearchText !== undefined && input.SearchText !== null && { SearchText: input.SearchText }),
...(input.SortCriteria !== undefined &&
input.SortCriteria !== null && { SortCriteria: serializeAws_json1_1SortCriteria(input.SortCriteria, context) }),
};
};
const serializeAws_json1_1SecurityGroupIdList = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1Segment = (input: Segment, context: __SerdeContext): any => {
return {
...(input.SegmentNumber !== undefined && input.SegmentNumber !== null && { SegmentNumber: input.SegmentNumber }),
...(input.TotalSegments !== undefined && input.TotalSegments !== null && { TotalSegments: input.TotalSegments }),
};
};
const serializeAws_json1_1SerDeInfo = (input: SerDeInfo, context: __SerdeContext): any => {
return {
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.Parameters !== undefined &&
input.Parameters !== null && { Parameters: serializeAws_json1_1ParametersMap(input.Parameters, context) }),
...(input.SerializationLibrary !== undefined &&
input.SerializationLibrary !== null && { SerializationLibrary: input.SerializationLibrary }),
};
};
const serializeAws_json1_1SkewedInfo = (input: SkewedInfo, context: __SerdeContext): any => {
return {
...(input.SkewedColumnNames !== undefined &&
input.SkewedColumnNames !== null && {
SkewedColumnNames: serializeAws_json1_1NameStringList(input.SkewedColumnNames, context),
}),
...(input.SkewedColumnValueLocationMaps !== undefined &&
input.SkewedColumnValueLocationMaps !== null && {
SkewedColumnValueLocationMaps: serializeAws_json1_1LocationMap(input.SkewedColumnValueLocationMaps, context),
}),
...(input.SkewedColumnValues !== undefined &&
input.SkewedColumnValues !== null && {
SkewedColumnValues: serializeAws_json1_1ColumnValueStringList(input.SkewedColumnValues, context),
}),
};
};
const serializeAws_json1_1SortCriteria = (input: SortCriterion[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1SortCriterion(entry, context);
});
};
const serializeAws_json1_1SortCriterion = (input: SortCriterion, context: __SerdeContext): any => {
return {
...(input.FieldName !== undefined && input.FieldName !== null && { FieldName: input.FieldName }),
...(input.Sort !== undefined && input.Sort !== null && { Sort: input.Sort }),
};
};
const serializeAws_json1_1StartBlueprintRunRequest = (
input: StartBlueprintRunRequest,
context: __SerdeContext
): any => {
return {
...(input.BlueprintName !== undefined && input.BlueprintName !== null && { BlueprintName: input.BlueprintName }),
...(input.Parameters !== undefined && input.Parameters !== null && { Parameters: input.Parameters }),
...(input.RoleArn !== undefined && input.RoleArn !== null && { RoleArn: input.RoleArn }),
};
};
const serializeAws_json1_1StartCrawlerRequest = (input: StartCrawlerRequest, context: __SerdeContext): any => {
return {
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1StartCrawlerScheduleRequest = (
input: StartCrawlerScheduleRequest,
context: __SerdeContext
): any => {
return {
...(input.CrawlerName !== undefined && input.CrawlerName !== null && { CrawlerName: input.CrawlerName }),
};
};
const serializeAws_json1_1StartExportLabelsTaskRunRequest = (
input: StartExportLabelsTaskRunRequest,
context: __SerdeContext
): any => {
return {
...(input.OutputS3Path !== undefined && input.OutputS3Path !== null && { OutputS3Path: input.OutputS3Path }),
...(input.TransformId !== undefined && input.TransformId !== null && { TransformId: input.TransformId }),
};
};
const serializeAws_json1_1StartImportLabelsTaskRunRequest = (
input: StartImportLabelsTaskRunRequest,
context: __SerdeContext
): any => {
return {
...(input.InputS3Path !== undefined && input.InputS3Path !== null && { InputS3Path: input.InputS3Path }),
...(input.ReplaceAllLabels !== undefined &&
input.ReplaceAllLabels !== null && { ReplaceAllLabels: input.ReplaceAllLabels }),
...(input.TransformId !== undefined && input.TransformId !== null && { TransformId: input.TransformId }),
};
};
const serializeAws_json1_1StartJobRunRequest = (input: StartJobRunRequest, context: __SerdeContext): any => {
return {
...(input.AllocatedCapacity !== undefined &&
input.AllocatedCapacity !== null && { AllocatedCapacity: input.AllocatedCapacity }),
...(input.Arguments !== undefined &&
input.Arguments !== null && { Arguments: serializeAws_json1_1GenericMap(input.Arguments, context) }),
...(input.JobName !== undefined && input.JobName !== null && { JobName: input.JobName }),
...(input.JobRunId !== undefined && input.JobRunId !== null && { JobRunId: input.JobRunId }),
...(input.MaxCapacity !== undefined &&
input.MaxCapacity !== null && { MaxCapacity: __serializeFloat(input.MaxCapacity) }),
...(input.NotificationProperty !== undefined &&
input.NotificationProperty !== null && {
NotificationProperty: serializeAws_json1_1NotificationProperty(input.NotificationProperty, context),
}),
...(input.NumberOfWorkers !== undefined &&
input.NumberOfWorkers !== null && { NumberOfWorkers: input.NumberOfWorkers }),
...(input.SecurityConfiguration !== undefined &&
input.SecurityConfiguration !== null && { SecurityConfiguration: input.SecurityConfiguration }),
...(input.Timeout !== undefined && input.Timeout !== null && { Timeout: input.Timeout }),
...(input.WorkerType !== undefined && input.WorkerType !== null && { WorkerType: input.WorkerType }),
};
};
const serializeAws_json1_1StartMLEvaluationTaskRunRequest = (
input: StartMLEvaluationTaskRunRequest,
context: __SerdeContext
): any => {
return {
...(input.TransformId !== undefined && input.TransformId !== null && { TransformId: input.TransformId }),
};
};
const serializeAws_json1_1StartMLLabelingSetGenerationTaskRunRequest = (
input: StartMLLabelingSetGenerationTaskRunRequest,
context: __SerdeContext
): any => {
return {
...(input.OutputS3Path !== undefined && input.OutputS3Path !== null && { OutputS3Path: input.OutputS3Path }),
...(input.TransformId !== undefined && input.TransformId !== null && { TransformId: input.TransformId }),
};
};
const serializeAws_json1_1StartTriggerRequest = (input: StartTriggerRequest, context: __SerdeContext): any => {
return {
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1StartWorkflowRunRequest = (input: StartWorkflowRunRequest, context: __SerdeContext): any => {
return {
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1StopCrawlerRequest = (input: StopCrawlerRequest, context: __SerdeContext): any => {
return {
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1StopCrawlerScheduleRequest = (
input: StopCrawlerScheduleRequest,
context: __SerdeContext
): any => {
return {
...(input.CrawlerName !== undefined && input.CrawlerName !== null && { CrawlerName: input.CrawlerName }),
};
};
const serializeAws_json1_1StopTriggerRequest = (input: StopTriggerRequest, context: __SerdeContext): any => {
return {
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1StopWorkflowRunRequest = (input: StopWorkflowRunRequest, context: __SerdeContext): any => {
return {
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.RunId !== undefined && input.RunId !== null && { RunId: input.RunId }),
};
};
const serializeAws_json1_1StorageDescriptor = (input: StorageDescriptor, context: __SerdeContext): any => {
return {
...(input.BucketColumns !== undefined &&
input.BucketColumns !== null && {
BucketColumns: serializeAws_json1_1NameStringList(input.BucketColumns, context),
}),
...(input.Columns !== undefined &&
input.Columns !== null && { Columns: serializeAws_json1_1ColumnList(input.Columns, context) }),
...(input.Compressed !== undefined && input.Compressed !== null && { Compressed: input.Compressed }),
...(input.InputFormat !== undefined && input.InputFormat !== null && { InputFormat: input.InputFormat }),
...(input.Location !== undefined && input.Location !== null && { Location: input.Location }),
...(input.NumberOfBuckets !== undefined &&
input.NumberOfBuckets !== null && { NumberOfBuckets: input.NumberOfBuckets }),
...(input.OutputFormat !== undefined && input.OutputFormat !== null && { OutputFormat: input.OutputFormat }),
...(input.Parameters !== undefined &&
input.Parameters !== null && { Parameters: serializeAws_json1_1ParametersMap(input.Parameters, context) }),
...(input.SchemaReference !== undefined &&
input.SchemaReference !== null && {
SchemaReference: serializeAws_json1_1SchemaReference(input.SchemaReference, context),
}),
...(input.SerdeInfo !== undefined &&
input.SerdeInfo !== null && { SerdeInfo: serializeAws_json1_1SerDeInfo(input.SerdeInfo, context) }),
...(input.SkewedInfo !== undefined &&
input.SkewedInfo !== null && { SkewedInfo: serializeAws_json1_1SkewedInfo(input.SkewedInfo, context) }),
...(input.SortColumns !== undefined &&
input.SortColumns !== null && { SortColumns: serializeAws_json1_1OrderList(input.SortColumns, context) }),
...(input.StoredAsSubDirectories !== undefined &&
input.StoredAsSubDirectories !== null && { StoredAsSubDirectories: input.StoredAsSubDirectories }),
};
};
const serializeAws_json1_1StringColumnStatisticsData = (
input: StringColumnStatisticsData,
context: __SerdeContext
): any => {
return {
...(input.AverageLength !== undefined &&
input.AverageLength !== null && { AverageLength: __serializeFloat(input.AverageLength) }),
...(input.MaximumLength !== undefined && input.MaximumLength !== null && { MaximumLength: input.MaximumLength }),
...(input.NumberOfDistinctValues !== undefined &&
input.NumberOfDistinctValues !== null && { NumberOfDistinctValues: input.NumberOfDistinctValues }),
...(input.NumberOfNulls !== undefined && input.NumberOfNulls !== null && { NumberOfNulls: input.NumberOfNulls }),
};
};
const serializeAws_json1_1StringList = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1TableIdentifier = (input: TableIdentifier, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1TableInput = (input: TableInput, context: __SerdeContext): any => {
return {
...(input.Description !== undefined && input.Description !== null && { Description: input.Description }),
...(input.LastAccessTime !== undefined &&
input.LastAccessTime !== null && { LastAccessTime: Math.round(input.LastAccessTime.getTime() / 1000) }),
...(input.LastAnalyzedTime !== undefined &&
input.LastAnalyzedTime !== null && { LastAnalyzedTime: Math.round(input.LastAnalyzedTime.getTime() / 1000) }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.Owner !== undefined && input.Owner !== null && { Owner: input.Owner }),
...(input.Parameters !== undefined &&
input.Parameters !== null && { Parameters: serializeAws_json1_1ParametersMap(input.Parameters, context) }),
...(input.PartitionKeys !== undefined &&
input.PartitionKeys !== null && { PartitionKeys: serializeAws_json1_1ColumnList(input.PartitionKeys, context) }),
...(input.Retention !== undefined && input.Retention !== null && { Retention: input.Retention }),
...(input.StorageDescriptor !== undefined &&
input.StorageDescriptor !== null && {
StorageDescriptor: serializeAws_json1_1StorageDescriptor(input.StorageDescriptor, context),
}),
...(input.TableType !== undefined && input.TableType !== null && { TableType: input.TableType }),
...(input.TargetTable !== undefined &&
input.TargetTable !== null && { TargetTable: serializeAws_json1_1TableIdentifier(input.TargetTable, context) }),
...(input.ViewExpandedText !== undefined &&
input.ViewExpandedText !== null && { ViewExpandedText: input.ViewExpandedText }),
...(input.ViewOriginalText !== undefined &&
input.ViewOriginalText !== null && { ViewOriginalText: input.ViewOriginalText }),
};
};
const serializeAws_json1_1TagKeysList = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1TagResourceRequest = (input: TagResourceRequest, context: __SerdeContext): any => {
return {
...(input.ResourceArn !== undefined && input.ResourceArn !== null && { ResourceArn: input.ResourceArn }),
...(input.TagsToAdd !== undefined &&
input.TagsToAdd !== null && { TagsToAdd: serializeAws_json1_1TagsMap(input.TagsToAdd, context) }),
};
};
const serializeAws_json1_1TagsMap = (input: { [key: string]: string }, context: __SerdeContext): any => {
return Object.entries(input).reduce((acc: { [key: string]: any }, [key, value]: [string, any]) => {
if (value === null) {
return acc;
}
return {
...acc,
[key]: value,
};
}, {});
};
const serializeAws_json1_1TaskRunFilterCriteria = (input: TaskRunFilterCriteria, context: __SerdeContext): any => {
return {
...(input.StartedAfter !== undefined &&
input.StartedAfter !== null && { StartedAfter: Math.round(input.StartedAfter.getTime() / 1000) }),
...(input.StartedBefore !== undefined &&
input.StartedBefore !== null && { StartedBefore: Math.round(input.StartedBefore.getTime() / 1000) }),
...(input.Status !== undefined && input.Status !== null && { Status: input.Status }),
...(input.TaskRunType !== undefined && input.TaskRunType !== null && { TaskRunType: input.TaskRunType }),
};
};
const serializeAws_json1_1TaskRunSortCriteria = (input: TaskRunSortCriteria, context: __SerdeContext): any => {
return {
...(input.Column !== undefined && input.Column !== null && { Column: input.Column }),
...(input.SortDirection !== undefined && input.SortDirection !== null && { SortDirection: input.SortDirection }),
};
};
const serializeAws_json1_1TransformEncryption = (input: TransformEncryption, context: __SerdeContext): any => {
return {
...(input.MlUserDataEncryption !== undefined &&
input.MlUserDataEncryption !== null && {
MlUserDataEncryption: serializeAws_json1_1MLUserDataEncryption(input.MlUserDataEncryption, context),
}),
...(input.TaskRunSecurityConfigurationName !== undefined &&
input.TaskRunSecurityConfigurationName !== null && {
TaskRunSecurityConfigurationName: input.TaskRunSecurityConfigurationName,
}),
};
};
const serializeAws_json1_1TransformFilterCriteria = (input: TransformFilterCriteria, context: __SerdeContext): any => {
return {
...(input.CreatedAfter !== undefined &&
input.CreatedAfter !== null && { CreatedAfter: Math.round(input.CreatedAfter.getTime() / 1000) }),
...(input.CreatedBefore !== undefined &&
input.CreatedBefore !== null && { CreatedBefore: Math.round(input.CreatedBefore.getTime() / 1000) }),
...(input.GlueVersion !== undefined && input.GlueVersion !== null && { GlueVersion: input.GlueVersion }),
...(input.LastModifiedAfter !== undefined &&
input.LastModifiedAfter !== null && { LastModifiedAfter: Math.round(input.LastModifiedAfter.getTime() / 1000) }),
...(input.LastModifiedBefore !== undefined &&
input.LastModifiedBefore !== null && {
LastModifiedBefore: Math.round(input.LastModifiedBefore.getTime() / 1000),
}),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.Schema !== undefined &&
input.Schema !== null && { Schema: serializeAws_json1_1TransformSchema(input.Schema, context) }),
...(input.Status !== undefined && input.Status !== null && { Status: input.Status }),
...(input.TransformType !== undefined && input.TransformType !== null && { TransformType: input.TransformType }),
};
};
const serializeAws_json1_1TransformParameters = (input: TransformParameters, context: __SerdeContext): any => {
return {
...(input.FindMatchesParameters !== undefined &&
input.FindMatchesParameters !== null && {
FindMatchesParameters: serializeAws_json1_1FindMatchesParameters(input.FindMatchesParameters, context),
}),
...(input.TransformType !== undefined && input.TransformType !== null && { TransformType: input.TransformType }),
};
};
const serializeAws_json1_1TransformSchema = (input: SchemaColumn[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1SchemaColumn(entry, context);
});
};
const serializeAws_json1_1TransformSortCriteria = (input: TransformSortCriteria, context: __SerdeContext): any => {
return {
...(input.Column !== undefined && input.Column !== null && { Column: input.Column }),
...(input.SortDirection !== undefined && input.SortDirection !== null && { SortDirection: input.SortDirection }),
};
};
const serializeAws_json1_1TriggerNameList = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1TriggerUpdate = (input: TriggerUpdate, context: __SerdeContext): any => {
return {
...(input.Actions !== undefined &&
input.Actions !== null && { Actions: serializeAws_json1_1ActionList(input.Actions, context) }),
...(input.Description !== undefined && input.Description !== null && { Description: input.Description }),
...(input.EventBatchingCondition !== undefined &&
input.EventBatchingCondition !== null && {
EventBatchingCondition: serializeAws_json1_1EventBatchingCondition(input.EventBatchingCondition, context),
}),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.Predicate !== undefined &&
input.Predicate !== null && { Predicate: serializeAws_json1_1Predicate(input.Predicate, context) }),
...(input.Schedule !== undefined && input.Schedule !== null && { Schedule: input.Schedule }),
};
};
const serializeAws_json1_1UntagResourceRequest = (input: UntagResourceRequest, context: __SerdeContext): any => {
return {
...(input.ResourceArn !== undefined && input.ResourceArn !== null && { ResourceArn: input.ResourceArn }),
...(input.TagsToRemove !== undefined &&
input.TagsToRemove !== null && { TagsToRemove: serializeAws_json1_1TagKeysList(input.TagsToRemove, context) }),
};
};
const serializeAws_json1_1UpdateBlueprintRequest = (input: UpdateBlueprintRequest, context: __SerdeContext): any => {
return {
...(input.BlueprintLocation !== undefined &&
input.BlueprintLocation !== null && { BlueprintLocation: input.BlueprintLocation }),
...(input.Description !== undefined && input.Description !== null && { Description: input.Description }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1UpdateClassifierRequest = (input: UpdateClassifierRequest, context: __SerdeContext): any => {
return {
...(input.CsvClassifier !== undefined &&
input.CsvClassifier !== null && {
CsvClassifier: serializeAws_json1_1UpdateCsvClassifierRequest(input.CsvClassifier, context),
}),
...(input.GrokClassifier !== undefined &&
input.GrokClassifier !== null && {
GrokClassifier: serializeAws_json1_1UpdateGrokClassifierRequest(input.GrokClassifier, context),
}),
...(input.JsonClassifier !== undefined &&
input.JsonClassifier !== null && {
JsonClassifier: serializeAws_json1_1UpdateJsonClassifierRequest(input.JsonClassifier, context),
}),
...(input.XMLClassifier !== undefined &&
input.XMLClassifier !== null && {
XMLClassifier: serializeAws_json1_1UpdateXMLClassifierRequest(input.XMLClassifier, context),
}),
};
};
const serializeAws_json1_1UpdateColumnStatisticsForPartitionRequest = (
input: UpdateColumnStatisticsForPartitionRequest,
context: __SerdeContext
): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.ColumnStatisticsList !== undefined &&
input.ColumnStatisticsList !== null && {
ColumnStatisticsList: serializeAws_json1_1UpdateColumnStatisticsList(input.ColumnStatisticsList, context),
}),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.PartitionValues !== undefined &&
input.PartitionValues !== null && {
PartitionValues: serializeAws_json1_1ValueStringList(input.PartitionValues, context),
}),
...(input.TableName !== undefined && input.TableName !== null && { TableName: input.TableName }),
};
};
const serializeAws_json1_1UpdateColumnStatisticsForTableRequest = (
input: UpdateColumnStatisticsForTableRequest,
context: __SerdeContext
): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.ColumnStatisticsList !== undefined &&
input.ColumnStatisticsList !== null && {
ColumnStatisticsList: serializeAws_json1_1UpdateColumnStatisticsList(input.ColumnStatisticsList, context),
}),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.TableName !== undefined && input.TableName !== null && { TableName: input.TableName }),
};
};
const serializeAws_json1_1UpdateColumnStatisticsList = (input: ColumnStatistics[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return serializeAws_json1_1ColumnStatistics(entry, context);
});
};
const serializeAws_json1_1UpdateConnectionRequest = (input: UpdateConnectionRequest, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.ConnectionInput !== undefined &&
input.ConnectionInput !== null && {
ConnectionInput: serializeAws_json1_1ConnectionInput(input.ConnectionInput, context),
}),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1UpdateCrawlerRequest = (input: UpdateCrawlerRequest, context: __SerdeContext): any => {
return {
...(input.Classifiers !== undefined &&
input.Classifiers !== null && {
Classifiers: serializeAws_json1_1ClassifierNameList(input.Classifiers, context),
}),
...(input.Configuration !== undefined && input.Configuration !== null && { Configuration: input.Configuration }),
...(input.CrawlerSecurityConfiguration !== undefined &&
input.CrawlerSecurityConfiguration !== null && {
CrawlerSecurityConfiguration: input.CrawlerSecurityConfiguration,
}),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.Description !== undefined && input.Description !== null && { Description: input.Description }),
...(input.LineageConfiguration !== undefined &&
input.LineageConfiguration !== null && {
LineageConfiguration: serializeAws_json1_1LineageConfiguration(input.LineageConfiguration, context),
}),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.RecrawlPolicy !== undefined &&
input.RecrawlPolicy !== null && {
RecrawlPolicy: serializeAws_json1_1RecrawlPolicy(input.RecrawlPolicy, context),
}),
...(input.Role !== undefined && input.Role !== null && { Role: input.Role }),
...(input.Schedule !== undefined && input.Schedule !== null && { Schedule: input.Schedule }),
...(input.SchemaChangePolicy !== undefined &&
input.SchemaChangePolicy !== null && {
SchemaChangePolicy: serializeAws_json1_1SchemaChangePolicy(input.SchemaChangePolicy, context),
}),
...(input.TablePrefix !== undefined && input.TablePrefix !== null && { TablePrefix: input.TablePrefix }),
...(input.Targets !== undefined &&
input.Targets !== null && { Targets: serializeAws_json1_1CrawlerTargets(input.Targets, context) }),
};
};
const serializeAws_json1_1UpdateCrawlerScheduleRequest = (
input: UpdateCrawlerScheduleRequest,
context: __SerdeContext
): any => {
return {
...(input.CrawlerName !== undefined && input.CrawlerName !== null && { CrawlerName: input.CrawlerName }),
...(input.Schedule !== undefined && input.Schedule !== null && { Schedule: input.Schedule }),
};
};
const serializeAws_json1_1UpdateCsvClassifierRequest = (
input: UpdateCsvClassifierRequest,
context: __SerdeContext
): any => {
return {
...(input.AllowSingleColumn !== undefined &&
input.AllowSingleColumn !== null && { AllowSingleColumn: input.AllowSingleColumn }),
...(input.ContainsHeader !== undefined &&
input.ContainsHeader !== null && { ContainsHeader: input.ContainsHeader }),
...(input.Delimiter !== undefined && input.Delimiter !== null && { Delimiter: input.Delimiter }),
...(input.DisableValueTrimming !== undefined &&
input.DisableValueTrimming !== null && { DisableValueTrimming: input.DisableValueTrimming }),
...(input.Header !== undefined &&
input.Header !== null && { Header: serializeAws_json1_1CsvHeader(input.Header, context) }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.QuoteSymbol !== undefined && input.QuoteSymbol !== null && { QuoteSymbol: input.QuoteSymbol }),
};
};
const serializeAws_json1_1UpdateDatabaseRequest = (input: UpdateDatabaseRequest, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseInput !== undefined &&
input.DatabaseInput !== null && {
DatabaseInput: serializeAws_json1_1DatabaseInput(input.DatabaseInput, context),
}),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1UpdateDevEndpointRequest = (
input: UpdateDevEndpointRequest,
context: __SerdeContext
): any => {
return {
...(input.AddArguments !== undefined &&
input.AddArguments !== null && { AddArguments: serializeAws_json1_1MapValue(input.AddArguments, context) }),
...(input.AddPublicKeys !== undefined &&
input.AddPublicKeys !== null && {
AddPublicKeys: serializeAws_json1_1PublicKeysList(input.AddPublicKeys, context),
}),
...(input.CustomLibraries !== undefined &&
input.CustomLibraries !== null && {
CustomLibraries: serializeAws_json1_1DevEndpointCustomLibraries(input.CustomLibraries, context),
}),
...(input.DeleteArguments !== undefined &&
input.DeleteArguments !== null && {
DeleteArguments: serializeAws_json1_1StringList(input.DeleteArguments, context),
}),
...(input.DeletePublicKeys !== undefined &&
input.DeletePublicKeys !== null && {
DeletePublicKeys: serializeAws_json1_1PublicKeysList(input.DeletePublicKeys, context),
}),
...(input.EndpointName !== undefined && input.EndpointName !== null && { EndpointName: input.EndpointName }),
...(input.PublicKey !== undefined && input.PublicKey !== null && { PublicKey: input.PublicKey }),
...(input.UpdateEtlLibraries !== undefined &&
input.UpdateEtlLibraries !== null && { UpdateEtlLibraries: input.UpdateEtlLibraries }),
};
};
const serializeAws_json1_1UpdateGrokClassifierRequest = (
input: UpdateGrokClassifierRequest,
context: __SerdeContext
): any => {
return {
...(input.Classification !== undefined &&
input.Classification !== null && { Classification: input.Classification }),
...(input.CustomPatterns !== undefined &&
input.CustomPatterns !== null && { CustomPatterns: input.CustomPatterns }),
...(input.GrokPattern !== undefined && input.GrokPattern !== null && { GrokPattern: input.GrokPattern }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1UpdateJobRequest = (input: UpdateJobRequest, context: __SerdeContext): any => {
return {
...(input.JobName !== undefined && input.JobName !== null && { JobName: input.JobName }),
...(input.JobUpdate !== undefined &&
input.JobUpdate !== null && { JobUpdate: serializeAws_json1_1JobUpdate(input.JobUpdate, context) }),
};
};
const serializeAws_json1_1UpdateJsonClassifierRequest = (
input: UpdateJsonClassifierRequest,
context: __SerdeContext
): any => {
return {
...(input.JsonPath !== undefined && input.JsonPath !== null && { JsonPath: input.JsonPath }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1UpdateMLTransformRequest = (
input: UpdateMLTransformRequest,
context: __SerdeContext
): any => {
return {
...(input.Description !== undefined && input.Description !== null && { Description: input.Description }),
...(input.GlueVersion !== undefined && input.GlueVersion !== null && { GlueVersion: input.GlueVersion }),
...(input.MaxCapacity !== undefined &&
input.MaxCapacity !== null && { MaxCapacity: __serializeFloat(input.MaxCapacity) }),
...(input.MaxRetries !== undefined && input.MaxRetries !== null && { MaxRetries: input.MaxRetries }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.NumberOfWorkers !== undefined &&
input.NumberOfWorkers !== null && { NumberOfWorkers: input.NumberOfWorkers }),
...(input.Parameters !== undefined &&
input.Parameters !== null && { Parameters: serializeAws_json1_1TransformParameters(input.Parameters, context) }),
...(input.Role !== undefined && input.Role !== null && { Role: input.Role }),
...(input.Timeout !== undefined && input.Timeout !== null && { Timeout: input.Timeout }),
...(input.TransformId !== undefined && input.TransformId !== null && { TransformId: input.TransformId }),
...(input.WorkerType !== undefined && input.WorkerType !== null && { WorkerType: input.WorkerType }),
};
};
const serializeAws_json1_1UpdatePartitionRequest = (input: UpdatePartitionRequest, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.PartitionInput !== undefined &&
input.PartitionInput !== null && {
PartitionInput: serializeAws_json1_1PartitionInput(input.PartitionInput, context),
}),
...(input.PartitionValueList !== undefined &&
input.PartitionValueList !== null && {
PartitionValueList: serializeAws_json1_1BoundedPartitionValueList(input.PartitionValueList, context),
}),
...(input.TableName !== undefined && input.TableName !== null && { TableName: input.TableName }),
};
};
const serializeAws_json1_1UpdateRegistryInput = (input: UpdateRegistryInput, context: __SerdeContext): any => {
return {
...(input.Description !== undefined && input.Description !== null && { Description: input.Description }),
...(input.RegistryId !== undefined &&
input.RegistryId !== null && { RegistryId: serializeAws_json1_1RegistryId(input.RegistryId, context) }),
};
};
const serializeAws_json1_1UpdateSchemaInput = (input: UpdateSchemaInput, context: __SerdeContext): any => {
return {
...(input.Compatibility !== undefined && input.Compatibility !== null && { Compatibility: input.Compatibility }),
...(input.Description !== undefined && input.Description !== null && { Description: input.Description }),
...(input.SchemaId !== undefined &&
input.SchemaId !== null && { SchemaId: serializeAws_json1_1SchemaId(input.SchemaId, context) }),
...(input.SchemaVersionNumber !== undefined &&
input.SchemaVersionNumber !== null && {
SchemaVersionNumber: serializeAws_json1_1SchemaVersionNumber(input.SchemaVersionNumber, context),
}),
};
};
const serializeAws_json1_1UpdateTableRequest = (input: UpdateTableRequest, context: __SerdeContext): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.SkipArchive !== undefined && input.SkipArchive !== null && { SkipArchive: input.SkipArchive }),
...(input.TableInput !== undefined &&
input.TableInput !== null && { TableInput: serializeAws_json1_1TableInput(input.TableInput, context) }),
};
};
const serializeAws_json1_1UpdateTriggerRequest = (input: UpdateTriggerRequest, context: __SerdeContext): any => {
return {
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.TriggerUpdate !== undefined &&
input.TriggerUpdate !== null && {
TriggerUpdate: serializeAws_json1_1TriggerUpdate(input.TriggerUpdate, context),
}),
};
};
const serializeAws_json1_1UpdateUserDefinedFunctionRequest = (
input: UpdateUserDefinedFunctionRequest,
context: __SerdeContext
): any => {
return {
...(input.CatalogId !== undefined && input.CatalogId !== null && { CatalogId: input.CatalogId }),
...(input.DatabaseName !== undefined && input.DatabaseName !== null && { DatabaseName: input.DatabaseName }),
...(input.FunctionInput !== undefined &&
input.FunctionInput !== null && {
FunctionInput: serializeAws_json1_1UserDefinedFunctionInput(input.FunctionInput, context),
}),
...(input.FunctionName !== undefined && input.FunctionName !== null && { FunctionName: input.FunctionName }),
};
};
const serializeAws_json1_1UpdateWorkflowRequest = (input: UpdateWorkflowRequest, context: __SerdeContext): any => {
return {
...(input.DefaultRunProperties !== undefined &&
input.DefaultRunProperties !== null && {
DefaultRunProperties: serializeAws_json1_1WorkflowRunProperties(input.DefaultRunProperties, context),
}),
...(input.Description !== undefined && input.Description !== null && { Description: input.Description }),
...(input.MaxConcurrentRuns !== undefined &&
input.MaxConcurrentRuns !== null && { MaxConcurrentRuns: input.MaxConcurrentRuns }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
};
};
const serializeAws_json1_1UpdateXMLClassifierRequest = (
input: UpdateXMLClassifierRequest,
context: __SerdeContext
): any => {
return {
...(input.Classification !== undefined &&
input.Classification !== null && { Classification: input.Classification }),
...(input.Name !== undefined && input.Name !== null && { Name: input.Name }),
...(input.RowTag !== undefined && input.RowTag !== null && { RowTag: input.RowTag }),
};
};
const serializeAws_json1_1UserDefinedFunctionInput = (
input: UserDefinedFunctionInput,
context: __SerdeContext
): any => {
return {
...(input.ClassName !== undefined && input.ClassName !== null && { ClassName: input.ClassName }),
...(input.FunctionName !== undefined && input.FunctionName !== null && { FunctionName: input.FunctionName }),
...(input.OwnerName !== undefined && input.OwnerName !== null && { OwnerName: input.OwnerName }),
...(input.OwnerType !== undefined && input.OwnerType !== null && { OwnerType: input.OwnerType }),
...(input.ResourceUris !== undefined &&
input.ResourceUris !== null && {
ResourceUris: serializeAws_json1_1ResourceUriList(input.ResourceUris, context),
}),
};
};
const serializeAws_json1_1ValueStringList = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1WorkflowNames = (input: string[], context: __SerdeContext): any => {
return input
.filter((e: any) => e != null)
.map((entry) => {
if (entry === null) {
return null as any;
}
return entry;
});
};
const serializeAws_json1_1WorkflowRunProperties = (input: { [key: string]: string }, context: __SerdeContext): any => {
return Object.entries(input).reduce((acc: { [key: string]: any }, [key, value]: [string, any]) => {
if (value === null) {
return acc;
}
return {
...acc,
[key]: value,
};
}, {});
};
const deserializeAws_json1_1AccessDeniedException = (output: any, context: __SerdeContext): AccessDeniedException => {
return {
Message: __expectString(output.Message),
} as any;
};
const deserializeAws_json1_1Action = (output: any, context: __SerdeContext): Action => {
return {
Arguments:
output.Arguments !== undefined && output.Arguments !== null
? deserializeAws_json1_1GenericMap(output.Arguments, context)
: undefined,
CrawlerName: __expectString(output.CrawlerName),
JobName: __expectString(output.JobName),
NotificationProperty:
output.NotificationProperty !== undefined && output.NotificationProperty !== null
? deserializeAws_json1_1NotificationProperty(output.NotificationProperty, context)
: undefined,
SecurityConfiguration: __expectString(output.SecurityConfiguration),
Timeout: __expectInt32(output.Timeout),
} as any;
};
const deserializeAws_json1_1ActionList = (output: any, context: __SerdeContext): Action[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1Action(entry, context);
});
};
const deserializeAws_json1_1AlreadyExistsException = (output: any, context: __SerdeContext): AlreadyExistsException => {
return {
Message: __expectString(output.Message),
} as any;
};
const deserializeAws_json1_1BackfillError = (output: any, context: __SerdeContext): BackfillError => {
return {
Code: __expectString(output.Code),
Partitions:
output.Partitions !== undefined && output.Partitions !== null
? deserializeAws_json1_1BackfillErroredPartitionsList(output.Partitions, context)
: undefined,
} as any;
};
const deserializeAws_json1_1BackfillErroredPartitionsList = (
output: any,
context: __SerdeContext
): PartitionValueList[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1PartitionValueList(entry, context);
});
};
const deserializeAws_json1_1BackfillErrors = (output: any, context: __SerdeContext): BackfillError[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1BackfillError(entry, context);
});
};
const deserializeAws_json1_1BatchCreatePartitionResponse = (
output: any,
context: __SerdeContext
): BatchCreatePartitionResponse => {
return {
Errors:
output.Errors !== undefined && output.Errors !== null
? deserializeAws_json1_1PartitionErrors(output.Errors, context)
: undefined,
} as any;
};
const deserializeAws_json1_1BatchDeleteConnectionResponse = (
output: any,
context: __SerdeContext
): BatchDeleteConnectionResponse => {
return {
Errors:
output.Errors !== undefined && output.Errors !== null
? deserializeAws_json1_1ErrorByName(output.Errors, context)
: undefined,
Succeeded:
output.Succeeded !== undefined && output.Succeeded !== null
? deserializeAws_json1_1NameStringList(output.Succeeded, context)
: undefined,
} as any;
};
const deserializeAws_json1_1BatchDeletePartitionResponse = (
output: any,
context: __SerdeContext
): BatchDeletePartitionResponse => {
return {
Errors:
output.Errors !== undefined && output.Errors !== null
? deserializeAws_json1_1PartitionErrors(output.Errors, context)
: undefined,
} as any;
};
const deserializeAws_json1_1BatchDeleteTableResponse = (
output: any,
context: __SerdeContext
): BatchDeleteTableResponse => {
return {
Errors:
output.Errors !== undefined && output.Errors !== null
? deserializeAws_json1_1TableErrors(output.Errors, context)
: undefined,
} as any;
};
const deserializeAws_json1_1BatchDeleteTableVersionResponse = (
output: any,
context: __SerdeContext
): BatchDeleteTableVersionResponse => {
return {
Errors:
output.Errors !== undefined && output.Errors !== null
? deserializeAws_json1_1TableVersionErrors(output.Errors, context)
: undefined,
} as any;
};
const deserializeAws_json1_1BatchGetBlueprintsResponse = (
output: any,
context: __SerdeContext
): BatchGetBlueprintsResponse => {
return {
Blueprints:
output.Blueprints !== undefined && output.Blueprints !== null
? deserializeAws_json1_1Blueprints(output.Blueprints, context)
: undefined,
MissingBlueprints:
output.MissingBlueprints !== undefined && output.MissingBlueprints !== null
? deserializeAws_json1_1BlueprintNames(output.MissingBlueprints, context)
: undefined,
} as any;
};
const deserializeAws_json1_1BatchGetCrawlersResponse = (
output: any,
context: __SerdeContext
): BatchGetCrawlersResponse => {
return {
Crawlers:
output.Crawlers !== undefined && output.Crawlers !== null
? deserializeAws_json1_1CrawlerList(output.Crawlers, context)
: undefined,
CrawlersNotFound:
output.CrawlersNotFound !== undefined && output.CrawlersNotFound !== null
? deserializeAws_json1_1CrawlerNameList(output.CrawlersNotFound, context)
: undefined,
} as any;
};
const deserializeAws_json1_1BatchGetDevEndpointsResponse = (
output: any,
context: __SerdeContext
): BatchGetDevEndpointsResponse => {
return {
DevEndpoints:
output.DevEndpoints !== undefined && output.DevEndpoints !== null
? deserializeAws_json1_1DevEndpointList(output.DevEndpoints, context)
: undefined,
DevEndpointsNotFound:
output.DevEndpointsNotFound !== undefined && output.DevEndpointsNotFound !== null
? deserializeAws_json1_1DevEndpointNames(output.DevEndpointsNotFound, context)
: undefined,
} as any;
};
const deserializeAws_json1_1BatchGetJobsResponse = (output: any, context: __SerdeContext): BatchGetJobsResponse => {
return {
Jobs:
output.Jobs !== undefined && output.Jobs !== null
? deserializeAws_json1_1JobList(output.Jobs, context)
: undefined,
JobsNotFound:
output.JobsNotFound !== undefined && output.JobsNotFound !== null
? deserializeAws_json1_1JobNameList(output.JobsNotFound, context)
: undefined,
} as any;
};
const deserializeAws_json1_1BatchGetPartitionResponse = (
output: any,
context: __SerdeContext
): BatchGetPartitionResponse => {
return {
Partitions:
output.Partitions !== undefined && output.Partitions !== null
? deserializeAws_json1_1PartitionList(output.Partitions, context)
: undefined,
UnprocessedKeys:
output.UnprocessedKeys !== undefined && output.UnprocessedKeys !== null
? deserializeAws_json1_1BatchGetPartitionValueList(output.UnprocessedKeys, context)
: undefined,
} as any;
};
const deserializeAws_json1_1BatchGetPartitionValueList = (
output: any,
context: __SerdeContext
): PartitionValueList[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1PartitionValueList(entry, context);
});
};
const deserializeAws_json1_1BatchGetTriggersResponse = (
output: any,
context: __SerdeContext
): BatchGetTriggersResponse => {
return {
Triggers:
output.Triggers !== undefined && output.Triggers !== null
? deserializeAws_json1_1TriggerList(output.Triggers, context)
: undefined,
TriggersNotFound:
output.TriggersNotFound !== undefined && output.TriggersNotFound !== null
? deserializeAws_json1_1TriggerNameList(output.TriggersNotFound, context)
: undefined,
} as any;
};
const deserializeAws_json1_1BatchGetWorkflowsResponse = (
output: any,
context: __SerdeContext
): BatchGetWorkflowsResponse => {
return {
MissingWorkflows:
output.MissingWorkflows !== undefined && output.MissingWorkflows !== null
? deserializeAws_json1_1WorkflowNames(output.MissingWorkflows, context)
: undefined,
Workflows:
output.Workflows !== undefined && output.Workflows !== null
? deserializeAws_json1_1Workflows(output.Workflows, context)
: undefined,
} as any;
};
const deserializeAws_json1_1BatchStopJobRunError = (output: any, context: __SerdeContext): BatchStopJobRunError => {
return {
ErrorDetail:
output.ErrorDetail !== undefined && output.ErrorDetail !== null
? deserializeAws_json1_1ErrorDetail(output.ErrorDetail, context)
: undefined,
JobName: __expectString(output.JobName),
JobRunId: __expectString(output.JobRunId),
} as any;
};
const deserializeAws_json1_1BatchStopJobRunErrorList = (
output: any,
context: __SerdeContext
): BatchStopJobRunError[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1BatchStopJobRunError(entry, context);
});
};
const deserializeAws_json1_1BatchStopJobRunResponse = (
output: any,
context: __SerdeContext
): BatchStopJobRunResponse => {
return {
Errors:
output.Errors !== undefined && output.Errors !== null
? deserializeAws_json1_1BatchStopJobRunErrorList(output.Errors, context)
: undefined,
SuccessfulSubmissions:
output.SuccessfulSubmissions !== undefined && output.SuccessfulSubmissions !== null
? deserializeAws_json1_1BatchStopJobRunSuccessfulSubmissionList(output.SuccessfulSubmissions, context)
: undefined,
} as any;
};
const deserializeAws_json1_1BatchStopJobRunSuccessfulSubmission = (
output: any,
context: __SerdeContext
): BatchStopJobRunSuccessfulSubmission => {
return {
JobName: __expectString(output.JobName),
JobRunId: __expectString(output.JobRunId),
} as any;
};
const deserializeAws_json1_1BatchStopJobRunSuccessfulSubmissionList = (
output: any,
context: __SerdeContext
): BatchStopJobRunSuccessfulSubmission[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1BatchStopJobRunSuccessfulSubmission(entry, context);
});
};
const deserializeAws_json1_1BatchUpdatePartitionFailureEntry = (
output: any,
context: __SerdeContext
): BatchUpdatePartitionFailureEntry => {
return {
ErrorDetail:
output.ErrorDetail !== undefined && output.ErrorDetail !== null
? deserializeAws_json1_1ErrorDetail(output.ErrorDetail, context)
: undefined,
PartitionValueList:
output.PartitionValueList !== undefined && output.PartitionValueList !== null
? deserializeAws_json1_1BoundedPartitionValueList(output.PartitionValueList, context)
: undefined,
} as any;
};
const deserializeAws_json1_1BatchUpdatePartitionFailureList = (
output: any,
context: __SerdeContext
): BatchUpdatePartitionFailureEntry[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1BatchUpdatePartitionFailureEntry(entry, context);
});
};
const deserializeAws_json1_1BatchUpdatePartitionResponse = (
output: any,
context: __SerdeContext
): BatchUpdatePartitionResponse => {
return {
Errors:
output.Errors !== undefined && output.Errors !== null
? deserializeAws_json1_1BatchUpdatePartitionFailureList(output.Errors, context)
: undefined,
} as any;
};
const deserializeAws_json1_1BinaryColumnStatisticsData = (
output: any,
context: __SerdeContext
): BinaryColumnStatisticsData => {
return {
AverageLength: __limitedParseDouble(output.AverageLength),
MaximumLength: __expectLong(output.MaximumLength),
NumberOfNulls: __expectLong(output.NumberOfNulls),
} as any;
};
const deserializeAws_json1_1Blueprint = (output: any, context: __SerdeContext): Blueprint => {
return {
BlueprintLocation: __expectString(output.BlueprintLocation),
BlueprintServiceLocation: __expectString(output.BlueprintServiceLocation),
CreatedOn:
output.CreatedOn !== undefined && output.CreatedOn !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CreatedOn)))
: undefined,
Description: __expectString(output.Description),
ErrorMessage: __expectString(output.ErrorMessage),
LastActiveDefinition:
output.LastActiveDefinition !== undefined && output.LastActiveDefinition !== null
? deserializeAws_json1_1LastActiveDefinition(output.LastActiveDefinition, context)
: undefined,
LastModifiedOn:
output.LastModifiedOn !== undefined && output.LastModifiedOn !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.LastModifiedOn)))
: undefined,
Name: __expectString(output.Name),
ParameterSpec: __expectString(output.ParameterSpec),
Status: __expectString(output.Status),
} as any;
};
const deserializeAws_json1_1BlueprintDetails = (output: any, context: __SerdeContext): BlueprintDetails => {
return {
BlueprintName: __expectString(output.BlueprintName),
RunId: __expectString(output.RunId),
} as any;
};
const deserializeAws_json1_1BlueprintNames = (output: any, context: __SerdeContext): string[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return __expectString(entry) as any;
});
};
const deserializeAws_json1_1BlueprintRun = (output: any, context: __SerdeContext): BlueprintRun => {
return {
BlueprintName: __expectString(output.BlueprintName),
CompletedOn:
output.CompletedOn !== undefined && output.CompletedOn !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CompletedOn)))
: undefined,
ErrorMessage: __expectString(output.ErrorMessage),
Parameters: __expectString(output.Parameters),
RoleArn: __expectString(output.RoleArn),
RollbackErrorMessage: __expectString(output.RollbackErrorMessage),
RunId: __expectString(output.RunId),
StartedOn:
output.StartedOn !== undefined && output.StartedOn !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.StartedOn)))
: undefined,
State: __expectString(output.State),
WorkflowName: __expectString(output.WorkflowName),
} as any;
};
const deserializeAws_json1_1BlueprintRuns = (output: any, context: __SerdeContext): BlueprintRun[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1BlueprintRun(entry, context);
});
};
const deserializeAws_json1_1Blueprints = (output: any, context: __SerdeContext): Blueprint[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1Blueprint(entry, context);
});
};
const deserializeAws_json1_1BooleanColumnStatisticsData = (
output: any,
context: __SerdeContext
): BooleanColumnStatisticsData => {
return {
NumberOfFalses: __expectLong(output.NumberOfFalses),
NumberOfNulls: __expectLong(output.NumberOfNulls),
NumberOfTrues: __expectLong(output.NumberOfTrues),
} as any;
};
const deserializeAws_json1_1BoundedPartitionValueList = (output: any, context: __SerdeContext): string[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return __expectString(entry) as any;
});
};
const deserializeAws_json1_1CancelMLTaskRunResponse = (
output: any,
context: __SerdeContext
): CancelMLTaskRunResponse => {
return {
Status: __expectString(output.Status),
TaskRunId: __expectString(output.TaskRunId),
TransformId: __expectString(output.TransformId),
} as any;
};
const deserializeAws_json1_1CatalogImportStatus = (output: any, context: __SerdeContext): CatalogImportStatus => {
return {
ImportCompleted: __expectBoolean(output.ImportCompleted),
ImportTime:
output.ImportTime !== undefined && output.ImportTime !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.ImportTime)))
: undefined,
ImportedBy: __expectString(output.ImportedBy),
} as any;
};
const deserializeAws_json1_1CatalogTablesList = (output: any, context: __SerdeContext): string[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return __expectString(entry) as any;
});
};
const deserializeAws_json1_1CatalogTarget = (output: any, context: __SerdeContext): CatalogTarget => {
return {
DatabaseName: __expectString(output.DatabaseName),
Tables:
output.Tables !== undefined && output.Tables !== null
? deserializeAws_json1_1CatalogTablesList(output.Tables, context)
: undefined,
} as any;
};
const deserializeAws_json1_1CatalogTargetList = (output: any, context: __SerdeContext): CatalogTarget[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1CatalogTarget(entry, context);
});
};
const deserializeAws_json1_1CheckSchemaVersionValidityResponse = (
output: any,
context: __SerdeContext
): CheckSchemaVersionValidityResponse => {
return {
Error: __expectString(output.Error),
Valid: __expectBoolean(output.Valid),
} as any;
};
const deserializeAws_json1_1Classifier = (output: any, context: __SerdeContext): Classifier => {
return {
CsvClassifier:
output.CsvClassifier !== undefined && output.CsvClassifier !== null
? deserializeAws_json1_1CsvClassifier(output.CsvClassifier, context)
: undefined,
GrokClassifier:
output.GrokClassifier !== undefined && output.GrokClassifier !== null
? deserializeAws_json1_1GrokClassifier(output.GrokClassifier, context)
: undefined,
JsonClassifier:
output.JsonClassifier !== undefined && output.JsonClassifier !== null
? deserializeAws_json1_1JsonClassifier(output.JsonClassifier, context)
: undefined,
XMLClassifier:
output.XMLClassifier !== undefined && output.XMLClassifier !== null
? deserializeAws_json1_1XMLClassifier(output.XMLClassifier, context)
: undefined,
} as any;
};
const deserializeAws_json1_1ClassifierList = (output: any, context: __SerdeContext): Classifier[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1Classifier(entry, context);
});
};
const deserializeAws_json1_1ClassifierNameList = (output: any, context: __SerdeContext): string[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return __expectString(entry) as any;
});
};
const deserializeAws_json1_1CloudWatchEncryption = (output: any, context: __SerdeContext): CloudWatchEncryption => {
return {
CloudWatchEncryptionMode: __expectString(output.CloudWatchEncryptionMode),
KmsKeyArn: __expectString(output.KmsKeyArn),
} as any;
};
const deserializeAws_json1_1CodeGenEdge = (output: any, context: __SerdeContext): CodeGenEdge => {
return {
Source: __expectString(output.Source),
Target: __expectString(output.Target),
TargetParameter: __expectString(output.TargetParameter),
} as any;
};
const deserializeAws_json1_1CodeGenNode = (output: any, context: __SerdeContext): CodeGenNode => {
return {
Args:
output.Args !== undefined && output.Args !== null
? deserializeAws_json1_1CodeGenNodeArgs(output.Args, context)
: undefined,
Id: __expectString(output.Id),
LineNumber: __expectInt32(output.LineNumber),
NodeType: __expectString(output.NodeType),
} as any;
};
const deserializeAws_json1_1CodeGenNodeArg = (output: any, context: __SerdeContext): CodeGenNodeArg => {
return {
Name: __expectString(output.Name),
Param: __expectBoolean(output.Param),
Value: __expectString(output.Value),
} as any;
};
const deserializeAws_json1_1CodeGenNodeArgs = (output: any, context: __SerdeContext): CodeGenNodeArg[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1CodeGenNodeArg(entry, context);
});
};
const deserializeAws_json1_1Column = (output: any, context: __SerdeContext): Column => {
return {
Comment: __expectString(output.Comment),
Name: __expectString(output.Name),
Parameters:
output.Parameters !== undefined && output.Parameters !== null
? deserializeAws_json1_1ParametersMap(output.Parameters, context)
: undefined,
Type: __expectString(output.Type),
} as any;
};
const deserializeAws_json1_1ColumnError = (output: any, context: __SerdeContext): ColumnError => {
return {
ColumnName: __expectString(output.ColumnName),
Error:
output.Error !== undefined && output.Error !== null
? deserializeAws_json1_1ErrorDetail(output.Error, context)
: undefined,
} as any;
};
const deserializeAws_json1_1ColumnErrors = (output: any, context: __SerdeContext): ColumnError[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1ColumnError(entry, context);
});
};
const deserializeAws_json1_1ColumnImportance = (output: any, context: __SerdeContext): ColumnImportance => {
return {
ColumnName: __expectString(output.ColumnName),
Importance: __limitedParseDouble(output.Importance),
} as any;
};
const deserializeAws_json1_1ColumnImportanceList = (output: any, context: __SerdeContext): ColumnImportance[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1ColumnImportance(entry, context);
});
};
const deserializeAws_json1_1ColumnList = (output: any, context: __SerdeContext): Column[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1Column(entry, context);
});
};
const deserializeAws_json1_1ColumnStatistics = (output: any, context: __SerdeContext): ColumnStatistics => {
return {
AnalyzedTime:
output.AnalyzedTime !== undefined && output.AnalyzedTime !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.AnalyzedTime)))
: undefined,
ColumnName: __expectString(output.ColumnName),
ColumnType: __expectString(output.ColumnType),
StatisticsData:
output.StatisticsData !== undefined && output.StatisticsData !== null
? deserializeAws_json1_1ColumnStatisticsData(output.StatisticsData, context)
: undefined,
} as any;
};
const deserializeAws_json1_1ColumnStatisticsData = (output: any, context: __SerdeContext): ColumnStatisticsData => {
return {
BinaryColumnStatisticsData:
output.BinaryColumnStatisticsData !== undefined && output.BinaryColumnStatisticsData !== null
? deserializeAws_json1_1BinaryColumnStatisticsData(output.BinaryColumnStatisticsData, context)
: undefined,
BooleanColumnStatisticsData:
output.BooleanColumnStatisticsData !== undefined && output.BooleanColumnStatisticsData !== null
? deserializeAws_json1_1BooleanColumnStatisticsData(output.BooleanColumnStatisticsData, context)
: undefined,
DateColumnStatisticsData:
output.DateColumnStatisticsData !== undefined && output.DateColumnStatisticsData !== null
? deserializeAws_json1_1DateColumnStatisticsData(output.DateColumnStatisticsData, context)
: undefined,
DecimalColumnStatisticsData:
output.DecimalColumnStatisticsData !== undefined && output.DecimalColumnStatisticsData !== null
? deserializeAws_json1_1DecimalColumnStatisticsData(output.DecimalColumnStatisticsData, context)
: undefined,
DoubleColumnStatisticsData:
output.DoubleColumnStatisticsData !== undefined && output.DoubleColumnStatisticsData !== null
? deserializeAws_json1_1DoubleColumnStatisticsData(output.DoubleColumnStatisticsData, context)
: undefined,
LongColumnStatisticsData:
output.LongColumnStatisticsData !== undefined && output.LongColumnStatisticsData !== null
? deserializeAws_json1_1LongColumnStatisticsData(output.LongColumnStatisticsData, context)
: undefined,
StringColumnStatisticsData:
output.StringColumnStatisticsData !== undefined && output.StringColumnStatisticsData !== null
? deserializeAws_json1_1StringColumnStatisticsData(output.StringColumnStatisticsData, context)
: undefined,
Type: __expectString(output.Type),
} as any;
};
const deserializeAws_json1_1ColumnStatisticsError = (output: any, context: __SerdeContext): ColumnStatisticsError => {
return {
ColumnStatistics:
output.ColumnStatistics !== undefined && output.ColumnStatistics !== null
? deserializeAws_json1_1ColumnStatistics(output.ColumnStatistics, context)
: undefined,
Error:
output.Error !== undefined && output.Error !== null
? deserializeAws_json1_1ErrorDetail(output.Error, context)
: undefined,
} as any;
};
const deserializeAws_json1_1ColumnStatisticsErrors = (
output: any,
context: __SerdeContext
): ColumnStatisticsError[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1ColumnStatisticsError(entry, context);
});
};
const deserializeAws_json1_1ColumnStatisticsList = (output: any, context: __SerdeContext): ColumnStatistics[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1ColumnStatistics(entry, context);
});
};
const deserializeAws_json1_1ColumnValueStringList = (output: any, context: __SerdeContext): string[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return __expectString(entry) as any;
});
};
const deserializeAws_json1_1ConcurrentModificationException = (
output: any,
context: __SerdeContext
): ConcurrentModificationException => {
return {
Message: __expectString(output.Message),
} as any;
};
const deserializeAws_json1_1ConcurrentRunsExceededException = (
output: any,
context: __SerdeContext
): ConcurrentRunsExceededException => {
return {
Message: __expectString(output.Message),
} as any;
};
const deserializeAws_json1_1Condition = (output: any, context: __SerdeContext): Condition => {
return {
CrawlState: __expectString(output.CrawlState),
CrawlerName: __expectString(output.CrawlerName),
JobName: __expectString(output.JobName),
LogicalOperator: __expectString(output.LogicalOperator),
State: __expectString(output.State),
} as any;
};
const deserializeAws_json1_1ConditionCheckFailureException = (
output: any,
context: __SerdeContext
): ConditionCheckFailureException => {
return {
Message: __expectString(output.Message),
} as any;
};
const deserializeAws_json1_1ConditionList = (output: any, context: __SerdeContext): Condition[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1Condition(entry, context);
});
};
const deserializeAws_json1_1ConflictException = (output: any, context: __SerdeContext): ConflictException => {
return {
Message: __expectString(output.Message),
} as any;
};
const deserializeAws_json1_1ConfusionMatrix = (output: any, context: __SerdeContext): ConfusionMatrix => {
return {
NumFalseNegatives: __expectLong(output.NumFalseNegatives),
NumFalsePositives: __expectLong(output.NumFalsePositives),
NumTrueNegatives: __expectLong(output.NumTrueNegatives),
NumTruePositives: __expectLong(output.NumTruePositives),
} as any;
};
const deserializeAws_json1_1Connection = (output: any, context: __SerdeContext): Connection => {
return {
ConnectionProperties:
output.ConnectionProperties !== undefined && output.ConnectionProperties !== null
? deserializeAws_json1_1ConnectionProperties(output.ConnectionProperties, context)
: undefined,
ConnectionType: __expectString(output.ConnectionType),
CreationTime:
output.CreationTime !== undefined && output.CreationTime !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CreationTime)))
: undefined,
Description: __expectString(output.Description),
LastUpdatedBy: __expectString(output.LastUpdatedBy),
LastUpdatedTime:
output.LastUpdatedTime !== undefined && output.LastUpdatedTime !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.LastUpdatedTime)))
: undefined,
MatchCriteria:
output.MatchCriteria !== undefined && output.MatchCriteria !== null
? deserializeAws_json1_1MatchCriteria(output.MatchCriteria, context)
: undefined,
Name: __expectString(output.Name),
PhysicalConnectionRequirements:
output.PhysicalConnectionRequirements !== undefined && output.PhysicalConnectionRequirements !== null
? deserializeAws_json1_1PhysicalConnectionRequirements(output.PhysicalConnectionRequirements, context)
: undefined,
} as any;
};
const deserializeAws_json1_1ConnectionList = (output: any, context: __SerdeContext): Connection[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1Connection(entry, context);
});
};
const deserializeAws_json1_1ConnectionPasswordEncryption = (
output: any,
context: __SerdeContext
): ConnectionPasswordEncryption => {
return {
AwsKmsKeyId: __expectString(output.AwsKmsKeyId),
ReturnConnectionPasswordEncrypted: __expectBoolean(output.ReturnConnectionPasswordEncrypted),
} as any;
};
const deserializeAws_json1_1ConnectionProperties = (
output: any,
context: __SerdeContext
): { [key: string]: string } => {
return Object.entries(output).reduce(
(acc: { [key: string]: string }, [key, value]: [ConnectionPropertyKey | string, any]) => {
if (value === null) {
return acc;
}
return {
...acc,
[key]: __expectString(value) as any,
};
},
{}
);
};
const deserializeAws_json1_1ConnectionsList = (output: any, context: __SerdeContext): ConnectionsList => {
return {
Connections:
output.Connections !== undefined && output.Connections !== null
? deserializeAws_json1_1OrchestrationStringList(output.Connections, context)
: undefined,
} as any;
};
const deserializeAws_json1_1Crawl = (output: any, context: __SerdeContext): Crawl => {
return {
CompletedOn:
output.CompletedOn !== undefined && output.CompletedOn !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CompletedOn)))
: undefined,
ErrorMessage: __expectString(output.ErrorMessage),
LogGroup: __expectString(output.LogGroup),
LogStream: __expectString(output.LogStream),
StartedOn:
output.StartedOn !== undefined && output.StartedOn !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.StartedOn)))
: undefined,
State: __expectString(output.State),
} as any;
};
const deserializeAws_json1_1Crawler = (output: any, context: __SerdeContext): Crawler => {
return {
Classifiers:
output.Classifiers !== undefined && output.Classifiers !== null
? deserializeAws_json1_1ClassifierNameList(output.Classifiers, context)
: undefined,
Configuration: __expectString(output.Configuration),
CrawlElapsedTime: __expectLong(output.CrawlElapsedTime),
CrawlerSecurityConfiguration: __expectString(output.CrawlerSecurityConfiguration),
CreationTime:
output.CreationTime !== undefined && output.CreationTime !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CreationTime)))
: undefined,
DatabaseName: __expectString(output.DatabaseName),
Description: __expectString(output.Description),
LastCrawl:
output.LastCrawl !== undefined && output.LastCrawl !== null
? deserializeAws_json1_1LastCrawlInfo(output.LastCrawl, context)
: undefined,
LastUpdated:
output.LastUpdated !== undefined && output.LastUpdated !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.LastUpdated)))
: undefined,
LineageConfiguration:
output.LineageConfiguration !== undefined && output.LineageConfiguration !== null
? deserializeAws_json1_1LineageConfiguration(output.LineageConfiguration, context)
: undefined,
Name: __expectString(output.Name),
RecrawlPolicy:
output.RecrawlPolicy !== undefined && output.RecrawlPolicy !== null
? deserializeAws_json1_1RecrawlPolicy(output.RecrawlPolicy, context)
: undefined,
Role: __expectString(output.Role),
Schedule:
output.Schedule !== undefined && output.Schedule !== null
? deserializeAws_json1_1Schedule(output.Schedule, context)
: undefined,
SchemaChangePolicy:
output.SchemaChangePolicy !== undefined && output.SchemaChangePolicy !== null
? deserializeAws_json1_1SchemaChangePolicy(output.SchemaChangePolicy, context)
: undefined,
State: __expectString(output.State),
TablePrefix: __expectString(output.TablePrefix),
Targets:
output.Targets !== undefined && output.Targets !== null
? deserializeAws_json1_1CrawlerTargets(output.Targets, context)
: undefined,
Version: __expectLong(output.Version),
} as any;
};
const deserializeAws_json1_1CrawlerList = (output: any, context: __SerdeContext): Crawler[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1Crawler(entry, context);
});
};
const deserializeAws_json1_1CrawlerMetrics = (output: any, context: __SerdeContext): CrawlerMetrics => {
return {
CrawlerName: __expectString(output.CrawlerName),
LastRuntimeSeconds: __limitedParseDouble(output.LastRuntimeSeconds),
MedianRuntimeSeconds: __limitedParseDouble(output.MedianRuntimeSeconds),
StillEstimating: __expectBoolean(output.StillEstimating),
TablesCreated: __expectInt32(output.TablesCreated),
TablesDeleted: __expectInt32(output.TablesDeleted),
TablesUpdated: __expectInt32(output.TablesUpdated),
TimeLeftSeconds: __limitedParseDouble(output.TimeLeftSeconds),
} as any;
};
const deserializeAws_json1_1CrawlerMetricsList = (output: any, context: __SerdeContext): CrawlerMetrics[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1CrawlerMetrics(entry, context);
});
};
const deserializeAws_json1_1CrawlerNameList = (output: any, context: __SerdeContext): string[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return __expectString(entry) as any;
});
};
const deserializeAws_json1_1CrawlerNodeDetails = (output: any, context: __SerdeContext): CrawlerNodeDetails => {
return {
Crawls:
output.Crawls !== undefined && output.Crawls !== null
? deserializeAws_json1_1CrawlList(output.Crawls, context)
: undefined,
} as any;
};
const deserializeAws_json1_1CrawlerNotRunningException = (
output: any,
context: __SerdeContext
): CrawlerNotRunningException => {
return {
Message: __expectString(output.Message),
} as any;
};
const deserializeAws_json1_1CrawlerRunningException = (
output: any,
context: __SerdeContext
): CrawlerRunningException => {
return {
Message: __expectString(output.Message),
} as any;
};
const deserializeAws_json1_1CrawlerStoppingException = (
output: any,
context: __SerdeContext
): CrawlerStoppingException => {
return {
Message: __expectString(output.Message),
} as any;
};
const deserializeAws_json1_1CrawlerTargets = (output: any, context: __SerdeContext): CrawlerTargets => {
return {
CatalogTargets:
output.CatalogTargets !== undefined && output.CatalogTargets !== null
? deserializeAws_json1_1CatalogTargetList(output.CatalogTargets, context)
: undefined,
DynamoDBTargets:
output.DynamoDBTargets !== undefined && output.DynamoDBTargets !== null
? deserializeAws_json1_1DynamoDBTargetList(output.DynamoDBTargets, context)
: undefined,
JdbcTargets:
output.JdbcTargets !== undefined && output.JdbcTargets !== null
? deserializeAws_json1_1JdbcTargetList(output.JdbcTargets, context)
: undefined,
MongoDBTargets:
output.MongoDBTargets !== undefined && output.MongoDBTargets !== null
? deserializeAws_json1_1MongoDBTargetList(output.MongoDBTargets, context)
: undefined,
S3Targets:
output.S3Targets !== undefined && output.S3Targets !== null
? deserializeAws_json1_1S3TargetList(output.S3Targets, context)
: undefined,
} as any;
};
const deserializeAws_json1_1CrawlList = (output: any, context: __SerdeContext): Crawl[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1Crawl(entry, context);
});
};
const deserializeAws_json1_1CreateBlueprintResponse = (
output: any,
context: __SerdeContext
): CreateBlueprintResponse => {
return {
Name: __expectString(output.Name),
} as any;
};
const deserializeAws_json1_1CreateClassifierResponse = (
output: any,
context: __SerdeContext
): CreateClassifierResponse => {
return {} as any;
};
const deserializeAws_json1_1CreateConnectionResponse = (
output: any,
context: __SerdeContext
): CreateConnectionResponse => {
return {} as any;
};
const deserializeAws_json1_1CreateCrawlerResponse = (output: any, context: __SerdeContext): CreateCrawlerResponse => {
return {} as any;
};
const deserializeAws_json1_1CreateDatabaseResponse = (output: any, context: __SerdeContext): CreateDatabaseResponse => {
return {} as any;
};
const deserializeAws_json1_1CreateDevEndpointResponse = (
output: any,
context: __SerdeContext
): CreateDevEndpointResponse => {
return {
Arguments:
output.Arguments !== undefined && output.Arguments !== null
? deserializeAws_json1_1MapValue(output.Arguments, context)
: undefined,
AvailabilityZone: __expectString(output.AvailabilityZone),
CreatedTimestamp:
output.CreatedTimestamp !== undefined && output.CreatedTimestamp !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CreatedTimestamp)))
: undefined,
EndpointName: __expectString(output.EndpointName),
ExtraJarsS3Path: __expectString(output.ExtraJarsS3Path),
ExtraPythonLibsS3Path: __expectString(output.ExtraPythonLibsS3Path),
FailureReason: __expectString(output.FailureReason),
GlueVersion: __expectString(output.GlueVersion),
NumberOfNodes: __expectInt32(output.NumberOfNodes),
NumberOfWorkers: __expectInt32(output.NumberOfWorkers),
RoleArn: __expectString(output.RoleArn),
SecurityConfiguration: __expectString(output.SecurityConfiguration),
SecurityGroupIds:
output.SecurityGroupIds !== undefined && output.SecurityGroupIds !== null
? deserializeAws_json1_1StringList(output.SecurityGroupIds, context)
: undefined,
Status: __expectString(output.Status),
SubnetId: __expectString(output.SubnetId),
VpcId: __expectString(output.VpcId),
WorkerType: __expectString(output.WorkerType),
YarnEndpointAddress: __expectString(output.YarnEndpointAddress),
ZeppelinRemoteSparkInterpreterPort: __expectInt32(output.ZeppelinRemoteSparkInterpreterPort),
} as any;
};
const deserializeAws_json1_1CreateJobResponse = (output: any, context: __SerdeContext): CreateJobResponse => {
return {
Name: __expectString(output.Name),
} as any;
};
const deserializeAws_json1_1CreateMLTransformResponse = (
output: any,
context: __SerdeContext
): CreateMLTransformResponse => {
return {
TransformId: __expectString(output.TransformId),
} as any;
};
const deserializeAws_json1_1CreatePartitionIndexResponse = (
output: any,
context: __SerdeContext
): CreatePartitionIndexResponse => {
return {} as any;
};
const deserializeAws_json1_1CreatePartitionResponse = (
output: any,
context: __SerdeContext
): CreatePartitionResponse => {
return {} as any;
};
const deserializeAws_json1_1CreateRegistryResponse = (output: any, context: __SerdeContext): CreateRegistryResponse => {
return {
Description: __expectString(output.Description),
RegistryArn: __expectString(output.RegistryArn),
RegistryName: __expectString(output.RegistryName),
Tags:
output.Tags !== undefined && output.Tags !== null
? deserializeAws_json1_1TagsMap(output.Tags, context)
: undefined,
} as any;
};
const deserializeAws_json1_1CreateSchemaResponse = (output: any, context: __SerdeContext): CreateSchemaResponse => {
return {
Compatibility: __expectString(output.Compatibility),
DataFormat: __expectString(output.DataFormat),
Description: __expectString(output.Description),
LatestSchemaVersion: __expectLong(output.LatestSchemaVersion),
NextSchemaVersion: __expectLong(output.NextSchemaVersion),
RegistryArn: __expectString(output.RegistryArn),
RegistryName: __expectString(output.RegistryName),
SchemaArn: __expectString(output.SchemaArn),
SchemaCheckpoint: __expectLong(output.SchemaCheckpoint),
SchemaName: __expectString(output.SchemaName),
SchemaStatus: __expectString(output.SchemaStatus),
SchemaVersionId: __expectString(output.SchemaVersionId),
SchemaVersionStatus: __expectString(output.SchemaVersionStatus),
Tags:
output.Tags !== undefined && output.Tags !== null
? deserializeAws_json1_1TagsMap(output.Tags, context)
: undefined,
} as any;
};
const deserializeAws_json1_1CreateScriptResponse = (output: any, context: __SerdeContext): CreateScriptResponse => {
return {
PythonScript: __expectString(output.PythonScript),
ScalaCode: __expectString(output.ScalaCode),
} as any;
};
const deserializeAws_json1_1CreateSecurityConfigurationResponse = (
output: any,
context: __SerdeContext
): CreateSecurityConfigurationResponse => {
return {
CreatedTimestamp:
output.CreatedTimestamp !== undefined && output.CreatedTimestamp !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CreatedTimestamp)))
: undefined,
Name: __expectString(output.Name),
} as any;
};
const deserializeAws_json1_1CreateTableResponse = (output: any, context: __SerdeContext): CreateTableResponse => {
return {} as any;
};
const deserializeAws_json1_1CreateTriggerResponse = (output: any, context: __SerdeContext): CreateTriggerResponse => {
return {
Name: __expectString(output.Name),
} as any;
};
const deserializeAws_json1_1CreateUserDefinedFunctionResponse = (
output: any,
context: __SerdeContext
): CreateUserDefinedFunctionResponse => {
return {} as any;
};
const deserializeAws_json1_1CreateWorkflowResponse = (output: any, context: __SerdeContext): CreateWorkflowResponse => {
return {
Name: __expectString(output.Name),
} as any;
};
const deserializeAws_json1_1CsvClassifier = (output: any, context: __SerdeContext): CsvClassifier => {
return {
AllowSingleColumn: __expectBoolean(output.AllowSingleColumn),
ContainsHeader: __expectString(output.ContainsHeader),
CreationTime:
output.CreationTime !== undefined && output.CreationTime !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CreationTime)))
: undefined,
Delimiter: __expectString(output.Delimiter),
DisableValueTrimming: __expectBoolean(output.DisableValueTrimming),
Header:
output.Header !== undefined && output.Header !== null
? deserializeAws_json1_1CsvHeader(output.Header, context)
: undefined,
LastUpdated:
output.LastUpdated !== undefined && output.LastUpdated !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.LastUpdated)))
: undefined,
Name: __expectString(output.Name),
QuoteSymbol: __expectString(output.QuoteSymbol),
Version: __expectLong(output.Version),
} as any;
};
const deserializeAws_json1_1CsvHeader = (output: any, context: __SerdeContext): string[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return __expectString(entry) as any;
});
};
const deserializeAws_json1_1DagEdges = (output: any, context: __SerdeContext): CodeGenEdge[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1CodeGenEdge(entry, context);
});
};
const deserializeAws_json1_1DagNodes = (output: any, context: __SerdeContext): CodeGenNode[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1CodeGenNode(entry, context);
});
};
const deserializeAws_json1_1Database = (output: any, context: __SerdeContext): Database => {
return {
CatalogId: __expectString(output.CatalogId),
CreateTableDefaultPermissions:
output.CreateTableDefaultPermissions !== undefined && output.CreateTableDefaultPermissions !== null
? deserializeAws_json1_1PrincipalPermissionsList(output.CreateTableDefaultPermissions, context)
: undefined,
CreateTime:
output.CreateTime !== undefined && output.CreateTime !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CreateTime)))
: undefined,
Description: __expectString(output.Description),
LocationUri: __expectString(output.LocationUri),
Name: __expectString(output.Name),
Parameters:
output.Parameters !== undefined && output.Parameters !== null
? deserializeAws_json1_1ParametersMap(output.Parameters, context)
: undefined,
TargetDatabase:
output.TargetDatabase !== undefined && output.TargetDatabase !== null
? deserializeAws_json1_1DatabaseIdentifier(output.TargetDatabase, context)
: undefined,
} as any;
};
const deserializeAws_json1_1DatabaseIdentifier = (output: any, context: __SerdeContext): DatabaseIdentifier => {
return {
CatalogId: __expectString(output.CatalogId),
DatabaseName: __expectString(output.DatabaseName),
} as any;
};
const deserializeAws_json1_1DatabaseList = (output: any, context: __SerdeContext): Database[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1Database(entry, context);
});
};
const deserializeAws_json1_1DataCatalogEncryptionSettings = (
output: any,
context: __SerdeContext
): DataCatalogEncryptionSettings => {
return {
ConnectionPasswordEncryption:
output.ConnectionPasswordEncryption !== undefined && output.ConnectionPasswordEncryption !== null
? deserializeAws_json1_1ConnectionPasswordEncryption(output.ConnectionPasswordEncryption, context)
: undefined,
EncryptionAtRest:
output.EncryptionAtRest !== undefined && output.EncryptionAtRest !== null
? deserializeAws_json1_1EncryptionAtRest(output.EncryptionAtRest, context)
: undefined,
} as any;
};
const deserializeAws_json1_1DataLakePrincipal = (output: any, context: __SerdeContext): DataLakePrincipal => {
return {
DataLakePrincipalIdentifier: __expectString(output.DataLakePrincipalIdentifier),
} as any;
};
const deserializeAws_json1_1DateColumnStatisticsData = (
output: any,
context: __SerdeContext
): DateColumnStatisticsData => {
return {
MaximumValue:
output.MaximumValue !== undefined && output.MaximumValue !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.MaximumValue)))
: undefined,
MinimumValue:
output.MinimumValue !== undefined && output.MinimumValue !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.MinimumValue)))
: undefined,
NumberOfDistinctValues: __expectLong(output.NumberOfDistinctValues),
NumberOfNulls: __expectLong(output.NumberOfNulls),
} as any;
};
const deserializeAws_json1_1DecimalColumnStatisticsData = (
output: any,
context: __SerdeContext
): DecimalColumnStatisticsData => {
return {
MaximumValue:
output.MaximumValue !== undefined && output.MaximumValue !== null
? deserializeAws_json1_1DecimalNumber(output.MaximumValue, context)
: undefined,
MinimumValue:
output.MinimumValue !== undefined && output.MinimumValue !== null
? deserializeAws_json1_1DecimalNumber(output.MinimumValue, context)
: undefined,
NumberOfDistinctValues: __expectLong(output.NumberOfDistinctValues),
NumberOfNulls: __expectLong(output.NumberOfNulls),
} as any;
};
const deserializeAws_json1_1DecimalNumber = (output: any, context: __SerdeContext): DecimalNumber => {
return {
Scale: __expectInt32(output.Scale),
UnscaledValue:
output.UnscaledValue !== undefined && output.UnscaledValue !== null
? context.base64Decoder(output.UnscaledValue)
: undefined,
} as any;
};
const deserializeAws_json1_1DeleteBlueprintResponse = (
output: any,
context: __SerdeContext
): DeleteBlueprintResponse => {
return {
Name: __expectString(output.Name),
} as any;
};
const deserializeAws_json1_1DeleteClassifierResponse = (
output: any,
context: __SerdeContext
): DeleteClassifierResponse => {
return {} as any;
};
const deserializeAws_json1_1DeleteColumnStatisticsForPartitionResponse = (
output: any,
context: __SerdeContext
): DeleteColumnStatisticsForPartitionResponse => {
return {} as any;
};
const deserializeAws_json1_1DeleteColumnStatisticsForTableResponse = (
output: any,
context: __SerdeContext
): DeleteColumnStatisticsForTableResponse => {
return {} as any;
};
const deserializeAws_json1_1DeleteConnectionResponse = (
output: any,
context: __SerdeContext
): DeleteConnectionResponse => {
return {} as any;
};
const deserializeAws_json1_1DeleteCrawlerResponse = (output: any, context: __SerdeContext): DeleteCrawlerResponse => {
return {} as any;
};
const deserializeAws_json1_1DeleteDatabaseResponse = (output: any, context: __SerdeContext): DeleteDatabaseResponse => {
return {} as any;
};
const deserializeAws_json1_1DeleteDevEndpointResponse = (
output: any,
context: __SerdeContext
): DeleteDevEndpointResponse => {
return {} as any;
};
const deserializeAws_json1_1DeleteJobResponse = (output: any, context: __SerdeContext): DeleteJobResponse => {
return {
JobName: __expectString(output.JobName),
} as any;
};
const deserializeAws_json1_1DeleteMLTransformResponse = (
output: any,
context: __SerdeContext
): DeleteMLTransformResponse => {
return {
TransformId: __expectString(output.TransformId),
} as any;
};
const deserializeAws_json1_1DeletePartitionIndexResponse = (
output: any,
context: __SerdeContext
): DeletePartitionIndexResponse => {
return {} as any;
};
const deserializeAws_json1_1DeletePartitionResponse = (
output: any,
context: __SerdeContext
): DeletePartitionResponse => {
return {} as any;
};
const deserializeAws_json1_1DeleteRegistryResponse = (output: any, context: __SerdeContext): DeleteRegistryResponse => {
return {
RegistryArn: __expectString(output.RegistryArn),
RegistryName: __expectString(output.RegistryName),
Status: __expectString(output.Status),
} as any;
};
const deserializeAws_json1_1DeleteResourcePolicyResponse = (
output: any,
context: __SerdeContext
): DeleteResourcePolicyResponse => {
return {} as any;
};
const deserializeAws_json1_1DeleteSchemaResponse = (output: any, context: __SerdeContext): DeleteSchemaResponse => {
return {
SchemaArn: __expectString(output.SchemaArn),
SchemaName: __expectString(output.SchemaName),
Status: __expectString(output.Status),
} as any;
};
const deserializeAws_json1_1DeleteSchemaVersionsResponse = (
output: any,
context: __SerdeContext
): DeleteSchemaVersionsResponse => {
return {
SchemaVersionErrors:
output.SchemaVersionErrors !== undefined && output.SchemaVersionErrors !== null
? deserializeAws_json1_1SchemaVersionErrorList(output.SchemaVersionErrors, context)
: undefined,
} as any;
};
const deserializeAws_json1_1DeleteSecurityConfigurationResponse = (
output: any,
context: __SerdeContext
): DeleteSecurityConfigurationResponse => {
return {} as any;
};
const deserializeAws_json1_1DeleteTableResponse = (output: any, context: __SerdeContext): DeleteTableResponse => {
return {} as any;
};
const deserializeAws_json1_1DeleteTableVersionResponse = (
output: any,
context: __SerdeContext
): DeleteTableVersionResponse => {
return {} as any;
};
const deserializeAws_json1_1DeleteTriggerResponse = (output: any, context: __SerdeContext): DeleteTriggerResponse => {
return {
Name: __expectString(output.Name),
} as any;
};
const deserializeAws_json1_1DeleteUserDefinedFunctionResponse = (
output: any,
context: __SerdeContext
): DeleteUserDefinedFunctionResponse => {
return {} as any;
};
const deserializeAws_json1_1DeleteWorkflowResponse = (output: any, context: __SerdeContext): DeleteWorkflowResponse => {
return {
Name: __expectString(output.Name),
} as any;
};
const deserializeAws_json1_1DevEndpoint = (output: any, context: __SerdeContext): DevEndpoint => {
return {
Arguments:
output.Arguments !== undefined && output.Arguments !== null
? deserializeAws_json1_1MapValue(output.Arguments, context)
: undefined,
AvailabilityZone: __expectString(output.AvailabilityZone),
CreatedTimestamp:
output.CreatedTimestamp !== undefined && output.CreatedTimestamp !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CreatedTimestamp)))
: undefined,
EndpointName: __expectString(output.EndpointName),
ExtraJarsS3Path: __expectString(output.ExtraJarsS3Path),
ExtraPythonLibsS3Path: __expectString(output.ExtraPythonLibsS3Path),
FailureReason: __expectString(output.FailureReason),
GlueVersion: __expectString(output.GlueVersion),
LastModifiedTimestamp:
output.LastModifiedTimestamp !== undefined && output.LastModifiedTimestamp !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.LastModifiedTimestamp)))
: undefined,
LastUpdateStatus: __expectString(output.LastUpdateStatus),
NumberOfNodes: __expectInt32(output.NumberOfNodes),
NumberOfWorkers: __expectInt32(output.NumberOfWorkers),
PrivateAddress: __expectString(output.PrivateAddress),
PublicAddress: __expectString(output.PublicAddress),
PublicKey: __expectString(output.PublicKey),
PublicKeys:
output.PublicKeys !== undefined && output.PublicKeys !== null
? deserializeAws_json1_1PublicKeysList(output.PublicKeys, context)
: undefined,
RoleArn: __expectString(output.RoleArn),
SecurityConfiguration: __expectString(output.SecurityConfiguration),
SecurityGroupIds:
output.SecurityGroupIds !== undefined && output.SecurityGroupIds !== null
? deserializeAws_json1_1StringList(output.SecurityGroupIds, context)
: undefined,
Status: __expectString(output.Status),
SubnetId: __expectString(output.SubnetId),
VpcId: __expectString(output.VpcId),
WorkerType: __expectString(output.WorkerType),
YarnEndpointAddress: __expectString(output.YarnEndpointAddress),
ZeppelinRemoteSparkInterpreterPort: __expectInt32(output.ZeppelinRemoteSparkInterpreterPort),
} as any;
};
const deserializeAws_json1_1DevEndpointList = (output: any, context: __SerdeContext): DevEndpoint[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1DevEndpoint(entry, context);
});
};
const deserializeAws_json1_1DevEndpointNameList = (output: any, context: __SerdeContext): string[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return __expectString(entry) as any;
});
};
const deserializeAws_json1_1DevEndpointNames = (output: any, context: __SerdeContext): string[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return __expectString(entry) as any;
});
};
const deserializeAws_json1_1DoubleColumnStatisticsData = (
output: any,
context: __SerdeContext
): DoubleColumnStatisticsData => {
return {
MaximumValue: __limitedParseDouble(output.MaximumValue),
MinimumValue: __limitedParseDouble(output.MinimumValue),
NumberOfDistinctValues: __expectLong(output.NumberOfDistinctValues),
NumberOfNulls: __expectLong(output.NumberOfNulls),
} as any;
};
const deserializeAws_json1_1DynamoDBTarget = (output: any, context: __SerdeContext): DynamoDBTarget => {
return {
Path: __expectString(output.Path),
scanAll: __expectBoolean(output.scanAll),
scanRate: __limitedParseDouble(output.scanRate),
} as any;
};
const deserializeAws_json1_1DynamoDBTargetList = (output: any, context: __SerdeContext): DynamoDBTarget[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1DynamoDBTarget(entry, context);
});
};
const deserializeAws_json1_1Edge = (output: any, context: __SerdeContext): Edge => {
return {
DestinationId: __expectString(output.DestinationId),
SourceId: __expectString(output.SourceId),
} as any;
};
const deserializeAws_json1_1EdgeList = (output: any, context: __SerdeContext): Edge[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1Edge(entry, context);
});
};
const deserializeAws_json1_1EncryptionAtRest = (output: any, context: __SerdeContext): EncryptionAtRest => {
return {
CatalogEncryptionMode: __expectString(output.CatalogEncryptionMode),
SseAwsKmsKeyId: __expectString(output.SseAwsKmsKeyId),
} as any;
};
const deserializeAws_json1_1EncryptionConfiguration = (
output: any,
context: __SerdeContext
): EncryptionConfiguration => {
return {
CloudWatchEncryption:
output.CloudWatchEncryption !== undefined && output.CloudWatchEncryption !== null
? deserializeAws_json1_1CloudWatchEncryption(output.CloudWatchEncryption, context)
: undefined,
JobBookmarksEncryption:
output.JobBookmarksEncryption !== undefined && output.JobBookmarksEncryption !== null
? deserializeAws_json1_1JobBookmarksEncryption(output.JobBookmarksEncryption, context)
: undefined,
S3Encryption:
output.S3Encryption !== undefined && output.S3Encryption !== null
? deserializeAws_json1_1S3EncryptionList(output.S3Encryption, context)
: undefined,
} as any;
};
const deserializeAws_json1_1EntityNotFoundException = (
output: any,
context: __SerdeContext
): EntityNotFoundException => {
return {
Message: __expectString(output.Message),
} as any;
};
const deserializeAws_json1_1ErrorByName = (output: any, context: __SerdeContext): { [key: string]: ErrorDetail } => {
return Object.entries(output).reduce((acc: { [key: string]: ErrorDetail }, [key, value]: [string, any]) => {
if (value === null) {
return acc;
}
return {
...acc,
[key]: deserializeAws_json1_1ErrorDetail(value, context),
};
}, {});
};
const deserializeAws_json1_1ErrorDetail = (output: any, context: __SerdeContext): ErrorDetail => {
return {
ErrorCode: __expectString(output.ErrorCode),
ErrorMessage: __expectString(output.ErrorMessage),
} as any;
};
const deserializeAws_json1_1ErrorDetails = (output: any, context: __SerdeContext): ErrorDetails => {
return {
ErrorCode: __expectString(output.ErrorCode),
ErrorMessage: __expectString(output.ErrorMessage),
} as any;
};
const deserializeAws_json1_1EvaluationMetrics = (output: any, context: __SerdeContext): EvaluationMetrics => {
return {
FindMatchesMetrics:
output.FindMatchesMetrics !== undefined && output.FindMatchesMetrics !== null
? deserializeAws_json1_1FindMatchesMetrics(output.FindMatchesMetrics, context)
: undefined,
TransformType: __expectString(output.TransformType),
} as any;
};
const deserializeAws_json1_1EventBatchingCondition = (output: any, context: __SerdeContext): EventBatchingCondition => {
return {
BatchSize: __expectInt32(output.BatchSize),
BatchWindow: __expectInt32(output.BatchWindow),
} as any;
};
const deserializeAws_json1_1ExecutionProperty = (output: any, context: __SerdeContext): ExecutionProperty => {
return {
MaxConcurrentRuns: __expectInt32(output.MaxConcurrentRuns),
} as any;
};
const deserializeAws_json1_1ExportLabelsTaskRunProperties = (
output: any,
context: __SerdeContext
): ExportLabelsTaskRunProperties => {
return {
OutputS3Path: __expectString(output.OutputS3Path),
} as any;
};
const deserializeAws_json1_1FindMatchesMetrics = (output: any, context: __SerdeContext): FindMatchesMetrics => {
return {
AreaUnderPRCurve: __limitedParseDouble(output.AreaUnderPRCurve),
ColumnImportances:
output.ColumnImportances !== undefined && output.ColumnImportances !== null
? deserializeAws_json1_1ColumnImportanceList(output.ColumnImportances, context)
: undefined,
ConfusionMatrix:
output.ConfusionMatrix !== undefined && output.ConfusionMatrix !== null
? deserializeAws_json1_1ConfusionMatrix(output.ConfusionMatrix, context)
: undefined,
F1: __limitedParseDouble(output.F1),
Precision: __limitedParseDouble(output.Precision),
Recall: __limitedParseDouble(output.Recall),
} as any;
};
const deserializeAws_json1_1FindMatchesParameters = (output: any, context: __SerdeContext): FindMatchesParameters => {
return {
AccuracyCostTradeoff: __limitedParseDouble(output.AccuracyCostTradeoff),
EnforceProvidedLabels: __expectBoolean(output.EnforceProvidedLabels),
PrecisionRecallTradeoff: __limitedParseDouble(output.PrecisionRecallTradeoff),
PrimaryKeyColumnName: __expectString(output.PrimaryKeyColumnName),
} as any;
};
const deserializeAws_json1_1FindMatchesTaskRunProperties = (
output: any,
context: __SerdeContext
): FindMatchesTaskRunProperties => {
return {
JobId: __expectString(output.JobId),
JobName: __expectString(output.JobName),
JobRunId: __expectString(output.JobRunId),
} as any;
};
const deserializeAws_json1_1GenericMap = (output: any, context: __SerdeContext): { [key: string]: string } => {
return Object.entries(output).reduce((acc: { [key: string]: string }, [key, value]: [string, any]) => {
if (value === null) {
return acc;
}
return {
...acc,
[key]: __expectString(value) as any,
};
}, {});
};
const deserializeAws_json1_1GetBlueprintResponse = (output: any, context: __SerdeContext): GetBlueprintResponse => {
return {
Blueprint:
output.Blueprint !== undefined && output.Blueprint !== null
? deserializeAws_json1_1Blueprint(output.Blueprint, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetBlueprintRunResponse = (
output: any,
context: __SerdeContext
): GetBlueprintRunResponse => {
return {
BlueprintRun:
output.BlueprintRun !== undefined && output.BlueprintRun !== null
? deserializeAws_json1_1BlueprintRun(output.BlueprintRun, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetBlueprintRunsResponse = (
output: any,
context: __SerdeContext
): GetBlueprintRunsResponse => {
return {
BlueprintRuns:
output.BlueprintRuns !== undefined && output.BlueprintRuns !== null
? deserializeAws_json1_1BlueprintRuns(output.BlueprintRuns, context)
: undefined,
NextToken: __expectString(output.NextToken),
} as any;
};
const deserializeAws_json1_1GetCatalogImportStatusResponse = (
output: any,
context: __SerdeContext
): GetCatalogImportStatusResponse => {
return {
ImportStatus:
output.ImportStatus !== undefined && output.ImportStatus !== null
? deserializeAws_json1_1CatalogImportStatus(output.ImportStatus, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetClassifierResponse = (output: any, context: __SerdeContext): GetClassifierResponse => {
return {
Classifier:
output.Classifier !== undefined && output.Classifier !== null
? deserializeAws_json1_1Classifier(output.Classifier, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetClassifiersResponse = (output: any, context: __SerdeContext): GetClassifiersResponse => {
return {
Classifiers:
output.Classifiers !== undefined && output.Classifiers !== null
? deserializeAws_json1_1ClassifierList(output.Classifiers, context)
: undefined,
NextToken: __expectString(output.NextToken),
} as any;
};
const deserializeAws_json1_1GetColumnStatisticsForPartitionResponse = (
output: any,
context: __SerdeContext
): GetColumnStatisticsForPartitionResponse => {
return {
ColumnStatisticsList:
output.ColumnStatisticsList !== undefined && output.ColumnStatisticsList !== null
? deserializeAws_json1_1ColumnStatisticsList(output.ColumnStatisticsList, context)
: undefined,
Errors:
output.Errors !== undefined && output.Errors !== null
? deserializeAws_json1_1ColumnErrors(output.Errors, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetColumnStatisticsForTableResponse = (
output: any,
context: __SerdeContext
): GetColumnStatisticsForTableResponse => {
return {
ColumnStatisticsList:
output.ColumnStatisticsList !== undefined && output.ColumnStatisticsList !== null
? deserializeAws_json1_1ColumnStatisticsList(output.ColumnStatisticsList, context)
: undefined,
Errors:
output.Errors !== undefined && output.Errors !== null
? deserializeAws_json1_1ColumnErrors(output.Errors, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetConnectionResponse = (output: any, context: __SerdeContext): GetConnectionResponse => {
return {
Connection:
output.Connection !== undefined && output.Connection !== null
? deserializeAws_json1_1Connection(output.Connection, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetConnectionsResponse = (output: any, context: __SerdeContext): GetConnectionsResponse => {
return {
ConnectionList:
output.ConnectionList !== undefined && output.ConnectionList !== null
? deserializeAws_json1_1ConnectionList(output.ConnectionList, context)
: undefined,
NextToken: __expectString(output.NextToken),
} as any;
};
const deserializeAws_json1_1GetCrawlerMetricsResponse = (
output: any,
context: __SerdeContext
): GetCrawlerMetricsResponse => {
return {
CrawlerMetricsList:
output.CrawlerMetricsList !== undefined && output.CrawlerMetricsList !== null
? deserializeAws_json1_1CrawlerMetricsList(output.CrawlerMetricsList, context)
: undefined,
NextToken: __expectString(output.NextToken),
} as any;
};
const deserializeAws_json1_1GetCrawlerResponse = (output: any, context: __SerdeContext): GetCrawlerResponse => {
return {
Crawler:
output.Crawler !== undefined && output.Crawler !== null
? deserializeAws_json1_1Crawler(output.Crawler, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetCrawlersResponse = (output: any, context: __SerdeContext): GetCrawlersResponse => {
return {
Crawlers:
output.Crawlers !== undefined && output.Crawlers !== null
? deserializeAws_json1_1CrawlerList(output.Crawlers, context)
: undefined,
NextToken: __expectString(output.NextToken),
} as any;
};
const deserializeAws_json1_1GetDatabaseResponse = (output: any, context: __SerdeContext): GetDatabaseResponse => {
return {
Database:
output.Database !== undefined && output.Database !== null
? deserializeAws_json1_1Database(output.Database, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetDatabasesResponse = (output: any, context: __SerdeContext): GetDatabasesResponse => {
return {
DatabaseList:
output.DatabaseList !== undefined && output.DatabaseList !== null
? deserializeAws_json1_1DatabaseList(output.DatabaseList, context)
: undefined,
NextToken: __expectString(output.NextToken),
} as any;
};
const deserializeAws_json1_1GetDataCatalogEncryptionSettingsResponse = (
output: any,
context: __SerdeContext
): GetDataCatalogEncryptionSettingsResponse => {
return {
DataCatalogEncryptionSettings:
output.DataCatalogEncryptionSettings !== undefined && output.DataCatalogEncryptionSettings !== null
? deserializeAws_json1_1DataCatalogEncryptionSettings(output.DataCatalogEncryptionSettings, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetDataflowGraphResponse = (
output: any,
context: __SerdeContext
): GetDataflowGraphResponse => {
return {
DagEdges:
output.DagEdges !== undefined && output.DagEdges !== null
? deserializeAws_json1_1DagEdges(output.DagEdges, context)
: undefined,
DagNodes:
output.DagNodes !== undefined && output.DagNodes !== null
? deserializeAws_json1_1DagNodes(output.DagNodes, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetDevEndpointResponse = (output: any, context: __SerdeContext): GetDevEndpointResponse => {
return {
DevEndpoint:
output.DevEndpoint !== undefined && output.DevEndpoint !== null
? deserializeAws_json1_1DevEndpoint(output.DevEndpoint, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetDevEndpointsResponse = (
output: any,
context: __SerdeContext
): GetDevEndpointsResponse => {
return {
DevEndpoints:
output.DevEndpoints !== undefined && output.DevEndpoints !== null
? deserializeAws_json1_1DevEndpointList(output.DevEndpoints, context)
: undefined,
NextToken: __expectString(output.NextToken),
} as any;
};
const deserializeAws_json1_1GetJobBookmarkResponse = (output: any, context: __SerdeContext): GetJobBookmarkResponse => {
return {
JobBookmarkEntry:
output.JobBookmarkEntry !== undefined && output.JobBookmarkEntry !== null
? deserializeAws_json1_1JobBookmarkEntry(output.JobBookmarkEntry, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetJobResponse = (output: any, context: __SerdeContext): GetJobResponse => {
return {
Job: output.Job !== undefined && output.Job !== null ? deserializeAws_json1_1Job(output.Job, context) : undefined,
} as any;
};
const deserializeAws_json1_1GetJobRunResponse = (output: any, context: __SerdeContext): GetJobRunResponse => {
return {
JobRun:
output.JobRun !== undefined && output.JobRun !== null
? deserializeAws_json1_1JobRun(output.JobRun, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetJobRunsResponse = (output: any, context: __SerdeContext): GetJobRunsResponse => {
return {
JobRuns:
output.JobRuns !== undefined && output.JobRuns !== null
? deserializeAws_json1_1JobRunList(output.JobRuns, context)
: undefined,
NextToken: __expectString(output.NextToken),
} as any;
};
const deserializeAws_json1_1GetJobsResponse = (output: any, context: __SerdeContext): GetJobsResponse => {
return {
Jobs:
output.Jobs !== undefined && output.Jobs !== null
? deserializeAws_json1_1JobList(output.Jobs, context)
: undefined,
NextToken: __expectString(output.NextToken),
} as any;
};
const deserializeAws_json1_1GetMappingResponse = (output: any, context: __SerdeContext): GetMappingResponse => {
return {
Mapping:
output.Mapping !== undefined && output.Mapping !== null
? deserializeAws_json1_1MappingList(output.Mapping, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetMLTaskRunResponse = (output: any, context: __SerdeContext): GetMLTaskRunResponse => {
return {
CompletedOn:
output.CompletedOn !== undefined && output.CompletedOn !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CompletedOn)))
: undefined,
ErrorString: __expectString(output.ErrorString),
ExecutionTime: __expectInt32(output.ExecutionTime),
LastModifiedOn:
output.LastModifiedOn !== undefined && output.LastModifiedOn !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.LastModifiedOn)))
: undefined,
LogGroupName: __expectString(output.LogGroupName),
Properties:
output.Properties !== undefined && output.Properties !== null
? deserializeAws_json1_1TaskRunProperties(output.Properties, context)
: undefined,
StartedOn:
output.StartedOn !== undefined && output.StartedOn !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.StartedOn)))
: undefined,
Status: __expectString(output.Status),
TaskRunId: __expectString(output.TaskRunId),
TransformId: __expectString(output.TransformId),
} as any;
};
const deserializeAws_json1_1GetMLTaskRunsResponse = (output: any, context: __SerdeContext): GetMLTaskRunsResponse => {
return {
NextToken: __expectString(output.NextToken),
TaskRuns:
output.TaskRuns !== undefined && output.TaskRuns !== null
? deserializeAws_json1_1TaskRunList(output.TaskRuns, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetMLTransformResponse = (output: any, context: __SerdeContext): GetMLTransformResponse => {
return {
CreatedOn:
output.CreatedOn !== undefined && output.CreatedOn !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CreatedOn)))
: undefined,
Description: __expectString(output.Description),
EvaluationMetrics:
output.EvaluationMetrics !== undefined && output.EvaluationMetrics !== null
? deserializeAws_json1_1EvaluationMetrics(output.EvaluationMetrics, context)
: undefined,
GlueVersion: __expectString(output.GlueVersion),
InputRecordTables:
output.InputRecordTables !== undefined && output.InputRecordTables !== null
? deserializeAws_json1_1GlueTables(output.InputRecordTables, context)
: undefined,
LabelCount: __expectInt32(output.LabelCount),
LastModifiedOn:
output.LastModifiedOn !== undefined && output.LastModifiedOn !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.LastModifiedOn)))
: undefined,
MaxCapacity: __limitedParseDouble(output.MaxCapacity),
MaxRetries: __expectInt32(output.MaxRetries),
Name: __expectString(output.Name),
NumberOfWorkers: __expectInt32(output.NumberOfWorkers),
Parameters:
output.Parameters !== undefined && output.Parameters !== null
? deserializeAws_json1_1TransformParameters(output.Parameters, context)
: undefined,
Role: __expectString(output.Role),
Schema:
output.Schema !== undefined && output.Schema !== null
? deserializeAws_json1_1TransformSchema(output.Schema, context)
: undefined,
Status: __expectString(output.Status),
Timeout: __expectInt32(output.Timeout),
TransformEncryption:
output.TransformEncryption !== undefined && output.TransformEncryption !== null
? deserializeAws_json1_1TransformEncryption(output.TransformEncryption, context)
: undefined,
TransformId: __expectString(output.TransformId),
WorkerType: __expectString(output.WorkerType),
} as any;
};
const deserializeAws_json1_1GetMLTransformsResponse = (
output: any,
context: __SerdeContext
): GetMLTransformsResponse => {
return {
NextToken: __expectString(output.NextToken),
Transforms:
output.Transforms !== undefined && output.Transforms !== null
? deserializeAws_json1_1TransformList(output.Transforms, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetPartitionIndexesResponse = (
output: any,
context: __SerdeContext
): GetPartitionIndexesResponse => {
return {
NextToken: __expectString(output.NextToken),
PartitionIndexDescriptorList:
output.PartitionIndexDescriptorList !== undefined && output.PartitionIndexDescriptorList !== null
? deserializeAws_json1_1PartitionIndexDescriptorList(output.PartitionIndexDescriptorList, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetPartitionResponse = (output: any, context: __SerdeContext): GetPartitionResponse => {
return {
Partition:
output.Partition !== undefined && output.Partition !== null
? deserializeAws_json1_1Partition(output.Partition, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetPartitionsResponse = (output: any, context: __SerdeContext): GetPartitionsResponse => {
return {
NextToken: __expectString(output.NextToken),
Partitions:
output.Partitions !== undefined && output.Partitions !== null
? deserializeAws_json1_1PartitionList(output.Partitions, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetPlanResponse = (output: any, context: __SerdeContext): GetPlanResponse => {
return {
PythonScript: __expectString(output.PythonScript),
ScalaCode: __expectString(output.ScalaCode),
} as any;
};
const deserializeAws_json1_1GetRegistryResponse = (output: any, context: __SerdeContext): GetRegistryResponse => {
return {
CreatedTime: __expectString(output.CreatedTime),
Description: __expectString(output.Description),
RegistryArn: __expectString(output.RegistryArn),
RegistryName: __expectString(output.RegistryName),
Status: __expectString(output.Status),
UpdatedTime: __expectString(output.UpdatedTime),
} as any;
};
const deserializeAws_json1_1GetResourcePoliciesResponse = (
output: any,
context: __SerdeContext
): GetResourcePoliciesResponse => {
return {
GetResourcePoliciesResponseList:
output.GetResourcePoliciesResponseList !== undefined && output.GetResourcePoliciesResponseList !== null
? deserializeAws_json1_1GetResourcePoliciesResponseList(output.GetResourcePoliciesResponseList, context)
: undefined,
NextToken: __expectString(output.NextToken),
} as any;
};
const deserializeAws_json1_1GetResourcePoliciesResponseList = (output: any, context: __SerdeContext): GluePolicy[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1GluePolicy(entry, context);
});
};
const deserializeAws_json1_1GetResourcePolicyResponse = (
output: any,
context: __SerdeContext
): GetResourcePolicyResponse => {
return {
CreateTime:
output.CreateTime !== undefined && output.CreateTime !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CreateTime)))
: undefined,
PolicyHash: __expectString(output.PolicyHash),
PolicyInJson: __expectString(output.PolicyInJson),
UpdateTime:
output.UpdateTime !== undefined && output.UpdateTime !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.UpdateTime)))
: undefined,
} as any;
};
const deserializeAws_json1_1GetSchemaByDefinitionResponse = (
output: any,
context: __SerdeContext
): GetSchemaByDefinitionResponse => {
return {
CreatedTime: __expectString(output.CreatedTime),
DataFormat: __expectString(output.DataFormat),
SchemaArn: __expectString(output.SchemaArn),
SchemaVersionId: __expectString(output.SchemaVersionId),
Status: __expectString(output.Status),
} as any;
};
const deserializeAws_json1_1GetSchemaResponse = (output: any, context: __SerdeContext): GetSchemaResponse => {
return {
Compatibility: __expectString(output.Compatibility),
CreatedTime: __expectString(output.CreatedTime),
DataFormat: __expectString(output.DataFormat),
Description: __expectString(output.Description),
LatestSchemaVersion: __expectLong(output.LatestSchemaVersion),
NextSchemaVersion: __expectLong(output.NextSchemaVersion),
RegistryArn: __expectString(output.RegistryArn),
RegistryName: __expectString(output.RegistryName),
SchemaArn: __expectString(output.SchemaArn),
SchemaCheckpoint: __expectLong(output.SchemaCheckpoint),
SchemaName: __expectString(output.SchemaName),
SchemaStatus: __expectString(output.SchemaStatus),
UpdatedTime: __expectString(output.UpdatedTime),
} as any;
};
const deserializeAws_json1_1GetSchemaVersionResponse = (
output: any,
context: __SerdeContext
): GetSchemaVersionResponse => {
return {
CreatedTime: __expectString(output.CreatedTime),
DataFormat: __expectString(output.DataFormat),
SchemaArn: __expectString(output.SchemaArn),
SchemaDefinition: __expectString(output.SchemaDefinition),
SchemaVersionId: __expectString(output.SchemaVersionId),
Status: __expectString(output.Status),
VersionNumber: __expectLong(output.VersionNumber),
} as any;
};
const deserializeAws_json1_1GetSchemaVersionsDiffResponse = (
output: any,
context: __SerdeContext
): GetSchemaVersionsDiffResponse => {
return {
Diff: __expectString(output.Diff),
} as any;
};
const deserializeAws_json1_1GetSecurityConfigurationResponse = (
output: any,
context: __SerdeContext
): GetSecurityConfigurationResponse => {
return {
SecurityConfiguration:
output.SecurityConfiguration !== undefined && output.SecurityConfiguration !== null
? deserializeAws_json1_1SecurityConfiguration(output.SecurityConfiguration, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetSecurityConfigurationsResponse = (
output: any,
context: __SerdeContext
): GetSecurityConfigurationsResponse => {
return {
NextToken: __expectString(output.NextToken),
SecurityConfigurations:
output.SecurityConfigurations !== undefined && output.SecurityConfigurations !== null
? deserializeAws_json1_1SecurityConfigurationList(output.SecurityConfigurations, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetTableResponse = (output: any, context: __SerdeContext): GetTableResponse => {
return {
Table:
output.Table !== undefined && output.Table !== null
? deserializeAws_json1_1Table(output.Table, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetTablesResponse = (output: any, context: __SerdeContext): GetTablesResponse => {
return {
NextToken: __expectString(output.NextToken),
TableList:
output.TableList !== undefined && output.TableList !== null
? deserializeAws_json1_1TableList(output.TableList, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetTableVersionResponse = (
output: any,
context: __SerdeContext
): GetTableVersionResponse => {
return {
TableVersion:
output.TableVersion !== undefined && output.TableVersion !== null
? deserializeAws_json1_1TableVersion(output.TableVersion, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetTableVersionsList = (output: any, context: __SerdeContext): TableVersion[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1TableVersion(entry, context);
});
};
const deserializeAws_json1_1GetTableVersionsResponse = (
output: any,
context: __SerdeContext
): GetTableVersionsResponse => {
return {
NextToken: __expectString(output.NextToken),
TableVersions:
output.TableVersions !== undefined && output.TableVersions !== null
? deserializeAws_json1_1GetTableVersionsList(output.TableVersions, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetTagsResponse = (output: any, context: __SerdeContext): GetTagsResponse => {
return {
Tags:
output.Tags !== undefined && output.Tags !== null
? deserializeAws_json1_1TagsMap(output.Tags, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetTriggerResponse = (output: any, context: __SerdeContext): GetTriggerResponse => {
return {
Trigger:
output.Trigger !== undefined && output.Trigger !== null
? deserializeAws_json1_1Trigger(output.Trigger, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetTriggersResponse = (output: any, context: __SerdeContext): GetTriggersResponse => {
return {
NextToken: __expectString(output.NextToken),
Triggers:
output.Triggers !== undefined && output.Triggers !== null
? deserializeAws_json1_1TriggerList(output.Triggers, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetUserDefinedFunctionResponse = (
output: any,
context: __SerdeContext
): GetUserDefinedFunctionResponse => {
return {
UserDefinedFunction:
output.UserDefinedFunction !== undefined && output.UserDefinedFunction !== null
? deserializeAws_json1_1UserDefinedFunction(output.UserDefinedFunction, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetUserDefinedFunctionsResponse = (
output: any,
context: __SerdeContext
): GetUserDefinedFunctionsResponse => {
return {
NextToken: __expectString(output.NextToken),
UserDefinedFunctions:
output.UserDefinedFunctions !== undefined && output.UserDefinedFunctions !== null
? deserializeAws_json1_1UserDefinedFunctionList(output.UserDefinedFunctions, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetWorkflowResponse = (output: any, context: __SerdeContext): GetWorkflowResponse => {
return {
Workflow:
output.Workflow !== undefined && output.Workflow !== null
? deserializeAws_json1_1Workflow(output.Workflow, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetWorkflowRunPropertiesResponse = (
output: any,
context: __SerdeContext
): GetWorkflowRunPropertiesResponse => {
return {
RunProperties:
output.RunProperties !== undefined && output.RunProperties !== null
? deserializeAws_json1_1WorkflowRunProperties(output.RunProperties, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetWorkflowRunResponse = (output: any, context: __SerdeContext): GetWorkflowRunResponse => {
return {
Run:
output.Run !== undefined && output.Run !== null
? deserializeAws_json1_1WorkflowRun(output.Run, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GetWorkflowRunsResponse = (
output: any,
context: __SerdeContext
): GetWorkflowRunsResponse => {
return {
NextToken: __expectString(output.NextToken),
Runs:
output.Runs !== undefined && output.Runs !== null
? deserializeAws_json1_1WorkflowRuns(output.Runs, context)
: undefined,
} as any;
};
const deserializeAws_json1_1GlueEncryptionException = (
output: any,
context: __SerdeContext
): GlueEncryptionException => {
return {
Message: __expectString(output.Message),
} as any;
};
const deserializeAws_json1_1GluePolicy = (output: any, context: __SerdeContext): GluePolicy => {
return {
CreateTime:
output.CreateTime !== undefined && output.CreateTime !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CreateTime)))
: undefined,
PolicyHash: __expectString(output.PolicyHash),
PolicyInJson: __expectString(output.PolicyInJson),
UpdateTime:
output.UpdateTime !== undefined && output.UpdateTime !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.UpdateTime)))
: undefined,
} as any;
};
const deserializeAws_json1_1GlueTable = (output: any, context: __SerdeContext): GlueTable => {
return {
CatalogId: __expectString(output.CatalogId),
ConnectionName: __expectString(output.ConnectionName),
DatabaseName: __expectString(output.DatabaseName),
TableName: __expectString(output.TableName),
} as any;
};
const deserializeAws_json1_1GlueTables = (output: any, context: __SerdeContext): GlueTable[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1GlueTable(entry, context);
});
};
const deserializeAws_json1_1GrokClassifier = (output: any, context: __SerdeContext): GrokClassifier => {
return {
Classification: __expectString(output.Classification),
CreationTime:
output.CreationTime !== undefined && output.CreationTime !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CreationTime)))
: undefined,
CustomPatterns: __expectString(output.CustomPatterns),
GrokPattern: __expectString(output.GrokPattern),
LastUpdated:
output.LastUpdated !== undefined && output.LastUpdated !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.LastUpdated)))
: undefined,
Name: __expectString(output.Name),
Version: __expectLong(output.Version),
} as any;
};
const deserializeAws_json1_1IdempotentParameterMismatchException = (
output: any,
context: __SerdeContext
): IdempotentParameterMismatchException => {
return {
Message: __expectString(output.Message),
} as any;
};
const deserializeAws_json1_1IllegalBlueprintStateException = (
output: any,
context: __SerdeContext
): IllegalBlueprintStateException => {
return {
Message: __expectString(output.Message),
} as any;
};
const deserializeAws_json1_1IllegalWorkflowStateException = (
output: any,
context: __SerdeContext
): IllegalWorkflowStateException => {
return {
Message: __expectString(output.Message),
} as any;
};
const deserializeAws_json1_1ImportCatalogToGlueResponse = (
output: any,
context: __SerdeContext
): ImportCatalogToGlueResponse => {
return {} as any;
};
const deserializeAws_json1_1ImportLabelsTaskRunProperties = (
output: any,
context: __SerdeContext
): ImportLabelsTaskRunProperties => {
return {
InputS3Path: __expectString(output.InputS3Path),
Replace: __expectBoolean(output.Replace),
} as any;
};
const deserializeAws_json1_1InternalServiceException = (
output: any,
context: __SerdeContext
): InternalServiceException => {
return {
Message: __expectString(output.Message),
} as any;
};
const deserializeAws_json1_1InvalidInputException = (output: any, context: __SerdeContext): InvalidInputException => {
return {
Message: __expectString(output.Message),
} as any;
};
const deserializeAws_json1_1JdbcTarget = (output: any, context: __SerdeContext): JdbcTarget => {
return {
ConnectionName: __expectString(output.ConnectionName),
Exclusions:
output.Exclusions !== undefined && output.Exclusions !== null
? deserializeAws_json1_1PathList(output.Exclusions, context)
: undefined,
Path: __expectString(output.Path),
} as any;
};
const deserializeAws_json1_1JdbcTargetList = (output: any, context: __SerdeContext): JdbcTarget[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1JdbcTarget(entry, context);
});
};
const deserializeAws_json1_1Job = (output: any, context: __SerdeContext): Job => {
return {
AllocatedCapacity: __expectInt32(output.AllocatedCapacity),
Command:
output.Command !== undefined && output.Command !== null
? deserializeAws_json1_1JobCommand(output.Command, context)
: undefined,
Connections:
output.Connections !== undefined && output.Connections !== null
? deserializeAws_json1_1ConnectionsList(output.Connections, context)
: undefined,
CreatedOn:
output.CreatedOn !== undefined && output.CreatedOn !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CreatedOn)))
: undefined,
DefaultArguments:
output.DefaultArguments !== undefined && output.DefaultArguments !== null
? deserializeAws_json1_1GenericMap(output.DefaultArguments, context)
: undefined,
Description: __expectString(output.Description),
ExecutionProperty:
output.ExecutionProperty !== undefined && output.ExecutionProperty !== null
? deserializeAws_json1_1ExecutionProperty(output.ExecutionProperty, context)
: undefined,
GlueVersion: __expectString(output.GlueVersion),
LastModifiedOn:
output.LastModifiedOn !== undefined && output.LastModifiedOn !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.LastModifiedOn)))
: undefined,
LogUri: __expectString(output.LogUri),
MaxCapacity: __limitedParseDouble(output.MaxCapacity),
MaxRetries: __expectInt32(output.MaxRetries),
Name: __expectString(output.Name),
NonOverridableArguments:
output.NonOverridableArguments !== undefined && output.NonOverridableArguments !== null
? deserializeAws_json1_1GenericMap(output.NonOverridableArguments, context)
: undefined,
NotificationProperty:
output.NotificationProperty !== undefined && output.NotificationProperty !== null
? deserializeAws_json1_1NotificationProperty(output.NotificationProperty, context)
: undefined,
NumberOfWorkers: __expectInt32(output.NumberOfWorkers),
Role: __expectString(output.Role),
SecurityConfiguration: __expectString(output.SecurityConfiguration),
Timeout: __expectInt32(output.Timeout),
WorkerType: __expectString(output.WorkerType),
} as any;
};
const deserializeAws_json1_1JobBookmarkEntry = (output: any, context: __SerdeContext): JobBookmarkEntry => {
return {
Attempt: __expectInt32(output.Attempt),
JobBookmark: __expectString(output.JobBookmark),
JobName: __expectString(output.JobName),
PreviousRunId: __expectString(output.PreviousRunId),
Run: __expectInt32(output.Run),
RunId: __expectString(output.RunId),
Version: __expectInt32(output.Version),
} as any;
};
const deserializeAws_json1_1JobBookmarksEncryption = (output: any, context: __SerdeContext): JobBookmarksEncryption => {
return {
JobBookmarksEncryptionMode: __expectString(output.JobBookmarksEncryptionMode),
KmsKeyArn: __expectString(output.KmsKeyArn),
} as any;
};
const deserializeAws_json1_1JobCommand = (output: any, context: __SerdeContext): JobCommand => {
return {
Name: __expectString(output.Name),
PythonVersion: __expectString(output.PythonVersion),
ScriptLocation: __expectString(output.ScriptLocation),
} as any;
};
const deserializeAws_json1_1JobList = (output: any, context: __SerdeContext): Job[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1Job(entry, context);
});
};
const deserializeAws_json1_1JobNameList = (output: any, context: __SerdeContext): string[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return __expectString(entry) as any;
});
};
const deserializeAws_json1_1JobNodeDetails = (output: any, context: __SerdeContext): JobNodeDetails => {
return {
JobRuns:
output.JobRuns !== undefined && output.JobRuns !== null
? deserializeAws_json1_1JobRunList(output.JobRuns, context)
: undefined,
} as any;
};
const deserializeAws_json1_1JobRun = (output: any, context: __SerdeContext): JobRun => {
return {
AllocatedCapacity: __expectInt32(output.AllocatedCapacity),
Arguments:
output.Arguments !== undefined && output.Arguments !== null
? deserializeAws_json1_1GenericMap(output.Arguments, context)
: undefined,
Attempt: __expectInt32(output.Attempt),
CompletedOn:
output.CompletedOn !== undefined && output.CompletedOn !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CompletedOn)))
: undefined,
ErrorMessage: __expectString(output.ErrorMessage),
ExecutionTime: __expectInt32(output.ExecutionTime),
GlueVersion: __expectString(output.GlueVersion),
Id: __expectString(output.Id),
JobName: __expectString(output.JobName),
JobRunState: __expectString(output.JobRunState),
LastModifiedOn:
output.LastModifiedOn !== undefined && output.LastModifiedOn !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.LastModifiedOn)))
: undefined,
LogGroupName: __expectString(output.LogGroupName),
MaxCapacity: __limitedParseDouble(output.MaxCapacity),
NotificationProperty:
output.NotificationProperty !== undefined && output.NotificationProperty !== null
? deserializeAws_json1_1NotificationProperty(output.NotificationProperty, context)
: undefined,
NumberOfWorkers: __expectInt32(output.NumberOfWorkers),
PredecessorRuns:
output.PredecessorRuns !== undefined && output.PredecessorRuns !== null
? deserializeAws_json1_1PredecessorList(output.PredecessorRuns, context)
: undefined,
PreviousRunId: __expectString(output.PreviousRunId),
SecurityConfiguration: __expectString(output.SecurityConfiguration),
StartedOn:
output.StartedOn !== undefined && output.StartedOn !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.StartedOn)))
: undefined,
Timeout: __expectInt32(output.Timeout),
TriggerName: __expectString(output.TriggerName),
WorkerType: __expectString(output.WorkerType),
} as any;
};
const deserializeAws_json1_1JobRunList = (output: any, context: __SerdeContext): JobRun[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1JobRun(entry, context);
});
};
const deserializeAws_json1_1JsonClassifier = (output: any, context: __SerdeContext): JsonClassifier => {
return {
CreationTime:
output.CreationTime !== undefined && output.CreationTime !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CreationTime)))
: undefined,
JsonPath: __expectString(output.JsonPath),
LastUpdated:
output.LastUpdated !== undefined && output.LastUpdated !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.LastUpdated)))
: undefined,
Name: __expectString(output.Name),
Version: __expectLong(output.Version),
} as any;
};
const deserializeAws_json1_1KeySchemaElement = (output: any, context: __SerdeContext): KeySchemaElement => {
return {
Name: __expectString(output.Name),
Type: __expectString(output.Type),
} as any;
};
const deserializeAws_json1_1KeySchemaElementList = (output: any, context: __SerdeContext): KeySchemaElement[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1KeySchemaElement(entry, context);
});
};
const deserializeAws_json1_1LabelingSetGenerationTaskRunProperties = (
output: any,
context: __SerdeContext
): LabelingSetGenerationTaskRunProperties => {
return {
OutputS3Path: __expectString(output.OutputS3Path),
} as any;
};
const deserializeAws_json1_1LastActiveDefinition = (output: any, context: __SerdeContext): LastActiveDefinition => {
return {
BlueprintLocation: __expectString(output.BlueprintLocation),
BlueprintServiceLocation: __expectString(output.BlueprintServiceLocation),
Description: __expectString(output.Description),
LastModifiedOn:
output.LastModifiedOn !== undefined && output.LastModifiedOn !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.LastModifiedOn)))
: undefined,
ParameterSpec: __expectString(output.ParameterSpec),
} as any;
};
const deserializeAws_json1_1LastCrawlInfo = (output: any, context: __SerdeContext): LastCrawlInfo => {
return {
ErrorMessage: __expectString(output.ErrorMessage),
LogGroup: __expectString(output.LogGroup),
LogStream: __expectString(output.LogStream),
MessagePrefix: __expectString(output.MessagePrefix),
StartTime:
output.StartTime !== undefined && output.StartTime !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.StartTime)))
: undefined,
Status: __expectString(output.Status),
} as any;
};
const deserializeAws_json1_1LineageConfiguration = (output: any, context: __SerdeContext): LineageConfiguration => {
return {
CrawlerLineageSettings: __expectString(output.CrawlerLineageSettings),
} as any;
};
const deserializeAws_json1_1ListBlueprintsResponse = (output: any, context: __SerdeContext): ListBlueprintsResponse => {
return {
Blueprints:
output.Blueprints !== undefined && output.Blueprints !== null
? deserializeAws_json1_1BlueprintNames(output.Blueprints, context)
: undefined,
NextToken: __expectString(output.NextToken),
} as any;
};
const deserializeAws_json1_1ListCrawlersResponse = (output: any, context: __SerdeContext): ListCrawlersResponse => {
return {
CrawlerNames:
output.CrawlerNames !== undefined && output.CrawlerNames !== null
? deserializeAws_json1_1CrawlerNameList(output.CrawlerNames, context)
: undefined,
NextToken: __expectString(output.NextToken),
} as any;
};
const deserializeAws_json1_1ListDevEndpointsResponse = (
output: any,
context: __SerdeContext
): ListDevEndpointsResponse => {
return {
DevEndpointNames:
output.DevEndpointNames !== undefined && output.DevEndpointNames !== null
? deserializeAws_json1_1DevEndpointNameList(output.DevEndpointNames, context)
: undefined,
NextToken: __expectString(output.NextToken),
} as any;
};
const deserializeAws_json1_1ListJobsResponse = (output: any, context: __SerdeContext): ListJobsResponse => {
return {
JobNames:
output.JobNames !== undefined && output.JobNames !== null
? deserializeAws_json1_1JobNameList(output.JobNames, context)
: undefined,
NextToken: __expectString(output.NextToken),
} as any;
};
const deserializeAws_json1_1ListMLTransformsResponse = (
output: any,
context: __SerdeContext
): ListMLTransformsResponse => {
return {
NextToken: __expectString(output.NextToken),
TransformIds:
output.TransformIds !== undefined && output.TransformIds !== null
? deserializeAws_json1_1TransformIdList(output.TransformIds, context)
: undefined,
} as any;
};
const deserializeAws_json1_1ListRegistriesResponse = (output: any, context: __SerdeContext): ListRegistriesResponse => {
return {
NextToken: __expectString(output.NextToken),
Registries:
output.Registries !== undefined && output.Registries !== null
? deserializeAws_json1_1RegistryListDefinition(output.Registries, context)
: undefined,
} as any;
};
const deserializeAws_json1_1ListSchemasResponse = (output: any, context: __SerdeContext): ListSchemasResponse => {
return {
NextToken: __expectString(output.NextToken),
Schemas:
output.Schemas !== undefined && output.Schemas !== null
? deserializeAws_json1_1SchemaListDefinition(output.Schemas, context)
: undefined,
} as any;
};
const deserializeAws_json1_1ListSchemaVersionsResponse = (
output: any,
context: __SerdeContext
): ListSchemaVersionsResponse => {
return {
NextToken: __expectString(output.NextToken),
Schemas:
output.Schemas !== undefined && output.Schemas !== null
? deserializeAws_json1_1SchemaVersionList(output.Schemas, context)
: undefined,
} as any;
};
const deserializeAws_json1_1ListTriggersResponse = (output: any, context: __SerdeContext): ListTriggersResponse => {
return {
NextToken: __expectString(output.NextToken),
TriggerNames:
output.TriggerNames !== undefined && output.TriggerNames !== null
? deserializeAws_json1_1TriggerNameList(output.TriggerNames, context)
: undefined,
} as any;
};
const deserializeAws_json1_1ListWorkflowsResponse = (output: any, context: __SerdeContext): ListWorkflowsResponse => {
return {
NextToken: __expectString(output.NextToken),
Workflows:
output.Workflows !== undefined && output.Workflows !== null
? deserializeAws_json1_1WorkflowNames(output.Workflows, context)
: undefined,
} as any;
};
const deserializeAws_json1_1LocationMap = (output: any, context: __SerdeContext): { [key: string]: string } => {
return Object.entries(output).reduce((acc: { [key: string]: string }, [key, value]: [string, any]) => {
if (value === null) {
return acc;
}
return {
...acc,
[key]: __expectString(value) as any,
};
}, {});
};
const deserializeAws_json1_1LongColumnStatisticsData = (
output: any,
context: __SerdeContext
): LongColumnStatisticsData => {
return {
MaximumValue: __expectLong(output.MaximumValue),
MinimumValue: __expectLong(output.MinimumValue),
NumberOfDistinctValues: __expectLong(output.NumberOfDistinctValues),
NumberOfNulls: __expectLong(output.NumberOfNulls),
} as any;
};
const deserializeAws_json1_1MappingEntry = (output: any, context: __SerdeContext): MappingEntry => {
return {
SourcePath: __expectString(output.SourcePath),
SourceTable: __expectString(output.SourceTable),
SourceType: __expectString(output.SourceType),
TargetPath: __expectString(output.TargetPath),
TargetTable: __expectString(output.TargetTable),
TargetType: __expectString(output.TargetType),
} as any;
};
const deserializeAws_json1_1MappingList = (output: any, context: __SerdeContext): MappingEntry[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1MappingEntry(entry, context);
});
};
const deserializeAws_json1_1MapValue = (output: any, context: __SerdeContext): { [key: string]: string } => {
return Object.entries(output).reduce((acc: { [key: string]: string }, [key, value]: [string, any]) => {
if (value === null) {
return acc;
}
return {
...acc,
[key]: __expectString(value) as any,
};
}, {});
};
const deserializeAws_json1_1MatchCriteria = (output: any, context: __SerdeContext): string[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return __expectString(entry) as any;
});
};
const deserializeAws_json1_1MetadataInfo = (output: any, context: __SerdeContext): MetadataInfo => {
return {
CreatedTime: __expectString(output.CreatedTime),
MetadataValue: __expectString(output.MetadataValue),
OtherMetadataValueList:
output.OtherMetadataValueList !== undefined && output.OtherMetadataValueList !== null
? deserializeAws_json1_1OtherMetadataValueList(output.OtherMetadataValueList, context)
: undefined,
} as any;
};
const deserializeAws_json1_1MetadataInfoMap = (
output: any,
context: __SerdeContext
): { [key: string]: MetadataInfo } => {
return Object.entries(output).reduce((acc: { [key: string]: MetadataInfo }, [key, value]: [string, any]) => {
if (value === null) {
return acc;
}
return {
...acc,
[key]: deserializeAws_json1_1MetadataInfo(value, context),
};
}, {});
};
const deserializeAws_json1_1MLTransform = (output: any, context: __SerdeContext): MLTransform => {
return {
CreatedOn:
output.CreatedOn !== undefined && output.CreatedOn !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CreatedOn)))
: undefined,
Description: __expectString(output.Description),
EvaluationMetrics:
output.EvaluationMetrics !== undefined && output.EvaluationMetrics !== null
? deserializeAws_json1_1EvaluationMetrics(output.EvaluationMetrics, context)
: undefined,
GlueVersion: __expectString(output.GlueVersion),
InputRecordTables:
output.InputRecordTables !== undefined && output.InputRecordTables !== null
? deserializeAws_json1_1GlueTables(output.InputRecordTables, context)
: undefined,
LabelCount: __expectInt32(output.LabelCount),
LastModifiedOn:
output.LastModifiedOn !== undefined && output.LastModifiedOn !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.LastModifiedOn)))
: undefined,
MaxCapacity: __limitedParseDouble(output.MaxCapacity),
MaxRetries: __expectInt32(output.MaxRetries),
Name: __expectString(output.Name),
NumberOfWorkers: __expectInt32(output.NumberOfWorkers),
Parameters:
output.Parameters !== undefined && output.Parameters !== null
? deserializeAws_json1_1TransformParameters(output.Parameters, context)
: undefined,
Role: __expectString(output.Role),
Schema:
output.Schema !== undefined && output.Schema !== null
? deserializeAws_json1_1TransformSchema(output.Schema, context)
: undefined,
Status: __expectString(output.Status),
Timeout: __expectInt32(output.Timeout),
TransformEncryption:
output.TransformEncryption !== undefined && output.TransformEncryption !== null
? deserializeAws_json1_1TransformEncryption(output.TransformEncryption, context)
: undefined,
TransformId: __expectString(output.TransformId),
WorkerType: __expectString(output.WorkerType),
} as any;
};
const deserializeAws_json1_1MLTransformNotReadyException = (
output: any,
context: __SerdeContext
): MLTransformNotReadyException => {
return {
Message: __expectString(output.Message),
} as any;
};
const deserializeAws_json1_1MLUserDataEncryption = (output: any, context: __SerdeContext): MLUserDataEncryption => {
return {
KmsKeyId: __expectString(output.KmsKeyId),
MlUserDataEncryptionMode: __expectString(output.MlUserDataEncryptionMode),
} as any;
};
const deserializeAws_json1_1MongoDBTarget = (output: any, context: __SerdeContext): MongoDBTarget => {
return {
ConnectionName: __expectString(output.ConnectionName),
Path: __expectString(output.Path),
ScanAll: __expectBoolean(output.ScanAll),
} as any;
};
const deserializeAws_json1_1MongoDBTargetList = (output: any, context: __SerdeContext): MongoDBTarget[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1MongoDBTarget(entry, context);
});
};
const deserializeAws_json1_1NameStringList = (output: any, context: __SerdeContext): string[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return __expectString(entry) as any;
});
};
const deserializeAws_json1_1Node = (output: any, context: __SerdeContext): Node => {
return {
CrawlerDetails:
output.CrawlerDetails !== undefined && output.CrawlerDetails !== null
? deserializeAws_json1_1CrawlerNodeDetails(output.CrawlerDetails, context)
: undefined,
JobDetails:
output.JobDetails !== undefined && output.JobDetails !== null
? deserializeAws_json1_1JobNodeDetails(output.JobDetails, context)
: undefined,
Name: __expectString(output.Name),
TriggerDetails:
output.TriggerDetails !== undefined && output.TriggerDetails !== null
? deserializeAws_json1_1TriggerNodeDetails(output.TriggerDetails, context)
: undefined,
Type: __expectString(output.Type),
UniqueId: __expectString(output.UniqueId),
} as any;
};
const deserializeAws_json1_1NodeIdList = (output: any, context: __SerdeContext): string[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return __expectString(entry) as any;
});
};
const deserializeAws_json1_1NodeList = (output: any, context: __SerdeContext): Node[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1Node(entry, context);
});
};
const deserializeAws_json1_1NoScheduleException = (output: any, context: __SerdeContext): NoScheduleException => {
return {
Message: __expectString(output.Message),
} as any;
};
const deserializeAws_json1_1NotificationProperty = (output: any, context: __SerdeContext): NotificationProperty => {
return {
NotifyDelayAfter: __expectInt32(output.NotifyDelayAfter),
} as any;
};
const deserializeAws_json1_1OperationTimeoutException = (
output: any,
context: __SerdeContext
): OperationTimeoutException => {
return {
Message: __expectString(output.Message),
} as any;
};
const deserializeAws_json1_1OrchestrationStringList = (output: any, context: __SerdeContext): string[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return __expectString(entry) as any;
});
};
const deserializeAws_json1_1Order = (output: any, context: __SerdeContext): Order => {
return {
Column: __expectString(output.Column),
SortOrder: __expectInt32(output.SortOrder),
} as any;
};
const deserializeAws_json1_1OrderList = (output: any, context: __SerdeContext): Order[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1Order(entry, context);
});
};
const deserializeAws_json1_1OtherMetadataValueList = (
output: any,
context: __SerdeContext
): OtherMetadataValueListItem[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1OtherMetadataValueListItem(entry, context);
});
};
const deserializeAws_json1_1OtherMetadataValueListItem = (
output: any,
context: __SerdeContext
): OtherMetadataValueListItem => {
return {
CreatedTime: __expectString(output.CreatedTime),
MetadataValue: __expectString(output.MetadataValue),
} as any;
};
const deserializeAws_json1_1ParametersMap = (output: any, context: __SerdeContext): { [key: string]: string } => {
return Object.entries(output).reduce((acc: { [key: string]: string }, [key, value]: [string, any]) => {
if (value === null) {
return acc;
}
return {
...acc,
[key]: __expectString(value) as any,
};
}, {});
};
const deserializeAws_json1_1Partition = (output: any, context: __SerdeContext): Partition => {
return {
CatalogId: __expectString(output.CatalogId),
CreationTime:
output.CreationTime !== undefined && output.CreationTime !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CreationTime)))
: undefined,
DatabaseName: __expectString(output.DatabaseName),
LastAccessTime:
output.LastAccessTime !== undefined && output.LastAccessTime !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.LastAccessTime)))
: undefined,
LastAnalyzedTime:
output.LastAnalyzedTime !== undefined && output.LastAnalyzedTime !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.LastAnalyzedTime)))
: undefined,
Parameters:
output.Parameters !== undefined && output.Parameters !== null
? deserializeAws_json1_1ParametersMap(output.Parameters, context)
: undefined,
StorageDescriptor:
output.StorageDescriptor !== undefined && output.StorageDescriptor !== null
? deserializeAws_json1_1StorageDescriptor(output.StorageDescriptor, context)
: undefined,
TableName: __expectString(output.TableName),
Values:
output.Values !== undefined && output.Values !== null
? deserializeAws_json1_1ValueStringList(output.Values, context)
: undefined,
} as any;
};
const deserializeAws_json1_1PartitionError = (output: any, context: __SerdeContext): PartitionError => {
return {
ErrorDetail:
output.ErrorDetail !== undefined && output.ErrorDetail !== null
? deserializeAws_json1_1ErrorDetail(output.ErrorDetail, context)
: undefined,
PartitionValues:
output.PartitionValues !== undefined && output.PartitionValues !== null
? deserializeAws_json1_1ValueStringList(output.PartitionValues, context)
: undefined,
} as any;
};
const deserializeAws_json1_1PartitionErrors = (output: any, context: __SerdeContext): PartitionError[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1PartitionError(entry, context);
});
};
const deserializeAws_json1_1PartitionIndexDescriptor = (
output: any,
context: __SerdeContext
): PartitionIndexDescriptor => {
return {
BackfillErrors:
output.BackfillErrors !== undefined && output.BackfillErrors !== null
? deserializeAws_json1_1BackfillErrors(output.BackfillErrors, context)
: undefined,
IndexName: __expectString(output.IndexName),
IndexStatus: __expectString(output.IndexStatus),
Keys:
output.Keys !== undefined && output.Keys !== null
? deserializeAws_json1_1KeySchemaElementList(output.Keys, context)
: undefined,
} as any;
};
const deserializeAws_json1_1PartitionIndexDescriptorList = (
output: any,
context: __SerdeContext
): PartitionIndexDescriptor[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1PartitionIndexDescriptor(entry, context);
});
};
const deserializeAws_json1_1PartitionList = (output: any, context: __SerdeContext): Partition[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1Partition(entry, context);
});
};
const deserializeAws_json1_1PartitionValueList = (output: any, context: __SerdeContext): PartitionValueList => {
return {
Values:
output.Values !== undefined && output.Values !== null
? deserializeAws_json1_1ValueStringList(output.Values, context)
: undefined,
} as any;
};
const deserializeAws_json1_1PathList = (output: any, context: __SerdeContext): string[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return __expectString(entry) as any;
});
};
const deserializeAws_json1_1PermissionList = (output: any, context: __SerdeContext): (Permission | string)[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return __expectString(entry) as any;
});
};
const deserializeAws_json1_1PhysicalConnectionRequirements = (
output: any,
context: __SerdeContext
): PhysicalConnectionRequirements => {
return {
AvailabilityZone: __expectString(output.AvailabilityZone),
SecurityGroupIdList:
output.SecurityGroupIdList !== undefined && output.SecurityGroupIdList !== null
? deserializeAws_json1_1SecurityGroupIdList(output.SecurityGroupIdList, context)
: undefined,
SubnetId: __expectString(output.SubnetId),
} as any;
};
const deserializeAws_json1_1Predecessor = (output: any, context: __SerdeContext): Predecessor => {
return {
JobName: __expectString(output.JobName),
RunId: __expectString(output.RunId),
} as any;
};
const deserializeAws_json1_1PredecessorList = (output: any, context: __SerdeContext): Predecessor[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1Predecessor(entry, context);
});
};
const deserializeAws_json1_1Predicate = (output: any, context: __SerdeContext): Predicate => {
return {
Conditions:
output.Conditions !== undefined && output.Conditions !== null
? deserializeAws_json1_1ConditionList(output.Conditions, context)
: undefined,
Logical: __expectString(output.Logical),
} as any;
};
const deserializeAws_json1_1PrincipalPermissions = (output: any, context: __SerdeContext): PrincipalPermissions => {
return {
Permissions:
output.Permissions !== undefined && output.Permissions !== null
? deserializeAws_json1_1PermissionList(output.Permissions, context)
: undefined,
Principal:
output.Principal !== undefined && output.Principal !== null
? deserializeAws_json1_1DataLakePrincipal(output.Principal, context)
: undefined,
} as any;
};
const deserializeAws_json1_1PrincipalPermissionsList = (
output: any,
context: __SerdeContext
): PrincipalPermissions[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1PrincipalPermissions(entry, context);
});
};
const deserializeAws_json1_1PublicKeysList = (output: any, context: __SerdeContext): string[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return __expectString(entry) as any;
});
};
const deserializeAws_json1_1PutDataCatalogEncryptionSettingsResponse = (
output: any,
context: __SerdeContext
): PutDataCatalogEncryptionSettingsResponse => {
return {} as any;
};
const deserializeAws_json1_1PutResourcePolicyResponse = (
output: any,
context: __SerdeContext
): PutResourcePolicyResponse => {
return {
PolicyHash: __expectString(output.PolicyHash),
} as any;
};
const deserializeAws_json1_1PutSchemaVersionMetadataResponse = (
output: any,
context: __SerdeContext
): PutSchemaVersionMetadataResponse => {
return {
LatestVersion: __expectBoolean(output.LatestVersion),
MetadataKey: __expectString(output.MetadataKey),
MetadataValue: __expectString(output.MetadataValue),
RegistryName: __expectString(output.RegistryName),
SchemaArn: __expectString(output.SchemaArn),
SchemaName: __expectString(output.SchemaName),
SchemaVersionId: __expectString(output.SchemaVersionId),
VersionNumber: __expectLong(output.VersionNumber),
} as any;
};
const deserializeAws_json1_1PutWorkflowRunPropertiesResponse = (
output: any,
context: __SerdeContext
): PutWorkflowRunPropertiesResponse => {
return {} as any;
};
const deserializeAws_json1_1QuerySchemaVersionMetadataResponse = (
output: any,
context: __SerdeContext
): QuerySchemaVersionMetadataResponse => {
return {
MetadataInfoMap:
output.MetadataInfoMap !== undefined && output.MetadataInfoMap !== null
? deserializeAws_json1_1MetadataInfoMap(output.MetadataInfoMap, context)
: undefined,
NextToken: __expectString(output.NextToken),
SchemaVersionId: __expectString(output.SchemaVersionId),
} as any;
};
const deserializeAws_json1_1RecrawlPolicy = (output: any, context: __SerdeContext): RecrawlPolicy => {
return {
RecrawlBehavior: __expectString(output.RecrawlBehavior),
} as any;
};
const deserializeAws_json1_1RegisterSchemaVersionResponse = (
output: any,
context: __SerdeContext
): RegisterSchemaVersionResponse => {
return {
SchemaVersionId: __expectString(output.SchemaVersionId),
Status: __expectString(output.Status),
VersionNumber: __expectLong(output.VersionNumber),
} as any;
};
const deserializeAws_json1_1RegistryListDefinition = (output: any, context: __SerdeContext): RegistryListItem[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1RegistryListItem(entry, context);
});
};
const deserializeAws_json1_1RegistryListItem = (output: any, context: __SerdeContext): RegistryListItem => {
return {
CreatedTime: __expectString(output.CreatedTime),
Description: __expectString(output.Description),
RegistryArn: __expectString(output.RegistryArn),
RegistryName: __expectString(output.RegistryName),
Status: __expectString(output.Status),
UpdatedTime: __expectString(output.UpdatedTime),
} as any;
};
const deserializeAws_json1_1RemoveSchemaVersionMetadataResponse = (
output: any,
context: __SerdeContext
): RemoveSchemaVersionMetadataResponse => {
return {
LatestVersion: __expectBoolean(output.LatestVersion),
MetadataKey: __expectString(output.MetadataKey),
MetadataValue: __expectString(output.MetadataValue),
RegistryName: __expectString(output.RegistryName),
SchemaArn: __expectString(output.SchemaArn),
SchemaName: __expectString(output.SchemaName),
SchemaVersionId: __expectString(output.SchemaVersionId),
VersionNumber: __expectLong(output.VersionNumber),
} as any;
};
const deserializeAws_json1_1ResetJobBookmarkResponse = (
output: any,
context: __SerdeContext
): ResetJobBookmarkResponse => {
return {
JobBookmarkEntry:
output.JobBookmarkEntry !== undefined && output.JobBookmarkEntry !== null
? deserializeAws_json1_1JobBookmarkEntry(output.JobBookmarkEntry, context)
: undefined,
} as any;
};
const deserializeAws_json1_1ResourceNumberLimitExceededException = (
output: any,
context: __SerdeContext
): ResourceNumberLimitExceededException => {
return {
Message: __expectString(output.Message),
} as any;
};
const deserializeAws_json1_1ResourceUri = (output: any, context: __SerdeContext): ResourceUri => {
return {
ResourceType: __expectString(output.ResourceType),
Uri: __expectString(output.Uri),
} as any;
};
const deserializeAws_json1_1ResourceUriList = (output: any, context: __SerdeContext): ResourceUri[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1ResourceUri(entry, context);
});
};
const deserializeAws_json1_1ResumeWorkflowRunResponse = (
output: any,
context: __SerdeContext
): ResumeWorkflowRunResponse => {
return {
NodeIds:
output.NodeIds !== undefined && output.NodeIds !== null
? deserializeAws_json1_1NodeIdList(output.NodeIds, context)
: undefined,
RunId: __expectString(output.RunId),
} as any;
};
const deserializeAws_json1_1S3Encryption = (output: any, context: __SerdeContext): S3Encryption => {
return {
KmsKeyArn: __expectString(output.KmsKeyArn),
S3EncryptionMode: __expectString(output.S3EncryptionMode),
} as any;
};
const deserializeAws_json1_1S3EncryptionList = (output: any, context: __SerdeContext): S3Encryption[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1S3Encryption(entry, context);
});
};
const deserializeAws_json1_1S3Target = (output: any, context: __SerdeContext): S3Target => {
return {
ConnectionName: __expectString(output.ConnectionName),
Exclusions:
output.Exclusions !== undefined && output.Exclusions !== null
? deserializeAws_json1_1PathList(output.Exclusions, context)
: undefined,
Path: __expectString(output.Path),
SampleSize: __expectInt32(output.SampleSize),
} as any;
};
const deserializeAws_json1_1S3TargetList = (output: any, context: __SerdeContext): S3Target[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1S3Target(entry, context);
});
};
const deserializeAws_json1_1Schedule = (output: any, context: __SerdeContext): Schedule => {
return {
ScheduleExpression: __expectString(output.ScheduleExpression),
State: __expectString(output.State),
} as any;
};
const deserializeAws_json1_1SchedulerNotRunningException = (
output: any,
context: __SerdeContext
): SchedulerNotRunningException => {
return {
Message: __expectString(output.Message),
} as any;
};
const deserializeAws_json1_1SchedulerRunningException = (
output: any,
context: __SerdeContext
): SchedulerRunningException => {
return {
Message: __expectString(output.Message),
} as any;
};
const deserializeAws_json1_1SchedulerTransitioningException = (
output: any,
context: __SerdeContext
): SchedulerTransitioningException => {
return {
Message: __expectString(output.Message),
} as any;
};
const deserializeAws_json1_1SchemaChangePolicy = (output: any, context: __SerdeContext): SchemaChangePolicy => {
return {
DeleteBehavior: __expectString(output.DeleteBehavior),
UpdateBehavior: __expectString(output.UpdateBehavior),
} as any;
};
const deserializeAws_json1_1SchemaColumn = (output: any, context: __SerdeContext): SchemaColumn => {
return {
DataType: __expectString(output.DataType),
Name: __expectString(output.Name),
} as any;
};
const deserializeAws_json1_1SchemaId = (output: any, context: __SerdeContext): SchemaId => {
return {
RegistryName: __expectString(output.RegistryName),
SchemaArn: __expectString(output.SchemaArn),
SchemaName: __expectString(output.SchemaName),
} as any;
};
const deserializeAws_json1_1SchemaListDefinition = (output: any, context: __SerdeContext): SchemaListItem[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1SchemaListItem(entry, context);
});
};
const deserializeAws_json1_1SchemaListItem = (output: any, context: __SerdeContext): SchemaListItem => {
return {
CreatedTime: __expectString(output.CreatedTime),
Description: __expectString(output.Description),
RegistryName: __expectString(output.RegistryName),
SchemaArn: __expectString(output.SchemaArn),
SchemaName: __expectString(output.SchemaName),
SchemaStatus: __expectString(output.SchemaStatus),
UpdatedTime: __expectString(output.UpdatedTime),
} as any;
};
const deserializeAws_json1_1SchemaReference = (output: any, context: __SerdeContext): SchemaReference => {
return {
SchemaId:
output.SchemaId !== undefined && output.SchemaId !== null
? deserializeAws_json1_1SchemaId(output.SchemaId, context)
: undefined,
SchemaVersionId: __expectString(output.SchemaVersionId),
SchemaVersionNumber: __expectLong(output.SchemaVersionNumber),
} as any;
};
const deserializeAws_json1_1SchemaVersionErrorItem = (output: any, context: __SerdeContext): SchemaVersionErrorItem => {
return {
ErrorDetails:
output.ErrorDetails !== undefined && output.ErrorDetails !== null
? deserializeAws_json1_1ErrorDetails(output.ErrorDetails, context)
: undefined,
VersionNumber: __expectLong(output.VersionNumber),
} as any;
};
const deserializeAws_json1_1SchemaVersionErrorList = (
output: any,
context: __SerdeContext
): SchemaVersionErrorItem[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1SchemaVersionErrorItem(entry, context);
});
};
const deserializeAws_json1_1SchemaVersionList = (output: any, context: __SerdeContext): SchemaVersionListItem[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1SchemaVersionListItem(entry, context);
});
};
const deserializeAws_json1_1SchemaVersionListItem = (output: any, context: __SerdeContext): SchemaVersionListItem => {
return {
CreatedTime: __expectString(output.CreatedTime),
SchemaArn: __expectString(output.SchemaArn),
SchemaVersionId: __expectString(output.SchemaVersionId),
Status: __expectString(output.Status),
VersionNumber: __expectLong(output.VersionNumber),
} as any;
};
const deserializeAws_json1_1SearchTablesResponse = (output: any, context: __SerdeContext): SearchTablesResponse => {
return {
NextToken: __expectString(output.NextToken),
TableList:
output.TableList !== undefined && output.TableList !== null
? deserializeAws_json1_1TableList(output.TableList, context)
: undefined,
} as any;
};
const deserializeAws_json1_1SecurityConfiguration = (output: any, context: __SerdeContext): SecurityConfiguration => {
return {
CreatedTimeStamp:
output.CreatedTimeStamp !== undefined && output.CreatedTimeStamp !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CreatedTimeStamp)))
: undefined,
EncryptionConfiguration:
output.EncryptionConfiguration !== undefined && output.EncryptionConfiguration !== null
? deserializeAws_json1_1EncryptionConfiguration(output.EncryptionConfiguration, context)
: undefined,
Name: __expectString(output.Name),
} as any;
};
const deserializeAws_json1_1SecurityConfigurationList = (
output: any,
context: __SerdeContext
): SecurityConfiguration[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1SecurityConfiguration(entry, context);
});
};
const deserializeAws_json1_1SecurityGroupIdList = (output: any, context: __SerdeContext): string[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return __expectString(entry) as any;
});
};
const deserializeAws_json1_1SerDeInfo = (output: any, context: __SerdeContext): SerDeInfo => {
return {
Name: __expectString(output.Name),
Parameters:
output.Parameters !== undefined && output.Parameters !== null
? deserializeAws_json1_1ParametersMap(output.Parameters, context)
: undefined,
SerializationLibrary: __expectString(output.SerializationLibrary),
} as any;
};
const deserializeAws_json1_1SkewedInfo = (output: any, context: __SerdeContext): SkewedInfo => {
return {
SkewedColumnNames:
output.SkewedColumnNames !== undefined && output.SkewedColumnNames !== null
? deserializeAws_json1_1NameStringList(output.SkewedColumnNames, context)
: undefined,
SkewedColumnValueLocationMaps:
output.SkewedColumnValueLocationMaps !== undefined && output.SkewedColumnValueLocationMaps !== null
? deserializeAws_json1_1LocationMap(output.SkewedColumnValueLocationMaps, context)
: undefined,
SkewedColumnValues:
output.SkewedColumnValues !== undefined && output.SkewedColumnValues !== null
? deserializeAws_json1_1ColumnValueStringList(output.SkewedColumnValues, context)
: undefined,
} as any;
};
const deserializeAws_json1_1StartBlueprintRunResponse = (
output: any,
context: __SerdeContext
): StartBlueprintRunResponse => {
return {
RunId: __expectString(output.RunId),
} as any;
};
const deserializeAws_json1_1StartCrawlerResponse = (output: any, context: __SerdeContext): StartCrawlerResponse => {
return {} as any;
};
const deserializeAws_json1_1StartCrawlerScheduleResponse = (
output: any,
context: __SerdeContext
): StartCrawlerScheduleResponse => {
return {} as any;
};
const deserializeAws_json1_1StartExportLabelsTaskRunResponse = (
output: any,
context: __SerdeContext
): StartExportLabelsTaskRunResponse => {
return {
TaskRunId: __expectString(output.TaskRunId),
} as any;
};
const deserializeAws_json1_1StartImportLabelsTaskRunResponse = (
output: any,
context: __SerdeContext
): StartImportLabelsTaskRunResponse => {
return {
TaskRunId: __expectString(output.TaskRunId),
} as any;
};
const deserializeAws_json1_1StartingEventBatchCondition = (
output: any,
context: __SerdeContext
): StartingEventBatchCondition => {
return {
BatchSize: __expectInt32(output.BatchSize),
BatchWindow: __expectInt32(output.BatchWindow),
} as any;
};
const deserializeAws_json1_1StartJobRunResponse = (output: any, context: __SerdeContext): StartJobRunResponse => {
return {
JobRunId: __expectString(output.JobRunId),
} as any;
};
const deserializeAws_json1_1StartMLEvaluationTaskRunResponse = (
output: any,
context: __SerdeContext
): StartMLEvaluationTaskRunResponse => {
return {
TaskRunId: __expectString(output.TaskRunId),
} as any;
};
const deserializeAws_json1_1StartMLLabelingSetGenerationTaskRunResponse = (
output: any,
context: __SerdeContext
): StartMLLabelingSetGenerationTaskRunResponse => {
return {
TaskRunId: __expectString(output.TaskRunId),
} as any;
};
const deserializeAws_json1_1StartTriggerResponse = (output: any, context: __SerdeContext): StartTriggerResponse => {
return {
Name: __expectString(output.Name),
} as any;
};
const deserializeAws_json1_1StartWorkflowRunResponse = (
output: any,
context: __SerdeContext
): StartWorkflowRunResponse => {
return {
RunId: __expectString(output.RunId),
} as any;
};
const deserializeAws_json1_1StopCrawlerResponse = (output: any, context: __SerdeContext): StopCrawlerResponse => {
return {} as any;
};
const deserializeAws_json1_1StopCrawlerScheduleResponse = (
output: any,
context: __SerdeContext
): StopCrawlerScheduleResponse => {
return {} as any;
};
const deserializeAws_json1_1StopTriggerResponse = (output: any, context: __SerdeContext): StopTriggerResponse => {
return {
Name: __expectString(output.Name),
} as any;
};
const deserializeAws_json1_1StopWorkflowRunResponse = (
output: any,
context: __SerdeContext
): StopWorkflowRunResponse => {
return {} as any;
};
const deserializeAws_json1_1StorageDescriptor = (output: any, context: __SerdeContext): StorageDescriptor => {
return {
BucketColumns:
output.BucketColumns !== undefined && output.BucketColumns !== null
? deserializeAws_json1_1NameStringList(output.BucketColumns, context)
: undefined,
Columns:
output.Columns !== undefined && output.Columns !== null
? deserializeAws_json1_1ColumnList(output.Columns, context)
: undefined,
Compressed: __expectBoolean(output.Compressed),
InputFormat: __expectString(output.InputFormat),
Location: __expectString(output.Location),
NumberOfBuckets: __expectInt32(output.NumberOfBuckets),
OutputFormat: __expectString(output.OutputFormat),
Parameters:
output.Parameters !== undefined && output.Parameters !== null
? deserializeAws_json1_1ParametersMap(output.Parameters, context)
: undefined,
SchemaReference:
output.SchemaReference !== undefined && output.SchemaReference !== null
? deserializeAws_json1_1SchemaReference(output.SchemaReference, context)
: undefined,
SerdeInfo:
output.SerdeInfo !== undefined && output.SerdeInfo !== null
? deserializeAws_json1_1SerDeInfo(output.SerdeInfo, context)
: undefined,
SkewedInfo:
output.SkewedInfo !== undefined && output.SkewedInfo !== null
? deserializeAws_json1_1SkewedInfo(output.SkewedInfo, context)
: undefined,
SortColumns:
output.SortColumns !== undefined && output.SortColumns !== null
? deserializeAws_json1_1OrderList(output.SortColumns, context)
: undefined,
StoredAsSubDirectories: __expectBoolean(output.StoredAsSubDirectories),
} as any;
};
const deserializeAws_json1_1StringColumnStatisticsData = (
output: any,
context: __SerdeContext
): StringColumnStatisticsData => {
return {
AverageLength: __limitedParseDouble(output.AverageLength),
MaximumLength: __expectLong(output.MaximumLength),
NumberOfDistinctValues: __expectLong(output.NumberOfDistinctValues),
NumberOfNulls: __expectLong(output.NumberOfNulls),
} as any;
};
const deserializeAws_json1_1StringList = (output: any, context: __SerdeContext): string[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return __expectString(entry) as any;
});
};
const deserializeAws_json1_1Table = (output: any, context: __SerdeContext): Table => {
return {
CatalogId: __expectString(output.CatalogId),
CreateTime:
output.CreateTime !== undefined && output.CreateTime !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CreateTime)))
: undefined,
CreatedBy: __expectString(output.CreatedBy),
DatabaseName: __expectString(output.DatabaseName),
Description: __expectString(output.Description),
IsRegisteredWithLakeFormation: __expectBoolean(output.IsRegisteredWithLakeFormation),
LastAccessTime:
output.LastAccessTime !== undefined && output.LastAccessTime !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.LastAccessTime)))
: undefined,
LastAnalyzedTime:
output.LastAnalyzedTime !== undefined && output.LastAnalyzedTime !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.LastAnalyzedTime)))
: undefined,
Name: __expectString(output.Name),
Owner: __expectString(output.Owner),
Parameters:
output.Parameters !== undefined && output.Parameters !== null
? deserializeAws_json1_1ParametersMap(output.Parameters, context)
: undefined,
PartitionKeys:
output.PartitionKeys !== undefined && output.PartitionKeys !== null
? deserializeAws_json1_1ColumnList(output.PartitionKeys, context)
: undefined,
Retention: __expectInt32(output.Retention),
StorageDescriptor:
output.StorageDescriptor !== undefined && output.StorageDescriptor !== null
? deserializeAws_json1_1StorageDescriptor(output.StorageDescriptor, context)
: undefined,
TableType: __expectString(output.TableType),
TargetTable:
output.TargetTable !== undefined && output.TargetTable !== null
? deserializeAws_json1_1TableIdentifier(output.TargetTable, context)
: undefined,
UpdateTime:
output.UpdateTime !== undefined && output.UpdateTime !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.UpdateTime)))
: undefined,
ViewExpandedText: __expectString(output.ViewExpandedText),
ViewOriginalText: __expectString(output.ViewOriginalText),
} as any;
};
const deserializeAws_json1_1TableError = (output: any, context: __SerdeContext): TableError => {
return {
ErrorDetail:
output.ErrorDetail !== undefined && output.ErrorDetail !== null
? deserializeAws_json1_1ErrorDetail(output.ErrorDetail, context)
: undefined,
TableName: __expectString(output.TableName),
} as any;
};
const deserializeAws_json1_1TableErrors = (output: any, context: __SerdeContext): TableError[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1TableError(entry, context);
});
};
const deserializeAws_json1_1TableIdentifier = (output: any, context: __SerdeContext): TableIdentifier => {
return {
CatalogId: __expectString(output.CatalogId),
DatabaseName: __expectString(output.DatabaseName),
Name: __expectString(output.Name),
} as any;
};
const deserializeAws_json1_1TableList = (output: any, context: __SerdeContext): Table[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1Table(entry, context);
});
};
const deserializeAws_json1_1TableVersion = (output: any, context: __SerdeContext): TableVersion => {
return {
Table:
output.Table !== undefined && output.Table !== null
? deserializeAws_json1_1Table(output.Table, context)
: undefined,
VersionId: __expectString(output.VersionId),
} as any;
};
const deserializeAws_json1_1TableVersionError = (output: any, context: __SerdeContext): TableVersionError => {
return {
ErrorDetail:
output.ErrorDetail !== undefined && output.ErrorDetail !== null
? deserializeAws_json1_1ErrorDetail(output.ErrorDetail, context)
: undefined,
TableName: __expectString(output.TableName),
VersionId: __expectString(output.VersionId),
} as any;
};
const deserializeAws_json1_1TableVersionErrors = (output: any, context: __SerdeContext): TableVersionError[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1TableVersionError(entry, context);
});
};
const deserializeAws_json1_1TagResourceResponse = (output: any, context: __SerdeContext): TagResourceResponse => {
return {} as any;
};
const deserializeAws_json1_1TagsMap = (output: any, context: __SerdeContext): { [key: string]: string } => {
return Object.entries(output).reduce((acc: { [key: string]: string }, [key, value]: [string, any]) => {
if (value === null) {
return acc;
}
return {
...acc,
[key]: __expectString(value) as any,
};
}, {});
};
const deserializeAws_json1_1TaskRun = (output: any, context: __SerdeContext): TaskRun => {
return {
CompletedOn:
output.CompletedOn !== undefined && output.CompletedOn !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CompletedOn)))
: undefined,
ErrorString: __expectString(output.ErrorString),
ExecutionTime: __expectInt32(output.ExecutionTime),
LastModifiedOn:
output.LastModifiedOn !== undefined && output.LastModifiedOn !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.LastModifiedOn)))
: undefined,
LogGroupName: __expectString(output.LogGroupName),
Properties:
output.Properties !== undefined && output.Properties !== null
? deserializeAws_json1_1TaskRunProperties(output.Properties, context)
: undefined,
StartedOn:
output.StartedOn !== undefined && output.StartedOn !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.StartedOn)))
: undefined,
Status: __expectString(output.Status),
TaskRunId: __expectString(output.TaskRunId),
TransformId: __expectString(output.TransformId),
} as any;
};
const deserializeAws_json1_1TaskRunList = (output: any, context: __SerdeContext): TaskRun[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1TaskRun(entry, context);
});
};
const deserializeAws_json1_1TaskRunProperties = (output: any, context: __SerdeContext): TaskRunProperties => {
return {
ExportLabelsTaskRunProperties:
output.ExportLabelsTaskRunProperties !== undefined && output.ExportLabelsTaskRunProperties !== null
? deserializeAws_json1_1ExportLabelsTaskRunProperties(output.ExportLabelsTaskRunProperties, context)
: undefined,
FindMatchesTaskRunProperties:
output.FindMatchesTaskRunProperties !== undefined && output.FindMatchesTaskRunProperties !== null
? deserializeAws_json1_1FindMatchesTaskRunProperties(output.FindMatchesTaskRunProperties, context)
: undefined,
ImportLabelsTaskRunProperties:
output.ImportLabelsTaskRunProperties !== undefined && output.ImportLabelsTaskRunProperties !== null
? deserializeAws_json1_1ImportLabelsTaskRunProperties(output.ImportLabelsTaskRunProperties, context)
: undefined,
LabelingSetGenerationTaskRunProperties:
output.LabelingSetGenerationTaskRunProperties !== undefined &&
output.LabelingSetGenerationTaskRunProperties !== null
? deserializeAws_json1_1LabelingSetGenerationTaskRunProperties(
output.LabelingSetGenerationTaskRunProperties,
context
)
: undefined,
TaskType: __expectString(output.TaskType),
} as any;
};
const deserializeAws_json1_1TransformEncryption = (output: any, context: __SerdeContext): TransformEncryption => {
return {
MlUserDataEncryption:
output.MlUserDataEncryption !== undefined && output.MlUserDataEncryption !== null
? deserializeAws_json1_1MLUserDataEncryption(output.MlUserDataEncryption, context)
: undefined,
TaskRunSecurityConfigurationName: __expectString(output.TaskRunSecurityConfigurationName),
} as any;
};
const deserializeAws_json1_1TransformIdList = (output: any, context: __SerdeContext): string[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return __expectString(entry) as any;
});
};
const deserializeAws_json1_1TransformList = (output: any, context: __SerdeContext): MLTransform[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1MLTransform(entry, context);
});
};
const deserializeAws_json1_1TransformParameters = (output: any, context: __SerdeContext): TransformParameters => {
return {
FindMatchesParameters:
output.FindMatchesParameters !== undefined && output.FindMatchesParameters !== null
? deserializeAws_json1_1FindMatchesParameters(output.FindMatchesParameters, context)
: undefined,
TransformType: __expectString(output.TransformType),
} as any;
};
const deserializeAws_json1_1TransformSchema = (output: any, context: __SerdeContext): SchemaColumn[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1SchemaColumn(entry, context);
});
};
const deserializeAws_json1_1Trigger = (output: any, context: __SerdeContext): Trigger => {
return {
Actions:
output.Actions !== undefined && output.Actions !== null
? deserializeAws_json1_1ActionList(output.Actions, context)
: undefined,
Description: __expectString(output.Description),
EventBatchingCondition:
output.EventBatchingCondition !== undefined && output.EventBatchingCondition !== null
? deserializeAws_json1_1EventBatchingCondition(output.EventBatchingCondition, context)
: undefined,
Id: __expectString(output.Id),
Name: __expectString(output.Name),
Predicate:
output.Predicate !== undefined && output.Predicate !== null
? deserializeAws_json1_1Predicate(output.Predicate, context)
: undefined,
Schedule: __expectString(output.Schedule),
State: __expectString(output.State),
Type: __expectString(output.Type),
WorkflowName: __expectString(output.WorkflowName),
} as any;
};
const deserializeAws_json1_1TriggerList = (output: any, context: __SerdeContext): Trigger[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1Trigger(entry, context);
});
};
const deserializeAws_json1_1TriggerNameList = (output: any, context: __SerdeContext): string[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return __expectString(entry) as any;
});
};
const deserializeAws_json1_1TriggerNodeDetails = (output: any, context: __SerdeContext): TriggerNodeDetails => {
return {
Trigger:
output.Trigger !== undefined && output.Trigger !== null
? deserializeAws_json1_1Trigger(output.Trigger, context)
: undefined,
} as any;
};
const deserializeAws_json1_1UntagResourceResponse = (output: any, context: __SerdeContext): UntagResourceResponse => {
return {} as any;
};
const deserializeAws_json1_1UpdateBlueprintResponse = (
output: any,
context: __SerdeContext
): UpdateBlueprintResponse => {
return {
Name: __expectString(output.Name),
} as any;
};
const deserializeAws_json1_1UpdateClassifierResponse = (
output: any,
context: __SerdeContext
): UpdateClassifierResponse => {
return {} as any;
};
const deserializeAws_json1_1UpdateColumnStatisticsForPartitionResponse = (
output: any,
context: __SerdeContext
): UpdateColumnStatisticsForPartitionResponse => {
return {
Errors:
output.Errors !== undefined && output.Errors !== null
? deserializeAws_json1_1ColumnStatisticsErrors(output.Errors, context)
: undefined,
} as any;
};
const deserializeAws_json1_1UpdateColumnStatisticsForTableResponse = (
output: any,
context: __SerdeContext
): UpdateColumnStatisticsForTableResponse => {
return {
Errors:
output.Errors !== undefined && output.Errors !== null
? deserializeAws_json1_1ColumnStatisticsErrors(output.Errors, context)
: undefined,
} as any;
};
const deserializeAws_json1_1UpdateConnectionResponse = (
output: any,
context: __SerdeContext
): UpdateConnectionResponse => {
return {} as any;
};
const deserializeAws_json1_1UpdateCrawlerResponse = (output: any, context: __SerdeContext): UpdateCrawlerResponse => {
return {} as any;
};
const deserializeAws_json1_1UpdateCrawlerScheduleResponse = (
output: any,
context: __SerdeContext
): UpdateCrawlerScheduleResponse => {
return {} as any;
};
const deserializeAws_json1_1UpdateDatabaseResponse = (output: any, context: __SerdeContext): UpdateDatabaseResponse => {
return {} as any;
};
const deserializeAws_json1_1UpdateDevEndpointResponse = (
output: any,
context: __SerdeContext
): UpdateDevEndpointResponse => {
return {} as any;
};
const deserializeAws_json1_1UpdateJobResponse = (output: any, context: __SerdeContext): UpdateJobResponse => {
return {
JobName: __expectString(output.JobName),
} as any;
};
const deserializeAws_json1_1UpdateMLTransformResponse = (
output: any,
context: __SerdeContext
): UpdateMLTransformResponse => {
return {
TransformId: __expectString(output.TransformId),
} as any;
};
const deserializeAws_json1_1UpdatePartitionResponse = (
output: any,
context: __SerdeContext
): UpdatePartitionResponse => {
return {} as any;
};
const deserializeAws_json1_1UpdateRegistryResponse = (output: any, context: __SerdeContext): UpdateRegistryResponse => {
return {
RegistryArn: __expectString(output.RegistryArn),
RegistryName: __expectString(output.RegistryName),
} as any;
};
const deserializeAws_json1_1UpdateSchemaResponse = (output: any, context: __SerdeContext): UpdateSchemaResponse => {
return {
RegistryName: __expectString(output.RegistryName),
SchemaArn: __expectString(output.SchemaArn),
SchemaName: __expectString(output.SchemaName),
} as any;
};
const deserializeAws_json1_1UpdateTableResponse = (output: any, context: __SerdeContext): UpdateTableResponse => {
return {} as any;
};
const deserializeAws_json1_1UpdateTriggerResponse = (output: any, context: __SerdeContext): UpdateTriggerResponse => {
return {
Trigger:
output.Trigger !== undefined && output.Trigger !== null
? deserializeAws_json1_1Trigger(output.Trigger, context)
: undefined,
} as any;
};
const deserializeAws_json1_1UpdateUserDefinedFunctionResponse = (
output: any,
context: __SerdeContext
): UpdateUserDefinedFunctionResponse => {
return {} as any;
};
const deserializeAws_json1_1UpdateWorkflowResponse = (output: any, context: __SerdeContext): UpdateWorkflowResponse => {
return {
Name: __expectString(output.Name),
} as any;
};
const deserializeAws_json1_1UserDefinedFunction = (output: any, context: __SerdeContext): UserDefinedFunction => {
return {
CatalogId: __expectString(output.CatalogId),
ClassName: __expectString(output.ClassName),
CreateTime:
output.CreateTime !== undefined && output.CreateTime !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CreateTime)))
: undefined,
DatabaseName: __expectString(output.DatabaseName),
FunctionName: __expectString(output.FunctionName),
OwnerName: __expectString(output.OwnerName),
OwnerType: __expectString(output.OwnerType),
ResourceUris:
output.ResourceUris !== undefined && output.ResourceUris !== null
? deserializeAws_json1_1ResourceUriList(output.ResourceUris, context)
: undefined,
} as any;
};
const deserializeAws_json1_1UserDefinedFunctionList = (output: any, context: __SerdeContext): UserDefinedFunction[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1UserDefinedFunction(entry, context);
});
};
const deserializeAws_json1_1ValidationException = (output: any, context: __SerdeContext): ValidationException => {
return {
Message: __expectString(output.Message),
} as any;
};
const deserializeAws_json1_1ValueStringList = (output: any, context: __SerdeContext): string[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return __expectString(entry) as any;
});
};
const deserializeAws_json1_1VersionMismatchException = (
output: any,
context: __SerdeContext
): VersionMismatchException => {
return {
Message: __expectString(output.Message),
} as any;
};
const deserializeAws_json1_1Workflow = (output: any, context: __SerdeContext): Workflow => {
return {
BlueprintDetails:
output.BlueprintDetails !== undefined && output.BlueprintDetails !== null
? deserializeAws_json1_1BlueprintDetails(output.BlueprintDetails, context)
: undefined,
CreatedOn:
output.CreatedOn !== undefined && output.CreatedOn !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CreatedOn)))
: undefined,
DefaultRunProperties:
output.DefaultRunProperties !== undefined && output.DefaultRunProperties !== null
? deserializeAws_json1_1WorkflowRunProperties(output.DefaultRunProperties, context)
: undefined,
Description: __expectString(output.Description),
Graph:
output.Graph !== undefined && output.Graph !== null
? deserializeAws_json1_1WorkflowGraph(output.Graph, context)
: undefined,
LastModifiedOn:
output.LastModifiedOn !== undefined && output.LastModifiedOn !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.LastModifiedOn)))
: undefined,
LastRun:
output.LastRun !== undefined && output.LastRun !== null
? deserializeAws_json1_1WorkflowRun(output.LastRun, context)
: undefined,
MaxConcurrentRuns: __expectInt32(output.MaxConcurrentRuns),
Name: __expectString(output.Name),
} as any;
};
const deserializeAws_json1_1WorkflowGraph = (output: any, context: __SerdeContext): WorkflowGraph => {
return {
Edges:
output.Edges !== undefined && output.Edges !== null
? deserializeAws_json1_1EdgeList(output.Edges, context)
: undefined,
Nodes:
output.Nodes !== undefined && output.Nodes !== null
? deserializeAws_json1_1NodeList(output.Nodes, context)
: undefined,
} as any;
};
const deserializeAws_json1_1WorkflowNames = (output: any, context: __SerdeContext): string[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return __expectString(entry) as any;
});
};
const deserializeAws_json1_1WorkflowRun = (output: any, context: __SerdeContext): WorkflowRun => {
return {
CompletedOn:
output.CompletedOn !== undefined && output.CompletedOn !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CompletedOn)))
: undefined,
ErrorMessage: __expectString(output.ErrorMessage),
Graph:
output.Graph !== undefined && output.Graph !== null
? deserializeAws_json1_1WorkflowGraph(output.Graph, context)
: undefined,
Name: __expectString(output.Name),
PreviousRunId: __expectString(output.PreviousRunId),
StartedOn:
output.StartedOn !== undefined && output.StartedOn !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.StartedOn)))
: undefined,
StartingEventBatchCondition:
output.StartingEventBatchCondition !== undefined && output.StartingEventBatchCondition !== null
? deserializeAws_json1_1StartingEventBatchCondition(output.StartingEventBatchCondition, context)
: undefined,
Statistics:
output.Statistics !== undefined && output.Statistics !== null
? deserializeAws_json1_1WorkflowRunStatistics(output.Statistics, context)
: undefined,
Status: __expectString(output.Status),
WorkflowRunId: __expectString(output.WorkflowRunId),
WorkflowRunProperties:
output.WorkflowRunProperties !== undefined && output.WorkflowRunProperties !== null
? deserializeAws_json1_1WorkflowRunProperties(output.WorkflowRunProperties, context)
: undefined,
} as any;
};
const deserializeAws_json1_1WorkflowRunProperties = (
output: any,
context: __SerdeContext
): { [key: string]: string } => {
return Object.entries(output).reduce((acc: { [key: string]: string }, [key, value]: [string, any]) => {
if (value === null) {
return acc;
}
return {
...acc,
[key]: __expectString(value) as any,
};
}, {});
};
const deserializeAws_json1_1WorkflowRuns = (output: any, context: __SerdeContext): WorkflowRun[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1WorkflowRun(entry, context);
});
};
const deserializeAws_json1_1WorkflowRunStatistics = (output: any, context: __SerdeContext): WorkflowRunStatistics => {
return {
FailedActions: __expectInt32(output.FailedActions),
RunningActions: __expectInt32(output.RunningActions),
StoppedActions: __expectInt32(output.StoppedActions),
SucceededActions: __expectInt32(output.SucceededActions),
TimeoutActions: __expectInt32(output.TimeoutActions),
TotalActions: __expectInt32(output.TotalActions),
} as any;
};
const deserializeAws_json1_1Workflows = (output: any, context: __SerdeContext): Workflow[] => {
return (output || [])
.filter((e: any) => e != null)
.map((entry: any) => {
if (entry === null) {
return null as any;
}
return deserializeAws_json1_1Workflow(entry, context);
});
};
const deserializeAws_json1_1XMLClassifier = (output: any, context: __SerdeContext): XMLClassifier => {
return {
Classification: __expectString(output.Classification),
CreationTime:
output.CreationTime !== undefined && output.CreationTime !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CreationTime)))
: undefined,
LastUpdated:
output.LastUpdated !== undefined && output.LastUpdated !== null
? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.LastUpdated)))
: undefined,
Name: __expectString(output.Name),
RowTag: __expectString(output.RowTag),
Version: __expectLong(output.Version),
} as any;
};
const deserializeMetadata = (output: __HttpResponse): __ResponseMetadata => ({
httpStatusCode: output.statusCode,
requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"],
extendedRequestId: output.headers["x-amz-id-2"],
cfId: output.headers["x-amz-cf-id"],
});
// Collect low-level response body stream to Uint8Array.
const collectBody = (streamBody: any = new Uint8Array(), context: __SerdeContext): Promise<Uint8Array> => {
if (streamBody instanceof Uint8Array) {
return Promise.resolve(streamBody);
}
return context.streamCollector(streamBody) || Promise.resolve(new Uint8Array());
};
// Encode Uint8Array data into string with utf-8.
const collectBodyString = (streamBody: any, context: __SerdeContext): Promise<string> =>
collectBody(streamBody, context).then((body) => context.utf8Encoder(body));
const buildHttpRpcRequest = async (
context: __SerdeContext,
headers: __HeaderBag,
path: string,
resolvedHostname: string | undefined,
body: any
): Promise<__HttpRequest> => {
const { hostname, protocol = "https", port, path: basePath } = await context.endpoint();
const contents: any = {
protocol,
hostname,
port,
method: "POST",
path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path,
headers,
};
if (resolvedHostname !== undefined) {
contents.hostname = resolvedHostname;
}
if (body !== undefined) {
contents.body = body;
}
return new __HttpRequest(contents);
};
const parseBody = (streamBody: any, context: __SerdeContext): any =>
collectBodyString(streamBody, context).then((encoded) => {
if (encoded.length) {
return JSON.parse(encoded);
}
return {};
});
/**
* Load an error code for the aws.rest-json-1.1 protocol.
*/
const loadRestJsonErrorCode = (output: __HttpResponse, data: any): string => {
const findKey = (object: any, key: string) => Object.keys(object).find((k) => k.toLowerCase() === key.toLowerCase());
const sanitizeErrorCode = (rawValue: string): string => {
let cleanValue = rawValue;
if (cleanValue.indexOf(":") >= 0) {
cleanValue = cleanValue.split(":")[0];
}
if (cleanValue.indexOf("#") >= 0) {
cleanValue = cleanValue.split("#")[1];
}
return cleanValue;
};
const headerKey = findKey(output.headers, "x-amzn-errortype");
if (headerKey !== undefined) {
return sanitizeErrorCode(output.headers[headerKey]);
}
if (data.code !== undefined) {
return sanitizeErrorCode(data.code);
}
if (data["__type"] !== undefined) {
return sanitizeErrorCode(data["__type"]);
}
return "";
}; | break; |
index.ts | /**
* @fileOverview entry file
* the animation cfg description
* @param {object} cfg - animate config
* @property {object} cfg.element - G.Element
* @property {object} cfg.item - G6.Item
* @property {object} cfg.startKeyFrame - start key frame
* @property {object} cfg.endKeyFrame - end key frame
* @property {object} cfg.startCache - start key frames cache
* @property {object} cfg.endCache - end key frames cache
* @property {function} cfg.done - should be executed when animate finished
* @author [email protected]
*/
import Global = require('../global');
/**
* scale in animate
* @param {object} item - G.Element
* @param {function} callback callback when animate finshed
*/
function scaleIn(item, callback) {
const group = item.getGraphicGroup();
const box = item.getBBox();
const x = (box.minX + box.maxX) / 2;
const y = (box.minY + box.maxY) / 2;
const m = group.getMatrix();
const s = m[0];
group.transform([
[ 't', -x, -y ],
[ 's', 0.01 / s, 0.01 / s ],
[ 't', x, y ]
]);
group.animate({
transform: [
[ 't', -x, -y ],
[ 's', 100 * s, 100 * s ],
[ 't', x, y ]
]
}, Global.enterDuration, Global.enterEasing, callback);
}
/**
* scale out animate
* @param {object} item - G.Element
* @param {function} callback callback when animate finshed
*/
function scaleOut(item, callback) {
const group = item.getGraphicGroup();
const box = item.getBBox();
const x = (box.minX + box.maxX) / 2;
const y = (box.minY + box.maxY) / 2;
const m = group.getMatrix();
const s = m[0];
group.animate({
transform: [
[ 't', -x, -y ],
[ 's', 0.01 / s, 0.01 / s ],
[ 't', x, y ]
]
}, Global.leaveDuration, Global.leaveEasing, callback);
}
/**
* fade in animate
* @param {object} group - G.Group item.getGraphicGroup()
* @param {function} callback callback when animate finshed
*/
function fadeIn(group, callback) {
group.deepEach(element => {
if (element.isShape) {
const fillOpacity = element.attr('fillOpacity');
const strokeOpacity = element.attr('strokeOpacity');
element.attr({
fillOpacity: 0,
strokeOpacity: 0
});
element.animate({
fillOpacity,
strokeOpacity
}, Global.enterDuration, Global.enterEasing, callback);
}
});
}
/**
* fade out animate
* @param {object} group - G.Group item.getGraphicGroup()
* @param {function} callback callback when animate finshed
*/
function fadeOut(group, callback) {
group.deepEach(element => {
const fillOpacity = element.attr('fillOpacity');
const strokeOpacity = element.attr('strokeOpacity');
if (element.isShape) {
element.animate({
fillOpacity: 0,
strokeOpacity: 0
}, Global.leaveDuration, Global.leaveEasing, () => {
element.attr({
fillOpacity,
strokeOpacity
});
callback();
});
}
});
}
export = {
enterScaleIn({ item, element }) {
if (!element.isItemContainer || !item.getKeyShape()) return;
scaleIn(item);
},
showScaleIn({ item, element }) {
if (!element.isItemContainer || !item.getKeyShape()) return;
scaleIn(item);
},
leaveScaleOut({ item, element, done }) {
if (!element.isItemContainer) return;
scaleOut(item, () => {
done();
});
},
hideScaleOut({ item, element, done }) {
if (!element.isItemContainer) return;
scaleOut(item, () => {
done();
});
},
enterFadeIn({ element, item }) {
if (!element.isItemContainer || !item.getKeyShape()) return; | fadeIn(element);
},
showFadeIn({ element, item }) {
if (!element.isItemContainer || !item.getKeyShape()) return;
fadeIn(element);
},
leaveFadeOut({ element, item, done }) {
if (!element.isItemContainer || !item.getKeyShape()) return;
fadeOut(element, done);
},
hideFadeOut({ element, item, done }) {
if (!element.isItemContainer || !item.getKeyShape()) return;
fadeOut(element, done);
}
}; | |
lib.rs | // Copyright 2021 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
anyhow::Result,
errors::{ffx_bail, ffx_error},
ffx_core::ffx_plugin,
ffx_pdk_lib::groups::{ArtifactStore, ArtifactStoreEntry, ArtifactStoreGroup},
ffx_pdk_lib::lock::{Lock, LockArtifact, LockArtifactStore},
ffx_pdk_lib::spec::{Spec, SpecArtifactStore, SpecArtifactStoreKind},
ffx_pdk_update_args::UpdateCommand,
fuchsia_hyper::new_https_client,
fuchsia_pkg::MetaContents,
futures_lite::io::AsyncWriteExt,
hyper::body::HttpBody,
hyper::{body, StatusCode, Uri},
serde_json::{json, Map, Value},
serde_json5,
std::cmp::Ordering,
std::fs::{read_to_string, File, OpenOptions},
std::io::BufReader,
std::path::PathBuf,
};
// Outputs artifacts to a lock file based on a general specification.
//
// Updates the artifacts by matching the available artifacts in an
// artifact store against the constraints in a specification
// (artifact_spec.json).
// URL path to artifact_groups.json for tuf artifact store
//
const TUF_ARTIFACT_GROUPS_PATH: &str = "targets/artifact_groups.json";
#[ffx_plugin("ffx_pdk")]
pub async fn cmd_update(cmd: UpdateCommand) -> Result<()> {
let spec: Spec = read_to_string(cmd.spec_file.clone())
.map_err(|e| ffx_error!("Cannot open file {:?} \nerror: {:?}", cmd.spec_file, e))
.and_then(|contents| {
serde_json5::from_str(&contents)
.map_err(|e| ffx_error!("Spec json parsing errored {}", e))
})?;
process_spec(&spec, &cmd).await?;
println!("Spec file for product \"{}\" processed.", spec.product);
Ok(())
}
/// Struct to hold a JSON Pointer as specified in [RFC
/// 6901](https://tools.ietf.org/html/rfc6901) and a $min/$max boolean.
///
/// This struct is used for filtering artifact store by $min/$max.
///
struct MinMaxPointer {
pointer: String,
is_min: bool,
}
impl std::fmt::Debug for MinMaxPointer {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "({}, {})", self.pointer, if self.is_min { "$min" } else { "$max" })
}
}
/// Returns a MinMaxPointer containing a JSON Pointer and "$min" or "$max" value.
///
/// No more than one $min or $max is allowed, so check and return errors.
///
fn get_min_max_pointer(json_object: &Map<String, Value>) -> Result<Option<MinMaxPointer>> {
let mut r = collect_min_max_pointers(json_object, "".to_string());
match r.len() {
0 => Ok(None),
1 => Ok(Some(r.remove(0))),
_ => ffx_bail!("More than one $min/$max found while processing spec file! {:?}", r),
}
}
/// Recursively collect JSON Pointers for keys containing the string
/// value "$min" or "$max" in the spec attributes.
///
/// JSON Pointers are used to look up values from a Value::Object for
/// filtering artifact store entries.
///
/// Return a vec of MinMaxPointer structs and the caller checks that no
/// more than 1 struct is returned.
///
fn collect_min_max_pointers(json_object: &Map<String, Value>, path: String) -> Vec<MinMaxPointer> {
// Collect in a tuple so we can catch the error of too many sort keys.
let mut result = Vec::<MinMaxPointer>::new();
for (key, value) in json_object.iter() {
match value {
Value::String(s) => {
if s == "$min" || s == "$max" {
result.push(MinMaxPointer {
pointer: format!("{}/{}", path, key),
is_min: s == "$min",
})
}
}
Value::Object(o) => {
result.append(&mut collect_min_max_pointers(o, format!("{}/{}", path, key)));
}
Value::Null | Value::Bool(_) | Value::Number(_) | Value::Array(_) => {}
}
}
result
}
/// Compare two Value::Object types using a JSON Pointer to extract the
/// comparison field.
///
/// Since this function is used by sort_by it returns Ordering. Panics
/// if either comparison field is missing or the field is not a number
/// or string.
///
fn value_object_partial_cmp(
a_object: &Value,
b_object: &Value,
pointer: &String,
) -> Option<Ordering> {
// values must be available, otherwise fatal error
let a: &Value = a_object
.pointer(pointer)
.unwrap_or_else(|| panic!("Missing field '{}' during $min/$max", pointer));
let b: &Value = b_object
.pointer(pointer)
.unwrap_or_else(|| panic!("Missing field '{}' during $min/$max", pointer));
match (a, b) {
(Value::Number(na), Value::Number(nb)) => {
na.as_f64().unwrap().partial_cmp(&nb.as_f64().unwrap())
}
(Value::String(sa), Value::String(sb)) => sa.partial_cmp(sb),
(_, _) => panic!("$min/$max field ({}) is not Number or String: {} {}", pointer, a, b),
}
}
/// Find the $min, $max and return the index.
///
fn find_min_max(
artifact_groups: &Vec<ArtifactStoreGroup>,
matches: &Vec<usize>,
attributes: &Map<String, Value>,
) -> Result<usize> {
// The next statement returns Err() when more than 1 $min/$max is present
let min_max_pointer = get_min_max_pointer(attributes)?;
match min_max_pointer {
None => {
if artifact_groups.len() > 1 {
ffx_bail!("Multiple artifact groups (probably missing $min/$max)");
}
Ok(0)
}
Some(p) => Ok(*matches
.iter()
.max_by(|&a, &b| {
let a_attributes = &artifact_groups[*a].attributes;
let b_attributes = &artifact_groups[*b].attributes;
value_object_partial_cmp(a_attributes, b_attributes, &p.pointer)
.map(|ordering| if p.is_min { ordering.reverse() } else { ordering })
.unwrap()
})
.unwrap()),
}
}
/// Returns the artifact for an artifact store entry by name.
///
fn get_artifact(
artifact_store_group: &ArtifactStoreGroup,
name: &str,
) -> Option<ArtifactStoreEntry> {
artifact_store_group.artifacts.iter().find(|&a| a.name == name).and_then(|a| Some(a.clone()))
}
/// Return artifact_groups.json for different kinds of artifact stores.
///
async fn read_artifact_groups(
store: &SpecArtifactStore,
cmd: &UpdateCommand,
) -> Result<ArtifactStore> {
match store.r#type {
SpecArtifactStoreKind::TUF => {
if store.repo.is_none() {
ffx_bail!("Missing repo field in artifact store")
}
let repo = store.repo.as_ref().unwrap();
let uri = format!("{}/{}", repo, TUF_ARTIFACT_GROUPS_PATH).parse::<Uri>()?;
let client = new_https_client();
let response = client.get(uri.clone()).await?;
if response.status() != StatusCode::OK {
ffx_bail!("http get error {} {}. \n", &uri, response.status(),);
}
let bytes = body::to_bytes(response.into_body()).await?;
let body = String::from_utf8(bytes.to_vec()).expect("response was not valid utf-8");
Ok(serde_json::from_str(&body)?)
}
SpecArtifactStoreKind::Local => {
if store.path.is_none() {
ffx_bail!("Missing path field in store kind");
}
let path_suffix = store.path.as_ref().unwrap();
if cmd.artifact_root.is_none() {
ffx_bail!("Missing --artifact-root parameter");
}
let path = format!("{}/{}", cmd.artifact_root.as_ref().unwrap(), path_suffix);
let reader = BufReader::new(File::open(path)?);
Ok(serde_json::from_reader(reader)?)
}
}
}
/// Recursively match the artifact group attributes against the specification pattern.
///
/// True if a match.
///
fn match_object(group_attributes: &Value, spec_pattern: &Map<String, Value>) -> bool {
if !group_attributes.is_object() {
panic!("match_object: not an object.");
}
for (key, spec_value) in spec_pattern.iter() {
if let Some(group_value) = group_attributes.get(key) {
// Do not compare $min/$max spec values
if *spec_value != json!("$min") && *spec_value != json!("$max") {
if group_value.is_object() && spec_value.is_object() {
// Compare Object types recursively
if !match_object(group_value, spec_value.as_object().unwrap()) {
return false;
}
} else if *group_value != *spec_value {
// Compare Bool, Number, String, Array
return false;
};
}
} else {
// No value for the key in the spec, probably a user error
println!("Missing value during match for key \"{}\"", key);
return false;
}
}
true
}
/// Match artifacts groups from the artifact store file and spec attribute pattern.
///
/// Returns the index of the matching group.
///
fn match_artifacts(
artifact_groups: &Vec<ArtifactStoreGroup>,
spec_attribute_pattern: &Map<String, Value>,
) -> Result<usize> {
let mut matches = Vec::<usize>::new();
for (index, artifact_group) in artifact_groups.iter().enumerate() {
if match_object(&artifact_group.attributes, spec_attribute_pattern) {
matches.push(index);
}
}
let index = find_min_max(&artifact_groups, &matches, &spec_attribute_pattern)?;
Ok(index)
}
/// Merge two Option<Map> and return a new map. Entries are cloned.
///
/// Note: a duplicate key in b overwrites the value from a.
///
fn merge(a: &Option<Map<String, Value>>, b: &Option<Map<String, Value>>) -> Map<String, Value> {
let mut result = Map::new();
if let Some(map) = a {
result.extend(map.into_iter().map(|(k, v)| (k.clone(), v.clone())));
}
if let Some(map) = b {
result.extend(map.into_iter().map(|(k, v)| (k.clone(), v.clone())));
}
result
}
async fn get_blobs(
content_address_storage: Option<String>,
hash: String,
artifact_root: Option<String>,
) -> Result<Vec<String>> {
let tempdir = tempfile::tempdir().unwrap();
let mut result = vec![hash.clone()];
let meta_far_path = if content_address_storage.is_none() {
PathBuf::from(artifact_root.unwrap()).join(hash.to_string())
} else {
let hostname = content_address_storage.unwrap();
let uri = format!("{}/{}", hostname, hash).parse::<Uri>()?;
let client = new_https_client();
let mut res = client.get(uri.clone()).await?;
let status = res.status();
if status != StatusCode::OK {
ffx_bail!("Cannot download meta.far. Status is {}. Uri is: {}. \n", status, &uri);
}
let meta_far_path = tempdir.path().join("meta.far");
let mut output = async_fs::File::create(&meta_far_path).await?;
while let Some(next) = res.data().await {
let chunk = next?;
output.write_all(&chunk).await?;
}
output.sync_all().await?;
meta_far_path
};
let mut archive = File::open(&meta_far_path)?;
let mut meta_far = fuchsia_archive::Reader::new(&mut archive)?;
let meta_contents = meta_far.read_file("meta/contents")?;
let meta_contents = MetaContents::deserialize(meta_contents.as_slice())?.into_contents();
result.extend(meta_contents.into_iter().map(|(_, hash)| hash.to_string()));
return Ok(result);
}
/// Main processing of a spec file
///
async fn process_spec(spec: &Spec, cmd: &UpdateCommand) -> Result<()> {
let mut lock_artifacts = Vec::<LockArtifact>::new();
for spec_artifact_group in spec.artifact_groups.iter() {
// SpecArtifactGroup has a store and list of artifacts
let spec_artifact_store = &spec_artifact_group.artifact_store;
let artifact_store_groups = read_artifact_groups(&spec_artifact_store, cmd).await?;
// find each artifact in the spec in the store
for spec_artifact in spec_artifact_group.artifacts.iter() {
let name = &spec_artifact.name;
// Merge attributes from group and spec
let attributes = merge(&spec.attributes, &spec_artifact_group.attributes);
// Select the single group that matches
let groups = &artifact_store_groups.artifact_groups;
let matching_index: usize = match_artifacts(groups, &attributes)?;
let matching_group = &groups[matching_index];
let artifact_store_group_entry =
get_artifact(matching_group, name).expect("missing artifiact");
let artifact_output = LockArtifact {
name: name.to_owned(),
r#type: artifact_store_group_entry.r#type,
artifact_store: LockArtifactStore {
name: spec_artifact_store.name.to_string(),
artifact_group_name: matching_group.name.to_string(),
r#type: spec_artifact_store.r#type.clone(),
repo: spec_artifact_store.repo.clone(),
content_address_storage: matching_group.content_address_storage.clone(),
},
attributes: matching_group.attributes.as_object().unwrap().clone(),
// todo: rename to hash
merkle: artifact_store_group_entry.hash.clone(),
blobs: get_blobs(
matching_group.content_address_storage.clone(),
artifact_store_group_entry.hash,
cmd.artifact_root.clone(),
)
.await?,
};
lock_artifacts.push(artifact_output);
}
}
let lock = Lock { artifacts: lock_artifacts };
let file = OpenOptions::new().create(true).write(true).truncate(true).open(cmd.out.clone())?;
// write file
serde_json::to_writer_pretty(&file, &lock)?;
Ok(())
}
// tests
#[cfg(test)]
mod test {
use super::*;
use fuchsia_async as fasync;
use fuchsia_pkg::MetaPackage;
use fuchsia_pkg::{build_with_file_system, CreationManifest, FileSystem};
use maplit::{btreemap, hashmap};
use pkg::repository::{RepositoryManager, RepositoryServer};
use pkg::test_utils::make_writable_empty_repository;
use serde_json::json;
use serde_json5;
use std::collections::HashMap;
use std::fs;
use std::io;
use std::io::Write;
use std::net::Ipv4Addr;
use std::path::PathBuf;
use std::sync::Arc;
/// Test artifact hash
#[test]
fn test_get_hash() {
// Test data in json5 format for cleaner look
let data = r#"
{
name: "1361ee2a-e384-4eda-9f25-694affdeb30e",
content_address_storage: "fuchsia-blobs.googleusercontent.com",
type: "tuf",
attributes: {version: "63"},
artifacts: [
{ name: "one", merkle: "hash_1", sha256: "2", type: "package" },
{ name: "two", merkle: "hash_2", sha256: "3", type: "package" },
],
}"#;
// Parse the test data
let v: ArtifactStoreGroup = serde_json5::from_str(data).unwrap();
assert_eq!(get_artifact(&v, "one").unwrap().hash, "hash_1");
}
// For testing comparisons
impl PartialEq for MinMaxPointer {
fn eq(&self, other: &MinMaxPointer) -> bool {
self.is_min == other.is_min && self.pointer == other.pointer
}
}
#[test]
fn test_get_min_max_pointer() {
let object = json!({
"name": "John",
"age": {
"human": "$max",
"dog": 49,
}
});
let ptr = get_min_max_pointer(&object.as_object().unwrap());
// A Result containing an Option containing a tuple
assert_eq!(
ptr.unwrap().unwrap(),
MinMaxPointer { pointer: "/age/human".to_string(), is_min: false }
)
}
// Tests the filtering of artifact store groups by $min/$max
//
#[test]
fn test_find_min_max() {
let store: ArtifactStore = serde_json::from_str(
r#"
{
"schema_version": "v1",
"artifact_groups": [
{
"artifacts": [ ],
"attributes": {
"creation_time": "2021-09-06T11:37:36.054280"
},
"name": "group_a"
}, {
"artifacts": [ ],
"attributes": {
"creation_time": "2021-09-06T11:37:36.054281"
},
"name": "group_b"
}
]
}"#,
)
.unwrap();
assert_eq!(store.artifact_groups.len(), 2);
// The spec attributes for the $min case
let json_min = json!({
"creation_time": "$min"
});
// Convert to Map<String,Value> instead of Value.
let spec_attributes_min = json_min.as_object().unwrap();
let matches: Vec<usize> = (0..store.artifact_groups.len()).collect();
assert_eq!(
find_min_max(&store.artifact_groups, &matches, &spec_attributes_min).unwrap(),
0
);
// max
let json_max = json!({
"creation_time": "$max"
});
let spec_attributes_max = json_max.as_object().unwrap();
assert_eq!(
find_min_max(&store.artifact_groups, &matches, &spec_attributes_max).unwrap(),
1
);
}
// Test match_object cases
// - ignores $min/$max fields
// - fails on top level object
// - fails on recursive object
#[test]
fn test_match_object() {
let spec_json = json!({"a": "$max", "b": 1, "c": {"d": true}});
let spec = spec_json.as_object().unwrap();
let group_1 = json!({"a": 1, "b": 1, "c": {"d": true}});
assert!(match_object(&group_1, &spec));
let group_2 = json!({"a": 1, "b": 2, "c": {"d": true}});
assert!(!match_object(&group_2, &spec));
let group_3 = json!({"a": 1, "b": 1, "c": {"d": false}});
assert!(!match_object(&group_3, &spec));
let group_4 = json!({"a": 1, "c": {"d": false}});
assert!(!match_object(&group_4, &spec));
}
#[test]
fn test_value_object_partial_cmp() {
let a = json!({"w": {"x": 1}});
let b = json!({"w": {"x": 2}});
let ordering = value_object_partial_cmp(&a, &b, &"/w/x".to_string());
assert_eq!(ordering, Some(Ordering::Less));
}
struct FakeFileSystem {
content_map: HashMap<String, Vec<u8>>,
}
impl<'a> FileSystem<'a> for FakeFileSystem {
type File = &'a [u8];
fn open(&'a self, path: &str) -> Result<Self::File, io::Error> {
Ok(self.content_map.get(path).unwrap().as_slice())
}
fn len(&self, path: &str) -> Result<u64, io::Error> {
Ok(self.content_map.get(path).unwrap().len() as u64)
}
fn read(&self, path: &str) -> Result<Vec<u8>, io::Error> {
Ok(self.content_map.get(path).unwrap().clone())
}
}
| fn create_meta_far(path: PathBuf) {
let creation_manifest = CreationManifest::from_external_and_far_contents(
btreemap! {
"lib/mylib.so".to_string() => "host/mylib.so".to_string()
},
btreemap! {
"meta/my_component.cmx".to_string() => "host/my_component.cmx".to_string(),
"meta/package".to_string() => "host/meta/package".to_string()
},
)
.unwrap();
let component_manifest_contents = "my_component.cmx contents";
let mut v = vec![];
let meta_package = MetaPackage::from_name_and_variant(
"my-package-name".parse().unwrap(),
"my-package-variant".parse().unwrap(),
);
meta_package.serialize(&mut v).unwrap();
let file_system = FakeFileSystem {
content_map: hashmap! {
"host/mylib.so".to_string() => Vec::new(),
"host/my_component.cmx".to_string() => component_manifest_contents.as_bytes().to_vec(),
"host/meta/package".to_string() => v
},
};
build_with_file_system(&creation_manifest, &path, "my-package-name", &file_system).unwrap();
}
fn write_file(path: PathBuf, body: &[u8]) {
let mut tmp = tempfile::NamedTempFile::new().unwrap();
tmp.write(body).unwrap();
tmp.persist(path).unwrap();
}
#[fuchsia_async::run_singlethreaded(test)]
async fn test_end_to_end_local() {
let tempdir = tempfile::tempdir().unwrap();
let root = tempdir.path();
let out_filename = root.join("artifact_lock.json");
// recreate the test_data directory
for (filename, data) in [
("artifact_spec.json", include_str!("../test_data/artifact_spec.json")),
("artifact_groups.json", include_str!("../test_data/artifact_groups.json")),
("artifact_groups2.json", include_str!("../test_data/artifact_groups2.json")),
] {
fs::write(root.join(filename), data).expect("Unable to write file");
}
let meta_far_path =
root.join("0000000000000000000000000000000000000000000000000000000000000000");
create_meta_far(meta_far_path);
let blob_path =
root.join("15ec7bf0b50732b49f8228e07d24365338f9e3ab994b00af08e5a3bffe55fd8b");
write_file(blob_path, "".as_bytes());
let cmd = UpdateCommand {
spec_file: PathBuf::from(root.join("artifact_spec.json")),
out: out_filename.clone(),
artifact_root: Some(root.display().to_string()),
};
let r = cmd_update(cmd).await;
assert!(r.is_ok());
let new_artifact_lock: Lock = File::open(&out_filename)
.map(BufReader::new)
.map(serde_json::from_reader)
.unwrap()
.unwrap();
let golden_artifact_lock: Lock =
serde_json::from_str(include_str!("../test_data/golden_artifact_lock.json")).unwrap();
assert_eq!(new_artifact_lock, golden_artifact_lock);
}
#[fuchsia_async::run_singlethreaded(test)]
async fn test_end_to_end_tuf() {
let manager = RepositoryManager::new();
let tempdir = tempfile::tempdir().unwrap();
let root = tempdir.path().join("artifact_store");
let repo = make_writable_empty_repository("artifact_store", root.clone()).await.unwrap();
let out_filename = tempdir.path().join("artifact_lock.json");
let meta_far_path =
root.join("0000000000000000000000000000000000000000000000000000000000000000");
create_meta_far(meta_far_path);
let blob_path =
root.join("15ec7bf0b50732b49f8228e07d24365338f9e3ab994b00af08e5a3bffe55fd8b");
write_file(blob_path, "".as_bytes());
manager.add(Arc::new(repo));
let addr = (Ipv4Addr::LOCALHOST, 0).into();
let (server_fut, _, server) =
RepositoryServer::builder(addr, Arc::clone(&manager)).start().await.unwrap();
// Run the server in the background.
let task = fasync::Task::local(server_fut);
let tuf_repo_url = server.local_url() + "/artifact_store";
// write artifact_groups.json to server.
let tuf_dir = root.join("targets/");
fs::create_dir(&tuf_dir).unwrap();
let artifact_group_path = tuf_dir.join("artifact_groups.json");
fs::write(
artifact_group_path,
include_str!("../test_data/tuf_artifact_groups.json")
.replace("tuf_repo_url", &tuf_repo_url),
)
.unwrap();
// write spec file.
let spec_file_path = tempdir.path().join("artifact_spec.json");
fs::write(
&spec_file_path,
include_str!("../test_data/tuf_artifact_spec.json")
.replace("tuf_repo_url", &tuf_repo_url),
)
.unwrap();
let cmd = UpdateCommand {
spec_file: spec_file_path,
out: out_filename.clone(),
artifact_root: None,
};
cmd_update(cmd).await.unwrap();
let new_artifact_lock: Lock = File::open(&out_filename)
.map(BufReader::new)
.map(serde_json::from_reader)
.unwrap()
.unwrap();
let golden_artifact_lock: Lock = serde_json::from_str(
include_str!("../test_data/golden_tuf_artifact_lock.json")
.replace("tuf_repo_url", &tuf_repo_url)
.as_str(),
)
.unwrap();
assert_eq!(new_artifact_lock, golden_artifact_lock);
// Signal the server to shutdown.
server.stop();
// Wait for the server to actually shut down.
task.await;
}
} | |
hyperloglogplus.rs | use std::collections::HashSet;
use std::hash::{BuildHasher, Hash, Hasher};
use std::marker::PhantomData;
use serde::{Deserialize, Serialize};
use crate::common::*;
use crate::constants;
use crate::encoding::DifIntVec;
use crate::HyperLogLog;
use crate::HyperLogLogError;
/// Implements the HyperLogLog++ algorithm for cardinality estimation.
///
/// This implementation is based on the paper:
///
/// *HyperLogLog in Practice: Algorithmic Engineering of a State of The Art
/// Cardinality Estimation Algorithm.*
///
/// - Uses 6-bit registers, packed in a 32-bit unsigned integer. Thus, every
/// five registers 2 bits are not used.
/// - In small cardinalities, a sparse representation is used which allows
/// for higher precision in estimations.
/// - Performs bias correction using the empirical data provided by Google
/// (can be found [here](http://goo.gl/iU8Ig)).
/// - Supports serialization/deserialization through `serde`.
///
/// # Examples
///
/// ```
/// use std::collections::hash_map::RandomState;
/// use hyperloglogplus::{HyperLogLog, HyperLogLogPlus};
///
/// let mut hllp = HyperLogLogPlus::new(16, RandomState::new()).unwrap();
///
/// hllp.add(&12345);
/// hllp.add(&23456);
///
/// assert_eq!(hllp.count().trunc() as u32, 2);
/// ```
///
/// # References
///
/// - ["HyperLogLog: the analysis of a near-optimal cardinality estimation
/// algorithm", Philippe Flajolet, Éric Fusy, Olivier Gandouet and Frédéric
/// Meunier.](http://algo.inria.fr/flajolet/Publications/FlFuGaMe07.pdf)
/// - ["HyperLogLog in Practice: Algorithmic Engineering of a State of The Art
/// Cardinality Estimation Algorithm", Stefan Heule, Marc Nunkesser and
/// Alexander Hall.](https://research.google/pubs/pub40671/)
/// - ["Appendix to HyperLogLog in Practice: Algorithmic Engineering of a State
/// of the Art Cardinality Estimation Algorithm", Stefan Heule, Marc
/// Nunkesser and Alexander Hall.](https://goo.gl/iU8Ig)
///
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct HyperLogLogPlus<H, B>
where
H: Hash + ?Sized,
B: BuildHasher,
{
builder: B,
precision: u8,
counts: (usize, usize, usize),
tmpset: HashSet<u32>,
sparse: DifIntVec,
registers: Option<RegistersPlus>,
phantom: PhantomData<H>,
}
impl<H, B> HyperLogLogPlus<H, B>
where
H: Hash + ?Sized,
B: BuildHasher,
{
// Minimum precision allowed.
const MIN_PRECISION: u8 = 4;
// Maximum precision allowed.
const MAX_PRECISION: u8 = 18;
// Maximum precision in sparse representation.
const PRIME_PRECISION: u8 = 25;
/// Creates a new HyperLogLogPlus instance.
pub fn new(precision: u8, builder: B) -> Result<Self, HyperLogLogError> {
// Ensure the specified precision is within bounds.
if precision < Self::MIN_PRECISION || precision > Self::MAX_PRECISION {
return Err(HyperLogLogError::InvalidPrecision);
}
let count = Self::register_count(precision);
let counts = (
count,
Self::register_count(Self::PRIME_PRECISION - 1),
RegistersPlus::size_in_bytes(count),
);
Ok(HyperLogLogPlus {
builder: builder,
precision: precision,
counts: counts,
tmpset: HashSet::new(),
sparse: DifIntVec::new(),
registers: None,
phantom: PhantomData,
})
}
/// Merges the `other` HyperLogLogPlus instance into `self`.
///
/// Both sketches must have the same precision. Merge can trigger
/// the transition from sparse to normal representation.
pub fn merge<S, T>(
&mut self,
other: &HyperLogLogPlus<S, T>,
) -> Result<(), HyperLogLogError>
where
S: Hash + ?Sized,
T: BuildHasher,
{
if self.precision != other.precision() {
return Err(HyperLogLogError::IncompatiblePrecision);
}
if other.is_sparse() {
if self.is_sparse() {
// Both sketches are in sparse representation.
//
// Insert all the hash codes of other into `tmpset`.
for hash_code in other.tmpset.iter() {
self.tmpset.insert(*hash_code);
}
for hash_code in other.sparse.into_iter() {
self.tmpset.insert(hash_code);
}
// Merge temporary set into sparse representation.
if self.tmpset.len() * 100 > self.counts.2 {
self.merge_sparse()
}
} else {
// The other sketch is in sparse representation but not self.
//
// Decode all the hash codes and update the self's
// corresponding Registers.
let registers = self.registers.as_mut().unwrap();
for hash_code in other.tmpset.iter() {
let (zeros, index) = other.decode_hash(*hash_code);
registers.set_greater(index, zeros);
}
for hash_code in other.sparse.into_iter() {
let (zeros, index) = other.decode_hash(hash_code);
registers.set_greater(index, zeros);
}
}
} else {
if self.is_sparse() {
// The other sketch is in normal representation but self
// is in sparse representation.
//
// Turn sparse into normal.
self.merge_sparse();
if self.is_sparse() {
self.sparse_to_normal();
}
}
// Merge registers from both sketches.
let registers = self.registers.as_mut().unwrap();
let other_registers_iter = other.registers_iter().unwrap();
for (i, val) in other_registers_iter.enumerate() {
registers.set_greater(i, val);
}
}
Ok(())
}
#[inline] // Returns the precision of the HyperLogLogPF instance.
fn precision(&self) -> u8 {
self.precision
}
#[inline] // Returns an iterator to the Registers' values.
fn registers_iter(&self) -> Option<impl Iterator<Item = u32> + '_> {
self.registers
.as_ref()
.and_then(|registers| Some(registers.iter()))
}
#[inline] // Returns true if the HyperLogLog is using the
// sparse representation.
fn is_sparse(&self) -> bool {
self.registers.is_none()
}
#[inline] // Encodes the hash value as a u32 integer.
fn encode_hash(&self, mut hash: u64) -> u32 {
let index: u64 = u64::extract(hash, 64, 64 - Self::PRIME_PRECISION);
let dif: u64 =
u64::extract(hash, 64 - self.precision, 64 - Self::PRIME_PRECISION);
if dif == 0 {
// Shift left the bits of the index.
hash = (hash << Self::PRIME_PRECISION) |
(1 << Self::PRIME_PRECISION - 1);
// Count leading zeros.
let zeros: u32 = 1 + hash.leading_zeros();
return ((index as u32) << 7) | (zeros << 1) | 1;
}
(index << 1) as u32
}
#[inline] // Extracts the index from a encoded hash.
fn index(&self, hash_code: u32) -> usize {
if hash_code & 1 == 1 {
return u32::extract(hash_code, 32, 32 - self.precision) as usize;
}
u32::extract(
hash_code,
Self::PRIME_PRECISION + 1,
Self::PRIME_PRECISION - self.precision + 1,
) as usize
}
#[inline] // Decodes a hash into the number of leading zeros and
// the index of the correspondingn hash.
fn decode_hash(&self, hash_code: u32) -> (u32, usize) {
if hash_code & 1 == 1 {
return (
u32::extract(hash_code, 7, 1) +
(Self::PRIME_PRECISION - self.precision) as u32,
self.index(hash_code),
);
}
let hash =
hash_code << (32 - Self::PRIME_PRECISION + self.precision - 1);
(hash.leading_zeros() + 1, self.index(hash_code))
}
// Creates a set of Registers for the given precision and copies the
// register values from the sparse representation to the normal one.
fn sparse_to_normal(&mut self) {
let mut registers: RegistersPlus =
RegistersPlus::with_count(self.counts.0);
for hash_code in self.sparse.into_iter() {
let (zeros, index) = self.decode_hash(hash_code);
registers.set_greater(index, zeros);
}
self.registers = Some(registers);
self.tmpset.clear();
self.sparse.clear();
}
// Merges the hash codes stored in the temporary set to the sparse
// representation.
fn merge_sparse(&mut self) {
let mut set_codes: Vec<u32> = self.tmpset.iter().copied().collect();
set_codes.sort();
let mut buf = DifIntVec::with_capacity(self.sparse.len());
let (mut set_iter, mut buf_iter) =
(set_codes.iter(), self.sparse.into_iter());
let (mut set_hash_option, mut buf_hash_option) =
(set_iter.next(), buf_iter.next());
while set_hash_option.is_some() || buf_hash_option.is_some() {
if set_hash_option.is_none() {
buf.push(buf_hash_option.unwrap());
buf_hash_option = buf_iter.next();
continue;
}
if buf_hash_option.is_none() {
buf.push(*set_hash_option.unwrap());
set_hash_option = set_iter.next();
continue;
}
let (set_hash_code, buf_hash_code) =
(*set_hash_option.unwrap(), buf_hash_option.unwrap());
if set_hash_code == buf_hash_code {
buf.push(set_hash_code);
set_hash_option = set_iter.next();
buf_hash_option = buf_iter.next();
} else if set_hash_code > buf_hash_code {
buf.push(buf_hash_code);
buf_hash_option = buf_iter.next();
} else {
buf.push(set_hash_code);
set_hash_option = set_iter.next();
}
}
self.sparse = buf;
self.tmpset.clear();
if self.sparse.len() > self.counts.2 {
self.sparse_to_normal();
}
}
// Returns an estimated bias correction based on empirical data.
fn estimate_bias(&self, raw: f64) -> f64 {
// Get a reference to raw estimates/biases for precision.
let biases = &constants::BIAS_DATA
[(self.precision - Self::MIN_PRECISION) as usize];
let estimates = &constants::RAW_ESTIMATE_DATA
[(self.precision - Self::MIN_PRECISION) as usize];
// Raw estimate is first/last in estimates. Return the first/last bias.
if raw <= estimates[0] {
return biases[0];
} else if estimates[estimates.len() - 1] <= raw {
return biases[biases.len() - 1];
}
// Raw estimate is somewhere in between estimates.
// Binary search for the calculated raw estimate.
//
// Here we unwrap because neither the values in `estimates`
// nor `raw` are going to be NaN.
let res =
estimates.binary_search_by(|est| est.partial_cmp(&raw).unwrap());
let (prv, idx) = match res {
Ok(idx) => (idx - 1, idx),
Err(idx) => (idx - 1, idx),
};
// Return linear interpolation between raw's neighboring points. | // Calculate bias.
biases[prv] + ratio * (biases[idx] - biases[prv])
}
#[inline] // Returns an empirically determined threshold to decide on
// the use of linear counting.
fn threshold(precision: u8) -> f64 {
match precision {
4 => 10.0,
5 => 10.0,
6 => 40.0,
7 => 80.0,
8 => 220.0,
9 => 400.0,
10 => 900.0,
11 => 1800.0,
12 => 3100.0,
13 => 6500.0,
14 => 11500.0,
15 => 20000.0,
16 => 50000.0,
17 => 120000.0,
18 => 350000.0,
_ => unreachable!(),
}
}
}
impl<H, B> HyperLogLogCommon for HyperLogLogPlus<H, B>
where
H: Hash + ?Sized,
B: BuildHasher,
{
}
impl<H, B> HyperLogLog<H> for HyperLogLogPlus<H, B>
where
H: Hash + ?Sized,
B: BuildHasher,
{
/// Adds a new value to the multiset.
fn add(&mut self, value: &H) {
// Create a new hasher.
let mut hasher = self.builder.build_hasher();
// Calculate the hash.
value.hash(&mut hasher);
// Use a 64-bit hash value.
let mut hash: u64 = hasher.finish();
match &mut self.registers {
Some(registers) => {
// We use normal representation.
// Calculate the register's index.
let index: usize = (hash >> (64 - self.precision)) as usize;
// Shift left the bits of the index.
hash = (hash << self.precision) | (1 << (self.precision - 1));
// Count leading zeros.
let zeros: u32 = 1 + hash.leading_zeros();
// Update the register with the max leading zeros counts.
registers.set_greater(index, zeros);
},
None => {
// We use sparse representation.
// Encode hash value.
let hash_code = self.encode_hash(hash);
// Insert hash_code into temporary set.
self.tmpset.insert(hash_code);
// Merge temporary set into sparse representation.
if self.tmpset.len() * 100 > self.counts.2 {
self.merge_sparse()
}
},
}
}
/// Estimates the cardinality of the multiset.
fn count(&mut self) -> f64 {
// Merge tmpset into sparse representation.
if self.registers.is_none() {
self.merge_sparse();
}
match self.registers.as_mut() {
Some(registers) => {
// We use normal representation.
let zeros = registers.zeros();
if zeros != 0 {
let correction = Self::linear_count(self.counts.0, zeros);
// Use linear counting only if value below threshold.
if correction <= Self::threshold(self.precision) {
correction
} else {
// Calculate the raw estimate.
let mut raw = Self::estimate_raw_plus(
registers.iter(),
self.counts.0,
);
// Apply correction if required.
if raw <= 5.0 * self.counts.0 as f64 {
raw -= self.estimate_bias(raw);
}
raw
}
} else {
// Calculate the raw estimate.
let mut raw = Self::estimate_raw_plus(
registers.iter(),
self.counts.0,
);
// Apply correction if required.
if raw <= 5.0 * self.counts.0 as f64 {
raw -= self.estimate_bias(raw);
}
raw
}
},
None => {
// We use sparse representation.
// Calculate number of registers set to zero.
let zeros = self.counts.1 - self.sparse.count();
// Use linear counting to approximate.
Self::linear_count(self.counts.1, zeros)
},
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::collections::hash_map::DefaultHasher;
use std::hash::{BuildHasher, Hasher};
struct PassThroughHasher(u64);
impl Hasher for PassThroughHasher {
#[inline]
fn finish(&self) -> u64 {
self.0
}
#[inline]
fn write(&mut self, _: &[u8]) {}
#[inline]
fn write_u64(&mut self, i: u64) {
self.0 = i;
}
}
#[derive(Serialize, Deserialize)]
struct PassThroughHasherBuilder;
impl BuildHasher for PassThroughHasherBuilder {
type Hasher = PassThroughHasher;
fn build_hasher(&self) -> Self::Hasher {
PassThroughHasher(0)
}
}
#[derive(Serialize, Deserialize)]
struct DefaultBuildHasher;
impl BuildHasher for DefaultBuildHasher {
type Hasher = DefaultHasher;
fn build_hasher(&self) -> Self::Hasher {
DefaultHasher::new()
}
}
#[test]
fn test_normal_add() {
let builder = PassThroughHasherBuilder {};
let mut hll: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(16, builder).unwrap();
hll.sparse_to_normal();
assert!(hll.registers.is_some());
hll.add(&0x00010fffffffffff);
assert_eq!(hll.registers.as_ref().unwrap().get(1), 5);
hll.add(&0x0002ffffffffffff);
assert_eq!(hll.registers.as_ref().unwrap().get(2), 1);
hll.add(&0x0003000000000000);
assert_eq!(hll.registers.as_ref().unwrap().get(3), 49);
hll.add(&0x0003000000000001);
assert_eq!(hll.registers.as_ref().unwrap().get(3), 49);
hll.add(&0xff03700000000000);
assert_eq!(hll.registers.as_ref().unwrap().get(0xff03), 2);
hll.add(&0xff03080000000000);
assert_eq!(hll.registers.as_ref().unwrap().get(0xff03), 5);
let builder = PassThroughHasherBuilder {};
let mut hll: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(4, builder).unwrap();
hll.sparse_to_normal();
hll.add(&0x1fffffffffffffff);
assert_eq!(hll.registers.as_ref().unwrap().get(1), 1);
hll.add(&0xffffffffffffffff);
assert_eq!(hll.registers.as_ref().unwrap().get(0xf), 1);
hll.add(&0x00ffffffffffffff);
assert_eq!(hll.registers.as_ref().unwrap().get(0), 5);
}
#[test]
fn test_sparse_encode_hash() {
let builder = PassThroughHasherBuilder {};
let hll: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(14, builder).unwrap();
// < ... 14 ... > .. 25 .. >
let index: u64 = 0b0000000000111000000000000;
let hash: u64 = 0b1101;
let hash_code = hll.encode_hash((index << 64 - 25) | hash);
assert_eq!(hash_code, (index << 7) as u32 | (35 + 1 << 1) | 1);
// < ... 14 ... > .. 25 .. >
let index: u64 = 0b0000000000111000000000010;
let hash: u64 = 0b1101;
let hash_code = hll.encode_hash((index << 64 - 25) | hash);
assert_eq!(hash_code, (index << 1) as u32);
}
#[test]
fn test_sparse_decode_hash() {
let builder = PassThroughHasherBuilder {};
let hll: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(8, builder).unwrap();
let (zeros, index) =
hll.decode_hash(hll.encode_hash(0xffffff8000000000));
assert_eq!((zeros, index), (1, 0xff));
let (zeros, index) =
hll.decode_hash(hll.encode_hash(0xff00000000000000));
assert_eq!((zeros, index), (57, 0xff));
let (zeros, index) =
hll.decode_hash(hll.encode_hash(0xff30000000000000));
assert_eq!((zeros, index), (3, 0xff));
let (zeros, index) =
hll.decode_hash(hll.encode_hash(0xaa10000000000000));
assert_eq!((zeros, index), (4, 0xaa));
let (zeros, index) =
hll.decode_hash(hll.encode_hash(0xaa0f000000000000));
assert_eq!((zeros, index), (5, 0xaa));
}
#[test]
fn test_sparse_merge_sparse() {
let builder = PassThroughHasherBuilder {};
let mut hll: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(16, builder).unwrap();
let hashes: [u64; 3] =
[0xf000017000000000, 0x000fff8f00000000, 0x0f00017000000000];
let hash_codes: [u32; 3] = [
hll.encode_hash(hashes[0]),
hll.encode_hash(hashes[1]),
hll.encode_hash(hashes[2]),
];
// Insert a couple of hashes.
hll.add(&hashes[0]);
assert!(hll.tmpset.contains(&hash_codes[0]));
hll.add(&hashes[1]);
assert!(hll.tmpset.contains(&hash_codes[1]));
assert_eq!(hll.tmpset.len(), 2);
assert_eq!(hll.sparse.len(), 0);
// Merge and check hashes.
hll.merge_sparse();
assert_eq!(hll.sparse.count(), 2);
assert_eq!(hll.tmpset.len(), 0);
let hll_hash_codes: Vec<u32> = hll.sparse.into_iter().collect();
assert_eq!(hll_hash_codes, vec![hash_codes[1], hash_codes[0]]);
// Insert another hash.
hll.add(&hashes[2]);
assert!(hll.tmpset.contains(&hash_codes[2]));
// Merge and check hashes again.
hll.merge_sparse();
assert_eq!(hll.sparse.count(), 3);
assert_eq!(hll.tmpset.len(), 0);
let hll_hash_codes: Vec<u32> = hll.sparse.into_iter().collect();
assert_eq!(
hll_hash_codes,
vec![hash_codes[1], hash_codes[2], hash_codes[0]]
);
}
#[test]
fn test_sparse_merge_to_normal() {
let builder = PassThroughHasherBuilder {};
let mut hll: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(7, builder).unwrap();
// We have 5 registers every 4 bytes
// (1 << 7) * 4 / 5 = 102
for i in 0u64..102 {
hll.add(&(i << 39));
hll.count();
}
hll.add(&1);
assert!(hll.registers.is_none());
hll.count();
assert!(hll.registers.is_some());
assert_eq!(hll.tmpset.len(), 0);
assert_eq!(hll.sparse.len(), 0);
}
#[test]
fn test_sparse_trigger_sparse_to_normal() {
let builder = PassThroughHasherBuilder {};
let mut hll: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(4, builder).unwrap();
// We have 5 registers every 4 bytes
// (1 << 4) * 4 / 5 = 12
for i in 0u64..12 {
hll.add(&(1 << i));
}
assert!(hll.registers.is_none());
hll.add(&(1 << 13));
assert!(hll.registers.is_some());
assert_eq!(hll.tmpset.len(), 0);
assert_eq!(hll.sparse.len(), 0);
}
#[test]
fn test_sparse_sparse_to_normal() {
let builder = PassThroughHasherBuilder {};
let mut hll: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(16, builder).unwrap();
hll.add(&0x00010fffffffffff);
assert_eq!(hll.count() as u64, 1);
hll.merge_sparse();
hll.sparse_to_normal();
assert_eq!(hll.count() as u64, 1);
assert!(hll.registers.is_some());
let builder = PassThroughHasherBuilder {};
let mut hll: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(16, builder).unwrap();
hll.add(&0x00010fffffffffff);
hll.add(&0x0002ffffffffffff);
hll.add(&0x0003000000000000);
hll.add(&0x0003000000000001);
hll.add(&0xff03700000000000);
hll.add(&0xff03080000000000);
hll.merge_sparse();
hll.sparse_to_normal();
assert_eq!(hll.count() as u64, 4);
assert_eq!(hll.registers.as_ref().unwrap().get(1), 5);
assert_eq!(hll.registers.as_ref().unwrap().get(2), 1);
assert_eq!(hll.registers.as_ref().unwrap().get(3), 49);
assert_eq!(hll.registers.as_ref().unwrap().get(0xff03), 5);
}
#[test]
fn test_sparse_count() {
let builder = PassThroughHasherBuilder {};
let mut hll: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(16, builder).unwrap();
let hashes: [u64; 6] = [
0x00010fffffffffff,
0x00020fffffffffff,
0x00030fffffffffff,
0x00040fffffffffff,
0x00050fffffffffff,
0x00050fffffffffff,
];
for hash in &hashes {
hll.add(hash);
}
// Calls a merge_sparse().
hll.count();
let hash_codes: Vec<u32> = hll.sparse.into_iter().collect();
let expected_hash_codes: Vec<u32> =
hashes.iter().map(|hash| hll.encode_hash(*hash)).collect();
assert_eq!(hash_codes.as_slice(), &expected_hash_codes[..5]);
assert_eq!(hll.count() as u64, 5);
}
#[test]
fn test_estimate_bias() {
let builder = PassThroughHasherBuilder {};
let hll: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(4, builder).unwrap();
let bias = hll.estimate_bias(14.0988);
assert!((bias - 7.5988).abs() <= 1e-5);
let bias = hll.estimate_bias(10.0);
assert!((bias - 10.0).abs() < 1e-5);
let bias = hll.estimate_bias(80.0);
assert!((bias - (-1.7606)).abs() < 1e-5);
let builder = PassThroughHasherBuilder {};
let hll: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(16, builder).unwrap();
let bias = hll.estimate_bias(55391.4373);
assert!((bias - 39416.9373).abs() < 1e-5);
let builder = PassThroughHasherBuilder {};
let hll: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(18, builder).unwrap();
let bias = hll.estimate_bias(275468.768);
assert!((bias - 118181.769).abs() <= 1e-3);
let bias = hll.estimate_bias(587532.522);
assert!((bias - 23922.523).abs() <= 1e-3);
let bias = hll.estimate_bias(1205430.993);
assert!((bias - (-434.006000000052)).abs() <= 1e-3);
let bias = hll.estimate_bias(1251260.649);
assert!((bias - (-479.351000000024)).abs() <= 1e-3);
}
#[test]
fn test_estimate_bias_count() {
let builder = PassThroughHasherBuilder {};
let mut hll: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(4, builder).unwrap();
hll.sparse_to_normal();
for i in 0u64..10 {
hll.add(&((i << 60) + 0xfffffffffffffff));
}
assert!((10.0 - hll.count()).abs() < 1.0);
}
#[test]
fn test_merge_error() {
let mut hll: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(16, PassThroughHasherBuilder {}).unwrap();
let other: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(12, PassThroughHasherBuilder {}).unwrap();
assert_eq!(
hll.merge(&other),
Err(HyperLogLogError::IncompatiblePrecision)
);
}
#[test]
fn test_merge_both_sparse() {
let mut hll: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(16, PassThroughHasherBuilder {}).unwrap();
let mut other: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(16, PassThroughHasherBuilder {}).unwrap();
other.add(&0x00010fffffffffff);
other.add(&0x00020fffffffffff);
other.add(&0x00030fffffffffff);
other.add(&0x00040fffffffffff);
other.add(&0x00050fffffffffff);
other.add(&0x00050fffffffffff);
assert_eq!(other.count().trunc() as u64, 5);
let res = hll.merge(&other);
assert_eq!(res, Ok(()));
assert_eq!(hll.count().trunc() as u64, 5);
assert!(hll.is_sparse() && other.is_sparse());
let res = hll.merge(&other);
assert_eq!(res, Ok(()));
assert_eq!(hll.count().trunc() as u64, 5);
assert!(hll.is_sparse() && other.is_sparse());
other.add(&0x00060fffffffffff);
other.add(&0x00070fffffffffff);
other.add(&0x00080fffffffffff);
other.add(&0x00090fffffffffff);
other.add(&0x000a0fffffffffff);
assert_eq!(other.count().trunc() as u64, 10);
let res = hll.merge(&other);
assert_eq!(res, Ok(()));
assert_eq!(hll.count().trunc() as u64, 10);
assert!(hll.is_sparse() && other.is_sparse());
}
#[test]
fn test_merge_both_normal() {
let mut hll: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(16, PassThroughHasherBuilder {}).unwrap();
let mut other: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(16, PassThroughHasherBuilder {}).unwrap();
hll.sparse_to_normal();
other.sparse_to_normal();
other.add(&0x00010fffffffffff);
other.add(&0x00020fffffffffff);
other.add(&0x00030fffffffffff);
other.add(&0x00040fffffffffff);
other.add(&0x00050fffffffffff);
other.add(&0x00050fffffffffff);
assert_eq!(other.count().trunc() as u64, 5);
let res = hll.merge(&other);
assert_eq!(res, Ok(()));
assert_eq!(hll.count().trunc() as u64, 5);
assert!(!hll.is_sparse() && !other.is_sparse());
let res = hll.merge(&other);
assert_eq!(res, Ok(()));
assert_eq!(hll.count().trunc() as u64, 5);
assert!(!hll.is_sparse() && !other.is_sparse());
other.add(&0x00060fffffffffff);
other.add(&0x00070fffffffffff);
other.add(&0x00080fffffffffff);
other.add(&0x00090fffffffffff);
other.add(&0x000a0fffffffffff);
assert_eq!(other.count().trunc() as u64, 10);
let res = hll.merge(&other);
assert_eq!(res, Ok(()));
assert_eq!(hll.count().trunc() as u64, 10);
assert!(!hll.is_sparse() && !other.is_sparse());
}
#[test]
fn test_merge_sparse_to_normal() {
let mut hll: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(16, PassThroughHasherBuilder {}).unwrap();
let mut other: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(16, PassThroughHasherBuilder {}).unwrap();
hll.sparse_to_normal();
other.add(&0x00010fffffffffff);
other.add(&0x00020fffffffffff);
other.add(&0x00030fffffffffff);
other.add(&0x00040fffffffffff);
other.add(&0x00050fffffffffff);
other.add(&0x00050fffffffffff);
assert_eq!(other.count().trunc() as u64, 5);
let res = hll.merge(&other);
assert_eq!(res, Ok(()));
assert_eq!(hll.count().trunc() as u64, 5);
assert!(!hll.is_sparse() && other.is_sparse());
let res = hll.merge(&other);
assert_eq!(res, Ok(()));
assert_eq!(hll.count().trunc() as u64, 5);
assert!(!hll.is_sparse() && other.is_sparse());
other.add(&0x00060fffffffffff);
other.add(&0x00070fffffffffff);
other.add(&0x00080fffffffffff);
other.add(&0x00090fffffffffff);
other.add(&0x000a0fffffffffff);
assert_eq!(other.count().trunc() as u64, 10);
let res = hll.merge(&other);
assert_eq!(res, Ok(()));
assert_eq!(hll.count().trunc() as u64, 10);
assert!(!hll.is_sparse() && other.is_sparse());
}
#[test]
fn test_merge_normal_to_sparse() {
let mut hll: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(16, PassThroughHasherBuilder {}).unwrap();
let mut other: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(16, PassThroughHasherBuilder {}).unwrap();
other.sparse_to_normal();
other.add(&0x00010fffffffffff);
other.add(&0x00020fffffffffff);
other.add(&0x00030fffffffffff);
other.add(&0x00040fffffffffff);
other.add(&0x00050fffffffffff);
other.add(&0x00050fffffffffff);
assert_eq!(other.count().trunc() as u64, 5);
let res = hll.merge(&other);
assert_eq!(res, Ok(()));
assert_eq!(hll.count().trunc() as u64, 5);
assert!(!hll.is_sparse() && !other.is_sparse());
}
#[test]
fn test_serialization() {
let builder = PassThroughHasherBuilder {};
let mut hll: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(16, builder).unwrap();
hll.add(&0x00010fffffffffff);
hll.add(&0x00020fffffffffff);
hll.add(&0x00030fffffffffff);
hll.add(&0x00040fffffffffff);
hll.add(&0x00050fffffffffff);
hll.add(&0x00050fffffffffff);
assert_eq!(hll.count().trunc() as usize, 5);
let serialized = serde_json::to_string(&hll).unwrap();
let mut deserialized: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
serde_json::from_str(&serialized).unwrap();
assert_eq!(deserialized.count().trunc() as usize, 5);
deserialized.add(&0x00060fffffffffff);
assert_eq!(deserialized.count().trunc() as usize, 6);
hll.sparse_to_normal();
assert_eq!(hll.count().trunc() as usize, 5);
let serialized = serde_json::to_string(&hll).unwrap();
let mut deserialized: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
serde_json::from_str(&serialized).unwrap();
assert_eq!(deserialized.count().trunc() as usize, 5);
deserialized.add(&0x00060fffffffffff);
assert_eq!(deserialized.count().trunc() as usize, 6);
}
#[cfg(feature = "bench-units")]
mod benches {
extern crate test;
use super::*;
use rand::prelude::*;
use test::{black_box, Bencher};
#[bench]
fn bench_plus_add_normal(b: &mut Bencher) {
let builder = PassThroughHasherBuilder {};
let mut hll: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(16, builder).unwrap();
hll.sparse_to_normal();
b.iter(|| {
for i in 0u64..1000 {
hll.add(&(u64::max_value() - i));
}
})
}
#[bench]
fn bench_add_normal_with_hash(b: &mut Bencher) {
let mut rng = rand::thread_rng();
let workload: Vec<String> = (0..2000)
.map(|_| {
format!("- {} - {} -", rng.gen::<u64>(), rng.gen::<u64>())
})
.collect();
b.iter(|| {
let mut hll: HyperLogLogPlus<&String, DefaultBuildHasher> =
HyperLogLogPlus::new(16, DefaultBuildHasher {}).unwrap();
hll.sparse_to_normal();
for val in &workload {
hll.add(&val);
}
let val = hll.count();
black_box(val);
})
}
#[bench]
fn bench_plus_count_normal(b: &mut Bencher) {
let builder = PassThroughHasherBuilder {};
let mut hll: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(16, builder).unwrap();
hll.sparse_to_normal();
b.iter(|| {
let count = hll.count();
black_box(count);
})
}
#[bench]
fn bench_plus_merge_sparse(b: &mut Bencher) {
let builder = PassThroughHasherBuilder {};
let mut hll: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(16, builder).unwrap();
for i in 0u64..500 {
hll.add(&(i << 39));
}
assert_eq!(hll.tmpset.len(), 500);
let set = hll.tmpset.clone();
b.iter(|| {
hll.tmpset = set.clone();
hll.merge_sparse()
});
assert!(hll.registers.is_none());
assert_eq!(hll.tmpset.len(), 0);
}
#[bench]
fn bench_estimate_bias(b: &mut Bencher) {
let builder = PassThroughHasherBuilder {};
let hll: HyperLogLogPlus<u64, PassThroughHasherBuilder> =
HyperLogLogPlus::new(18, builder).unwrap();
b.iter(|| {
let bias = hll.estimate_bias(275468.768);
black_box(bias);
let bias = hll.estimate_bias(587532.522);
black_box(bias);
let bias = hll.estimate_bias(1205430.993);
black_box(bias);
let bias = hll.estimate_bias(1251260.649);
black_box(bias);
});
}
}
} | let ratio = (raw - estimates[prv]) / (estimates[idx] - estimates[prv]);
|
QuickDraw.py | import torch
import numpy as np
import os
from torch.utils.data import TensorDataset, DataLoader
from .utils import collate_sequences
NORMALIZER = { # (mu, std) per class computed on the concatenation of both features (discarding the binary feature)
'hot dog': (1.3554527691145501, 55.15028414343622),
'palm tree': (-0.7063322505461493, 49.02700047706162),
'moon': (0.8036297226693486, 41.345375756324735),
'envelope': (4.900210171097034, 69.4196392054246),
'dumbbell': (2.0407119932197504, 47.695996108391235),
'microwave': (2.9699868328411974, 66.93104801889736),
'onion': (1.2284401861051968, 45.21653229074296),
'nail': (1.6172277953943177, 62.21706638258232),
'paper clip': (0.6436449511025123, 33.32139677497804),
'soccer ball': (0.6708907017116656, 39.52546034271634),
'drill': (1.1185769827821401, 48.722276882610934),
'telephone': (1.3498681969396034, 40.76261400934935),
'airplane': (1.0251489388319772, 53.19602656498733),
'dishwasher': (2.2191710394266084, 61.508456849155735),
'chair': (4.016188671169509, 62.53028260788498),
'grass': (3.376122464598659, 69.31003265138725),
'rhinoceros': (1.2215264458448767, 48.80834840225656),
'octopus': (0.8146148966002359, 40.89244147955804),
'cloud': (0.35621641218733063, 29.409365110585483),
'bicycle': (0.46389958129146036, 48.99489500128756),
'swan': (1.049680167563788, 42.94216649535794),
'picture frame': (1.5065118700885085, 63.253773000519175),
'shorts': (1.9229859470161206, 52.22414095061445),
'flying saucer': (1.5281540318557478, 55.091686319872025),
'basketball': (1.6894072481767088, 57.72410547176846),
'harp': (2.906289433329914, 78.44568624724737),
'beard': (0.9775846803866044, 40.10763763299041),
'binoculars': (0.808846681416587, 47.034367710374035),
'tiger': (0.9438875155470355, 50.66921493109194),
'book': (2.5133103399080188, 65.60820901501357),
'scissors': (0.6647841622339, 40.07199951607956),
'raccoon': (0.7915126973835728, 43.36239169880799),
'peanut': (0.5509739234166906, 30.524261515788336),
'wheel': (0.7686023820927692, 47.607169012136815),
'trombone': (1.0112428613309314, 52.411164111718705),
'diamond': (2.395434500084604, 59.73484161759297),
'parachute': (2.056040072916103, 60.77205525434674),
'tractor': (0.5855071624918279, 50.5522849539403),
'windmill': (0.24800006974356498, 52.12209721342569),
'alarm clock': (0.391438978240927, 41.44493991046053),
'clarinet': (1.2795783017970905, 49.905620294236705),
'spider': (0.6505395210719399, 51.18743252881025),
'violin': (0.8724565090414226, 52.533813964768754),
'clock': (1.6654012441543409, 37.33134444355261),
'tent': (3.3092329281631137, 79.47572994387069),
'belt': (2.3132169051670886, 71.13105919924993),
'map': (2.6555302484638714, 61.11029370697819),
'toe': (0.5471757615653022, 43.27192861865762),
'bread': (1.3686935317654665, 47.6839114556787),
'kangaroo': (0.6918159454175237, 35.99155678225584),
'camera': (1.4527253130110975, 49.336211227235296),
'duck': (1.598900790833744, 41.45077993986563),
'lipstick': (0.41066758960159977, 41.786372987299615),
'snowman': (0.14670998509400804, 31.624590642386174),
'pickup truck': (1.6892820330935685, 54.644954488199524),
'radio': (1.1157698308056494, 56.49502963911298),
'truck': (1.814018865712332, 55.76992610437815),
'train': (1.470463028668502, 77.63271694640828),
'teapot': (0.9014336302825292, 37.48169241933444),
'tree': (0.13337780967798976, 42.97342154517355),
'hourglass': (2.39448903480218, 52.622226862393084),
'eyeglasses': (1.379818588483046, 52.57994649640675),
'church': (0.9630982672082059, 69.99862099910592),
'submarine': (0.9138035290673335, 56.77613283220326),
'couch': (2.283752727373058, 68.77383224311272),
'umbrella': (0.5170775226020248, 47.83678678400117),
'whale': (0.3497782843722267, 52.43513503159438),
'cooler': (2.19778540728888, 61.74770130955316),
'sword': (0.5910085971920176, 48.46440617862079),
'table': (4.542251159698462, 87.48848948789511),
'skull': (0.6570416475324352, 36.02607871443743),
'house': (2.654399100012597, 66.81448281800678),
'blackberry': (0.3455386008050086, 29.1600574174796),
'bush': (0.7558370448198207, 41.04289142315455),
'giraffe': (1.4011522715876905, 46.32335477059355),
'rainbow': (4.702348561309779, 82.07143165031478),
'yoga': (1.1423119096294918, 50.79902795898255),
'mailbox': (0.3511077577743624, 55.61495444057362),
'wristwatch': (1.0924273980760375, 49.96303380973288),
'The Eiffel Tower': (1.2008260944995623, 73.04798400687072),
'syringe': (1.6277984132013836, 56.22798342770764),
'bulldozer': (1.060340370028316, 50.385030079706375),
'door': (3.36173249421909, 66.2933191994613),
'zebra': (1.132649710524639, 52.459089632246396),
'beach': (2.263430578427388, 106.91064036288513),
'crown': (0.7879512551564102, 50.206077053610386),
'screwdriver': (1.1442268550285573, 46.07164154904856),
'bear': (0.9395651847291722, 36.986932048274426),
'sink': (0.9422909049727696, 47.54959424164138),
'teddy-bear': (0.7913359738933313, 33.36477019938705),
'square': (1.3275239412907043, 65.89863242901156),
'cruise ship': (2.15931097599974, 78.36615495337965),
'waterslide': (3.4486527614397833, 83.83125777723943),
'elbow': (4.092940205383508, 61.758770494053785),
'stereo': (1.8269654619223092, 58.475208066649714),
'sweater': (1.067301637554828, 44.59577281486724),
'bandage': (1.4032828202796717, 49.86169830574158),
'bat': (0.8121797269039484, 37.69212883824029),
'The Mona Lisa': (1.6676774598611082, 58.162407907625514),
'sea turtle': (0.7386565039500725, 46.86228560536563),
'butterfly': (0.4342721164650034, 37.484845221008726),
'mosquito': (0.6493471316616555, 40.10938957349605),
'tennis racquet': (0.015468185574502526, 62.24923783294656),
'tornado': (0.7822439181013964, 45.2077352961338),
'computer': (3.0811423630717107, 60.20403781306317),
'bridge': (3.679091358194862, 120.07641800536442),
'toothbrush': (1.2915788907996562, 53.22105425492547),
'baseball bat': (0.410479892106175, 39.02003924116569),
'bench': (4.462592927663926, 85.6302327587279),
'finger': (-0.6637118888775841, 49.09874846625699),
'canoe': (2.9733556427417493, 68.6835039501244),
'baseball': (1.6959011615443051, 45.45130310748645),
'circle': (-0.39852915378672893, 31.77419572565473),
'banana': (1.3562427804512358, 42.94349204337924),
'bathtub': (2.3570421946852544, 65.3192157735626),
'axe': (1.0293065652442999, 54.84964062528346),
'lantern': (1.1476541043730428, 53.67189040723054),
'birthday cake': (0.15146259492252578, 55.89568012892327),
'castle': (1.804799071214271, 63.20589225473029),
'wine bottle': (1.034291851931799, 44.04598147244387),
'ant': (0.9303194592264448, 34.43552547266363),
'The Great Wall of China': (4.285709330181438, 131.23951199298298),
'bee': (0.43095116254566934, 40.56855963179127),
'apple': (-0.44780125973592305, 30.208033668691396),
'arm': (1.7757119621507091, 46.692967793920644),
'asparagus': (-0.2421902384249924, 48.97218720603324),
'angel': (0.5489444327750316, 41.66381961171915),
'cup': (2.673919370605991, 43.54406248924784),
'carrot': (0.6945175048056408, 46.104020850556616),
'bucket': (1.7396654767172537, 48.828570427954205),
'animal migration': (2.6285542168388782, 61.28180224245095),
'cell phone': (1.9267526020713346, 49.38973568488984),
'van': (1.9173825658872794, 54.8721828825201),
'dolphin': (0.9714616061928398, 42.83044052150523),
'bowtie': (1.168151585565935, 37.61503592501492),
'campfire': (0.2534712087647997, 42.286814756524535),
'ceiling fan': (1.0603067359693852, 40.52738328774831),
'boomerang': (0.5759666273292099, 39.78957492087158),
'aircraft carrier': (1.5172469688772912, 78.7478229402662),
'cactus': (-0.1281463623029328, 42.27573114632624),
'cake': (0.31108565857076187, 56.322115673526696),
'anvil': (1.471075424663743, 48.99321880248113),
'toothpaste': (1.8461911264030182, 51.53740072123023),
'swing set': (3.971684529151281, 98.99892200987023),
'feather': (-0.42952206263561854, 53.55639949373167),
'flashlight': (1.9317251715822668, 62.79624045193533),
'garden hose': (1.5452934595615202, 53.713569777275175),
'camel': (1.5165348305653266, 35.07846843003865),
'calculator': (1.991161645112966, 50.142844727554575),
'diving board': (1.7300484119947224, 75.61560569527323),
'chandelier': (1.991040877029286, 50.65396442677625),
'helmet': (1.9722019205320098, 45.87749730234627),
'squirrel': (0.729042851521045, 35.3641639039348),
'ambulance': (1.0598312283596059, 55.875842882074),
'bottlecap': (1.5970585109209756, 40.01592713375047),
'hospital': (1.7313904919786411, 72.37806984815816),
'coffee cup': (1.32151623967879, 41.665383540075005),
'watermelon': (1.8482342559051477, 59.31958622930048),
'dresser': (2.396722754292761, 79.1225545952145),
'bed': (2.588378888585306, 78.08870505568636),
'bird': (1.5906829218142842, 41.059856184169284),
'cookie': (0.7048879723978447, 34.29958258051739),
'underwear': (3.027964069514147, 54.355597943207094),
'drums': (1.1572575727426198, 54.68602043565278),
'cat': (0.9684180598296738, 43.19493215282525),
'calendar': (2.495118096854286, 82.1800159400022),
'bracelet': (0.4661401948292038, 31.127130949231766),
'eraser': (2.3490401085702235, 56.643670114244784),
'dog': (0.8907946320439043, 38.505287852990726),
'barn': (2.2770830828592583, 77.75086163641558),
'spoon': (0.5421543550003102, 37.016180276322515),
'sun': (-0.2008690561101928, 57.798300559005334),
'toilet': (1.291036016063847, 40.288417166228925),
'backpack': (1.3079276772602353, 46.33461078978928),
'trumpet': (1.233316766684717, 47.840050217395266),
'frying pan': (1.1317137754492954, 42.27197781360748),
'blueberry': (0.3428165650102726, 29.923143234478975),
'toaster': (1.3159036268033921, 59.46381954781093),
'floor lamp': (-1.4045719348973986, 52.73112796615196),
'crocodile': (1.2462846638010021, 51.83360295588419),
'police car': (0.6314716475098945, 51.402397657264785),
'cow': (0.6487350495428166, 44.82200063524666),
'basket': (1.781348034990179, 61.40405101602184),
'cello': (1.4380096549620986, 59.481368251629206),
'golf club': (2.935274820103259, 47.944997493610416),
'school bus': (1.3202131289388477, 61.70753264839142),
'hockey puck': (0.725588239742589, 48.55963543134594),
'fence': (3.8660243770815614, 92.36222788620427),
'donut': (0.431402194475543, 32.222374599013726),
'goatee': (1.2211961416317247, 39.81077215140121),
'traffic light': (1.269260032432163, 44.30942006032888),
'hamburger': (1.4103828007350085, 49.04022894395681),
'ear': (1.9563928536834947, 34.3747704500531),
'compass': (0.8636275744036599, 38.906947603746346),
'broccoli': (-0.08805269427735608, 30.880695648320078),
'skyscraper': (1.3477313197584702, 87.73974365488579),
'fan': (0.5595090068208328, 42.26975493031441),
'hot air balloon': (1.0010255829235684, 45.01559229242698),
'mountain': (5.349497596465423, 69.73739652862577),
'fork': (0.21995268515715857, 43.66291957421616),
'face': (1.1847102417517064, 41.81747854722619),
'crab': (0.5500211063457824, 48.30558365265961),
'ice cream': (0.5645385757011395, 41.72357855932428),
'foot': (1.6352285029716924, 40.86466847411941),
'hat': (2.1269765754849357, 53.181061994837336),
'candle': (-0.9566338163648833, 46.30537462785261),
'flip flops': (1.1195172002513105, 45.28787295602699),
'hammer': (0.40690889202283986, 45.31354440860368),
'guitar': (0.9118308447368665, 58.627968076179016),
'brain': (0.5667801625156502, 39.94893006675094),
'stove': (1.2695451153311437, 56.13115551721316),
'headphones': (1.7442579010033754, 38.05663003234409),
'flamingo': (1.3329066566304946, 44.20478550977875),
'flower': (0.014788800722293086, 28.686447255310085),
'bus': (1.5110163683563511, 65.58525727312637),
'hot tub': (0.9262199087425361, 63.37602990315963),
'elephant': (1.0286360401485168, 42.29328387209706),
'fire hydrant': (0.4353600099500871, 48.49174159770318),
'laptop': (2.5420362830209355, 63.093568635534155),
'leaf': (-0.07888685459428697, 51.531397540382116),
'potato': (0.7248796777877287, 36.04373128693473),
'hockey stick': (2.560198275283893, 47.75516557446046),
'lighter': (-0.10645657100081249, 38.600148168238576),
'hexagon': (2.7288170353096675, 50.79748328406929),
'garden': (0.881398058547382, 59.301002560708866),
'marker': (1.4847281646438588, 50.021490600998504),
'keyboard': (2.8496015722739236, 81.38936435354776),
'camouflage': (0.8524647599170719, 65.65432278791238),
'knee': (5.3541853695693575, 60.225209719801974),
'sheep': (1.2468686657122494, 35.19467195151128),
'microphone': (0.3006266208385552, 46.783442715555715),
'mushroom': (0.28405131561550195, 40.671965829362236),
'light bulb': (0.3093205629583717, 35.25819445171456),
'hand': (0.7429242999868996, 34.70475212985948),
'key': (0.7406380633244096, 34.13758650534517),
'house plant': (-0.4396176672108764, 40.515632771810296),
'eye': (0.8606006296728399, 44.889207403048424),
'matches': (0.3485948924638904, 47.42024782911991),
'broom': (2.9233557704577193, 49.52062851559808),
'knife': (1.4292202706092547, 51.01808033723662),
'crayon': (1.467668727844571, 51.82316360295973),
'ocean': (7.872452229036218, 89.99111246191521),
'dragon': (0.8266093687680877, 49.41364315921484),
'leg': (5.117580228531927, 54.01711580361819),
'horse': (0.9246973774561026, 48.65827974249926),
'zigzag': (9.770917367360767, 61.744673036996616),
'car': (1.1106827823007763, 47.60058589694208),
'grapes': (0.6046526027097275, 27.16306317679192),
'lightning': (4.090431090680993, 57.03172069825947),
'moustache': (1.7875824399413591, 37.731677498630745),
'mouth': (2.76090978291076, 57.20373539326289),
'vase': (0.5528729482101566, 36.996243257356014),
'fish': (0.8878609523273818, 44.34932077221152),
'string bean': (1.346485501392611, 54.7312484146683),
'lighthouse': (0.4274423658693314, 75.81546755799378),
'ladder': (5.90632648589332, 110.16555003310557),
'television': (1.3151946885305383, 62.90537952277926),
'helicopter': (0.7111156159770702, 56.6546344981718),
'pillow': (2.0992806701392936, 55.274535278488294),
'pencil': (2.0345830706124053, 62.90446034037889),
'rollerskates': (2.0053135688983006, 39.31457668947572),
'jail': (5.661515872939487, 115.47255551897983),
'mermaid': (0.3187352763659362, 39.8221589482459),
'jacket': (2.0929497013270537, 50.6087533539712),
'megaphone': (1.8135468059177202, 54.66219701027781),
'nose': (4.435118108240006, 36.01419720778613),
'pants': (1.4927018991320877, 55.47801110072461),
'octagon': (2.0144474110553916, 49.61164954802588),
'pizza': (0.9106006910867426, 49.75334623210151),
'passport': (2.09209268126368, 53.80930291521799),
'pool': (2.06494328488252, 67.72608882496336),
'motorbike': (0.4038001637130562, 46.94203574972685),
'snake': (1.5154800788642753, 49.350623204522535),
'pond': (0.7752730687547197, 47.62409950756826),
'frog': (0.8874821595962438, 39.61840650901404),
'pig': (0.47576581658267675, 39.5924494951546),
'penguin': (1.0164857569517498, 40.88730060878002),
'cannon': (0.8927868329478531, 53.019935221920896),
'parrot': (1.6070485082553567, 43.38710309821747),
'lobster': (0.5829596663716866, 42.78511651754868),
'saw': (1.6178343188617499, 43.19552103419441),
'strawberry': (0.6209739512011668, 32.08453043500838),
'firetruck': (1.125536302973774, 65.91057171556372),
'speedboat': (2.0848389958987257, 76.42986457816829),
'popsicle': (0.4813981088599971, 42.49229183387311),
'hurricane': (0.7079895622313991, 61.715710494552816),
'see saw': (1.8249850934378673, 70.89383197689017),
'saxophone': (0.9072383454069756, 36.470803735437904),
'mug': (2.5296236017401257, 42.26283334121334),
'piano': (2.6469411517060806, 73.27448246359215),
'mouse': (0.8020204927469491, 43.836228689128035),
'power outlet': (2.071476276483809, 46.822370189887785),
'hedgehog': (0.4703510415238984, 45.92192258266138),
'oven': (1.8548425634903463, 62.43067850281194),
'shoe': (1.297356215372919, 41.93847714957883),
'rifle': (2.5223233995449474, 60.73555429659974),
'roller coaster': (2.6065332991832584, 86.95567387367467),
'peas': (0.7749159834453123, 42.94847025647788),
'lion': (0.4463371384240275, 34.510210963204415),
'rake': (3.442498762575747, 57.38005406297777),
'postcard': (3.700937086574, 69.8261672011201),
'sock': (1.9223557134218592, 43.2687682421636),
'purse': (1.6872172724499956, 48.85082993380252),
'sleeping bag': (1.2484033851490541, 52.138238912603775),
'skateboard': (2.4819607493229663, 53.19362309156825),
'necklace': (2.392666309866489, 41.3064841582455),
'stairs': (5.195938168639603, 47.15470516213574),
'lollipop': (0.10920444361594842, 38.89025105370695),
'snowflake': (2.3415722082063986, 68.96721342968107),
'rabbit': (0.9078200152038035, 34.75862482451542),
'owl': (1.2457620241823235, 42.73803624793326),
'shovel': (1.970015817486029, 45.419236670608626),
'pear': (-0.45220059964010495, 30.843347488001527),
'remote control': (1.1358869210694837, 44.83511889242796),
'star': (0.3626996748657054, 52.65011227641426),
'scorpion': (0.4161827603069684, 38.88321413686467),
'washing machine': (1.5518183007862645, 51.91417194144562),
'monkey': (0.9748166731813579, 38.60787650590758),
'pineapple': (0.562007915664679, 43.7000843939721),
'sandwich': (1.6847535599541337, 57.542891294247035),
'shark': (1.272828952833183, 49.334895742299615),
'sailboat': (1.309450897368411, 66.09322028103158),
'steak': (0.8908929135892587, 46.82398060648129),
'stethoscope': (2.300526882061146, 43.63511505624273),
'wine glass': (2.1753360642422095, 42.95333738304328),
'smiley face': (1.4208837631558537, 43.864342591767816),
'streetlight': (-1.4343035375659503, 57.70810758721286),
'squiggle': (5.131557337201909, 48.02532522224354),
'stop sign': (1.3327274061718097, 42.78360537094287),
'line': (40.59167311123959, 112.02341955570965),
'pliers': (0.796279030471497, 45.67250007650481),
'paint can': (1.3512234721466652, 47.35796924253278),
'panda': (0.5475608600999033, 33.69643785103632),
'paintbrush': (0.20347385695100456, 47.341806442823824),
't-shirt': (0.9831120778329658, 42.21114938247829),
'fireplace': (1.3117628588460688, 61.01045131707993),
'river': (5.610367142280469, 117.56790294876312),
'snorkel': (1.2366543753832537, 43.709326082973476),
'rain': (3.6461954118834403, 61.31247784406768),
'triangle': (1.1218274781431306, 64.34926695455631),
'suitcase': (1.9098774305372213, 57.805580971303506),
'stitches': (4.142179481399166, 79.85573072340479),
'tooth': (0.7350361072423909, 34.97655580063578),
'snail': (0.3764966115255877, 34.91367713422217),
'spreadsheet': (4.333452826793876, 134.8852997594341)
}
class QuickDrawDataset(torch.utils.data.Dataset):
def __init__(self, data_dict, normalizers, task_vector=None):
self.data_dict = data_dict
self.normalizers = normalizers
self.task_vector = task_vector
self.patterns_per_class = [len(v) for k,v in self.data_dict.items()]
self.min_class_id = min(list(self.data_dict.keys()))
def __getitem__(self, idx):
# select class based on idx
class_id = None
curr_idx = idx
ppc = [0] + self.patterns_per_class
for i in range(1, len(ppc)):
if curr_idx < ppc[i]:
class_id = self.min_class_id + (i - 1)
break
elif curr_idx == ppc[i]:
curr_idx -= ppc[i]
class_id = self.min_class_id + i
break
else:
curr_idx -= ppc[i]
if class_id is None:
raise IndexError('Out of range when indexing QuickDraw!')
# normalize
x_cur = torch.from_numpy(self.data_dict[class_id][curr_idx]).float() #/ self.normalizers[class_id][1]
y_cur = torch.tensor(class_id).long()
if self.task_vector is not None:
x_cur = torch.cat((self.task_vector.unsqueeze(0).repeat(x_cur.size(0),1), x_cur), dim=1)
return x_cur, y_cur
def __len__(self):
return sum(self.patterns_per_class)
class CLQuickDraw():
def __init__(self, root, train_batch_size, test_batch_size,
len_task_vector=0, task_vector_at_test=False):
|
def _load_data(self, classes):
train_dict, test_dict, normalizer = {}, {}, {}
for classname in classes:
feature = np.load(os.path.join(self.root, f"{classname}.npz"), encoding='latin1', allow_pickle=True)
train, test = feature['train'], feature['test'] # discard feature['valid'] because we don't need it
train_dict[self.current_class_id] = train
test_dict[self.current_class_id] = test
normalizer[self.current_class_id] = NORMALIZER[classname]
self.current_class_id += 1
return train_dict, test_dict, normalizer
def get_task_loaders(self, classes=None, task_id=None):
if classes is not None:
train, test, normalizer = self._load_data(classes)
if self.len_task_vector > 0:
task_vector = torch.zeros(self.len_task_vector).float()
task_vector[len(self.dataloaders)] = 1.
else:
task_vector = None
train_dataset = QuickDrawDataset(train, normalizer,
task_vector=task_vector)
test_dataset = QuickDrawDataset(test, normalizer,
task_vector=task_vector if self.task_vector_at_test else None)
train_batch_size = len(train_dataset) if self.train_batch_size == 0 else self.train_batch_size
test_batch_size = len(test_dataset) if self.test_batch_size == 0 else self.test_batch_size
train_loader = DataLoader(train_dataset, batch_size=train_batch_size, shuffle=True, drop_last=True,
collate_fn=collate_sequences)
test_loader = DataLoader(test_dataset, batch_size=test_batch_size, shuffle=False, drop_last=True,
collate_fn=collate_sequences)
self.dataloaders.append([train_loader, test_loader])
return train_loader, test_loader
elif task_id is not None:
return self.dataloaders[task_id] | self.root = root
self.train_batch_size = train_batch_size
self.test_batch_size = test_batch_size
self.len_task_vector = len_task_vector
self.task_vector_at_test = task_vector_at_test
self.dataloaders = []
self.current_class_id = 0 |
http_bench.rs | #[macro_use]
extern crate derive_deref;
#[macro_use]
extern crate log;
use deno_core::CoreIsolate;
use deno_core::Op;
use deno_core::ResourceTable;
use deno_core::Script;
use deno_core::StartupData;
use deno_core::ZeroCopyBuf;
use futures::future::poll_fn;
use futures::prelude::*;
use futures::task::Context;
use futures::task::Poll;
use std::cell::RefCell;
use std::convert::TryInto;
use std::env;
use std::fmt::Debug;
use std::io::Error;
use std::io::ErrorKind;
use std::mem::size_of;
use std::net::SocketAddr;
use std::pin::Pin;
use std::ptr;
use std::rc::Rc;
use tokio::io::AsyncRead;
use tokio::io::AsyncWrite;
use tokio::net::TcpListener;
use tokio::net::TcpStream;
struct Logger;
impl log::Log for Logger {
fn enabled(&self, metadata: &log::Metadata) -> bool {
metadata.level() <= log::max_level()
}
fn log(&self, record: &log::Record) {
if self.enabled(record.metadata()) {
println!("{} - {}", record.level(), record.args());
}
}
fn flush(&self) {}
}
#[derive(Copy, Clone, Debug, PartialEq)]
struct Record {
pub promise_id: u32,
pub rid: u32,
pub result: i32,
}
type RecordBuf = [u8; size_of::<Record>()];
impl From<&[u8]> for Record {
fn from(buf: &[u8]) -> Self {
assert_eq!(buf.len(), size_of::<RecordBuf>());
unsafe { *(buf as *const _ as *const RecordBuf) }.into()
}
}
impl From<RecordBuf> for Record {
fn from(buf: RecordBuf) -> Self {
unsafe {
#[allow(clippy::cast_ptr_alignment)]
ptr::read_unaligned(&buf as *const _ as *const Self)
}
}
}
impl From<Record> for RecordBuf {
fn from(record: Record) -> Self {
unsafe { ptr::read(&record as *const _ as *const Self) }
}
}
struct Isolate {
core_isolate: Box<CoreIsolate>, // Unclear why CoreIsolate::new() returns a box.
state: State,
}
#[derive(Clone, Default, Deref)]
struct State(Rc<RefCell<StateInner>>);
#[derive(Default)]
struct StateInner {
resource_table: ResourceTable,
}
impl Isolate {
pub fn new() -> Self {
let startup_data = StartupData::Script(Script {
source: include_str!("http_bench.js"),
filename: "http_bench.js",
});
let mut isolate = Self {
core_isolate: CoreIsolate::new(startup_data, false),
state: Default::default(),
};
isolate.register_sync_op("listen", op_listen);
isolate.register_op("accept", op_accept);
isolate.register_op("read", op_read);
isolate.register_op("write", op_write);
isolate.register_sync_op("close", op_close);
isolate
}
fn register_sync_op<F>(&mut self, name: &'static str, handler: F)
where
F: 'static + Fn(State, u32, Option<ZeroCopyBuf>) -> Result<u32, Error>,
{
let state = self.state.clone();
let core_handler = move |_isolate: &mut CoreIsolate,
control_buf: &[u8],
zero_copy_buf: Option<ZeroCopyBuf>|
-> Op {
let state = state.clone();
let record = Record::from(control_buf);
let is_sync = record.promise_id == 0;
assert!(is_sync);
| Ok(r) => r as i32,
Err(_) => -1,
};
let buf = RecordBuf::from(Record { result, ..record })[..].into();
Op::Sync(buf)
};
self.core_isolate.register_op(name, core_handler);
}
fn register_op<F>(
&mut self,
name: &'static str,
handler: impl Fn(State, u32, Option<ZeroCopyBuf>) -> F + Copy + 'static,
) where
F: TryFuture,
F::Ok: TryInto<i32>,
<F::Ok as TryInto<i32>>::Error: Debug,
{
let state = self.state.clone();
let core_handler = move |_isolate: &mut CoreIsolate,
control_buf: &[u8],
zero_copy_buf: Option<ZeroCopyBuf>|
-> Op {
let state = state.clone();
let record = Record::from(control_buf);
let is_sync = record.promise_id == 0;
assert!(!is_sync);
let fut = async move {
let op = handler(state, record.rid, zero_copy_buf);
let result = op
.map_ok(|r| r.try_into().expect("op result does not fit in i32"))
.unwrap_or_else(|_| -1)
.await;
RecordBuf::from(Record { result, ..record })[..].into()
};
Op::Async(fut.boxed_local())
};
self.core_isolate.register_op(name, core_handler);
}
}
impl Future for Isolate {
type Output = <CoreIsolate as Future>::Output;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
self.core_isolate.poll_unpin(cx)
}
}
fn op_close(
state: State,
rid: u32,
_buf: Option<ZeroCopyBuf>,
) -> Result<u32, Error> {
debug!("close rid={}", rid);
let resource_table = &mut state.borrow_mut().resource_table;
resource_table
.close(rid)
.map(|_| 0)
.ok_or_else(bad_resource)
}
fn op_listen(
state: State,
_rid: u32,
_buf: Option<ZeroCopyBuf>,
) -> Result<u32, Error> {
debug!("listen");
let addr = "127.0.0.1:4544".parse::<SocketAddr>().unwrap();
let std_listener = std::net::TcpListener::bind(&addr)?;
let listener = TcpListener::from_std(std_listener)?;
let resource_table = &mut state.borrow_mut().resource_table;
let rid = resource_table.add("tcpListener", Box::new(listener));
Ok(rid)
}
fn op_accept(
state: State,
rid: u32,
_buf: Option<ZeroCopyBuf>,
) -> impl TryFuture<Ok = u32, Error = Error> {
debug!("accept rid={}", rid);
poll_fn(move |cx| {
let resource_table = &mut state.borrow_mut().resource_table;
let listener = resource_table
.get_mut::<TcpListener>(rid)
.ok_or_else(bad_resource)?;
listener.poll_accept(cx).map_ok(|(stream, _addr)| {
resource_table.add("tcpStream", Box::new(stream))
})
})
}
fn op_read(
state: State,
rid: u32,
buf: Option<ZeroCopyBuf>,
) -> impl TryFuture<Ok = usize, Error = Error> {
let mut buf = buf.unwrap();
debug!("read rid={}", rid);
poll_fn(move |cx| {
let resource_table = &mut state.borrow_mut().resource_table;
let stream = resource_table
.get_mut::<TcpStream>(rid)
.ok_or_else(bad_resource)?;
Pin::new(stream).poll_read(cx, &mut buf)
})
}
fn op_write(
state: State,
rid: u32,
buf: Option<ZeroCopyBuf>,
) -> impl TryFuture<Ok = usize, Error = Error> {
let buf = buf.unwrap();
debug!("write rid={}", rid);
poll_fn(move |cx| {
let resource_table = &mut state.borrow_mut().resource_table;
let stream = resource_table
.get_mut::<TcpStream>(rid)
.ok_or_else(bad_resource)?;
Pin::new(stream).poll_write(cx, &buf)
})
}
fn bad_resource() -> Error {
Error::new(ErrorKind::NotFound, "bad resource id")
}
fn main() {
log::set_logger(&Logger).unwrap();
log::set_max_level(
env::args()
.find(|a| a == "-D")
.map(|_| log::LevelFilter::Debug)
.unwrap_or(log::LevelFilter::Warn),
);
// NOTE: `--help` arg will display V8 help and exit
deno_core::v8_set_flags(env::args().collect());
let isolate = Isolate::new();
let mut runtime = tokio::runtime::Builder::new()
.basic_scheduler()
.enable_all()
.build()
.unwrap();
runtime.block_on(isolate).expect("unexpected isolate error");
}
#[test]
fn test_record_from() {
let expected = Record {
promise_id: 1,
rid: 3,
result: 4,
};
let buf = RecordBuf::from(expected);
if cfg!(target_endian = "little") {
assert_eq!(buf, [1u8, 0, 0, 0, 3, 0, 0, 0, 4, 0, 0, 0]);
}
let actual = Record::from(buf);
assert_eq!(actual, expected);
} | let result: i32 = match handler(state, record.rid, zero_copy_buf) { |
forms.py | from django import forms
class | (forms.Form):
email = forms.EmailField(label="Email")
| SlackInviteForm |
index.tsx | import React from "react";
import Link from "next/link";
import Layout from "../../components/Layout";
import Table from "../../components/Table";
import useDatasetListing from "../../services/useDatasetListing";
import ErrorMessage from "../../components/ErrorMessage";
export default function DatasetListingPage() {
const { data: datasetsRaw, error } = useDatasetListing();
const dimensions = datasetsRaw?.success ? datasetsRaw.datasets : undefined;
const rows = dimensions?.map((d) => [
<Link key={d.id + "-1"} href={`/datasets/${d.id}`}>
<a>{d.name}</a>
</Link>,
<Link key={d.id} href={`/datasets/${d.id}`}>
<a>{d.id}</a>
</Link>,
<Link key={d.id} href={`/datasets/${d.id}/compare`}>
<a className="button is-link is-small">Secure Comparison</a>
</Link>,
]);
return ( | <div className="block">
{error && <ErrorMessage>{error.message}</ErrorMessage>}
<div className="is-flex-tablet is-justify-content-space-between">
<h1 className="title is-2">
Compare yourself against an existing dataset
</h1>
</div>
</div>
<div className="block">
{!rows && "Dataset information is being fetched..."}
{rows && <Table columns={["Name", "ID", ""]} rows={rows} />}
</div>
<div className="block">
<Link href="/">
<a className="button is-primary">Back to Overview</a>
</Link>
</div>
</div>
</Layout>
);
} | <Layout title="SINE Benchmarking UI">
<div className="container content mb-6"> |
server.go | package prometheus
import (
"fmt"
"github.com/cosmos/cosmos-sdk/wire"
"github.com/irisnet/irishub/client/context"
"github.com/prometheus/client_golang/prometheus/promhttp"
"github.com/spf13/cobra"
"github.com/spf13/viper"
cmn "github.com/tendermint/tendermint/libs/common"
"log"
"net/http"
)
func | (cdc *wire.Codec) *cobra.Command {
cmd := &cobra.Command{
Use: "monitor",
Short: "irishub monitor",
RunE: func(cmd *cobra.Command, args []string) error {
ctx := context.NewCLIContext().WithCodec(cdc)
//ctx := tools.NewContext(storeName, cdc)
monitor := DefaultMonitor(ctx)
monitor.Start()
port := viper.GetInt("port")
srv := &http.Server{
Addr: fmt.Sprintf(":%d", port),
Handler: promhttp.Handler(),
}
go func() {
if err := srv.ListenAndServe(); err != http.ErrServerClosed {
log.Println("got ", err)
}
}()
cmn.TrapSignal(func() {
srv.Close()
})
return nil
},
}
cmd.Flags().Int("port", 36660, "port to connect to")
cmd.Flags().StringP("node", "n", "tcp://localhost:26657", "Node to connect to")
cmd.Flags().StringP("chain-id", "c", "fuxi", "Chain ID of tendermint node")
cmd.Flags().StringP("address", "a", "", `hex address of the validator that you want to
monitor`)
cmd.Flags().BoolP("recursively", "r", true, `specify whether the files in sub-directory is included,
included by default. If there are many files & sub-directories in home directory, this program may be very slow!`)
return cmd
}
/*
TODO:
1. 将数据轮询间隔做成可配置的
2. 监控上一个块的出块人
*/
| MonitorCommand |
pages.go | package middleware
import (
"gorm.io/gorm"
"strconv"
)
// GetPages 获取当前结构体的pages信息
func GetPages(db *gorm.DB, pageNow string, pageSizeNow string, value interface{}) (Pages, *gorm.DB) {
var pages Pages
page, _ := strconv.Atoi(pageNow)
pageSize, _ := strconv.Atoi(pageSizeNow)
Db := db.Model(value)
pages.TotalAmount = Db.Where("deleted_at IS NULL").Find(value).RowsAffected
if page > 0 && pageSize > 0 {
Db.Limit(pageSiz | -1 {
pages.Page = page
pages.PageSize = pageSize
} else {
Db = Db.Limit(15)
}
return pages, Db
}
| e).Offset((page - 1) * pageSize)
pages.Page = page
pages.PageSize = pageSize
} else if pageSize == |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.