text
stringlengths 5
22M
| id
stringlengths 12
177
| metadata
dict | __index_level_0__
int64 0
1.37k
|
---|---|---|---|
import torch
from mmcv.cnn import constant_init, kaiming_init
from torch import nn
def last_zero_init(m):
if isinstance(m, nn.Sequential):
constant_init(m[-1], val=0)
else:
constant_init(m, val=0)
class ContextBlock(nn.Module):
def __init__(self,
inplanes,
ratio,
pooling_type='att',
fusion_types=('channel_add', )):
super(ContextBlock, self).__init__()
assert pooling_type in ['avg', 'att']
assert isinstance(fusion_types, (list, tuple))
valid_fusion_types = ['channel_add', 'channel_mul']
assert all([f in valid_fusion_types for f in fusion_types])
assert len(fusion_types) > 0, 'at least one fusion should be used'
self.inplanes = inplanes
self.ratio = ratio
self.planes = int(inplanes * ratio)
self.pooling_type = pooling_type
self.fusion_types = fusion_types
if pooling_type == 'att':
self.conv_mask = nn.Conv2d(inplanes, 1, kernel_size=1)
self.softmax = nn.Softmax(dim=2)
else:
self.avg_pool = nn.AdaptiveAvgPool2d(1)
if 'channel_add' in fusion_types:
self.channel_add_conv = nn.Sequential(
nn.Conv2d(self.inplanes, self.planes, kernel_size=1),
nn.LayerNorm([self.planes, 1, 1]),
nn.ReLU(inplace=True), # yapf: disable
nn.Conv2d(self.planes, self.inplanes, kernel_size=1))
else:
self.channel_add_conv = None
if 'channel_mul' in fusion_types:
self.channel_mul_conv = nn.Sequential(
nn.Conv2d(self.inplanes, self.planes, kernel_size=1),
nn.LayerNorm([self.planes, 1, 1]),
nn.ReLU(inplace=True), # yapf: disable
nn.Conv2d(self.planes, self.inplanes, kernel_size=1))
else:
self.channel_mul_conv = None
self.reset_parameters()
def reset_parameters(self):
if self.pooling_type == 'att':
kaiming_init(self.conv_mask, mode='fan_in')
self.conv_mask.inited = True
if self.channel_add_conv is not None:
last_zero_init(self.channel_add_conv)
if self.channel_mul_conv is not None:
last_zero_init(self.channel_mul_conv)
def spatial_pool(self, x):
batch, channel, height, width = x.size()
if self.pooling_type == 'att':
input_x = x
# [N, C, H * W]
input_x = input_x.view(batch, channel, height * width)
# [N, 1, C, H * W]
input_x = input_x.unsqueeze(1)
# [N, 1, H, W]
context_mask = self.conv_mask(x)
# [N, 1, H * W]
context_mask = context_mask.view(batch, 1, height * width)
# [N, 1, H * W]
context_mask = self.softmax(context_mask)
# [N, 1, H * W, 1]
context_mask = context_mask.unsqueeze(-1)
# [N, 1, C, 1]
context = torch.matmul(input_x, context_mask)
# [N, C, 1, 1]
context = context.view(batch, channel, 1, 1)
else:
# [N, C, 1, 1]
context = self.avg_pool(x)
return context
def forward(self, x):
# [N, C, 1, 1]
context = self.spatial_pool(x)
out = x
if self.channel_mul_conv is not None:
# [N, C, 1, 1]
channel_mul_term = torch.sigmoid(self.channel_mul_conv(context))
out = out * channel_mul_term
if self.channel_add_conv is not None:
# [N, C, 1, 1]
channel_add_term = self.channel_add_conv(context)
out = out + channel_add_term
return out
|
Cream/CDARTS/CDARTS_detection/mmdet/ops/gcb/context_block.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/ops/gcb/context_block.py",
"repo_id": "Cream",
"token_count": 1982
}
| 307 |
# ----------------------------------------------------------
# Soft-NMS: Improving Object Detection With One Line of Code
# Copyright (c) University of Maryland, College Park
# Licensed under The MIT License [see LICENSE for details]
# Written by Navaneeth Bodla and Bharat Singh
# Modified by Kai Chen
# ----------------------------------------------------------
# cython: language_level=3, boundscheck=False
import numpy as np
cimport numpy as np
cdef inline np.float32_t max(np.float32_t a, np.float32_t b):
return a if a >= b else b
cdef inline np.float32_t min(np.float32_t a, np.float32_t b):
return a if a <= b else b
def soft_nms_cpu(
np.ndarray[float, ndim=2] boxes_in,
float iou_thr,
unsigned int method=1,
float sigma=0.5,
float min_score=0.001,
):
boxes = boxes_in.copy()
cdef unsigned int N = boxes.shape[0]
cdef float iw, ih, box_area
cdef float ua
cdef int pos = 0
cdef float maxscore = 0
cdef int maxpos = 0
cdef float x1, x2, y1, y2, tx1, tx2, ty1, ty2, ts, area, weight, ov
inds = np.arange(N)
for i in range(N):
maxscore = boxes[i, 4]
maxpos = i
tx1 = boxes[i, 0]
ty1 = boxes[i, 1]
tx2 = boxes[i, 2]
ty2 = boxes[i, 3]
ts = boxes[i, 4]
ti = inds[i]
pos = i + 1
# get max box
while pos < N:
if maxscore < boxes[pos, 4]:
maxscore = boxes[pos, 4]
maxpos = pos
pos = pos + 1
# add max box as a detection
boxes[i, 0] = boxes[maxpos, 0]
boxes[i, 1] = boxes[maxpos, 1]
boxes[i, 2] = boxes[maxpos, 2]
boxes[i, 3] = boxes[maxpos, 3]
boxes[i, 4] = boxes[maxpos, 4]
inds[i] = inds[maxpos]
# swap ith box with position of max box
boxes[maxpos, 0] = tx1
boxes[maxpos, 1] = ty1
boxes[maxpos, 2] = tx2
boxes[maxpos, 3] = ty2
boxes[maxpos, 4] = ts
inds[maxpos] = ti
tx1 = boxes[i, 0]
ty1 = boxes[i, 1]
tx2 = boxes[i, 2]
ty2 = boxes[i, 3]
ts = boxes[i, 4]
pos = i + 1
# NMS iterations, note that N changes if detection boxes fall below
# threshold
while pos < N:
x1 = boxes[pos, 0]
y1 = boxes[pos, 1]
x2 = boxes[pos, 2]
y2 = boxes[pos, 3]
s = boxes[pos, 4]
area = (x2 - x1 + 1) * (y2 - y1 + 1)
iw = (min(tx2, x2) - max(tx1, x1) + 1)
if iw > 0:
ih = (min(ty2, y2) - max(ty1, y1) + 1)
if ih > 0:
ua = float((tx2 - tx1 + 1) * (ty2 - ty1 + 1) + area - iw * ih)
ov = iw * ih / ua # iou between max box and detection box
if method == 1: # linear
if ov > iou_thr:
weight = 1 - ov
else:
weight = 1
elif method == 2: # gaussian
weight = np.exp(-(ov * ov) / sigma)
else: # original NMS
if ov > iou_thr:
weight = 0
else:
weight = 1
boxes[pos, 4] = weight * boxes[pos, 4]
# if box score falls below threshold, discard the box by
# swapping with last box update N
if boxes[pos, 4] < min_score:
boxes[pos, 0] = boxes[N-1, 0]
boxes[pos, 1] = boxes[N-1, 1]
boxes[pos, 2] = boxes[N-1, 2]
boxes[pos, 3] = boxes[N-1, 3]
boxes[pos, 4] = boxes[N-1, 4]
inds[pos] = inds[N - 1]
N = N - 1
pos = pos - 1
pos = pos + 1
return boxes[:N], inds[:N]
|
Cream/CDARTS/CDARTS_detection/mmdet/ops/nms/src/soft_nms_cpu.pyx/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/ops/nms/src/soft_nms_cpu.pyx",
"repo_id": "Cream",
"token_count": 2233
}
| 308 |
from torch.nn.modules.module import Module
from ..functions.roi_pool import roi_pool
class RoIPool(Module):
def __init__(self, out_size, spatial_scale):
super(RoIPool, self).__init__()
self.out_size = out_size
self.spatial_scale = float(spatial_scale)
def forward(self, features, rois):
return roi_pool(features, rois, self.out_size, self.spatial_scale)
|
Cream/CDARTS/CDARTS_detection/mmdet/ops/roi_pool/modules/roi_pool.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/ops/roi_pool/modules/roi_pool.py",
"repo_id": "Cream",
"token_count": 159
}
| 309 |
import logging
from mmcv.runner import get_dist_info
def get_root_logger(log_file=None, log_level=logging.INFO):
"""Get the root logger.
The logger will be initialized if it has not been initialized. By default a
StreamHandler will be added. If `log_file` is specified, a FileHandler will
also be added. The name of the root logger is the top-level package name,
e.g., "mmdet".
Args:
log_file (str | None): The log filename. If specified, a FileHandler
will be added to the root logger.
log_level (int): The root logger level. Note that only the process of
rank 0 is affected, while other processes will set the level to
"Error" and be silent most of the time.
Returns:
logging.Logger: The root logger.
"""
logger = logging.getLogger(__name__.split('.')[0]) # i.e., mmdet
# if the logger has been initialized, just return it
if logger.hasHandlers():
return logger
format_str = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
logging.basicConfig(format=format_str, level=log_level)
rank, _ = get_dist_info()
if rank != 0:
logger.setLevel('ERROR')
elif log_file is not None:
file_handler = logging.FileHandler(log_file, 'w')
file_handler.setFormatter(logging.Formatter(format_str))
file_handler.setLevel(log_level)
logger.addHandler(file_handler)
return logger
def print_log(msg, logger=None, level=logging.INFO):
"""Print a log message.
Args:
msg (str): The message to be logged.
logger (logging.Logger | str | None): The logger to be used. Some
special loggers are:
- "root": the root logger obtained with `get_root_logger()`.
- "silent": no message will be printed.
- None: The `print()` method will be used to print log messages.
level (int): Logging level. Only available when `logger` is a Logger
object or "root".
"""
if logger is None:
print(msg)
elif logger == 'root':
_logger = get_root_logger()
_logger.log(level, msg)
elif isinstance(logger, logging.Logger):
logger.log(level, msg)
elif logger != 'silent':
raise TypeError(
'logger should be either a logging.Logger object, "root", '
'"silent" or None, but got {}'.format(logger))
|
Cream/CDARTS/CDARTS_detection/mmdet/utils/logger.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/utils/logger.py",
"repo_id": "Cream",
"token_count": 960
}
| 310 |
from __future__ import division
import argparse
import torch
# torch.multiprocessing.set_sharing_strategy('file_system')
# for file_descriptor, but cause shm leak while nas optimizer
import os
from mmcv import Config
from mmdet import __version__
from mmdet.datasets import build_dataset
from mmdet.apis import (train_detector, init_dist, get_root_logger,
set_random_seed)
from mmdet.models import build_detector
def parse_args():
parser = argparse.ArgumentParser(description='Train a detector')
parser.add_argument('--config', help='train config file path')
parser.add_argument('--work_dir', default='/cache/tmp', help='path to save log and model')
parser.add_argument(
'--resume_from', help='the checkpoint file to resume from')
parser.add_argument(
'--validate',
action='store_true',
help='whether to evaluate the checkpoint during training')
parser.add_argument(
'--gpus',
type=int,
default=1,
help='number of gpus to use '
'(only applicable to non-distributed training)')
parser.add_argument('--seed', type=int, default=None, help='random seed')
parser.add_argument(
'--launcher',
choices=['none', 'pytorch', 'slurm', 'mpi'],
default='none',
help='job launcher')
parser.add_argument('--local_rank', type=int, default=0)
args, unparsed = parser.parse_known_args()
if 'LOCAL_RANK' not in os.environ:
os.environ['LOCAL_RANK'] = str(args.local_rank)
# Solve SyncBN deadlock
os.environ["NCCL_LL_THRESHOLD"] = '0'
return args
def main():
args = parse_args()
cfg = Config.fromfile(args.config)
# set cudnn_benchmark
if cfg.get('cudnn_benchmark', False):
torch.backends.cudnn.benchmark = True
# update configs according to CLI args
cfg.work_dir = args.work_dir
cfg.gpus = args.gpus
if args.resume_from is not None:
cfg.resume_from = args.resume_from
# init distributed env first, since logger depends on the dist info.
if args.launcher == 'none':
distributed = False
else:
distributed = True
init_dist(args.launcher, **cfg.dist_params)
# init logger before other steps
logger = get_root_logger(cfg.log_level)
logger.info('Distributed training: {}'.format(distributed))
# set random seeds
if args.seed is not None:
logger.info('Set random seed to {}'.format(args.seed))
set_random_seed(args.seed)
model = build_detector(
cfg.model, train_cfg=cfg.train_cfg, test_cfg=cfg.test_cfg)
train_dataset = build_dataset(cfg.data.train)
model = torch.nn.parallel.DistributedDataParallel(
model.cuda(), find_unused_parameters=True, device_ids=[args.local_rank], output_device=args.local_rank)
print(model)
print("Model have {} paramerters.".format(sum(x.numel() for x in model.parameters()) / 1e6))
print("Model have {} backbone.".format(sum(x.numel() for x in model.module.backbone.parameters()) / 1e6))
print("Model have {} neck.".format(sum(x.numel() for x in model.module.neck.parameters()) / 1e6))
print("Model have {} head.".format(sum(x.numel() for x in model.module.bbox_head.parameters()) / 1e6))
if cfg.checkpoint_config is not None:
# save mmdet version, config file content and class names in
# checkpoints as meta data
cfg.checkpoint_config.meta = dict(
mmdet_version=__version__,
config=cfg.text,
CLASSES=train_dataset.CLASSES)
# add an attribute for visualization convenience
model.CLASSES = train_dataset.CLASSES
train_detector(
model,
train_dataset,
cfg,
distributed=distributed,
validate=args.validate,
logger=logger)
if __name__ == '__main__':
main()
|
Cream/CDARTS/CDARTS_detection/train.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/train.py",
"repo_id": "Cream",
"token_count": 1567
}
| 311 |
# ------------------------------------------------------------------------------
# Base class for loading a segmentation Dataset.
# Written by Bowen Cheng ([email protected])
# ------------------------------------------------------------------------------
import os
import numpy as np
from PIL import Image, ImageOps
import torch
from torch.utils import data
class BaseDataset(data.Dataset):
"""
Base class for segmentation dataset.
Arguments:
root: Str, root directory.
split: Str, data split, e.g. train/val/test.
is_train: Bool, for training or testing.
crop_size: Tuple, crop size.
mirror: Bool, whether to apply random horizontal flip.
min_scale: Float, min scale in scale augmentation.
max_scale: Float, max scale in scale augmentation.
scale_step_size: Float, step size to select random scale.
mean: Tuple, image mean.
std: Tuple, image std.
"""
def __init__(self,
root,
split,
is_train=True,
crop_size=(513, 1025),
mirror=True,
min_scale=0.5,
max_scale=2.,
scale_step_size=0.25,
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225)):
self.root = root
self.split = split
self.is_train = is_train
self.crop_h, self.crop_w = crop_size
self.mirror = mirror
self.min_scale = min_scale
self.max_scale = max_scale
self.scale_step_size = scale_step_size
self.mean = mean
self.std = std
self.pad_value = tuple([int(v * 255) for v in self.mean])
# ======== override the following fields ========
self.ignore_label = 255
self.label_pad_value = (self.ignore_label, )
self.label_dtype = 'uint8'
# list of image filename (required)
self.img_list = []
# list of label filename (required)
self.ann_list = []
# list of instance dictionary (optional)
self.ins_list = []
self.has_instance = False
self.label_divisor = 1000
self.raw_label_transform = None
self.pre_augmentation_transform = None
self.transform = None
self.target_transform = None
def __len__(self):
return len(self.img_list)
def __getitem__(self, index):
# TODO: handle transform properly when there is no label
dataset_dict = {}
assert os.path.exists(self.img_list[index]), 'Path does not exist: {}'.format(self.img_list[index])
image = self.read_image(self.img_list[index], 'RGB')
if not self.is_train:
# Do not save this during training.
dataset_dict['raw_image'] = image.copy()
if self.ann_list is not None:
assert os.path.exists(self.ann_list[index]), 'Path does not exist: {}'.format(self.ann_list[index])
label = self.read_label(self.ann_list[index], self.label_dtype)
else:
label = None
raw_label = label.copy()
if self.raw_label_transform is not None:
raw_label = self.raw_label_transform(raw_label, self.ins_list[index])['semantic']
if not self.is_train:
# Do not save this during training
dataset_dict['raw_label'] = raw_label
size = image.shape
dataset_dict['raw_size'] = np.array(size)
# To save prediction for official evaluation.
name = os.path.splitext(os.path.basename(self.ann_list[index]))[0]
# TODO: how to return the filename?
# dataset_dict['name'] = np.array(name)
# Resize and pad image to the same size before data augmentation.
if self.pre_augmentation_transform is not None:
image, label = self.pre_augmentation_transform(image, label)
size = image.shape
dataset_dict['size'] = np.array(size)
else:
dataset_dict['size'] = dataset_dict['raw_size']
# Apply data augmentation.
if self.transform is not None:
image, label = self.transform(image, label)
dataset_dict['image'] = image
if not self.has_instance:
dataset_dict['semantic'] = torch.as_tensor(label.astype('long'))
return dataset_dict
# Generate training target.
if self.target_transform is not None:
label_dict = self.target_transform(label, self.ins_list[index])
for key in label_dict.keys():
dataset_dict[key] = label_dict[key]
return dataset_dict
@staticmethod
def read_image(file_name, format=None):
image = Image.open(file_name)
# capture and ignore this bug: https://github.com/python-pillow/Pillow/issues/3973
try:
image = ImageOps.exif_transpose(image)
except Exception:
pass
if format is not None:
# PIL only supports RGB, so convert to RGB and flip channels over below
conversion_format = format
if format == "BGR":
conversion_format = "RGB"
image = image.convert(conversion_format)
image = np.asarray(image)
if format == "BGR":
# flip channels if needed
image = image[:, :, ::-1]
# PIL squeezes out the channel dimension for "L", so make it HWC
if format == "L":
image = np.expand_dims(image, -1)
return image
@staticmethod
def read_label(file_name, dtype='uint8'):
# In some cases, `uint8` is not enough for label
label = Image.open(file_name)
return np.asarray(label, dtype=dtype)
def reverse_transform(self, image_tensor):
"""Reverse the normalization on image.
Args:
image_tensor: torch.Tensor, the normalized image tensor.
Returns:
image: numpy.array, the original image before normalization.
"""
dtype = image_tensor.dtype
mean = torch.as_tensor(self.mean, dtype=dtype, device=image_tensor.device)
std = torch.as_tensor(self.std, dtype=dtype, device=image_tensor.device)
image_tensor.mul_(std[:, None, None]).add_(mean[:, None, None])
image = image_tensor.mul(255)\
.clamp(0, 255)\
.byte()\
.permute(1, 2, 0)\
.cpu().numpy()
return image
@staticmethod
def train_id_to_eval_id():
return None
|
Cream/CDARTS/CDARTS_segmentation/dataloaders/segdatasets/base_dataset.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_segmentation/dataloaders/segdatasets/base_dataset.py",
"repo_id": "Cream",
"token_count": 2967
}
| 312 |
# ------------------------------------------------------------------------------
# Reference: https://github.com/facebookresearch/detectron2/blob/master/detectron2/evaluation/cityscapes_evaluation.py
# Modified by Bowen Cheng ([email protected])
# ------------------------------------------------------------------------------
import logging
from collections import OrderedDict
import os
import glob
from fvcore.common.file_io import PathManager
from segmentation.utils import save_annotation
class CityscapesInstanceEvaluator:
"""
Evaluate Cityscapes instance segmentation
"""
def __init__(self, output_dir=None, train_id_to_eval_id=None, gt_dir='./datasets/cityscapes/gtFine/val'):
"""
Args:
output_dir (str): an output directory to dump results.
train_id_to_eval_id (list): maps training id to evaluation id.
gt_dir (str): path to ground truth annotations (gtFine).
"""
if output_dir is None:
raise ValueError('Must provide a output directory.')
self._output_dir = output_dir
if self._output_dir:
PathManager.mkdirs(self._output_dir)
self._mask_dir = os.path.join(self._output_dir, 'mask')
if self._mask_dir:
PathManager.mkdirs(self._mask_dir)
self._train_id_to_eval_id = train_id_to_eval_id
self._logger = logging.getLogger(__name__)
self._gt_dir = gt_dir
def update(self, instances, image_filename=None):
pred_txt = os.path.join(self._output_dir, image_filename + "_pred.txt")
num_instances = len(instances)
with open(pred_txt, "w") as fout:
for i in range(num_instances):
pred_class = instances[i]['pred_class']
if self._train_id_to_eval_id is not None:
pred_class = self._train_id_to_eval_id[pred_class]
score = instances[i]['score']
mask = instances[i]['pred_mask'].astype("uint8")
png_filename = os.path.join(
self._mask_dir, image_filename + "_{}_{}.png".format(i, pred_class)
)
save_annotation(mask, self._mask_dir, image_filename + "_{}_{}".format(i, pred_class),
add_colormap=False, scale_values=True)
fout.write("{} {} {}\n".format(os.path.join('mask', os.path.basename(png_filename)), pred_class, score))
def evaluate(self):
"""
Returns:
dict: has a key "segm", whose value is a dict of "AP" and "AP50".
"""
import cityscapesscripts.evaluation.evalInstanceLevelSemanticLabeling as cityscapes_eval
if self._gt_dir is None:
raise ValueError('Must provide cityscapes path for evaluation.')
self._logger.info("Evaluating results under {} ...".format(self._output_dir))
# set some global states in cityscapes evaluation API, before evaluating
cityscapes_eval.args.predictionPath = os.path.abspath(self._output_dir)
cityscapes_eval.args.predictionWalk = None
cityscapes_eval.args.JSONOutput = False
cityscapes_eval.args.colorized = False
cityscapes_eval.args.gtInstancesFile = os.path.join(self._output_dir, "gtInstances.json")
# These lines are adopted from
# https://github.com/mcordts/cityscapesScripts/blob/master/cityscapesscripts/evaluation/evalInstanceLevelSemanticLabeling.py # noqa
gt_dir = PathManager.get_local_path(self._gt_dir)
groundTruthImgList = glob.glob(os.path.join(gt_dir, "*", "*_gtFine_instanceIds.png"))
assert len(
groundTruthImgList
), "Cannot find any ground truth images to use for evaluation. Searched for: {}".format(
cityscapes_eval.args.groundTruthSearch
)
predictionImgList = []
for gt in groundTruthImgList:
predictionImgList.append(cityscapes_eval.getPrediction(gt, cityscapes_eval.args))
results = cityscapes_eval.evaluateImgLists(
predictionImgList, groundTruthImgList, cityscapes_eval.args
)["averages"]
ret = OrderedDict()
ret["segm"] = {"AP": results["allAp"] * 100, "AP50": results["allAp50%"] * 100}
return ret
|
Cream/CDARTS/CDARTS_segmentation/segmentation/evaluation/instance.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_segmentation/segmentation/evaluation/instance.py",
"repo_id": "Cream",
"token_count": 1811
}
| 313 |
# ------------------------------------------------------------------------------
# Panoptic-DeepLab decoder.
# Written by Bowen Cheng ([email protected])
# ------------------------------------------------------------------------------
from collections import OrderedDict
from functools import partial
import torch
from torch import nn
from torch.nn import functional as F
from .aspp import ASPP
from .conv_module import stacked_conv
__all__ = ["PanopticDeepLabDecoder"]
class SinglePanopticDeepLabDecoder(nn.Module):
def __init__(self, in_channels, feature_key, low_level_channels, low_level_key, low_level_channels_project,
decoder_channels, atrous_rates, aspp_channels=None):
super(SinglePanopticDeepLabDecoder, self).__init__()
if aspp_channels is None:
aspp_channels = decoder_channels
self.aspp = ASPP(in_channels, out_channels=aspp_channels, atrous_rates=atrous_rates)
self.feature_key = feature_key
self.decoder_stage = len(low_level_channels)
assert self.decoder_stage == len(low_level_key)
assert self.decoder_stage == len(low_level_channels_project)
self.low_level_key = low_level_key
fuse_conv = partial(stacked_conv, kernel_size=5, num_stack=1, padding=2,
conv_type='depthwise_separable_conv')
# Transform low-level feature
project = []
# Fuse
fuse = []
# Top-down direction, i.e. starting from largest stride
for i in range(self.decoder_stage):
project.append(
nn.Sequential(
nn.Conv2d(low_level_channels[i], low_level_channels_project[i], 1, bias=False),
nn.BatchNorm2d(low_level_channels_project[i]),
nn.ReLU()
)
)
if i == 0:
fuse_in_channels = aspp_channels + low_level_channels_project[i]
else:
fuse_in_channels = decoder_channels + low_level_channels_project[i]
fuse.append(
fuse_conv(
fuse_in_channels,
decoder_channels,
)
)
self.project = nn.ModuleList(project)
self.fuse = nn.ModuleList(fuse)
def set_image_pooling(self, pool_size):
self.aspp.set_image_pooling(pool_size)
def forward(self, features):
x = features[self.feature_key]
x = self.aspp(x)
# build decoder
for i in range(self.decoder_stage):
l = features[self.low_level_key[i]]
l = self.project[i](l)
x = F.interpolate(x, size=l.size()[2:], mode='bilinear', align_corners=True)
x = torch.cat((x, l), dim=1)
x = self.fuse[i](x)
return x
class SinglePanopticDeepLabHead(nn.Module):
def __init__(self, decoder_channels, head_channels, num_classes, class_key):
super(SinglePanopticDeepLabHead, self).__init__()
fuse_conv = partial(stacked_conv, kernel_size=5, num_stack=1, padding=2,
conv_type='depthwise_separable_conv')
self.num_head = len(num_classes)
assert self.num_head == len(class_key)
classifier = {}
for i in range(self.num_head):
classifier[class_key[i]] = nn.Sequential(
fuse_conv(
decoder_channels,
head_channels,
),
nn.Conv2d(head_channels, num_classes[i], 1)
)
self.classifier = nn.ModuleDict(classifier)
self.class_key = class_key
def forward(self, x):
pred = OrderedDict()
# build classifier
for key in self.class_key:
pred[key] = self.classifier[key](x)
return pred
class PanopticDeepLabDecoder(nn.Module):
def __init__(self, in_channels, feature_key, low_level_channels, low_level_key, low_level_channels_project,
decoder_channels, atrous_rates, num_classes, **kwargs):
super(PanopticDeepLabDecoder, self).__init__()
# Build semantic decoder
self.semantic_decoder = SinglePanopticDeepLabDecoder(in_channels, feature_key, low_level_channels,
low_level_key, low_level_channels_project,
decoder_channels, atrous_rates)
self.semantic_head = SinglePanopticDeepLabHead(decoder_channels, decoder_channels, [num_classes], ['semantic'])
# Build instance decoder
self.instance_decoder = None
self.instance_head = None
if kwargs.get('has_instance', False):
instance_decoder_kwargs = dict(
in_channels=in_channels,
feature_key=feature_key,
low_level_channels=low_level_channels,
low_level_key=low_level_key,
low_level_channels_project=kwargs['instance_low_level_channels_project'],
decoder_channels=kwargs['instance_decoder_channels'],
atrous_rates=atrous_rates,
aspp_channels=kwargs['instance_aspp_channels']
)
self.instance_decoder = SinglePanopticDeepLabDecoder(**instance_decoder_kwargs)
instance_head_kwargs = dict(
decoder_channels=kwargs['instance_decoder_channels'],
head_channels=kwargs['instance_head_channels'],
num_classes=kwargs['instance_num_classes'],
class_key=kwargs['instance_class_key']
)
self.instance_head = SinglePanopticDeepLabHead(**instance_head_kwargs)
def set_image_pooling(self, pool_size):
self.semantic_decoder.set_image_pooling(pool_size)
if self.instance_decoder is not None:
self.instance_decoder.set_image_pooling(pool_size)
def forward(self, features):
pred = OrderedDict()
# Semantic branch
semantic = self.semantic_decoder(features)
semantic = self.semantic_head(semantic)
for key in semantic.keys():
pred[key] = semantic[key]
# Instance branch
if self.instance_decoder is not None:
instance = self.instance_decoder(features)
instance = self.instance_head(instance)
for key in instance.keys():
pred[key] = instance[key]
return pred
|
Cream/CDARTS/CDARTS_segmentation/segmentation/model/decoder/panoptic_deeplab.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_segmentation/segmentation/model/decoder/panoptic_deeplab.py",
"repo_id": "Cream",
"token_count": 3101
}
| 314 |
from .save_annotation import (
save_annotation, save_instance_annotation, save_panoptic_annotation, save_center_image, save_heatmap_image,
save_heatmap_and_center_image, save_offset_image)
from .flow_vis import flow_compute_color
from .utils import AverageMeter
from .debug import save_debug_images
|
Cream/CDARTS/CDARTS_segmentation/segmentation/utils/__init__.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_segmentation/segmentation/utils/__init__.py",
"repo_id": "Cream",
"token_count": 99
}
| 315 |
from .cityscapes import Cityscapes
__all__ = ['Cityscapes']
|
Cream/CDARTS/CDARTS_segmentation/tools/datasets/cityscapes/__init__.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_segmentation/tools/datasets/cityscapes/__init__.py",
"repo_id": "Cream",
"token_count": 20
}
| 316 |
import os
import math
import numpy as np
import torch
import shutil
from torch.autograd import Variable
import time
from tqdm import tqdm
from genotypes import PRIMITIVES
import matplotlib
# Force matplotlib to not use any Xwindows backend.
matplotlib.use('Agg')
from matplotlib import pyplot as plt
from pdb import set_trace as bp
import warnings
class AvgrageMeter(object):
def __init__(self):
self.reset()
def reset(self):
self.avg = 0
self.sum = 0
self.cnt = 0
def update(self, val, n=1):
self.sum += val * n
self.cnt += n
self.avg = self.sum / self.cnt
class Cutout(object):
def __init__(self, length):
self.length = length
def __call__(self, img):
h, w = img.size(1), img.size(2)
mask = np.ones((h, w), np.float32)
y = np.random.randint(h)
x = np.random.randint(w)
y1 = np.clip(y - self.length // 2, 0, h)
y2 = np.clip(y + self.length // 2, 0, h)
x1 = np.clip(x - self.length // 2, 0, w)
x2 = np.clip(x + self.length // 2, 0, w)
mask[y1: y2, x1: x2] = 0.
mask = torch.from_numpy(mask)
mask = mask.expand_as(img)
img *= mask
return img
def count_parameters_in_MB(model):
return np.sum(np.prod(v.size()) for name, v in model.named_parameters() if "auxiliary" not in name)/1e6
def save_checkpoint(state, is_best, save):
filename = os.path.join(save, 'checkpoint.pth.tar')
torch.save(state, filename)
if is_best:
best_filename = os.path.join(save, 'model_best.pth.tar')
shutil.copyfile(filename, best_filename)
def save(model, model_path):
torch.save(model.state_dict(), model_path)
def load(model, model_path):
model.load_state_dict(torch.load(model_path))
def drop_path(x, drop_prob):
if drop_prob > 0.:
keep_prob = 1.-drop_prob
mask = Variable(torch.cuda.FloatTensor(x.size(0), 1, 1, 1).bernoulli_(keep_prob))
x.div_(keep_prob)
x.mul_(mask)
return x
def create_exp_dir(path, scripts_to_save=None):
if not os.path.exists(path):
# os.mkdir(path)
os.makedirs(path, exist_ok=True)
print('Experiment dir : {}'.format(path))
if scripts_to_save is not None:
# os.mkdir(os.path.join(path, 'scripts'))
os.makedirs(os.path.join(path, 'scripts'), exist_ok=True)
for script in scripts_to_save:
dst_file = os.path.join(path, 'scripts', os.path.basename(script))
shutil.copyfile(script, dst_file)
########################## TensorRT speed_test #################################
try:
import tensorrt as trt
import pycuda.driver as cuda
import pycuda.autoinit
MAX_BATCH_SIZE = 1
MAX_WORKSPACE_SIZE = 1 << 30
TRT_LOGGER = trt.Logger(trt.Logger.WARNING)
DTYPE = trt.float32
# Model
INPUT_NAME = 'input'
OUTPUT_NAME = 'output'
def allocate_buffers(engine):
h_input = cuda.pagelocked_empty(trt.volume(engine.get_binding_shape(0)), dtype=trt.nptype(DTYPE))
h_output = cuda.pagelocked_empty(trt.volume(engine.get_binding_shape(1)), dtype=trt.nptype(DTYPE))
d_input = cuda.mem_alloc(h_input.nbytes)
d_output = cuda.mem_alloc(h_output.nbytes)
return h_input, d_input, h_output, d_output
def build_engine(model_file):
with trt.Builder(TRT_LOGGER) as builder, builder.create_network() as network, trt.OnnxParser(network, TRT_LOGGER) as parser:
builder.max_workspace_size = MAX_WORKSPACE_SIZE
builder.max_batch_size = MAX_BATCH_SIZE
with open(model_file, 'rb') as model:
parser.parse(model.read())
last_layer = network.get_layer(network.num_layers - 1)
network.mark_output(last_layer.get_output(0))
return builder.build_cuda_engine(network)
def load_input(input_size, host_buffer):
assert len(input_size) == 4
b, c, h, w = input_size
dtype = trt.nptype(DTYPE)
img_array = np.random.randn(c, h, w).astype(dtype).ravel()
np.copyto(host_buffer, img_array)
def do_inference(context, h_input, d_input, h_output, d_output, iterations=None):
# Transfer input data to the GPU.
cuda.memcpy_htod(d_input, h_input)
# warm-up
for _ in range(10):
context.execute(batch_size=1, bindings=[int(d_input), int(d_output)])
# test proper iterations
if iterations is None:
elapsed_time = 0
iterations = 100
while elapsed_time < 1:
t_start = time.time()
for _ in range(iterations):
context.execute(batch_size=1, bindings=[int(d_input), int(d_output)])
elapsed_time = time.time() - t_start
iterations *= 2
FPS = iterations / elapsed_time
iterations = int(FPS * 3)
# Run inference.
t_start = time.time()
for _ in tqdm(range(iterations)):
context.execute(batch_size=1, bindings=[int(d_input), int(d_output)])
elapsed_time = time.time() - t_start
latency = elapsed_time / iterations * 1000
return latency
def compute_latency_ms_tensorrt(model, input_size, iterations=None):
model = model.cuda()
model.eval()
_, c, h, w = input_size
dummy_input = torch.randn(1, c, h, w, device='cuda')
torch.onnx.export(model, dummy_input, "model.onnx", verbose=False, input_names=["input"], output_names=["output"])
with build_engine("model.onnx") as engine:
h_input, d_input, h_output, d_output = allocate_buffers(engine)
load_input(input_size, h_input)
with engine.create_execution_context() as context:
latency = do_inference(context, h_input, d_input, h_output, d_output, iterations=iterations)
# FPS = 1000 / latency (in ms)
return latency
except:
warnings.warn("TensorRT (or pycuda) is not installed. compute_latency_ms_tensorrt() cannot be used.")
#########################################################################
def compute_latency_ms_pytorch(model, input_size, iterations=None, device=None):
torch.backends.cudnn.enabled = True
torch.backends.cudnn.benchmark = True
model.eval()
model = model.cuda()
input = torch.randn(*input_size).cuda()
with torch.no_grad():
for _ in range(10):
model(input)
if iterations is None:
elapsed_time = 0
iterations = 100
while elapsed_time < 1:
torch.cuda.synchronize()
torch.cuda.synchronize()
t_start = time.time()
for _ in range(iterations):
model(input)
torch.cuda.synchronize()
torch.cuda.synchronize()
elapsed_time = time.time() - t_start
iterations *= 2
FPS = iterations / elapsed_time
iterations = int(FPS * 6)
print('=========Speed Testing=========')
torch.cuda.synchronize()
torch.cuda.synchronize()
t_start = time.time()
for _ in tqdm(range(iterations)):
model(input)
torch.cuda.synchronize()
torch.cuda.synchronize()
elapsed_time = time.time() - t_start
latency = elapsed_time / iterations * 1000
torch.cuda.empty_cache()
# FPS = 1000 / latency (in ms)
return latency
def plot_path(lasts, paths=[]):
'''
paths: list of path0~path2
'''
assert len(paths) > 0
path0 = paths[0]
path1 = paths[1] if len(paths) > 1 else []
path2 = paths[2] if len(paths) > 2 else []
if path0[-1] != lasts[0]: path0.append(lasts[0])
if len(path1) != 0 and path1[-1] != lasts[1]: path1.append(lasts[1])
if len(path2) != 0 and path2[-1] != lasts[2]: path2.append(lasts[2])
x_len = max(len(path0), len(path1), len(path2))
f, ax = plt.subplots(figsize=(x_len, 3))
ax.plot(np.arange(len(path0)), 2 - np.array(path0), label='1/32', lw=2.5, color='#000000', linestyle='-')#, marker='o', markeredgecolor='r', markerfacecolor='r')
ax.plot(np.arange(len(path1)), 2 - np.array(path1) - 0.08, lw=1.8, label='1/16', color='#313131', linestyle='--')#, marker='^', markeredgecolor='b', markerfacecolor='b')
ax.plot(np.arange(len(path2)), 2 - np.array(path2) - 0.16, lw=1.2, label='1/8', color='#5a5858', linestyle='-.')#, marker='s', markeredgecolor='m', markerfacecolor='m')
plt.xticks(np.arange(x_len), list(range(1, x_len+1)))
plt.yticks(np.array([0, 1, 2]), ["1/32", "1/16", "1/8"])
plt.ylabel("Scale", fontsize=17)
plt.xlabel("Layer", fontsize=17)
for tick in ax.xaxis.get_major_ticks():
tick.label.set_fontsize(14)
for tick in ax.yaxis.get_major_ticks():
tick.label.set_fontsize(14)
f.tight_layout()
plt.legend(prop={'size': 14}, loc=3)
return f
def plot_path_width(lasts, paths=[], widths=[]):
'''
paths: list of path0~path2
'''
assert len(paths) > 0 and len(widths) > 0
path0 = paths[0]
path1 = paths[1] if len(paths) > 1 else []
path2 = paths[2] if len(paths) > 2 else []
width0 = widths[0]
width1 = widths[1] if len(widths) > 1 else []
width2 = widths[2] if len(widths) > 2 else []
# just for visualization purpose
if path0[-1] != lasts[0]: path0.append(lasts[0])
if len(path1) != 0 and path1[-1] != lasts[1]: path1.append(lasts[1])
if len(path2) != 0 and path2[-1] != lasts[2]: path2.append(lasts[2])
line_updown = -0.07
annotation_updown = 0.05; annotation_down_scale = 1.7
x_len = max(len(path0), len(path1), len(path2))
f, ax = plt.subplots(figsize=(x_len, 3))
assert len(path0) == len(width0) + 1 or len(path0) + len(width0) == 0, "path0 %d, width0 %d"%(len(path0), len(width0))
assert len(path1) == len(width1) + 1 or len(path1) + len(width1) == 0, "path1 %d, width1 %d"%(len(path1), len(width1))
assert len(path2) == len(width2) + 1 or len(path2) + len(width2) == 0, "path2 %d, width2 %d"%(len(path2), len(width2))
ax.plot(np.arange(len(path0)), 2 - np.array(path0), label='1/32', lw=2.5, color='#000000', linestyle='-')
ax.plot(np.arange(len(path1)), 2 - np.array(path1) + line_updown, lw=1.8, label='1/16', color='#313131', linestyle='--')
ax.plot(np.arange(len(path2)), 2 - np.array(path2) + line_updown*2, lw=1.2, label='1/8', color='#5a5858', linestyle='-.')
annotations = {} # (idx, scale, width, down): ((x, y), width)
for idx, width in enumerate(width2):
annotations[(idx, path2[idx], width, path2[idx+1]-path2[idx])] = ((0.35 + idx, 2 - path2[idx] + line_updown*2 + annotation_updown - (path2[idx+1]-path2[idx])/annotation_down_scale), width)
for idx, width in enumerate(width1):
annotations[(idx, path1[idx], width, path1[idx+1]-path1[idx])] = ((0.35 + idx, 2 - path1[idx] + line_updown + annotation_updown - (path1[idx+1]-path1[idx])/annotation_down_scale), width)
for idx, width in enumerate(width0):
annotations[(idx, path0[idx], width, path0[idx+1]-path0[idx])] = ((0.35 + idx, 2 - path0[idx] + annotation_updown - (path0[idx+1]-path0[idx])/annotation_down_scale), width)
for k, v in annotations.items():
plt.annotate("%.2f"%v[1], v[0], fontsize=12, color='red')
plt.xticks(np.arange(x_len), list(range(1, x_len+1)))
plt.yticks(np.array([0, 1, 2]), ["1/32", "1/16", "1/8"])
plt.ylim([-0.4, 2.5])
plt.ylabel("Scale", fontsize=17)
plt.xlabel("Layer", fontsize=17)
for tick in ax.xaxis.get_major_ticks():
tick.label.set_fontsize(14)
for tick in ax.yaxis.get_major_ticks():
tick.label.set_fontsize(14)
f.tight_layout()
plt.legend(prop={'size': 14}, loc=3)
return f
def plot_op(ops, path, width=[], head_width=None, F_base=16):
assert len(width) == 0 or len(width) == len(ops) - 1
table_vals = []
scales = {0: "1/8", 1: "1/16", 2: "1/32"}; base_scale = 3
for idx, op in enumerate(ops):
scale = path[idx]
if len(width) > 0:
if idx < len(width):
ch = int(F_base*2**(scale+base_scale)*width[idx])
else:
ch = int(F_base*2**(scale+base_scale)*head_width)
else:
ch = F_base*2**(scale+base_scale)
row = [idx+1, PRIMITIVES[op], scales[scale], ch]
table_vals.append(row)
# Based on http://stackoverflow.com/a/8531491/190597 (Andrey Sobolev)
col_labels = ['Stage', 'Operator', 'Scale', '#Channel_out']
plt.tight_layout()
fig = plt.figure(figsize=(3,3))
ax = fig.add_subplot(111, frame_on=False)
ax.xaxis.set_visible(False) # hide the x axis
ax.yaxis.set_visible(False) # hide the y axis
table = plt.table(cellText=table_vals,
colWidths=[0.22, 0.6, 0.25, 0.5],
colLabels=col_labels,
cellLoc='center',
loc='center')
table.auto_set_font_size(False)
table.set_fontsize(20)
table.scale(2, 2)
return fig
def objective_acc_lat(acc, lat, lat_target=8.3, alpha=-0.07, beta=-0.07):
if lat <= lat_target:
w = alpha
else:
w = beta
return acc * math.pow(lat / lat_target, w)
|
Cream/CDARTS/CDARTS_segmentation/tools/utils/darts_utils.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_segmentation/tools/utils/darts_utils.py",
"repo_id": "Cream",
"token_count": 6285
}
| 317 |
# encoding: utf-8
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os.path as osp
import sys
import numpy as np
from easydict import EasyDict as edict
C = edict()
config = C
cfg = C
C.seed = 12345
"""please config ROOT_dir and user when u first using"""
C.abs_dir = osp.realpath(".")
C.root_dir = osp.realpath("..")
C.this_dir = C.abs_dir.split(osp.sep)[-1]
C.log_dir = osp.abspath(osp.join(C.root_dir, 'log', C.this_dir))
"""Data Dir"""
C.dataset_path = "/home/t-hongyuanyu/data/cityscapes/"
C.img_root_folder = C.dataset_path
C.gt_root_folder = C.dataset_path
C.train_source = osp.join(C.dataset_path, "cityscapes_train_fine.txt")
C.train_eval_source = osp.join(C.dataset_path, "cityscapes_train_val_fine.txt")
C.eval_source = osp.join(C.dataset_path, "cityscapes_val_fine.txt")
C.test_source = osp.join(C.dataset_path, "cityscapes_test.txt")
"""Path Config"""
def add_path(path):
if path not in sys.path:
sys.path.insert(0, path)
add_path(osp.join(C.root_dir, 'tools'))
add_path(C.root_dir)
"""Image Config"""
C.num_classes = 19
C.background = -1
C.image_mean = np.array([0.485, 0.456, 0.406])
C.image_std = np.array([0.229, 0.224, 0.225])
C.target_size = 1024
C.down_sampling = 1 # first down_sampling then crop ......
C.gt_down_sampling = 1
C.num_train_imgs = 2975
C.num_eval_imgs = 500
""" Settings for network, this would be different for each kind of model"""
C.bn_eps = 1e-5
C.bn_momentum = 0.1
"""Eval Config"""
C.eval_stride_rate = 5 / 6
C.eval_scale_array = [1, ]
C.eval_flip = False
C.eval_base_size = 1024
C.eval_crop_size = 1024
C.eval_height = 1024
C.eval_width = 2048
C.layers = 16
C.width_mult_list = [4./12, 6./12, 8./12, 10./12, 1.,]
C.stem_head_width = (1, 1)
C.Fch = 20
C.image_height = 512
C.image_width = 1024
########################################
C.save = "test"
C.is_test = False # if True, prediction files for the test set will be generated
C.is_eval = True # if True, the train.py will only do evaluation for once
C.json_file = "./jsons/3path_big2.json"
C.model_path = "./3path_big2.pth.tar" # path to pretrained directory to be evaluated
|
Cream/CDARTS/CDARTS_segmentation/train/config_test.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_segmentation/train/config_test.py",
"repo_id": "Cream",
"token_count": 875
}
| 318 |
__all__ = ['ConvNorm', 'BasicResidual1x', 'BasicResidual_downup_1x', 'BasicResidual2x', 'BasicResidual_downup_2x', 'FactorizedReduce', 'OPS', 'OPS_name', 'OPS_Class', 'Self_Attn']
from pdb import set_trace as bp
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from thop import profile
import sys
import os.path as osp
from easydict import EasyDict as edict
from torch import nn, einsum
from einops import rearrange
C = edict()
"""please config ROOT_dir and user when u first using"""
# C.repo_name = 'FasterSeg'
C.abs_dir = osp.realpath(".")
C.root_dir = osp.realpath("..")
C.this_dir = C.abs_dir.split(osp.sep)[-1]
# C.root_dir = C.abs_dir[:C.abs_dir.index(C.repo_name) + len(C.repo_name)]
"""Path Config"""
def add_path(path):
if path not in sys.path:
sys.path.insert(0, path)
add_path(osp.join(C.root_dir, 'tools'))
try:
from utils.darts_utils import compute_latency_ms_tensorrt as compute_latency
print("use TensorRT for latency test")
except:
from utils.darts_utils import compute_latency_ms_pytorch as compute_latency
print("use PyTorch for latency test")
from slimmable_ops import USConv2d, USBatchNorm2d
from layers import NaiveSyncBatchNorm
latency_lookup_table = {}
# table_file_name = "latency_lookup_table.npy"
# if osp.isfile(table_file_name):
# latency_lookup_table = np.load(table_file_name).item()
# BatchNorm2d = nn.BatchNorm2d
BatchNorm2d = NaiveSyncBatchNorm
def drop_path_(x, drop_prob, training):
if training and drop_prob > 0.:
keep_prob = 1. - drop_prob
# per data point mask; assuming x in cuda.
mask = torch.cuda.FloatTensor(x.size(0), 1, 1, 1).bernoulli_(keep_prob)
x = torch.div(x, keep_prob)
x = torch.mul(x, mask)
# x.div_(keep_prob).mul_(mask)
return x
class DropPath_(nn.Module):
def __init__(self, p=0.):
""" [!] DropPath is inplace module
Args:
p: probability of an path to be zeroed.
"""
super().__init__()
self.p = p
def extra_repr(self):
return 'p={}, inplace'.format(self.p)
def forward(self, x):
drop_path_(x, self.p, self.training)
return x
def forward_latency(self, size):
c_in, h_in, w_in = size
latency = 0
return latency, (c_in, h_in, w_in)
class ConvNorm(nn.Module):
'''
conv => norm => activation
use native nn.Conv2d, not slimmable
'''
def __init__(self, C_in, C_out, kernel_size=3, stride=1, padding=None, dilation=1, groups=1, bias=False, slimmable=True, width_mult_list=[1.]):
super(ConvNorm, self).__init__()
self.C_in = C_in
self.C_out = C_out
self.kernel_size = kernel_size
assert stride in [1, 2]
self.stride = stride
if padding is None:
# assume h_out = h_in / s
self.padding = int(np.ceil((dilation * (kernel_size - 1) + 1 - stride) / 2.))
else:
self.padding = padding
self.dilation = dilation
assert type(groups) == int
if kernel_size == 1:
self.groups = 1
else:
self.groups = groups
self.bias = bias
self.slimmable = slimmable
self.width_mult_list = width_mult_list
self.ratio = (1., 1.)
if slimmable:
self.conv = nn.Sequential(
USConv2d(C_in, C_out, kernel_size, stride, padding=self.padding, dilation=dilation, groups=self.groups, bias=bias, width_mult_list=width_mult_list),
USBatchNorm2d(C_out, width_mult_list),
nn.ReLU(inplace=True),
)
else:
self.conv = nn.Sequential(
nn.Conv2d(C_in, C_out, kernel_size, stride, padding=self.padding, dilation=dilation, groups=self.groups, bias=bias),
# nn.BatchNorm2d(C_out),
BatchNorm2d(C_out),
nn.ReLU(inplace=True),
)
def set_ratio(self, ratio):
assert self.slimmable
assert len(ratio) == 2
self.ratio = ratio
self.conv[0].set_ratio(ratio)
self.conv[1].set_ratio(ratio[1])
@staticmethod
def _flops(h, w, C_in, C_out, kernel_size=3, stride=1, padding=None, dilation=1, groups=1, bias=False):
layer = ConvNorm(C_in, C_out, kernel_size, stride, padding, dilation, groups, bias, slimmable=False)
flops, params = profile(layer, inputs=(torch.randn(1, C_in, h, w),), verbose=False)
return flops
@staticmethod
def _latency(h, w, C_in, C_out, kernel_size=3, stride=1, padding=None, dilation=1, groups=1, bias=False):
layer = ConvNorm(C_in, C_out, kernel_size, stride, padding, dilation, groups, bias, slimmable=False)
latency = compute_latency(layer, (1, C_in, h, w))
return latency
def forward_latency(self, size):
c_in, h_in, w_in = size
if self.slimmable:
assert c_in == int(self.C_in * self.ratio[0]), "c_in %d, self.C_in * self.ratio[0] %d"%(c_in, self.C_in * self.ratio[0])
c_out = int(self.C_out * self.ratio[1])
else:
assert c_in == self.C_in, "c_in %d, self.C_in %d"%(c_in, self.C_in)
c_out = self.C_out
if self.stride == 1:
h_out = h_in; w_out = w_in
else:
h_out = h_in // 2; w_out = w_in // 2
name = "ConvNorm_H%d_W%d_Cin%d_Cout%d_kernel%d_stride%d"%(h_in, w_in, c_in, c_out, self.kernel_size, self.stride)
if name in latency_lookup_table:
latency = latency_lookup_table[name]
else:
print("not found in latency_lookup_table:", name)
latency = ConvNorm._latency(h_in, w_in, c_in, c_out, self.kernel_size, self.stride, self.padding, self.dilation, self.groups, self.bias)
latency_lookup_table[name] = latency
np.save(table_file_name, latency_lookup_table)
return latency, (c_out, h_out, w_out)
def forward(self, x):
assert x.size()[1] == self.C_in, "{} {}".format(x.size()[1], self.C_in)
x = self.conv(x)
return x
class BasicResidual1x(nn.Module):
def __init__(self, C_in, C_out, kernel_size=3, stride=1, dilation=1, groups=1, slimmable=True, width_mult_list=[1.]):
super(BasicResidual1x, self).__init__()
# Both self.conv1 and self.downsample layers downsample the input when stride != 1
groups = 1
self.C_in = C_in
self.C_out = C_out
self.kernel_size = kernel_size
self.stride = stride
self.dilation = dilation
self.groups = groups
self.slimmable = slimmable
self.width_mult_list = width_mult_list
assert stride in [1, 2]
if self.stride == 2: self.dilation = 1
self.ratio = (1., 1.)
self.relu = nn.ReLU(inplace=True)
if slimmable:
self.conv1 = USConv2d(C_in, C_out, 3, stride, padding=dilation, dilation=dilation, groups=groups, bias=False, width_mult_list=width_mult_list)
self.bn1 = USBatchNorm2d(C_out, width_mult_list)
else:
self.conv1 = nn.Conv2d(C_in, C_out, 3, stride, padding=dilation, dilation=dilation, groups=groups, bias=False)
# self.bn1 = nn.BatchNorm2d(C_out)
self.bn1 = BatchNorm2d(C_out)
def set_ratio(self, ratio):
assert len(ratio) == 2
self.ratio = ratio
self.conv1.set_ratio(ratio)
self.bn1.set_ratio(ratio[1])
@staticmethod
def _flops(h, w, C_in, C_out, kernel_size=3, stride=1, dilation=1, groups=1):
layer = BasicResidual1x(C_in, C_out, kernel_size, stride, dilation, groups, slimmable=False)
flops, params = profile(layer, inputs=(torch.randn(1, C_in, h, w),), verbose=False)
return flops
@staticmethod
def _latency(h, w, C_in, C_out, kernel_size=3, stride=1, dilation=1, groups=1):
layer = BasicResidual1x(C_in, C_out, kernel_size, stride, dilation, groups, slimmable=False)
latency = compute_latency(layer, (1, C_in, h, w))
return latency
def forward_latency(self, size):
c_in, h_in, w_in = size
if self.slimmable:
assert c_in == int(self.C_in * self.ratio[0]), "c_in %d, int(self.C_in * self.ratio[0]) %d"%(c_in, int(self.C_in * self.ratio[0]))
c_out = int(self.C_out * self.ratio[1])
else:
assert c_in == self.C_in, "c_in %d, self.C_in %d"%(c_in, self.C_in)
c_out = self.C_out
if self.stride == 1:
h_out = h_in; w_out = w_in
else:
h_out = h_in // 2; w_out = w_in // 2
name = "BasicResidual1x_H%d_W%d_Cin%d_Cout%d_stride%d_dilation%d"%(h_in, w_in, c_in, c_out, self.stride, self.dilation)
if name in latency_lookup_table:
latency = latency_lookup_table[name]
else:
print("not found in latency_lookup_table:", name)
latency = BasicResidual1x._latency(h_in, w_in, c_in, c_out, self.kernel_size, self.stride, self.dilation, self.groups)
latency_lookup_table[name] = latency
np.save(table_file_name, latency_lookup_table)
return latency, (c_out, h_out, w_out)
def forward(self, x):
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
return out
class BasicResidual_downup_1x(nn.Module):
def __init__(self, C_in, C_out, kernel_size=3, stride=1, dilation=1, groups=1, slimmable=True, width_mult_list=[1.]):
super(BasicResidual_downup_1x, self).__init__()
# Both self.conv1 and self.downsample layers downsample the input when stride != 1
groups = 1
self.C_in = C_in
self.C_out = C_out
self.kernel_size = kernel_size
self.stride = stride
self.dilation = dilation
self.groups = groups
self.slimmable = slimmable
self.width_mult_list = width_mult_list
assert stride in [1, 2]
if self.stride == 2: self.dilation = 1
self.ratio = (1., 1.)
self.relu = nn.ReLU(inplace=True)
if slimmable:
self.conv1 = USConv2d(C_in, C_out, 3, 1, padding=dilation, dilation=dilation, groups=groups, bias=False, width_mult_list=width_mult_list)
self.bn1 = USBatchNorm2d(C_out, width_mult_list)
if self.stride==1:
self.downsample = nn.Sequential(
USConv2d(C_in, C_out, 1, 1, padding=0, dilation=dilation, groups=groups, bias=False, width_mult_list=width_mult_list),
USBatchNorm2d(C_out, width_mult_list)
)
else:
self.conv1 = nn.Conv2d(C_in, C_out, 3, 1, padding=dilation, dilation=dilation, groups=groups, bias=False)
# self.bn1 = nn.BatchNorm2d(C_out)
self.bn1 = BatchNorm2d(C_out)
if self.stride==1:
self.downsample = nn.Sequential(
nn.Conv2d(C_in, C_out, 1, 1, padding=0, dilation=dilation, groups=groups, bias=False),
BatchNorm2d(C_out)
)
def set_ratio(self, ratio):
assert len(ratio) == 2
self.ratio = ratio
self.conv1.set_ratio(ratio)
self.bn1.set_ratio(ratio[1])
@staticmethod
def _flops(h, w, C_in, C_out, kernel_size=3, stride=1, dilation=1, groups=1):
assert stride in [1, 2]
layer = BasicResidual_downup_1x(C_in, C_out, kernel_size, stride, dilation, groups, slimmable=False)
flops, params = profile(layer, inputs=(torch.randn(1, C_in, h, w),), verbose=False)
return flops
@staticmethod
def _latency(h, w, C_in, C_out, kernel_size=3, stride=1, dilation=1, groups=1):
assert stride in [1, 2]
layer = BasicResidual_downup_1x(C_in, C_out, kernel_size, stride, dilation, groups, slimmable=False)
latency = compute_latency(layer, (1, C_in, h, w))
return latency
def forward_latency(self, size):
c_in, h_in, w_in = size
if self.slimmable:
assert c_in == int(self.C_in * self.ratio[0]), "c_in %d, int(self.C_in * self.ratio[0]) %d"%(c_in, int(self.C_in * self.ratio[0]))
c_out = int(self.C_out * self.ratio[1])
else:
assert c_in == self.C_in, "c_in %d, self.C_in %d"%(c_in, self.C_in)
c_out = self.C_out
if self.stride == 1:
h_out = h_in; w_out = w_in
else:
h_out = h_in // 2; w_out = w_in // 2
name = "BasicResidual_downup_1x_H%d_W%d_Cin%d_Cout%d_stride%d_dilation%d"%(h_in, w_in, c_in, c_out, self.stride, self.dilation)
if name in latency_lookup_table:
latency = latency_lookup_table[name]
else:
print("not found in latency_lookup_table:", name)
latency = BasicResidual_downup_1x._latency(h_in, w_in, c_in, c_out, self.kernel_size, self.stride, self.dilation, self.groups)
latency_lookup_table[name] = latency
np.save(table_file_name, latency_lookup_table)
return latency, (c_out, h_out, w_out)
def forward(self, x):
out = F.interpolate(x, size=(int(x.size(2))//2, int(x.size(3))//2), mode='bilinear', align_corners=False)
out = self.conv1(out)
out = self.bn1(out)
if self.stride == 1:
out = F.interpolate(out, size=(int(x.size(2)), int(x.size(3))), mode='bilinear', align_corners=False)
out = out + self.downsample(x)
out = self.relu(out)
return out
class BasicResidual2x(nn.Module):
def __init__(self, C_in, C_out, kernel_size=3, stride=1, dilation=1, groups=1, slimmable=True, width_mult_list=[1.]):
super(BasicResidual2x, self).__init__()
# Both self.conv1 and self.downsample layers downsample the input when stride != 1
groups = 1
self.C_in = C_in
self.C_out = C_out
self.kernel_size = kernel_size
self.stride = stride
self.dilation = dilation
self.groups = groups
self.slimmable = slimmable
self.width_mult_list = width_mult_list
assert stride in [1, 2]
if self.stride == 2: self.dilation = 1
self.ratio = (1., 1.)
self.relu = nn.ReLU(inplace=True)
if self.slimmable:
self.conv1 = USConv2d(C_in, C_out, 3, stride, padding=dilation, dilation=dilation, groups=groups, bias=False, width_mult_list=width_mult_list)
self.bn1 = USBatchNorm2d(C_out, width_mult_list)
self.conv2 = USConv2d(C_out, C_out, 3, 1, padding=dilation, dilation=dilation, groups=groups, bias=False, width_mult_list=width_mult_list)
self.bn2 = USBatchNorm2d(C_out, width_mult_list)
else:
self.conv1 = nn.Conv2d(C_in, C_out, 3, stride, padding=dilation, dilation=dilation, groups=groups, bias=False)
# self.bn1 = nn.BatchNorm2d(C_out)
self.bn1 = BatchNorm2d(C_out)
self.conv2 = nn.Conv2d(C_out, C_out, 3, 1, padding=dilation, dilation=dilation, groups=groups, bias=False)
# self.bn2 = nn.BatchNorm2d(C_out)
self.bn2 = BatchNorm2d(C_out)
def set_ratio(self, ratio):
assert len(ratio) == 2
self.ratio = ratio
self.conv1.set_ratio(ratio)
self.bn1.set_ratio(ratio[1])
self.conv2.set_ratio((ratio[1], ratio[1]))
self.bn2.set_ratio(ratio[1])
@staticmethod
def _flops(h, w, C_in, C_out, kernel_size=3, stride=1, dilation=1, groups=1):
layer = BasicResidual2x(C_in, C_out, kernel_size, stride, dilation, groups, slimmable=False)
flops, params = profile(layer, inputs=(torch.randn(1, C_in, h, w),), verbose=False)
return flops
@staticmethod
def _latency(h, w, C_in, C_out, kernel_size=3, stride=1, dilation=1, groups=1):
layer = BasicResidual2x(C_in, C_out, kernel_size, stride, dilation, groups, slimmable=False)
latency = compute_latency(layer, (1, C_in, h, w))
return latency
def forward_latency(self, size):
c_in, h_in, w_in = size
if self.slimmable:
assert c_in == int(self.C_in * self.ratio[0])
c_out = int(self.C_out * self.ratio[1])
else:
assert c_in == self.C_in, "c_in %d, self.C_in%d"%(c_in, self.C_in)
c_out = self.C_out
if self.stride == 1:
h_out = h_in; w_out = w_in
else:
h_out = h_in // 2; w_out = w_in // 2
name = "BasicResidual2x_H%d_W%d_Cin%d_Cout%d_stride%d_dilation%d"%(h_in, w_in, c_in, c_out, self.stride, self.dilation)
if name in latency_lookup_table:
latency = latency_lookup_table[name]
else:
print("not found in latency_lookup_table:", name)
latency = BasicResidual2x._latency(h_in, w_in, c_in, c_out, self.kernel_size, self.stride, self.dilation, self.groups)
latency_lookup_table[name] = latency
np.save(table_file_name, latency_lookup_table)
return latency, (c_out, h_out, w_out)
def forward(self, x):
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
return out
class BasicResidual_downup_2x(nn.Module):
def __init__(self, C_in, C_out, kernel_size=3, stride=1, dilation=1, groups=1, slimmable=True, width_mult_list=[1.]):
super(BasicResidual_downup_2x, self).__init__()
# Both self.conv1 and self.downsample layers downsample the input when stride != 1
groups = 1
self.C_in = C_in
self.C_out = C_out
self.kernel_size = kernel_size
self.stride = stride
self.dilation = dilation
self.groups = groups
self.slimmable = slimmable
self.width_mult_list = width_mult_list
assert stride in [1, 2]
if self.stride == 2: self.dilation = 1
self.ratio = (1., 1.)
self.relu = nn.ReLU(inplace=True)
if self.slimmable:
self.conv1 = USConv2d(C_in, C_out, 3, 1, padding=dilation, dilation=dilation, groups=groups, bias=False, width_mult_list=width_mult_list)
self.bn1 = USBatchNorm2d(C_out, width_mult_list)
self.conv2 = USConv2d(C_out, C_out, 3, 1, padding=dilation, dilation=dilation, groups=groups, bias=False, width_mult_list=width_mult_list)
self.bn2 = USBatchNorm2d(C_out, width_mult_list)
if self.stride==1:
self.downsample = nn.Sequential(
USConv2d(C_in, C_out, 1, 1, padding=0, dilation=dilation, groups=groups, bias=False, width_mult_list=width_mult_list),
USBatchNorm2d(C_out, width_mult_list)
)
else:
self.conv1 = nn.Conv2d(C_in, C_out, 3, 1, padding=dilation, dilation=dilation, groups=groups, bias=False)
# self.bn1 = nn.BatchNorm2d(C_out)
self.bn1 = BatchNorm2d(C_out)
self.conv2 = nn.Conv2d(C_out, C_out, 3, 1, padding=dilation, dilation=dilation, groups=groups, bias=False)
# self.bn2 = nn.BatchNorm2d(C_out)
self.bn2 = BatchNorm2d(C_out)
if self.stride==1:
self.downsample = nn.Sequential(
nn.Conv2d(C_in, C_out, 1, 1, padding=0, dilation=dilation, groups=groups, bias=False),
BatchNorm2d(C_out)
)
def set_ratio(self, ratio):
assert len(ratio) == 2
self.ratio = ratio
self.conv1.set_ratio(ratio)
self.bn1.set_ratio(ratio[1])
self.conv2.set_ratio((ratio[1], ratio[1]))
self.bn2.set_ratio(ratio[1])
@staticmethod
def _flops(h, w, C_in, C_out, kernel_size=3, stride=1, dilation=1, groups=1):
assert stride in [1, 2]
layer = BasicResidual_downup_2x(C_in, C_out, kernel_size, stride, dilation, groups, slimmable=False)
flops, params = profile(layer, inputs=(torch.randn(1, C_in, h, w),), verbose=False)
return flops
@staticmethod
def _latency(h, w, C_in, C_out, kernel_size=3, stride=1, dilation=1, groups=1):
assert stride in [1, 2]
layer = BasicResidual_downup_2x(C_in, C_out, kernel_size, stride, dilation, groups, slimmable=False)
latency = compute_latency(layer, (1, C_in, h, w))
return latency
def forward_latency(self, size):
c_in, h_in, w_in = size
if self.slimmable:
assert c_in == int(self.C_in * self.ratio[0])
c_out = int(self.C_out * self.ratio[1])
else:
assert c_in == self.C_in, "c_in %d, self.C_in%d"%(c_in, self.C_in)
c_out = self.C_out
if self.stride == 1:
h_out = h_in; w_out = w_in
else:
h_out = h_in // 2; w_out = w_in // 2
name = "BasicResidual2x_H%d_W%d_Cin%d_Cout%d_stride%d_dilation%d"%(h_in, w_in, c_in, c_out, self.stride, self.dilation)
if name in latency_lookup_table:
latency = latency_lookup_table[name]
else:
print("not found in latency_lookup_table:", name)
latency = BasicResidual2x._latency(h_in, w_in, c_in, c_out, self.kernel_size, self.stride, self.dilation, self.groups)
latency_lookup_table[name] = latency
np.save(table_file_name, latency_lookup_table)
return latency, (c_out, h_out, w_out)
def forward(self, x):
out = F.interpolate(x, size=(int(x.size(2))//2, int(x.size(3))//2), mode='bilinear', align_corners=False)
out = self.conv1(out)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.stride == 1:
out = F.interpolate(out, size=(int(x.size(2)), int(x.size(3))), mode='bilinear', align_corners=False)
out = out + self.downsample(x)
out = self.relu(out)
return out
class FactorizedReduce(nn.Module):
def __init__(self, C_in, C_out, stride=1, slimmable=True, width_mult_list=[1.]):
super(FactorizedReduce, self).__init__()
assert stride in [1, 2]
assert C_out % 2 == 0
self.C_in = C_in
self.C_out = C_out
self.stride = stride
self.slimmable = slimmable
self.width_mult_list = width_mult_list
self.ratio = (1., 1.)
if stride == 1 and slimmable:
self.conv1 = USConv2d(C_in, C_out, 1, stride=1, padding=0, bias=False, width_mult_list=width_mult_list)
self.bn = USBatchNorm2d(C_out, width_mult_list)
self.relu = nn.ReLU(inplace=True)
elif stride == 2:
self.relu = nn.ReLU(inplace=True)
if slimmable:
self.conv1 = USConv2d(C_in, C_out // 2, 1, stride=2, padding=0, bias=False, width_mult_list=width_mult_list)
self.conv2 = USConv2d(C_in, C_out // 2, 1, stride=2, padding=0, bias=False, width_mult_list=width_mult_list)
self.bn = USBatchNorm2d(C_out, width_mult_list)
else:
self.conv1 = nn.Conv2d(C_in, C_out // 2, 1, stride=2, padding=0, bias=False)
self.conv2 = nn.Conv2d(C_in, C_out // 2, 1, stride=2, padding=0, bias=False)
self.bn = BatchNorm2d(C_out)
def set_ratio(self, ratio):
assert len(ratio) == 2
if self.stride == 1:
self.ratio = ratio
self.conv1.set_ratio(ratio)
self.bn.set_ratio(ratio[1])
elif self.stride == 2:
self.ratio = ratio
self.conv1.set_ratio(ratio)
self.conv2.set_ratio(ratio)
self.bn.set_ratio(ratio[1])
@staticmethod
def _flops(h, w, C_in, C_out, stride=1):
layer = FactorizedReduce(C_in, C_out, stride, slimmable=False)
flops, params = profile(layer, inputs=(torch.randn(1, C_in, h, w),), verbose=False)
return flops
@staticmethod
def _latency(h, w, C_in, C_out, stride=1):
layer = FactorizedReduce(C_in, C_out, stride, slimmable=False)
latency = compute_latency(layer, (1, C_in, h, w))
return latency
def forward_latency(self, size):
c_in, h_in, w_in = size
if self.slimmable:
assert c_in == int(self.C_in * self.ratio[0])
c_out = int(self.C_out * self.ratio[1])
else:
assert c_in == self.C_in
c_out = self.C_out
if self.stride == 1:
h_out = h_in; w_out = w_in
else:
h_out = h_in // 2; w_out = w_in // 2
name = "FactorizedReduce_H%d_W%d_Cin%d_Cout%d_stride%d"%(h_in, w_in, c_in, c_out, self.stride)
if name in latency_lookup_table:
latency = latency_lookup_table[name]
else:
print("not found in latency_lookup_table:", name)
latency = FactorizedReduce._latency(h_in, w_in, c_in, c_out, self.stride)
latency_lookup_table[name] = latency
np.save(table_file_name, latency_lookup_table)
return latency, (c_out, h_out, w_out)
def forward(self, x):
if self.stride == 2:
out = torch.cat([self.conv1(x), self.conv2(x[:,:,1:,1:])], dim=1)
out = self.bn(out)
out = self.relu(out)
return out
else:
if self.slimmable:
out = self.conv1(x)
out = self.bn(out)
out = self.relu(out)
return out
else:
return x
def pair(x):
return (x, x) if not isinstance(x, tuple) else x
def expand_dim(t, dim, k):
t = t.unsqueeze(dim = dim)
expand_shape = [-1] * len(t.shape)
expand_shape[dim] = k
return t.expand(*expand_shape)
def rel_to_abs(x):
b, h, l, _, device, dtype = *x.shape, x.device, x.dtype
dd = {'device': device, 'dtype': dtype}
col_pad = torch.zeros((b, h, l, 1), **dd)
x = torch.cat((x, col_pad), dim = 3)
flat_x = rearrange(x, 'b h l c -> b h (l c)')
flat_pad = torch.zeros((b, h, l - 1), **dd)
flat_x_padded = torch.cat((flat_x, flat_pad), dim = 2)
final_x = flat_x_padded.reshape(b, h, l + 1, 2 * l - 1)
final_x = final_x[:, :, :l, (l-1):]
return final_x
def relative_logits_1d(q, rel_k):
b, heads, h, w, dim = q.shape
logits = einsum('b h x y d, r d -> b h x y r', q, rel_k)
logits = rearrange(logits, 'b h x y r -> b (h x) y r')
logits = rel_to_abs(logits)
logits = logits.reshape(b, heads, h, w, w)
logits = expand_dim(logits, dim = 3, k = h)
return logits
# positional embeddings
class AbsPosEmb(nn.Module):
def __init__(
self,
fmap_size,
dim_head
):
super().__init__()
height, width = pair(fmap_size)
scale = dim_head ** -0.5
self.height = nn.Parameter(torch.randn(height, dim_head) * scale)
self.width = nn.Parameter(torch.randn(width, dim_head) * scale)
def forward(self, q):
emb = rearrange(self.height, 'h d -> h () d') + rearrange(self.width, 'w d -> () w d')
emb = rearrange(emb, ' h w d -> (h w) d')
logits = einsum('b h i d, j d -> b h i j', q, emb)
return logits
class RelPosEmb(nn.Module):
def __init__(
self,
fmap_size,
dim_head
):
super().__init__()
height, width = pair(fmap_size)
scale = dim_head ** -0.5
self.fmap_size = fmap_size
self.rel_height = nn.Parameter(torch.randn(height * 2 - 1, dim_head) * scale)
self.rel_width = nn.Parameter(torch.randn(width * 2 - 1, dim_head) * scale)
def forward(self, q):
h, w = self.fmap_size
q = rearrange(q, 'b h (x y) d -> b h x y d', x = h, y = w)
rel_logits_w = relative_logits_1d(q, self.rel_width)
rel_logits_w = rearrange(rel_logits_w, 'b h x i y j-> b h (x y) (i j)')
q = rearrange(q, 'b h x y d -> b h y x d')
rel_logits_h = relative_logits_1d(q, self.rel_height)
rel_logits_h = rearrange(rel_logits_h, 'b h x i y j -> b h (y x) (j i)')
return rel_logits_w + rel_logits_h
# classes
class Attention(nn.Module):
def __init__(
self,
*,
dim,
fmap_size,
heads = 4,
dim_head = 128,
rel_pos_emb = False
):
super().__init__()
self.heads = heads
self.scale = dim_head ** -0.5
inner_dim = heads * dim_head
self.to_qkv = nn.Conv2d(dim, inner_dim * 3, 1, bias = False)
rel_pos_class = AbsPosEmb if not rel_pos_emb else RelPosEmb
self.pos_emb = rel_pos_class(fmap_size, dim_head)
def forward(self, fmap):
heads, b, c, h, w = self.heads, *fmap.shape
q, k, v = self.to_qkv(fmap).chunk(3, dim = 1)
q, k, v = map(lambda t: rearrange(t, 'b (h d) x y -> b h (x y) d', h = heads), (q, k, v))
q *= self.scale
sim = einsum('b h i d, b h j d -> b h i j', q, k)
sim += self.pos_emb(q)
attn = sim.softmax(dim = -1)
out = einsum('b h i j, b h j d -> b h i d', attn, v)
out = rearrange(out, 'b h (x y) d -> b (h d) x y', x = h, y = w)
return out
class Self_Attn(nn.Module):
def __init__(
self,
*,
dim,
fmap_size,
dim_out,
proj_factor,
downsample,
slimmable=True,
width_mult_list=[1.],
heads = 4,
dim_head = 128,
rel_pos_emb = False,
activation = nn.ReLU(inplace=True)
):
super().__init__()
# shortcut
# contraction and expansion
self.slimmable = slimmable
self.width_mult_list = width_mult_list
if slimmable:
kernel_size, stride, padding = (3, 2, 1) if downsample else (1, 1, 0)
self.sk = False
self.shortcut = nn.Sequential(
USConv2d(dim, dim_out, kernel_size, padding=padding, stride=stride, dilation=1, groups=1, bias=False, width_mult_list=width_mult_list),
USBatchNorm2d(dim_out, width_mult_list),
activation
)
else:
if dim != dim_out or downsample:
self.sk = False
kernel_size, stride, padding = (3, 2, 1) if downsample else (1, 1, 0)
self.shortcut = nn.Sequential(
nn.Conv2d(dim, dim_out, kernel_size, stride = stride, padding = padding, bias = False),
BatchNorm2d(dim_out),
activation
)
else:
self.sk = True
self.shortcut = nn.Identity()
self.mix_bn1 = nn.ModuleList([])
self.mix_bn2 = nn.ModuleList([])
self.mix_bn3 = nn.ModuleList([])
# attn_dim_in = dim_out // proj_factor
attn_dim_in = dim_out
# attn_dim_out = heads * dim_head
attn_dim_out = attn_dim_in
if self.slimmable:
self.mix_bn1.append(USBatchNorm2d(dim_out, width_mult_list))
self.mix_bn2.append(USBatchNorm2d(dim_out, width_mult_list))
self.mix_bn3.append(USBatchNorm2d(dim_out, width_mult_list))
nn.init.zeros_(self.mix_bn3[0].weight)
else:
self.mix_bn1.append(BatchNorm2d(dim_out))
self.mix_bn2.append(BatchNorm2d(dim_out))
self.mix_bn3.append(BatchNorm2d(dim_out))
nn.init.zeros_(self.mix_bn3[0].weight)
if self.slimmable:
self.net1 = USConv2d(dim, attn_dim_in, 1, padding=0, stride=1, dilation=1, groups=1, bias=False, width_mult_list=width_mult_list)
self.net2 = nn.Sequential(
activation,
ATT(attn_dim_in, slimmable=True, width_mult_list=width_mult_list),
nn.AvgPool2d((2, 2)) if downsample else nn.Identity()
)
self.net3 = nn.Sequential(
activation,
USConv2d(attn_dim_out, dim_out, 1, padding=0, stride=1, dilation=1, groups=1, bias=False, width_mult_list=width_mult_list),
)
else:
self.net1 = nn.Conv2d(dim, attn_dim_in, 1, bias = False)
self.net2 = nn.Sequential(
activation,
ATT(attn_dim_in, slimmable=False),
nn.AvgPool2d((2, 2)) if downsample else nn.Identity()
)
self.net3 = nn.Sequential(
activation,
nn.Conv2d(attn_dim_out, dim_out, 1, bias = False),
)
# init last batch norm gamma to zero
# nn.init.zeros_(self.net[-1].weight)
# final activation
self.activation = activation
def set_ratio(self, ratio):
if not self.sk:
self.shortcut[0].set_ratio(ratio)
self.shortcut[1].set_ratio(ratio[1])
for i in range(len(self.mix_bn1)):
self.mix_bn1[i].set_ratio(ratio[1])
self.mix_bn2[i].set_ratio(ratio[1])
self.mix_bn3[i].set_ratio(ratio[1])
self.net1.set_ratio(ratio)
self.net2[1].set_ratio((ratio[1], ratio[1]))
self.net3[1].set_ratio((ratio[1], ratio[1]))
@staticmethod
def _flops(h, w, C_in, C_out, kernel_size=3, stride=1, dilation=1, groups=1):
assert stride in [1, 2]
layer = Self_Attn(dim=C_in, fmap_size=(128, 256), dim_out=C_out, downsample=(stride==2))
flops, params = profile(layer, inputs=(torch.randn(1, C_in, h, w),), verbose=False)
return flops
@staticmethod
def _latency(h, w, C_in, C_out, kernel_size=3, stride=1, dilation=1, groups=1):
assert stride in [1, 2]
layer = Self_Attn(dim=C_in, fmap_size=(128, 256), dim_out=C_out, downsample=(stride==2))
latency = compute_latency(layer, (1, C_in, h, w))
return latency
def forward_latency(self, size):
c_in, h_in, w_in = size
if self.slimmable:
assert c_in == int(self.C_in * self.ratio[0])
c_out = int(self.C_out * self.ratio[1])
else:
assert c_in == self.C_in, "c_in %d, self.C_in%d"%(c_in, self.C_in)
c_out = self.C_out
if self.stride == 1:
h_out = h_in; w_out = w_in
else:
h_out = h_in // 2; w_out = w_in // 2
name = "Self_Attn_H%d_W%d_Cin%d_Cout%d_stride%d_dilation%d"%(h_in, w_in, c_in, c_out, self.stride, self.dilation)
if name in latency_lookup_table:
latency = latency_lookup_table[name]
else:
print("not found in latency_lookup_table:", name)
latency = Self_Attn._latency(h_in, w_in, c_in, c_out, self.kernel_size, self.stride, self.dilation, self.groups)
latency_lookup_table[name] = latency
np.save(table_file_name, latency_lookup_table)
return latency, (c_out, h_out, w_out)
def forward(self, x):
branch = 0
shortcut = self.shortcut(x)
x = self.net1(x)
x = self.mix_bn1[branch](x)
x = self.net2(x)
x = self.mix_bn2[branch](x)
x = self.net3(x)
x = self.mix_bn3[branch](x)
x += shortcut
return self.activation(x)
class ATT(nn.Module):
""" Self attention Layer"""
def __init__(self, in_dim, slimmable=True, width_mult_list=[1.]):
super(ATT, self).__init__()
self.chanel_in = in_dim
self.slimmable = slimmable
self.width_mult_list = width_mult_list
self.ratio = (1., 1.)
if self.slimmable:
self.query_conv = USConv2d(in_dim , in_dim//8 , 1, padding=0, stride=1, bias=False, width_mult_list=width_mult_list)
self.key_conv = USConv2d(in_dim , in_dim//8 , 1, padding=0, stride=1, bias=False, width_mult_list=width_mult_list)
self.value_conv = USConv2d(in_dim , in_dim , 1, padding=0, stride=1, bias=False, width_mult_list=width_mult_list)
else:
self.query_conv = nn.Conv2d(in_channels = in_dim , out_channels = in_dim//8 , kernel_size= 1)
self.key_conv = nn.Conv2d(in_channels = in_dim , out_channels = in_dim//8 , kernel_size= 1)
self.value_conv = nn.Conv2d(in_channels = in_dim , out_channels = in_dim , kernel_size= 1)
self.gamma = nn.Parameter(torch.zeros(1))
self.softmax = nn.Softmax(dim=-1) #
def set_ratio(self, ratio):
assert len(ratio) == 2
self.ratio = ratio
self.query_conv.set_ratio(ratio)
self.key_conv.set_ratio(ratio)
self.value_conv.set_ratio(ratio)
def forward(self,x):
"""
inputs :
x : input feature maps( B X C X W X H)
returns :
out : self attention value + input feature
attention: B X N X N (N is Width*Height)
"""
m_batchsize, C, width, height = x.size()
proj_query = self.query_conv(x).view(m_batchsize, -1,width*height).permute(0,2,1) # B X CX(N)
proj_key = self.key_conv(x).view(m_batchsize,-1, width*height) # B X C x (*W*H)
energy = torch.bmm(proj_query, proj_key) # transpose check
attention = self.softmax(energy) # BX (N) X (N)
proj_value = self.value_conv(x).view(m_batchsize,-1,width*height) # B X C X N
out = torch.bmm(proj_value, attention.permute(0,2,1) )
out = out.view(m_batchsize, C, width,height)
out = self.gamma*out + x
return out
from collections import OrderedDict
OPS = {
'skip' : lambda C_in, C_out, stride, slimmable, width_mult_list, fmap_size: FactorizedReduce(C_in, C_out, stride, slimmable, width_mult_list),
'conv' : lambda C_in, C_out, stride, slimmable, width_mult_list, fmap_size: BasicResidual1x(C_in, C_out, kernel_size=3, stride=stride, dilation=1, slimmable=slimmable, width_mult_list=width_mult_list),
'conv_downup' : lambda C_in, C_out, stride, slimmable, width_mult_list, fmap_size: BasicResidual_downup_1x(C_in, C_out, kernel_size=3, stride=stride, dilation=1, slimmable=slimmable, width_mult_list=width_mult_list),
'conv_2x' : lambda C_in, C_out, stride, slimmable, width_mult_list, fmap_size: BasicResidual2x(C_in, C_out, kernel_size=3, stride=stride, dilation=1, slimmable=slimmable, width_mult_list=width_mult_list),
'conv_2x_downup' : lambda C_in, C_out, stride, slimmable, width_mult_list, fmap_size: BasicResidual_downup_2x(C_in, C_out, kernel_size=3, stride=stride, dilation=1, slimmable=slimmable, width_mult_list=width_mult_list),
'sa': lambda C_in, C_out, stride, slimmable, width_mult_list, fmap_size: Self_Attn(dim=C_in, fmap_size=(128, 256), dim_out=C_out, proj_factor=1, downsample=(stride==2), slimmable=slimmable, width_mult_list=width_mult_list)
}
OPS_name = ["FactorizedReduce", "BasicResidual1x", "BasicResidual_downup_1x", "BasicResidual2x", "BasicResidual_downup_2x", "Self_Attn"]
OPS_Class = OrderedDict()
OPS_Class['skip'] = FactorizedReduce
OPS_Class['conv'] = BasicResidual1x
OPS_Class['conv_downup'] = BasicResidual_downup_1x
OPS_Class['conv_2x'] = BasicResidual2x
OPS_Class['conv_2x_downup'] = BasicResidual_downup_2x
OPS_Class['sa'] = Self_Attn
|
Cream/CDARTS/CDARTS_segmentation/train/operations.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_segmentation/train/operations.py",
"repo_id": "Cream",
"token_count": 19889
}
| 319 |
import torch
import torch.nn as nn
import torch.nn.functional as F
from utils import utils
from models.loss import Loss_interactive
def search(train_loader, valid_loader, model, optimizer, w_optim, alpha_optim, epoch, writer, logger, config):
# interactive retrain and kl
device = torch.device("cuda")
criterion = nn.CrossEntropyLoss().to(device)
top1 = utils.AverageMeter()
top5 = utils.AverageMeter()
losses = utils.AverageMeter()
losses_interactive = utils.AverageMeter()
losses_cls = utils.AverageMeter()
losses_reg = utils.AverageMeter()
step_num = len(train_loader)
step_num = int(step_num * config.sample_ratio)
cur_step = epoch*step_num
cur_lr_search = w_optim.param_groups[0]['lr']
cur_lr_main = optimizer.param_groups[0]['lr']
if config.local_rank == 0:
logger.info("Train Epoch {} Search LR {}".format(epoch, cur_lr_search))
logger.info("Train Epoch {} Main LR {}".format(epoch, cur_lr_main))
writer.add_scalar('retrain/lr', cur_lr_search, cur_step)
model.train()
for step, ((trn_X, trn_y), (val_X, val_y)) in enumerate(zip(train_loader, valid_loader)):
if step > step_num:
break
trn_X, trn_y = trn_X.to(device, non_blocking=True), trn_y.to(device, non_blocking=True)
val_X, val_y = val_X.to(device, non_blocking=True), val_y.to(device, non_blocking=True)
N = trn_X.size(0)
#use valid data
alpha_optim.zero_grad()
optimizer.zero_grad()
logits_search, emsemble_logits_search = model(val_X, super_flag=True)
logits_main, emsemble_logits_main= model(val_X, super_flag=False)
loss_cls = (criterion(logits_search, val_y) + criterion(logits_main, val_y)) / config.loss_alpha
loss_interactive = Loss_interactive(emsemble_logits_search, emsemble_logits_main, config.loss_T, config.interactive_type) * config.loss_alpha
loss_regular = 0 * loss_cls
if config.regular:
coeff = max(config.regular_coeff * (1 - float(epoch-config.pretrain_epochs)/((
config.search_iter-config.pretrain_epochs)*config.search_iter_epochs*config.regular_ratio)), 0)
# loss_regular += coeff * torch.sum(abs(model.module._arch_parameters[:, 0]))
loss_regular += coeff * model.module.l1_loss()
loss = loss_cls + loss_interactive + loss_regular
loss.backward()
nn.utils.clip_grad_norm_(model.module.parameters(), config.w_grad_clip)
optimizer.step()
alpha_optim.step()
prec1, prec5 = utils.accuracy(logits_search, val_y, topk=(1, 5))
if config.distributed:
reduced_loss = utils.reduce_tensor(loss.data, config.world_size)
reduced_loss_interactive = utils.reduce_tensor(loss_interactive.data, config.world_size)
reduced_loss_cls = utils.reduce_tensor(loss_cls.data, config.world_size)
reduced_loss_reg = utils.reduce_tensor(loss_regular.data, config.world_size)
prec1 = utils.reduce_tensor(prec1, config.world_size)
prec5 = utils.reduce_tensor(prec5, config.world_size)
else:
reduced_loss = loss.data
reduced_loss_interactive = loss_interactive.data
reduced_loss_cls = loss_cls.data
reduced_loss_reg = loss_regular.data
losses.update(reduced_loss.item(), N)
losses_interactive.update(reduced_loss_interactive.item(), N)
losses_cls.update(reduced_loss_cls.item(), N)
losses_reg.update(reduced_loss_reg.item(), N)
top1.update(prec1.item(), N)
top5.update(prec5.item(), N)
torch.cuda.synchronize()
if config.local_rank == 0 and (step % config.print_freq == 0 or step == step_num):
logger.info(
"Train_2: Epoch {:2d}/{} Step {:03d}/{:03d} Loss {losses.avg:.3f} "
"Loss_interactive {losses_interactive.avg:.3f} Losses_cls {losses_cls.avg:.3f} Losses_reg {losses_reg.avg:.3f} "
"Prec@(1,5) ({top1.avg:.1%}, {top5.avg:.1%})".format(
epoch+1, config.search_iter*config.search_iter_epochs, step,
step_num, losses=losses, losses_interactive=losses_interactive, losses_cls=losses_cls,
losses_reg=losses_reg, top1=top1, top5=top5))
if config.local_rank == 0:
writer.add_scalar('retrain/loss', reduced_loss.item(), cur_step)
writer.add_scalar('retrain/top1', prec1.item(), cur_step)
writer.add_scalar('retrain/top5', prec5.item(), cur_step)
cur_step += 1
w_optim.zero_grad()
logits_search_train, _ = model(trn_X, super_flag=True)
loss_cls_train = criterion(logits_search_train, trn_y)
loss_train = loss_cls_train
loss_train.backward()
# gradient clipping
nn.utils.clip_grad_norm_(model.module.parameters(), config.w_grad_clip)
# only update w
w_optim.step()
# alpha_optim.step()
if config.distributed:
reduced_loss_cls_train = utils.reduce_tensor(loss_cls_train.data, config.world_size)
reduced_loss_train = utils.reduce_tensor(loss_train.data, config.world_size)
else:
reduced_loss_cls_train = reduced_loss_cls_train.data
reduced_loss_train = reduced_loss_train.data
if config.local_rank == 0 and (step % config.print_freq == 0 or step == step_num-1):
logger.info(
"Train_1: Loss_cls: {:.3f} Loss: {:.3f}".format(
reduced_loss_cls_train.item(), reduced_loss_train.item())
)
if config.local_rank == 0:
logger.info("Train_2: Epoch {:2d}/{} Final Prec@1 {:.4%}".format(
epoch+1, config.search_iter*config.search_iter_epochs, top1.avg))
def retrain_warmup(valid_loader, model, optimizer, epoch, writer, logger, super_flag, retrain_epochs, config):
device = torch.device("cuda")
criterion = nn.CrossEntropyLoss().to(device)
top1 = utils.AverageMeter()
top5 = utils.AverageMeter()
losses = utils.AverageMeter()
step_num = len(valid_loader)
step_num = int(step_num * config.sample_ratio)
cur_step = epoch*step_num
cur_lr = optimizer.param_groups[0]['lr']
if config.local_rank == 0:
logger.info("Warmup Epoch {} LR {:.3f}".format(epoch+1, cur_lr))
writer.add_scalar('warmup/lr', cur_lr, cur_step)
model.train()
for step, (val_X, val_y) in enumerate(valid_loader):
if step > step_num:
break
val_X, val_y = val_X.to(device, non_blocking=True), val_y.to(device, non_blocking=True)
N = val_X.size(0)
optimizer.zero_grad()
logits_main, _ = model(val_X, super_flag=super_flag)
loss = criterion(logits_main, val_y)
loss.backward()
nn.utils.clip_grad_norm_(model.module.parameters(), config.w_grad_clip)
optimizer.step()
prec1, prec5 = utils.accuracy(logits_main, val_y, topk=(1, 5))
if config.distributed:
reduced_loss = utils.reduce_tensor(loss.data, config.world_size)
prec1 = utils.reduce_tensor(prec1, config.world_size)
prec5 = utils.reduce_tensor(prec5, config.world_size)
else:
reduced_loss = loss.data
losses.update(reduced_loss.item(), N)
top1.update(prec1.item(), N)
top5.update(prec5.item(), N)
torch.cuda.synchronize()
if config.local_rank == 0 and (step % config.print_freq == 0 or step == step_num):
logger.info(
"Warmup: Epoch {:2d}/{} Step {:03d}/{:03d} Loss {losses.avg:.3f} "
"Prec@(1,5) ({top1.avg:.1%}, {top5.avg:.1%})".format(
epoch+1, retrain_epochs, step,
step_num, losses=losses, top1=top1, top5=top5))
if config.local_rank == 0:
writer.add_scalar('retrain/loss', reduced_loss.item(), cur_step)
writer.add_scalar('retrain/top1', prec1.item(), cur_step)
writer.add_scalar('retrain/top5', prec5.item(), cur_step)
cur_step += 1
if config.local_rank == 0:
logger.info("Warmup: Epoch {:2d}/{} Final Prec@1 {:.4%}".format(
epoch+1, retrain_epochs, top1.avg))
def validate(valid_loader, model, epoch, cur_step, writer, logger, super_flag, config):
top1 = utils.AverageMeter()
top5 = utils.AverageMeter()
losses = utils.AverageMeter()
model.eval()
device = torch.device("cuda")
criterion = nn.CrossEntropyLoss().to(device)
with torch.no_grad():
for step, (X, y) in enumerate(valid_loader):
X, y = X.to(device, non_blocking=True), y.to(device, non_blocking=True)
N = X.size(0)
logits, _ = model(X, super_flag=False)
loss = criterion(logits, y)
prec1, prec5 = utils.accuracy(logits, y, topk=(1, 5))
reduced_loss = loss.data
losses.update(reduced_loss.item(), N)
top1.update(prec1.item(), N)
top5.update(prec5.item(), N)
torch.cuda.synchronize()
step_num = len(valid_loader)
if (step % config.print_freq == 0 or step == step_num-1) and config.local_rank == 0:
logger.info(
"Valid: Epoch {:2d}/{} Step {:03d}/{:03d} Loss {losses.avg:.3f} "
"Prec@(1,5) ({top1.avg:.1%}, {top5.avg:.1%})".format(
epoch+1, config.search_iter*config.search_iter_epochs, step, step_num,
losses=losses, top1=top1, top5=top5))
if config.local_rank == 0:
writer.add_scalar('val/loss', losses.avg, cur_step)
writer.add_scalar('val/top1', top1.avg, cur_step)
writer.add_scalar('val/top5', top5.avg, cur_step)
logger.info("Valid: Epoch {:2d}/{} Final Prec@1 {:.4%}".format(
epoch+1, config.search_iter*config.search_iter_epochs, top1.avg))
return top1.avg
|
Cream/CDARTS/benchmark201/core/search_function.py/0
|
{
"file_path": "Cream/CDARTS/benchmark201/core/search_function.py",
"repo_id": "Cream",
"token_count": 4846
}
| 320 |
import torch
import torch.nn as nn
class DistillHeadCIFAR(nn.Module):
def __init__(self, C, size, num_classes, bn_affine=True):
"""assuming input size 8x8 or 16x16"""
super(DistillHeadCIFAR, self).__init__()
self.features = nn.Sequential(
nn.ReLU(),
nn.AvgPool2d(size, stride=2, padding=0, count_include_pad=False), # image size = 2 x 2 / 6 x 6
nn.Conv2d(C, 128, 1, bias=False),
# nn.BatchNorm2d(128, affine=bn_affine, track_running_stats=False),
nn.BatchNorm2d(128, affine=bn_affine),
nn.ReLU(),
nn.Conv2d(128, 768, 2, bias=False),
nn.BatchNorm2d(768, affine=bn_affine),
nn.ReLU()
)
self.classifier = nn.Linear(768, num_classes)
self.gap = nn.AdaptiveAvgPool2d(1)
def forward(self, x):
x = self.features(x)
x = self.gap(x)
x = self.classifier(x.view(x.size(0),-1))
return x
class DistillHeadImagenet(nn.Module):
def __init__(self, C, size, num_classes, bn_affine=True):
"""assuming input size 7x7 or 14x14"""
super(DistillHeadImagenet, self).__init__()
self.features = nn.Sequential(
nn.ReLU(),
nn.AvgPool2d(size, stride=2, padding=0, count_include_pad=False), # image size = 2 x 2 / 6 x 6
nn.Conv2d(C, 128, 1, bias=False),
nn.BatchNorm2d(128, affine=bn_affine),
nn.ReLU(),
nn.Conv2d(128, 768, 2, bias=False),
nn.BatchNorm2d(768, affine=bn_affine),
nn.ReLU()
)
self.classifier = nn.Linear(768, num_classes)
self.gap = nn.AdaptiveAvgPool2d(1)
def forward(self, x):
x = self.features(x)
x = self.gap(x)
x = self.classifier(x.view(x.size(0),-1))
return x
class AuxiliaryHeadCIFAR(nn.Module):
def __init__(self, C, size=5, num_classes=10):
"""assuming input size 8x8"""
super(AuxiliaryHeadCIFAR, self).__init__()
self.features = nn.Sequential(
nn.ReLU(inplace=True),
nn.AvgPool2d(size, stride=3, padding=0, count_include_pad=False), # image size = 2 x 2
nn.Conv2d(C, 128, 1, bias=False),
nn.BatchNorm2d(128),
nn.ReLU(inplace=True),
nn.Conv2d(128, 768, 2, bias=False),
nn.BatchNorm2d(768),
nn.ReLU(inplace=True)
)
self.classifier = nn.Linear(768, num_classes)
def forward(self, x):
x = self.features(x)
x = self.classifier(x.view(x.size(0),-1))
return x
class AuxiliaryHeadImageNet(nn.Module):
def __init__(self, C, size=5, num_classes=1000):
"""assuming input size 7x7"""
super(AuxiliaryHeadImageNet, self).__init__()
self.features = nn.Sequential(
nn.ReLU(inplace=True),
nn.AvgPool2d(size, stride=2, padding=0, count_include_pad=False),
nn.Conv2d(C, 128, 1, bias=False),
nn.BatchNorm2d(128),
nn.ReLU(inplace=True),
nn.Conv2d(128, 768, 2, bias=False),
# NOTE: This batchnorm was omitted in my earlier implementation due to a typo.
# Commenting it out for consistency with the experiments in the paper.
# nn.BatchNorm2d(768),
nn.ReLU(inplace=True)
)
self.classifier = nn.Linear(768, num_classes)
def forward(self, x):
x = self.features(x)
x = self.classifier(x.view(x.size(0),-1))
return x
|
Cream/CDARTS/lib/models/aux_head.py/0
|
{
"file_path": "Cream/CDARTS/lib/models/aux_head.py",
"repo_id": "Cream",
"token_count": 1858
}
| 321 |
# Train Workspace
|
Cream/Cream/experiments/workspace/train/README.md/0
|
{
"file_path": "Cream/Cream/experiments/workspace/train/README.md",
"repo_id": "Cream",
"token_count": 5
}
| 322 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# Written by Hao Du and Houwen Peng
# email: [email protected] and [email protected]
# This dictionary is generated from calculating each operation of each layer to quickly search for layers.
# flops_op_dict[which_stage][which_operation] =
# (flops_of_operation_with_stride1, flops_of_operation_with_stride2)
flops_op_dict = {}
for i in range(5):
flops_op_dict[i] = {}
flops_op_dict[0][0] = (21.828704, 18.820752)
flops_op_dict[0][1] = (32.669328, 28.16048)
flops_op_dict[0][2] = (25.039968, 23.637648)
flops_op_dict[0][3] = (37.486224, 35.385824)
flops_op_dict[0][4] = (29.856864, 30.862992)
flops_op_dict[0][5] = (44.711568, 46.22384)
flops_op_dict[1][0] = (11.808656, 11.86712)
flops_op_dict[1][1] = (17.68624, 17.780848)
flops_op_dict[1][2] = (13.01288, 13.87416)
flops_op_dict[1][3] = (19.492576, 20.791408)
flops_op_dict[1][4] = (14.819216, 16.88472)
flops_op_dict[1][5] = (22.20208, 25.307248)
flops_op_dict[2][0] = (8.198, 10.99632)
flops_op_dict[2][1] = (12.292848, 16.5172)
flops_op_dict[2][2] = (8.69976, 11.99984)
flops_op_dict[2][3] = (13.045488, 18.02248)
flops_op_dict[2][4] = (9.4524, 13.50512)
flops_op_dict[2][5] = (14.174448, 20.2804)
flops_op_dict[3][0] = (12.006112, 15.61632)
flops_op_dict[3][1] = (18.028752, 23.46096)
flops_op_dict[3][2] = (13.009632, 16.820544)
flops_op_dict[3][3] = (19.534032, 25.267296)
flops_op_dict[3][4] = (14.514912, 18.62688)
flops_op_dict[3][5] = (21.791952, 27.9768)
flops_op_dict[4][0] = (11.307456, 15.292416)
flops_op_dict[4][1] = (17.007072, 23.1504)
flops_op_dict[4][2] = (11.608512, 15.894528)
flops_op_dict[4][3] = (17.458656, 24.053568)
flops_op_dict[4][4] = (12.060096, 16.797696)
flops_op_dict[4][5] = (18.136032, 25.40832)
|
Cream/Cream/lib/utils/op_by_layer_dict.py/0
|
{
"file_path": "Cream/Cream/lib/utils/op_by_layer_dict.py",
"repo_id": "Cream",
"token_count": 908
}
| 323 |
# EfficientViT for Image Classification
The codebase implements the image classification with EfficientViT.
## Model Zoo
|Model | Data | Input | Acc@1 | Acc@5 | #FLOPs | #Params | Throughput | Link |
| :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: |
|EfficientViT-M0 | ImageNet-1k |224x224| 63.2 | 85.2 | 79 | 2.3M | 27644 | [model](https://github.com/xinyuliu-jeffrey/EfficientViT_Model_Zoo/releases/download/v1.0/efficientvit_m0.pth)/[log](https://github.com/xinyuliu-jeffrey/EfficientViT_Model_Zoo/releases/download/v1.0/efficientvit_m0_log.txt)/[onnx](https://github.com/xinyuliu-jeffrey/EfficientViT_Model_Zoo/releases/download/v1.0/EfficientViT_M0.onnx) |
|EfficientViT-M1 | ImageNet-1k |224x224| 68.4 | 88.7 | 167 | 3.0M | 20093 | [model](https://github.com/xinyuliu-jeffrey/EfficientViT_Model_Zoo/releases/download/v1.0/efficientvit_m1.pth)/[log](https://github.com/xinyuliu-jeffrey/EfficientViT_Model_Zoo/releases/download/v1.0/efficientvit_m1_log.txt)/[onnx](https://github.com/xinyuliu-jeffrey/EfficientViT_Model_Zoo/releases/download/v1.0/EfficientViT_M1.onnx)|
|EfficientViT-M2 | ImageNet-1k |224x224| 70.8 | 90.2 | 201 | 4.2M | 18218 | [model](https://github.com/xinyuliu-jeffrey/EfficientViT_Model_Zoo/releases/download/v1.0/efficientvit_m2.pth)/[log](https://github.com/xinyuliu-jeffrey/EfficientViT_Model_Zoo/releases/download/v1.0/efficientvit_m2_log.txt)/[onnx](https://github.com/xinyuliu-jeffrey/EfficientViT_Model_Zoo/releases/download/v1.0/EfficientViT_M2.onnx)|
|EfficientViT-M3 | ImageNet-1k |224x224| 73.4 | 91.4 | 263 | 6.9M | 16644 | [model](https://github.com/xinyuliu-jeffrey/EfficientViT_Model_Zoo/releases/download/v1.0/efficientvit_m3.pth)/[log](https://github.com/xinyuliu-jeffrey/EfficientViT_Model_Zoo/releases/download/v1.0/efficientvit_m3_log.txt)/[onnx](https://github.com/xinyuliu-jeffrey/EfficientViT_Model_Zoo/releases/download/v1.0/EfficientViT_M3.onnx) |
|EfficientViT-M4 | ImageNet-1k |224x224| 74.3 | 91.8 | 299 | 8.8M | 15914 | [model](https://github.com/xinyuliu-jeffrey/EfficientViT_Model_Zoo/releases/download/v1.0/efficientvit_m4.pth)/[log](https://github.com/xinyuliu-jeffrey/EfficientViT_Model_Zoo/releases/download/v1.0/efficientvit_m4_log.txt)/[onnx](https://github.com/xinyuliu-jeffrey/EfficientViT_Model_Zoo/releases/download/v1.0/EfficientViT_M4.onnx) |
|EfficientViT-M5 | ImageNet-1k |224x224| 77.1 | 93.4 | 522 | 12.4M | 10621 | [model](https://github.com/xinyuliu-jeffrey/EfficientViT_Model_Zoo/releases/download/v1.0/efficientvit_m5.pth)/[log](https://github.com/xinyuliu-jeffrey/EfficientViT_Model_Zoo/releases/download/v1.0/efficientvit_m5_log.txt)/[onnx](https://github.com/xinyuliu-jeffrey/EfficientViT_Model_Zoo/releases/download/v1.0/EfficientViT_M5.onnx) |
## Get Started
### Install requirements
Run the following command to install the dependences:
```bash
pip install -r requirements.txt
```
### Data preparation
We need to prepare ImageNet-1k dataset from [`http://www.image-net.org/`](http://www.image-net.org/).
- ImageNet-1k
ImageNet-1k contains 1.28 M images for training and 50 K images for validation.
The images shall be stored as individual files:
```
ImageNet/
├── train
│ ├── n01440764
│ │ ├── n01440764_10026.JPEG
│ │ ├── n01440764_10027.JPEG
...
├── val
│ ├── n01440764
│ │ ├── ILSVRC2012_val_00000293.JPEG
...
```
Our code also supports storing the train set and validation set as the `*.tar` archives:
```
ImageNet/
├── train.tar
│ ├── n01440764
│ │ ├── n01440764_10026.JPEG
...
└── val.tar
│ ├── n01440764
│ │ ├── ILSVRC2012_val_00000293.JPEG
...
```
## Evaluation
Before evaluation, we need to prepare the pre-trained models from [model-zoo](https://github.com/xinyuliu-jeffrey/EfficientViT_Model_Zoo).
Run the following command to evaluate a pre-trained EfficientViT-M4 on ImageNet val with a single GPU:
```bash
python main.py --eval --model EfficientViT_M4 --resume ./efficientvit_m4.pth --data-path $PATH_TO_IMAGENET
```
This should give
```
* Acc@1 74.266 Acc@5 91.788 loss 1.242
```
Here are the command lines for evaluating other pre-trained models:
<details>
<summary>
EfficientViT-M0
</summary>
```bash
python main.py --eval --model EfficientViT_M0 --resume ./efficientvit_m0.pth --data-path $PATH_TO_IMAGENET
```
giving
```
* Acc@1 63.296 Acc@5 85.150 loss 1.741
```
</details>
<details>
<summary>
EfficientViT-M1
</summary>
```bash
python main.py --eval --model EfficientViT_M1 --resume ./efficientvit_m1.pth --data-path $PATH_TO_IMAGENET
```
giving
```
* Acc@1 68.356 Acc@5 88.672 loss 1.513
```
</details>
<details>
<summary>
EfficientViT-M2
</summary>
```bash
python main.py --eval --model EfficientViT_M2 --resume ./efficientvit_m2.pth --data-path $PATH_TO_IMAGENET
```
giving
```
* Acc@1 70.786 Acc@5 90.150 loss 1.442
```
</details>
<details>
<summary>
EfficientViT-M3
</summary>
```bash
python main.py --eval --model EfficientViT_M3 --resume ./efficientvit_m3.pth --data-path $PATH_TO_IMAGENET
```
giving
```
* Acc@1 73.390 Acc@5 91.350 loss 1.285
```
</details>
<details>
<summary>
EfficientViT-M5
</summary>
```bash
python main.py --eval --model EfficientViT_M5 --resume ./efficientvit_m5.pth --data-path $PATH_TO_IMAGENET
```
giving
```
* Acc@1 77.124 Acc@5 93.360 loss 1.127
```
</details>
## Training
To train an EfficientViT-M4 model on a single node with 8 GPUs for 300 epochs and distributed evaluation, run:
```bash
python -m torch.distributed.launch --nproc_per_node=8 --master_port 12345 --use_env main.py --model EfficientViT_M4 --data-path $PATH_TO_IMAGENET --dist-eval
```
<details>
<summary>
EfficientViT-M0
</summary>
To train an EfficientViT-M0 model on a single node with 8 GPUs for 300 epochs and distributed evaluation, run:
```bash
python -m torch.distributed.launch --nproc_per_node=8 --master_port 12345 --use_env main.py --model EfficientViT_M0 --data-path $PATH_TO_IMAGENET --dist-eval
```
</details>
<details>
<summary>
EfficientViT-M1
</summary>
To train an EfficientViT-M1 model on a single node with 8 GPUs for 300 epochs and distributed evaluation, run:
```bash
python -m torch.distributed.launch --nproc_per_node=8 --master_port 12345 --use_env main.py --model EfficientViT_M1 --data-path $PATH_TO_IMAGENET --dist-eval
```
</details>
<details>
<summary>
EfficientViT-M2
</summary>
To train an EfficientViT-M2 model on a single node with 8 GPUs for 300 epochs and distributed evaluation, run:
```bash
python -m torch.distributed.launch --nproc_per_node=8 --master_port 12345 --use_env main.py --model EfficientViT_M2 --data-path $PATH_TO_IMAGENET --dist-eval
```
</details>
<details>
<summary>
EfficientViT-M3
</summary>
To train an EfficientViT-M3 model on a single node with 8 GPUs for 300 epochs and distributed evaluation, run:
```bash
python -m torch.distributed.launch --nproc_per_node=8 --master_port 12345 --use_env main.py --model EfficientViT_M3 --data-path $PATH_TO_IMAGENET --dist-eval
```
</details>
<details>
<summary>
EfficientViT-M5
</summary>
To train an EfficientViT-M5 model on a single node with 8 GPUs for 300 epochs and distributed evaluation, run:
```bash
python -m torch.distributed.launch --nproc_per_node=8 --master_port 12345 --use_env main.py --model EfficientViT_M5 --data-path $PATH_TO_IMAGENET --dist-eval
```
</details>
## Speed test
Run the following command to compare the throughputs on GPU/CPU:
```bash
python speed_test.py
```
which should give
```
EfficientViT_M0 cuda:0 27643.941865437002 images/s @ batch size 2048
EfficientViT_M1 cuda:0 20093.286204638334 images/s @ batch size 2048
EfficientViT_M2 cuda:0 18218.347390415714 images/s @ batch size 2048
EfficientViT_M3 cuda:0 16643.905520424512 images/s @ batch size 2048
EfficientViT_M4 cuda:0 15914.449955135608 images/s @ batch size 2048
EfficientViT_M5 cuda:0 10620.868156518267 images/s @ batch size 2048
```
## Acknowledge
We sincerely appreciate [Swin Transformer](https://github.com/microsoft/swin-transformer), [LeViT](https://github.com/facebookresearch/LeViT), [pytorch-image-models](https://github.com/rwightman/pytorch-image-models), and [PyTorch](https://github.com/pytorch/pytorch) for their awesome codebases.
## License
- [License](./LICENSE)
|
Cream/EfficientViT/classification/README.md/0
|
{
"file_path": "Cream/EfficientViT/classification/README.md",
"repo_id": "Cream",
"token_count": 3341
}
| 324 |
model = dict(
type='FasterRCNN',
pretrained='torchvision://resnet50',
backbone=dict(
type='ResNet',
depth=50,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=dict(type='BN', requires_grad=True),
norm_eval=True,
style='pytorch'),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
num_outs=5),
rpn_head=dict(
type='RPNHead',
in_channels=256,
feat_channels=256,
anchor_generator=dict(
type='AnchorGenerator',
scales=[8],
ratios=[0.5, 1.0, 2.0],
strides=[4, 8, 16, 32, 64]),
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0]),
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0),
loss_bbox=dict(type='L1Loss', loss_weight=1.0)),
roi_head=dict(
type='StandardRoIHead',
bbox_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
bbox_head=dict(
type='Shared2FCBBoxHead',
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=80,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2]),
reg_class_agnostic=False,
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
loss_bbox=dict(type='L1Loss', loss_weight=1.0))),
# model training and testing settings
train_cfg=dict(
rpn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
match_low_quality=True,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=-1,
add_gt_as_proposals=False),
allowed_border=-1,
pos_weight=-1,
debug=False),
rpn_proposal=dict(
nms_pre=2000,
max_per_img=1000,
nms=dict(type='nms', iou_threshold=0.7),
min_bbox_size=0),
rcnn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
match_low_quality=False,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
pos_weight=-1,
debug=False)),
test_cfg=dict(
rpn=dict(
nms_pre=1000,
max_per_img=1000,
nms=dict(type='nms', iou_threshold=0.7),
min_bbox_size=0),
rcnn=dict(
score_thr=0.05,
nms=dict(type='nms', iou_threshold=0.5),
max_per_img=100)
# soft-nms is also supported for rcnn testing
# e.g., nms=dict(type='soft_nms', iou_threshold=0.5, min_score=0.05)
))
|
Cream/EfficientViT/downstream/configs/_base_/models/faster_rcnn_r50_fpn.py/0
|
{
"file_path": "Cream/EfficientViT/downstream/configs/_base_/models/faster_rcnn_r50_fpn.py",
"repo_id": "Cream",
"token_count": 2202
}
| 325 |
"""Build iRPE (image RPE) Functions"""
from setuptools import setup, Extension
import torch
from torch.utils import cpp_extension
ext_t = cpp_extension.CppExtension
ext_fnames = ['rpe_index.cpp']
define_macros = []
extra_compile_args = dict(cxx=['-fopenmp', '-O3'],
nvcc=['-O3'])
if torch.cuda.is_available():
ext_t = cpp_extension.CUDAExtension
ext_fnames.append('rpe_index_cuda.cu')
define_macros.append(('WITH_CUDA', None))
setup(name='rpe_index',
version="1.2.0",
ext_modules=[ext_t(
'rpe_index_cpp',
ext_fnames,
define_macros=define_macros,
extra_compile_args=extra_compile_args,
)],
cmdclass={'build_ext': cpp_extension.BuildExtension})
|
Cream/MiniViT/Mini-DeiT/rpe_ops/setup.py/0
|
{
"file_path": "Cream/MiniViT/Mini-DeiT/rpe_ops/setup.py",
"repo_id": "Cream",
"token_count": 402
}
| 326 |
from .build import build_loader
|
Cream/MiniViT/Mini-Swin/data/__init__.py/0
|
{
"file_path": "Cream/MiniViT/Mini-Swin/data/__init__.py",
"repo_id": "Cream",
"token_count": 7
}
| 327 |
from torch import optim as optim
def build_optimizer(config, model):
"""
Build optimizer, set weight decay of normalization to 0 by default.
"""
skip = {}
skip_keywords = {}
if hasattr(model, 'no_weight_decay'):
skip = model.no_weight_decay()
if hasattr(model, 'no_weight_decay_keywords'):
skip_keywords = model.no_weight_decay_keywords()
parameters = set_weight_decay(model, skip, skip_keywords)
opt_lower = config.TRAIN.OPTIMIZER.NAME.lower()
optimizer = None
if opt_lower == 'sgd':
optimizer = optim.SGD(parameters, momentum=config.TRAIN.OPTIMIZER.MOMENTUM, nesterov=True,
lr=config.TRAIN.BASE_LR, weight_decay=config.TRAIN.WEIGHT_DECAY)
elif opt_lower == 'adamw':
optimizer = optim.AdamW(parameters, eps=config.TRAIN.OPTIMIZER.EPS, betas=config.TRAIN.OPTIMIZER.BETAS,
lr=config.TRAIN.BASE_LR, weight_decay=config.TRAIN.WEIGHT_DECAY)
return optimizer
def set_weight_decay(model, skip_list=(), skip_keywords=()):
has_decay = []
no_decay = []
for name, param in model.named_parameters():
if not param.requires_grad:
continue # frozen weights
if len(param.shape) == 1 or name.endswith(".bias") or (name in skip_list) or \
check_keywords_in_name(name, skip_keywords):
no_decay.append(param)
# print(f"{name} has no weight decay")
else:
has_decay.append(param)
return [{'params': has_decay},
{'params': no_decay, 'weight_decay': 0.}]
def check_keywords_in_name(name, keywords=()):
isin = False
for keyword in keywords:
if keyword in name:
isin = True
return isin
|
Cream/MiniViT/Mini-Swin/optimizer.py/0
|
{
"file_path": "Cream/MiniViT/Mini-Swin/optimizer.py",
"repo_id": "Cream",
"token_count": 819
}
| 328 |
import torch
from PIL import Image
import open_clip
# manual inheritance
# arch = 'TinyCLIP-ViT-39M-16-Text-19M'
# model, _, preprocess = open_clip.create_model_and_transforms(arch, pretrained='YFCC15M')
# arch = 'TinyCLIP-ViT-8M-16-Text-3M'
# model, _, preprocess = open_clip.create_model_and_transforms(arch, pretrained='YFCC15M')
# arch = 'TinyCLIP-ResNet-30M-Text-29M'
# model, _, preprocess = open_clip.create_model_and_transforms(arch, pretrained='LAION400M')
# arch = 'TinyCLIP-ResNet-19M-Text-19M'
# model, _, preprocess = open_clip.create_model_and_transforms(arch, pretrained='LAION400M')
# arch = 'TinyCLIP-ViT-61M-32-Text-29M'
# model, _, preprocess = open_clip.create_model_and_transforms(arch, pretrained='LAION400M')
arch = 'TinyCLIP-ViT-40M-32-Text-19M'
model, _, preprocess = open_clip.create_model_and_transforms(arch, pretrained='LAION400M')
tokenizer = open_clip.get_tokenizer(arch)
image_fname = './figure/TinyCLIP.jpg'
image = preprocess(Image.open(image_fname)).unsqueeze(0)
text = tokenizer(["a diagram", "a dog", "a cat"])
with torch.no_grad(), torch.cuda.amp.autocast():
image_features = model.encode_image(image)
text_features = model.encode_text(text)
image_features /= image_features.norm(dim=-1, keepdim=True)
text_features /= text_features.norm(dim=-1, keepdim=True)
text_probs = (100.0 * image_features @ text_features.T).softmax(dim=-1)
print("Label probs:", text_probs)
|
Cream/TinyCLIP/inference.py/0
|
{
"file_path": "Cream/TinyCLIP/inference.py",
"repo_id": "Cream",
"token_count": 572
}
| 329 |
imagenet_classnames = ["tench", "goldfish", "great white shark", "tiger shark", "hammerhead shark", "electric ray",
"stingray", "rooster", "hen", "ostrich", "brambling", "goldfinch", "house finch", "junco",
"indigo bunting", "American robin", "bulbul", "jay", "magpie", "chickadee", "American dipper",
"kite (bird of prey)", "bald eagle", "vulture", "great grey owl", "fire salamander",
"smooth newt", "newt", "spotted salamander", "axolotl", "American bullfrog", "tree frog",
"tailed frog", "loggerhead sea turtle", "leatherback sea turtle", "mud turtle", "terrapin",
"box turtle", "banded gecko", "green iguana", "Carolina anole",
"desert grassland whiptail lizard", "agama", "frilled-necked lizard", "alligator lizard",
"Gila monster", "European green lizard", "chameleon", "Komodo dragon", "Nile crocodile",
"American alligator", "triceratops", "worm snake", "ring-necked snake",
"eastern hog-nosed snake", "smooth green snake", "kingsnake", "garter snake", "water snake",
"vine snake", "night snake", "boa constrictor", "African rock python", "Indian cobra",
"green mamba", "sea snake", "Saharan horned viper", "eastern diamondback rattlesnake",
"sidewinder rattlesnake", "trilobite", "harvestman", "scorpion", "yellow garden spider",
"barn spider", "European garden spider", "southern black widow", "tarantula", "wolf spider",
"tick", "centipede", "black grouse", "ptarmigan", "ruffed grouse", "prairie grouse", "peafowl",
"quail", "partridge", "african grey parrot", "macaw", "sulphur-crested cockatoo", "lorikeet",
"coucal", "bee eater", "hornbill", "hummingbird", "jacamar", "toucan", "duck",
"red-breasted merganser", "goose", "black swan", "tusker", "echidna", "platypus", "wallaby",
"koala", "wombat", "jellyfish", "sea anemone", "brain coral", "flatworm", "nematode", "conch",
"snail", "slug", "sea slug", "chiton", "chambered nautilus", "Dungeness crab", "rock crab",
"fiddler crab", "red king crab", "American lobster", "spiny lobster", "crayfish", "hermit crab",
"isopod", "white stork", "black stork", "spoonbill", "flamingo", "little blue heron",
"great egret", "bittern bird", "crane bird", "limpkin", "common gallinule", "American coot",
"bustard", "ruddy turnstone", "dunlin", "common redshank", "dowitcher", "oystercatcher",
"pelican", "king penguin", "albatross", "grey whale", "killer whale", "dugong", "sea lion",
"Chihuahua", "Japanese Chin", "Maltese", "Pekingese", "Shih Tzu", "King Charles Spaniel",
"Papillon", "toy terrier", "Rhodesian Ridgeback", "Afghan Hound", "Basset Hound", "Beagle",
"Bloodhound", "Bluetick Coonhound", "Black and Tan Coonhound", "Treeing Walker Coonhound",
"English foxhound", "Redbone Coonhound", "borzoi", "Irish Wolfhound", "Italian Greyhound",
"Whippet", "Ibizan Hound", "Norwegian Elkhound", "Otterhound", "Saluki", "Scottish Deerhound",
"Weimaraner", "Staffordshire Bull Terrier", "American Staffordshire Terrier",
"Bedlington Terrier", "Border Terrier", "Kerry Blue Terrier", "Irish Terrier",
"Norfolk Terrier", "Norwich Terrier", "Yorkshire Terrier", "Wire Fox Terrier",
"Lakeland Terrier", "Sealyham Terrier", "Airedale Terrier", "Cairn Terrier",
"Australian Terrier", "Dandie Dinmont Terrier", "Boston Terrier", "Miniature Schnauzer",
"Giant Schnauzer", "Standard Schnauzer", "Scottish Terrier", "Tibetan Terrier",
"Australian Silky Terrier", "Soft-coated Wheaten Terrier", "West Highland White Terrier",
"Lhasa Apso", "Flat-Coated Retriever", "Curly-coated Retriever", "Golden Retriever",
"Labrador Retriever", "Chesapeake Bay Retriever", "German Shorthaired Pointer", "Vizsla",
"English Setter", "Irish Setter", "Gordon Setter", "Brittany dog", "Clumber Spaniel",
"English Springer Spaniel", "Welsh Springer Spaniel", "Cocker Spaniel", "Sussex Spaniel",
"Irish Water Spaniel", "Kuvasz", "Schipperke", "Groenendael dog", "Malinois", "Briard",
"Australian Kelpie", "Komondor", "Old English Sheepdog", "Shetland Sheepdog", "collie",
"Border Collie", "Bouvier des Flandres dog", "Rottweiler", "German Shepherd Dog", "Dobermann",
"Miniature Pinscher", "Greater Swiss Mountain Dog", "Bernese Mountain Dog",
"Appenzeller Sennenhund", "Entlebucher Sennenhund", "Boxer", "Bullmastiff", "Tibetan Mastiff",
"French Bulldog", "Great Dane", "St. Bernard", "husky", "Alaskan Malamute", "Siberian Husky",
"Dalmatian", "Affenpinscher", "Basenji", "pug", "Leonberger", "Newfoundland dog",
"Great Pyrenees dog", "Samoyed", "Pomeranian", "Chow Chow", "Keeshond", "brussels griffon",
"Pembroke Welsh Corgi", "Cardigan Welsh Corgi", "Toy Poodle", "Miniature Poodle",
"Standard Poodle", "Mexican hairless dog (xoloitzcuintli)", "grey wolf", "Alaskan tundra wolf",
"red wolf or maned wolf", "coyote", "dingo", "dhole", "African wild dog", "hyena", "red fox",
"kit fox", "Arctic fox", "grey fox", "tabby cat", "tiger cat", "Persian cat", "Siamese cat",
"Egyptian Mau", "cougar", "lynx", "leopard", "snow leopard", "jaguar", "lion", "tiger",
"cheetah", "brown bear", "American black bear", "polar bear", "sloth bear", "mongoose",
"meerkat", "tiger beetle", "ladybug", "ground beetle", "longhorn beetle", "leaf beetle",
"dung beetle", "rhinoceros beetle", "weevil", "fly", "bee", "ant", "grasshopper",
"cricket insect", "stick insect", "cockroach", "praying mantis", "cicada", "leafhopper",
"lacewing", "dragonfly", "damselfly", "red admiral butterfly", "ringlet butterfly",
"monarch butterfly", "small white butterfly", "sulphur butterfly", "gossamer-winged butterfly",
"starfish", "sea urchin", "sea cucumber", "cottontail rabbit", "hare", "Angora rabbit",
"hamster", "porcupine", "fox squirrel", "marmot", "beaver", "guinea pig", "common sorrel horse",
"zebra", "pig", "wild boar", "warthog", "hippopotamus", "ox", "water buffalo", "bison",
"ram (adult male sheep)", "bighorn sheep", "Alpine ibex", "hartebeest", "impala (antelope)",
"gazelle", "arabian camel", "llama", "weasel", "mink", "European polecat",
"black-footed ferret", "otter", "skunk", "badger", "armadillo", "three-toed sloth", "orangutan",
"gorilla", "chimpanzee", "gibbon", "siamang", "guenon", "patas monkey", "baboon", "macaque",
"langur", "black-and-white colobus", "proboscis monkey", "marmoset", "white-headed capuchin",
"howler monkey", "titi monkey", "Geoffroy's spider monkey", "common squirrel monkey",
"ring-tailed lemur", "indri", "Asian elephant", "African bush elephant", "red panda",
"giant panda", "snoek fish", "eel", "silver salmon", "rock beauty fish", "clownfish",
"sturgeon", "gar fish", "lionfish", "pufferfish", "abacus", "abaya", "academic gown",
"accordion", "acoustic guitar", "aircraft carrier", "airliner", "airship", "altar", "ambulance",
"amphibious vehicle", "analog clock", "apiary", "apron", "trash can", "assault rifle",
"backpack", "bakery", "balance beam", "balloon", "ballpoint pen", "Band-Aid", "banjo",
"baluster / handrail", "barbell", "barber chair", "barbershop", "barn", "barometer", "barrel",
"wheelbarrow", "baseball", "basketball", "bassinet", "bassoon", "swimming cap", "bath towel",
"bathtub", "station wagon", "lighthouse", "beaker", "military hat (bearskin or shako)",
"beer bottle", "beer glass", "bell tower", "baby bib", "tandem bicycle", "bikini",
"ring binder", "binoculars", "birdhouse", "boathouse", "bobsleigh", "bolo tie", "poke bonnet",
"bookcase", "bookstore", "bottle cap", "hunting bow", "bow tie", "brass memorial plaque", "bra",
"breakwater", "breastplate", "broom", "bucket", "buckle", "bulletproof vest",
"high-speed train", "butcher shop", "taxicab", "cauldron", "candle", "cannon", "canoe",
"can opener", "cardigan", "car mirror", "carousel", "tool kit", "cardboard box / carton",
"car wheel", "automated teller machine", "cassette", "cassette player", "castle", "catamaran",
"CD player", "cello", "mobile phone", "chain", "chain-link fence", "chain mail", "chainsaw",
"storage chest", "chiffonier", "bell or wind chime", "china cabinet", "Christmas stocking",
"church", "movie theater", "cleaver", "cliff dwelling", "cloak", "clogs", "cocktail shaker",
"coffee mug", "coffeemaker", "spiral or coil", "combination lock", "computer keyboard",
"candy store", "container ship", "convertible", "corkscrew", "cornet", "cowboy boot",
"cowboy hat", "cradle", "construction crane", "crash helmet", "crate", "infant bed",
"Crock Pot", "croquet ball", "crutch", "cuirass", "dam", "desk", "desktop computer",
"rotary dial telephone", "diaper", "digital clock", "digital watch", "dining table",
"dishcloth", "dishwasher", "disc brake", "dock", "dog sled", "dome", "doormat", "drilling rig",
"drum", "drumstick", "dumbbell", "Dutch oven", "electric fan", "electric guitar",
"electric locomotive", "entertainment center", "envelope", "espresso machine", "face powder",
"feather boa", "filing cabinet", "fireboat", "fire truck", "fire screen", "flagpole", "flute",
"folding chair", "football helmet", "forklift", "fountain", "fountain pen", "four-poster bed",
"freight car", "French horn", "frying pan", "fur coat", "garbage truck",
"gas mask or respirator", "gas pump", "goblet", "go-kart", "golf ball", "golf cart", "gondola",
"gong", "gown", "grand piano", "greenhouse", "radiator grille", "grocery store", "guillotine",
"hair clip", "hair spray", "half-track", "hammer", "hamper", "hair dryer", "hand-held computer",
"handkerchief", "hard disk drive", "harmonica", "harp", "combine harvester", "hatchet",
"holster", "home theater", "honeycomb", "hook", "hoop skirt", "gymnastic horizontal bar",
"horse-drawn vehicle", "hourglass", "iPod", "clothes iron", "carved pumpkin", "jeans", "jeep",
"T-shirt", "jigsaw puzzle", "rickshaw", "joystick", "kimono", "knee pad", "knot", "lab coat",
"ladle", "lampshade", "laptop computer", "lawn mower", "lens cap", "letter opener", "library",
"lifeboat", "lighter", "limousine", "ocean liner", "lipstick", "slip-on shoe", "lotion",
"music speaker", "loupe magnifying glass", "sawmill", "magnetic compass", "messenger bag",
"mailbox", "tights", "one-piece bathing suit", "manhole cover", "maraca", "marimba", "mask",
"matchstick", "maypole", "maze", "measuring cup", "medicine cabinet", "megalith", "microphone",
"microwave oven", "military uniform", "milk can", "minibus", "miniskirt", "minivan", "missile",
"mitten", "mixing bowl", "mobile home", "ford model t", "modem", "monastery", "monitor",
"moped", "mortar and pestle", "graduation cap", "mosque", "mosquito net", "vespa",
"mountain bike", "tent", "computer mouse", "mousetrap", "moving van", "muzzle", "metal nail",
"neck brace", "necklace", "baby pacifier", "notebook computer", "obelisk", "oboe", "ocarina",
"odometer", "oil filter", "pipe organ", "oscilloscope", "overskirt", "bullock cart",
"oxygen mask", "product packet / packaging", "paddle", "paddle wheel", "padlock", "paintbrush",
"pajamas", "palace", "pan flute", "paper towel", "parachute", "parallel bars", "park bench",
"parking meter", "railroad car", "patio", "payphone", "pedestal", "pencil case",
"pencil sharpener", "perfume", "Petri dish", "photocopier", "plectrum", "Pickelhaube",
"picket fence", "pickup truck", "pier", "piggy bank", "pill bottle", "pillow", "ping-pong ball",
"pinwheel", "pirate ship", "drink pitcher", "block plane", "planetarium", "plastic bag",
"plate rack", "farm plow", "plunger", "Polaroid camera", "pole", "police van", "poncho",
"pool table", "soda bottle", "plant pot", "potter's wheel", "power drill", "prayer rug",
"printer", "prison", "missile", "projector", "hockey puck", "punching bag", "purse", "quill",
"quilt", "race car", "racket", "radiator", "radio", "radio telescope", "rain barrel",
"recreational vehicle", "fishing casting reel", "reflex camera", "refrigerator",
"remote control", "restaurant", "revolver", "rifle", "rocking chair", "rotisserie", "eraser",
"rugby ball", "ruler measuring stick", "sneaker", "safe", "safety pin", "salt shaker", "sandal",
"sarong", "saxophone", "scabbard", "weighing scale", "school bus", "schooner", "scoreboard",
"CRT monitor", "screw", "screwdriver", "seat belt", "sewing machine", "shield", "shoe store",
"shoji screen / room divider", "shopping basket", "shopping cart", "shovel", "shower cap",
"shower curtain", "ski", "balaclava ski mask", "sleeping bag", "slide rule", "sliding door",
"slot machine", "snorkel", "snowmobile", "snowplow", "soap dispenser", "soccer ball", "sock",
"solar thermal collector", "sombrero", "soup bowl", "keyboard space bar", "space heater",
"space shuttle", "spatula", "motorboat", "spider web", "spindle", "sports car", "spotlight",
"stage", "steam locomotive", "through arch bridge", "steel drum", "stethoscope", "scarf",
"stone wall", "stopwatch", "stove", "strainer", "tram", "stretcher", "couch", "stupa",
"submarine", "suit", "sundial", "sunglasses", "sunglasses", "sunscreen", "suspension bridge",
"mop", "sweatshirt", "swim trunks / shorts", "swing", "electrical switch", "syringe",
"table lamp", "tank", "tape player", "teapot", "teddy bear", "television", "tennis ball",
"thatched roof", "front curtain", "thimble", "threshing machine", "throne", "tile roof",
"toaster", "tobacco shop", "toilet seat", "torch", "totem pole", "tow truck", "toy store",
"tractor", "semi-trailer truck", "tray", "trench coat", "tricycle", "trimaran", "tripod",
"triumphal arch", "trolleybus", "trombone", "hot tub", "turnstile", "typewriter keyboard",
"umbrella", "unicycle", "upright piano", "vacuum cleaner", "vase", "vaulted or arched ceiling",
"velvet fabric", "vending machine", "vestment", "viaduct", "violin", "volleyball",
"waffle iron", "wall clock", "wallet", "wardrobe", "military aircraft", "sink",
"washing machine", "water bottle", "water jug", "water tower", "whiskey jug", "whistle",
"hair wig", "window screen", "window shade", "Windsor tie", "wine bottle", "airplane wing",
"wok", "wooden spoon", "wool", "split-rail fence", "shipwreck", "sailboat", "yurt", "website",
"comic book", "crossword", "traffic or street sign", "traffic light", "dust jacket", "menu",
"plate", "guacamole", "consomme", "hot pot", "trifle", "ice cream", "popsicle", "baguette",
"bagel", "pretzel", "cheeseburger", "hot dog", "mashed potatoes", "cabbage", "broccoli",
"cauliflower", "zucchini", "spaghetti squash", "acorn squash", "butternut squash", "cucumber",
"artichoke", "bell pepper", "cardoon", "mushroom", "Granny Smith apple", "strawberry", "orange",
"lemon", "fig", "pineapple", "banana", "jackfruit", "cherimoya (custard apple)", "pomegranate",
"hay", "carbonara", "chocolate syrup", "dough", "meatloaf", "pizza", "pot pie", "burrito",
"red wine", "espresso", "tea cup", "eggnog", "mountain", "bubble", "cliff", "coral reef",
"geyser", "lakeshore", "promontory", "sandbar", "beach", "valley", "volcano", "baseball player",
"bridegroom", "scuba diver", "rapeseed", "daisy", "yellow lady's slipper", "corn", "acorn",
"rose hip", "horse chestnut seed", "coral fungus", "agaric", "gyromitra", "stinkhorn mushroom",
"earth star fungus", "hen of the woods mushroom", "bolete", "corn cob", "toilet paper"]
openai_imagenet_template = [
lambda c: f'a bad photo of a {c}.',
lambda c: f'a photo of many {c}.',
lambda c: f'a sculpture of a {c}.',
lambda c: f'a photo of the hard to see {c}.',
lambda c: f'a low resolution photo of the {c}.',
lambda c: f'a rendering of a {c}.',
lambda c: f'graffiti of a {c}.',
lambda c: f'a bad photo of the {c}.',
lambda c: f'a cropped photo of the {c}.',
lambda c: f'a tattoo of a {c}.',
lambda c: f'the embroidered {c}.',
lambda c: f'a photo of a hard to see {c}.',
lambda c: f'a bright photo of a {c}.',
lambda c: f'a photo of a clean {c}.',
lambda c: f'a photo of a dirty {c}.',
lambda c: f'a dark photo of the {c}.',
lambda c: f'a drawing of a {c}.',
lambda c: f'a photo of my {c}.',
lambda c: f'the plastic {c}.',
lambda c: f'a photo of the cool {c}.',
lambda c: f'a close-up photo of a {c}.',
lambda c: f'a black and white photo of the {c}.',
lambda c: f'a painting of the {c}.',
lambda c: f'a painting of a {c}.',
lambda c: f'a pixelated photo of the {c}.',
lambda c: f'a sculpture of the {c}.',
lambda c: f'a bright photo of the {c}.',
lambda c: f'a cropped photo of a {c}.',
lambda c: f'a plastic {c}.',
lambda c: f'a photo of the dirty {c}.',
lambda c: f'a jpeg corrupted photo of a {c}.',
lambda c: f'a blurry photo of the {c}.',
lambda c: f'a photo of the {c}.',
lambda c: f'a good photo of the {c}.',
lambda c: f'a rendering of the {c}.',
lambda c: f'a {c} in a video game.',
lambda c: f'a photo of one {c}.',
lambda c: f'a doodle of a {c}.',
lambda c: f'a close-up photo of the {c}.',
lambda c: f'a photo of a {c}.',
lambda c: f'the origami {c}.',
lambda c: f'the {c} in a video game.',
lambda c: f'a sketch of a {c}.',
lambda c: f'a doodle of the {c}.',
lambda c: f'a origami {c}.',
lambda c: f'a low resolution photo of a {c}.',
lambda c: f'the toy {c}.',
lambda c: f'a rendition of the {c}.',
lambda c: f'a photo of the clean {c}.',
lambda c: f'a photo of a large {c}.',
lambda c: f'a rendition of a {c}.',
lambda c: f'a photo of a nice {c}.',
lambda c: f'a photo of a weird {c}.',
lambda c: f'a blurry photo of a {c}.',
lambda c: f'a cartoon {c}.',
lambda c: f'art of a {c}.',
lambda c: f'a sketch of the {c}.',
lambda c: f'a embroidered {c}.',
lambda c: f'a pixelated photo of a {c}.',
lambda c: f'itap of the {c}.',
lambda c: f'a jpeg corrupted photo of the {c}.',
lambda c: f'a good photo of a {c}.',
lambda c: f'a plushie {c}.',
lambda c: f'a photo of the nice {c}.',
lambda c: f'a photo of the small {c}.',
lambda c: f'a photo of the weird {c}.',
lambda c: f'the cartoon {c}.',
lambda c: f'art of the {c}.',
lambda c: f'a drawing of the {c}.',
lambda c: f'a photo of the large {c}.',
lambda c: f'a black and white photo of a {c}.',
lambda c: f'the plushie {c}.',
lambda c: f'a dark photo of a {c}.',
lambda c: f'itap of a {c}.',
lambda c: f'graffiti of the {c}.',
lambda c: f'a toy {c}.',
lambda c: f'itap of my {c}.',
lambda c: f'a photo of a cool {c}.',
lambda c: f'a photo of a small {c}.',
lambda c: f'a tattoo of the {c}.',
]
|
Cream/TinyCLIP/src/open_clip/imagenet_zeroshot_data.py/0
|
{
"file_path": "Cream/TinyCLIP/src/open_clip/imagenet_zeroshot_data.py",
"repo_id": "Cream",
"token_count": 10252
}
| 330 |
""" timm model adapter
Wraps timm (https://github.com/rwightman/pytorch-image-models) models for use as a vision tower in CLIP model.
"""
from collections import OrderedDict
import torch.nn as nn
try:
import timm
from timm.models.layers import Mlp, to_2tuple
except:
timm = None
try:
from timm.models.layers.attention_pool2d import RotAttentionPool2d
from timm.models.layers.attention_pool2d import AttentionPool2d as AbsAttentionPool2d
except:
pass
try:
from .utils import freeze_batch_norm_2d
except:
from utils import freeze_batch_norm_2d
class TimmModel(nn.Module):
""" timm model adapter
# FIXME this adapter is a work in progress, may change in ways that break weight compat
"""
def __init__(
self,
model_name,
embed_dim,
image_size=224,
pool='avg',
proj='linear',
drop=0.,
pretrained=False):
super().__init__()
if timm is None:
raise RuntimeError("Please `pip install timm` to use timm models.")
self.image_size = to_2tuple(image_size)
self.trunk = timm.create_model(model_name, pretrained=pretrained)
feat_size = self.trunk.default_cfg.get('pool_size', None)
feature_ndim = 1 if not feat_size else 2
if pool in ('abs_attn', 'rot_attn'):
assert feature_ndim == 2
# if attn pooling used, remove both classifier and default pool
self.trunk.reset_classifier(0, global_pool='')
else:
# reset global pool if pool config set, otherwise leave as network default
reset_kwargs = dict(global_pool=pool) if pool else {}
self.trunk.reset_classifier(0, **reset_kwargs)
prev_chs = self.trunk.num_features
head_layers = OrderedDict()
if pool == 'abs_attn':
head_layers['pool'] = AbsAttentionPool2d(
prev_chs, feat_size=feat_size, out_features=embed_dim)
prev_chs = embed_dim
elif pool == 'rot_attn':
head_layers['pool'] = RotAttentionPool2d(
prev_chs, out_features=embed_dim)
prev_chs = embed_dim
else:
assert proj, 'projection layer needed if non-attention pooling is used.'
# NOTE attention pool ends with a projection layer, so proj should usually be set to '' if such pooling is used
if proj == 'linear':
head_layers['drop'] = nn.Dropout(drop)
head_layers['proj'] = nn.Linear(prev_chs, embed_dim)
elif proj == 'mlp':
head_layers['mlp'] = Mlp(
prev_chs, 2 * embed_dim, embed_dim, drop=drop)
self.head = nn.Sequential(head_layers)
def lock(self, unlocked_groups=0, freeze_bn_stats=False):
""" lock modules
Args:
unlocked_groups (int): leave last n layer groups unlocked (default: 0)
"""
if not unlocked_groups:
# lock full model
for param in self.trunk.parameters():
param.requires_grad = False
if freeze_bn_stats:
freeze_batch_norm_2d(self.trunk)
else:
# NOTE: partial freeze requires latest timm (master) branch and is subject to change
try:
# FIXME import here until API stable and in an official release
from timm.models.helpers import group_parameters, group_modules
except ImportError:
raise RuntimeError(
'Please install latest timm `pip install git+https://github.com/rwightman/pytorch-image-models`')
matcher = self.trunk.group_matcher()
gparams = group_parameters(self.trunk, matcher)
max_layer_id = max(gparams.keys())
max_layer_id = max_layer_id - unlocked_groups
for group_idx in range(max_layer_id + 1):
group = gparams[group_idx]
for param in group:
self.trunk.get_parameter(param).requires_grad = False
if freeze_bn_stats:
gmodules = group_modules(self.trunk, matcher, reverse=True)
gmodules = {k for k, v in gmodules.items() if v <=
max_layer_id}
freeze_batch_norm_2d(self.trunk, gmodules)
def forward(self, x):
x = self.trunk(x)
x = self.head(x)
return x
if __name__ == '__main__':
model = TimmModel('vit_base_patch32_224_in21k',
512, pool='', pretrained=False)
print(model)
|
Cream/TinyCLIP/src/open_clip/timm_model.py/0
|
{
"file_path": "Cream/TinyCLIP/src/open_clip/timm_model.py",
"repo_id": "Cream",
"token_count": 2156
}
| 331 |
import argparse
def get_default_params(model_name):
# Params from paper (https://arxiv.org/pdf/2103.00020.pdf)
model_name = model_name.lower()
if "vit" in model_name:
return {"lr": 5.0e-4, "beta1": 0.9, "beta2": 0.98, "eps": 1.0e-6}
else:
return {"lr": 5.0e-4, "beta1": 0.9, "beta2": 0.999, "eps": 1.0e-8}
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
"--train-data",
type=str,
default=None,
help="Path to csv filewith training data",
)
parser.add_argument(
"--val-data",
type=str,
default=None,
help="Path to csv file with validation data",
)
parser.add_argument(
"--train-num-samples",
type=int,
default=None,
help="Number of samples in dataset. Required for webdataset if not available in info file.",
)
parser.add_argument(
"--val-num-samples",
type=int,
default=None,
help="Number of samples in dataset. Useful for webdataset if not available in info file.",
)
parser.add_argument(
"--dataset-type",
choices=["webdataset", "csv", "auto", "tsv", "blobchunk", "synthetic"],
default="auto",
help="Which type of dataset to process."
)
parser.add_argument(
"--dataset-resampled",
default=False,
action="store_true",
help="Whether to use sampling with replacement for webdataset shard selection."
)
parser.add_argument(
"--csv-separator",
type=str,
default="\t",
help="For csv-like datasets, which separator to use."
)
parser.add_argument(
"--csv-img-key",
type=str,
default="filepath",
help="For csv-like datasets, the name of the key for the image paths."
)
parser.add_argument(
"--csv-caption-key",
type=str,
default="title",
help="For csv-like datasets, the name of the key for the captions."
)
parser.add_argument(
"--imagenet-val",
type=str,
default=None,
help="Path to imagenet val set for conducting zero shot evaluation.",
)
parser.add_argument(
"--imagenet-v2",
type=str,
default=None,
help="Path to imagenet v2 for conducting zero shot evaluation.",
)
parser.add_argument(
"--logs",
type=str,
default="./logs/",
help="Where to store tensorboard logs. Use None to avoid storing logs.",
)
parser.add_argument(
"--log-local",
action="store_true",
default=False,
help="log files on local master, otherwise global master only.",
)
parser.add_argument(
"--name",
type=str,
default=None,
help="Optional identifier for the experiment when storing logs. Otherwise use current time.",
)
parser.add_argument(
"--workers", type=int, default=1, help="Number of dataloader workers per GPU."
)
parser.add_argument(
"--batch-size", type=int, default=64, help="Batch size per GPU."
)
parser.add_argument(
"--epochs", type=float, default=32, help="Number of epochs to train for."
)
parser.add_argument("--lr", type=float, default=None,
help="Learning rate.")
parser.add_argument("--beta1", type=float,
default=None, help="Adam beta 1.")
parser.add_argument("--beta2", type=float,
default=None, help="Adam beta 2.")
parser.add_argument("--eps", type=float, default=None,
help="Adam epsilon.")
parser.add_argument("--wd", type=float, default=0.2, help="Weight decay.")
parser.add_argument(
"--warmup", type=int, default=10000, help="Number of steps to warmup for."
)
parser.add_argument(
"--use-bn-sync",
default=False,
action="store_true",
help="Whether to use batch norm sync.")
parser.add_argument(
"--skip-scheduler",
action="store_true",
default=False,
help="Use this flag to skip the learning rate decay.",
)
parser.add_argument(
"--save-frequency", type=int, default=1, help="How often to save checkpoints."
)
parser.add_argument(
"--save-most-recent",
action="store_true",
default=False,
help="Always save the most recent model trained to epoch_latest.pt.",
)
parser.add_argument(
"--zeroshot-frequency", type=int, default=1, help="How often to run zero shot."
)
parser.add_argument(
"--val-frequency", type=int, default=1, help="How often to run evaluation with val data."
)
parser.add_argument(
"--resume",
default=None,
type=str,
help="path to latest checkpoint (default: none)",
)
parser.add_argument(
"--precision",
choices=["amp", "amp_bfloat16", "fp16", "fp32"],
default="amp",
help="Floating point precision."
)
parser.add_argument(
"--image-precision",
type=str,
help="Floating point precision for image encoder"
)
parser.add_argument(
"--text-precision",
type=str,
help="Floating point precision for text encoder"
)
parser.add_argument(
"--logit-precision",
type=str,
help="Floating point precision for logit scale"
)
parser.add_argument(
"--model",
type=str,
default="RN50",
help="Name of the vision backbone to use.",
)
parser.add_argument(
"--pretrained",
default='',
type=str,
help="Use a pretrained CLIP model weights with the specified tag or file path.",
)
parser.add_argument(
"--pretrained-image-file",
default='',
type=str,
help="Use a pretrained CLIP image model weights with the specified tag or file path.",
)
parser.add_argument(
"--pretrained-text-file",
default='',
type=str,
help="Use a pretrained CLIP text model weights with the specified tag or file path.",
)
parser.add_argument(
"--pretrained-image",
default=False,
action='store_true',
help="Load imagenet pretrained weights for image tower backbone if available.",
)
parser.add_argument(
"--lock-image",
default=False,
action='store_true',
help="Lock full image tower by disabling gradients.",
)
parser.add_argument(
"--lock-text",
default=False,
action='store_true',
help="Lock full text tower by disabling gradients.",
)
parser.add_argument(
"--use-teacher-image",
default=False,
action='store_true',
help="Use teacher image encoder",
)
parser.add_argument(
"--use-teacher-text",
default=False,
action='store_true',
help="Use teacher text encoder",
)
parser.add_argument(
"--lock-image-unlocked-groups",
type=int,
default=0,
help="Leave last n image tower layer groups unlocked.",
)
parser.add_argument(
"--lock-image-freeze-bn-stats",
default=False,
action='store_true',
help="Freeze BatchNorm running stats in image tower for any locked layers.",
)
parser.add_argument(
'--image-mean', type=float, nargs='+', default=None, metavar='MEAN',
help='Override default image mean value of dataset')
parser.add_argument(
'--image-std', type=float, nargs='+', default=None, metavar='STD',
help='Override default image std deviation of of dataset')
parser.add_argument(
"--grad-checkpointing",
default=False,
action='store_true',
help="Enable gradient checkpointing.",
)
parser.add_argument(
"--grad-cache-times",
type=int,
default=1,
help="Gradient cache times.",
)
parser.add_argument(
"--local-loss",
default=False,
action="store_true",
help="calculate loss w/ local features @ global (instead of realizing full global @ global matrix)"
)
parser.add_argument(
"--gather-with-grad",
default=False,
action="store_true",
help="enable full distributed gradient for feature gather"
)
parser.add_argument(
"--force-quick-gelu",
default=False,
action='store_true',
help="Force use of QuickGELU activation for non-OpenAI transformer models.",
)
parser.add_argument(
"--torchscript",
default=False,
action='store_true',
help="torch.jit.script the model, also uses jit version of OpenAI models if pretrained=='openai'",
)
parser.add_argument(
"--trace",
default=False,
action='store_true',
help="torch.jit.trace the model for inference / eval only",
)
# arguments for distributed training
parser.add_argument(
"--dist-url",
default="env://",
type=str,
help="url used to set up distributed training",
)
parser.add_argument(
"--dist-backend", default="nccl", type=str, help="distributed backend"
)
parser.add_argument(
"--report-to",
default='',
type=str,
help="Options are ['wandb', 'tensorboard', 'wandb,tensorboard']"
)
parser.add_argument(
"--wandb-notes",
default='',
type=str,
help="Notes if logging with wandb"
)
parser.add_argument(
"--debug",
default=False,
action="store_true",
help="If true, more information is logged."
)
parser.add_argument(
"--prune-image",
default=False,
action="store_true",
help="If true, use Image mask."
)
parser.add_argument(
"--prune-text",
default=False,
action="store_true",
help="If true, use text mask."
)
parser.add_argument(
"--prune-step",
type=int, default=3000,
help="prune model step, stop mask learn, warmup step."
)
parser.add_argument(
"--sparsity-warmup",
type=int, default=1000,
help="number of steps that mask sparsity reaches target sparsity."
)
parser.add_argument(
"--target-sparsity",
type=float, default=0.25,
help="target sparsity of this training stage."
)
parser.add_argument(
"--start-sparsity",
type=float, default=0,
help="start sparsity of this training stage."
)
parser.add_argument(
"--total-loss-flag",
default=False,
action="store_true",
help="use image and text branch to calculate overall sparsity"
)
parser.add_argument(
"--load-last-stage",
default=False,
action="store_true",
help="use image and text branch to calculate overall sparsity"
)
parser.add_argument(
"--l0lr", type=float, default=-0.02, help="mask Learning rate."
)
parser.add_argument(
"--copy-codebase",
default=False,
action="store_true",
help="If true, we copy the entire base on the log diretory, and execute from there."
)
parser.add_argument(
"--horovod",
default=False,
action="store_true",
help="Use horovod for distributed training."
)
parser.add_argument(
"--ddp-static-graph",
default=False,
action='store_true',
help="Enable static graph optimization for DDP in PyTorch >= 1.11.",
)
parser.add_argument(
"--no-set-device-rank",
default=False,
action="store_true",
help="Don't set device index from local rank (when CUDA_VISIBLE_DEVICES restricted to one per proc)."
)
parser.add_argument(
"--seed", type=int, default=0, help="Default random seed."
)
parser.add_argument(
"--norm_gradient_clip", type=float, default=None, help="Gradient clip."
)
parser.add_argument(
"--distillation",
default=False,
action="store_true",
)
parser.add_argument(
"--distillation-weight", # for soft label
type=float,
default=1.0,
help="Weight for distillation.",
)
parser.add_argument(
"--distillation-alpha", # for soft label
type=float,
default=1.0,
help="Alpha for distillation.",
)
parser.add_argument(
"--distillation-teacher",
type=str,
help='Teacher model for distillation.',
)
parser.add_argument(
"--eval",
default=False,
action="store_true",
)
parser.add_argument(
"--logit-scale",
type=float,
help="both student and teacher's logit scale, basic: 100"
)
args = parser.parse_args()
if args.distillation_teacher is not None:
args.distillation = True
# If some params are not passed, we use the default values based on model name.
default_params = get_default_params(args.model)
for name, val in default_params.items():
if getattr(args, name) is None:
setattr(args, name, val)
return args
|
Cream/TinyCLIP/src/training/params.py/0
|
{
"file_path": "Cream/TinyCLIP/src/training/params.py",
"repo_id": "Cream",
"token_count": 5891
}
| 332 |
MODEL:
NAME: TinyViT-11M-22k-distill
TYPE: tiny_vit
DROP_PATH_RATE: 0.0
TINY_VIT:
DEPTHS: [ 2, 2, 6, 2 ]
NUM_HEADS: [ 2, 4, 8, 14 ]
WINDOW_SIZES: [ 7, 7, 14, 7 ]
EMBED_DIMS: [64, 128, 256, 448]
TRAIN:
EPOCHS: 90
BASE_LR: 2.5e-4
WARMUP_EPOCHS: 5
WEIGHT_DECAY: 0.01
DATA:
DATASET: imagenet22k
AUG:
MIXUP: 0.0
CUTMIX: 0.0
|
Cream/TinyViT/configs/22k_distill/tiny_vit_11m_22k_distill.yaml/0
|
{
"file_path": "Cream/TinyViT/configs/22k_distill/tiny_vit_11m_22k_distill.yaml",
"repo_id": "Cream",
"token_count": 218
}
| 333 |
DEFAULT_CROP_PCT = 0.875
IMAGENET_DEFAULT_MEAN = (0.485, 0.456, 0.406)
IMAGENET_DEFAULT_STD = (0.229, 0.224, 0.225)
IMAGENET_INCEPTION_MEAN = (0.5, 0.5, 0.5)
IMAGENET_INCEPTION_STD = (0.5, 0.5, 0.5)
IMAGENET_DPN_MEAN = (124 / 255, 117 / 255, 104 / 255)
IMAGENET_DPN_STD = tuple([1 / (.0167 * 255)] * 3)
|
Cream/TinyViT/data/augmentation/constants.py/0
|
{
"file_path": "Cream/TinyViT/data/augmentation/constants.py",
"repo_id": "Cream",
"token_count": 162
}
| 334 |
""" Dataset parser interface that wraps TFDS datasets
Wraps many (most?) TFDS image-classification datasets
from https://github.com/tensorflow/datasets
https://www.tensorflow.org/datasets/catalog/overview#image_classification
Hacked together by / Copyright 2020 Ross Wightman
"""
import math
import torch
import torch.distributed as dist
from PIL import Image
try:
import tensorflow as tf
tf.config.set_visible_devices([], 'GPU') # Hands off my GPU! (or pip install tensorflow-cpu)
import tensorflow_datasets as tfds
try:
tfds.even_splits('', 1, drop_remainder=False) # non-buggy even_splits has drop_remainder arg
has_buggy_even_splits = False
except TypeError:
print("Warning: This version of tfds doesn't have the latest even_splits impl. "
"Please update or use tfds-nightly for better fine-grained split behaviour.")
has_buggy_even_splits = True
except ImportError as e:
print(e)
print("Please install tensorflow_datasets package `pip install tensorflow-datasets`.")
exit(1)
from .parser import Parser
MAX_TP_SIZE = 8 # maximum TF threadpool size, only doing jpeg decodes and queuing activities
SHUFFLE_SIZE = 8192 # examples to shuffle in DS queue
PREFETCH_SIZE = 2048 # examples to prefetch
def even_split_indices(split, n, num_examples):
partitions = [round(i * num_examples / n) for i in range(n + 1)]
return [f"{split}[{partitions[i]}:{partitions[i + 1]}]" for i in range(n)]
def get_class_labels(info):
if 'label' not in info.features:
return {}
class_label = info.features['label']
class_to_idx = {n: class_label.str2int(n) for n in class_label.names}
return class_to_idx
class ParserTfds(Parser):
""" Wrap Tensorflow Datasets for use in PyTorch
There several things to be aware of:
* To prevent excessive examples being dropped per epoch w/ distributed training or multiplicity of
dataloader workers, the train iterator wraps to avoid returning partial batches that trigger drop_last
https://github.com/pytorch/pytorch/issues/33413
* With PyTorch IterableDatasets, each worker in each replica operates in isolation, the final batch
from each worker could be a different size. For training this is worked around by option above, for
validation extra examples are inserted iff distributed mode is enabled so that the batches being reduced
across replicas are of same size. This will slightly alter the results, distributed validation will not be
100% correct. This is similar to common handling in DistributedSampler for normal Datasets but a bit worse
since there are up to N * J extra examples with IterableDatasets.
* The sharding (splitting of dataset into TFRecord) files imposes limitations on the number of
replicas and dataloader workers you can use. For really small datasets that only contain a few shards
you may have to train non-distributed w/ 1-2 dataloader workers. This is likely not a huge concern as the
benefit of distributed training or fast dataloading should be much less for small datasets.
* This wrapper is currently configured to return individual, decompressed image examples from the TFDS
dataset. The augmentation (transforms) and batching is still done in PyTorch. It would be possible
to specify TF augmentation fn and return augmented batches w/ some modifications to other downstream
components.
"""
def __init__(
self,
root,
name,
split='train',
is_training=False,
batch_size=None,
download=False,
repeats=0,
seed=42,
input_name='image',
input_image='RGB',
target_name='label',
target_image='',
prefetch_size=None,
shuffle_size=None,
max_threadpool_size=None
):
""" Tensorflow-datasets Wrapper
Args:
root: root data dir (ie your TFDS_DATA_DIR. not dataset specific sub-dir)
name: tfds dataset name (eg `imagenet2012`)
split: tfds dataset split (can use all TFDS split strings eg `train[:10%]`)
is_training: training mode, shuffle enabled, dataset len rounded by batch_size
batch_size: batch_size to use to unsure total examples % batch_size == 0 in training across all dis nodes
download: download and build TFDS dataset if set, otherwise must use tfds CLI
repeats: iterate through (repeat) the dataset this many times per iteration (once if 0 or 1)
seed: common seed for shard shuffle across all distributed/worker instances
input_name: name of Feature to return as data (input)
input_image: image mode if input is an image (currently PIL mode string)
target_name: name of Feature to return as target (label)
target_image: image mode if target is an image (currently PIL mode string)
prefetch_size: override default tf.data prefetch buffer size
shuffle_size: override default tf.data shuffle buffer size
max_threadpool_size: override default threadpool size for tf.data
"""
super().__init__()
self.root = root
self.split = split
self.is_training = is_training
if self.is_training:
assert batch_size is not None, \
"Must specify batch_size in training mode for reasonable behaviour w/ TFDS wrapper"
self.batch_size = batch_size
self.repeats = repeats
self.common_seed = seed # a seed that's fixed across all worker / distributed instances
# performance settings
self.prefetch_size = prefetch_size or PREFETCH_SIZE
self.shuffle_size = shuffle_size or SHUFFLE_SIZE
self.max_threadpool_size = max_threadpool_size or MAX_TP_SIZE
# TFDS builder and split information
self.input_name = input_name # FIXME support tuples / lists of inputs and targets and full range of Feature
self.input_image = input_image
self.target_name = target_name
self.target_image = target_image
self.builder = tfds.builder(name, data_dir=root)
# NOTE: the tfds command line app can be used download & prepare datasets if you don't enable download flag
if download:
self.builder.download_and_prepare()
self.class_to_idx = get_class_labels(self.builder.info) if self.target_name == 'label' else {}
self.split_info = self.builder.info.splits[split]
self.num_examples = self.split_info.num_examples
# Distributed world state
self.dist_rank = 0
self.dist_num_replicas = 1
if dist.is_available() and dist.is_initialized() and dist.get_world_size() > 1:
self.dist_rank = dist.get_rank()
self.dist_num_replicas = dist.get_world_size()
# Attributes that are updated in _lazy_init, including the tf.data pipeline itself
self.global_num_workers = 1
self.worker_info = None
self.worker_seed = 0 # seed unique to each work instance
self.subsplit = None # set when data is distributed across workers using sub-splits
self.ds = None # initialized lazily on each dataloader worker process
def _lazy_init(self):
""" Lazily initialize the dataset.
This is necessary to init the Tensorflow dataset pipeline in the (dataloader) process that
will be using the dataset instance. The __init__ method is called on the main process,
this will be called in a dataloader worker process.
NOTE: There will be problems if you try to re-use this dataset across different loader/worker
instances once it has been initialized. Do not call any dataset methods that can call _lazy_init
before it is passed to dataloader.
"""
worker_info = torch.utils.data.get_worker_info()
# setup input context to split dataset across distributed processes
num_workers = 1
global_worker_id = 0
if worker_info is not None:
self.worker_info = worker_info
self.worker_seed = worker_info.seed
num_workers = worker_info.num_workers
self.global_num_workers = self.dist_num_replicas * num_workers
global_worker_id = self.dist_rank * num_workers + worker_info.id
""" Data sharding
InputContext will assign subset of underlying TFRecord files to each 'pipeline' if used.
My understanding is that using split, the underling TFRecord files will shuffle (shuffle_files=True)
between the splits each iteration, but that understanding could be wrong.
I am currently using a mix of InputContext shard assignment and fine-grained sub-splits for distributing
the data across workers. For training InputContext is used to assign shards to nodes unless num_shards
in dataset < total number of workers. Otherwise sub-split API is used for datasets without enough shards or
for validation where we can't drop examples and need to avoid minimize uneven splits to avoid padding.
"""
should_subsplit = self.global_num_workers > 1 and (
self.split_info.num_shards < self.global_num_workers or not self.is_training)
if should_subsplit:
# split the dataset w/o using sharding for more even examples / worker, can result in less optimal
# read patterns for distributed training (overlap across shards) so better to use InputContext there
if has_buggy_even_splits:
# my even_split workaround doesn't work on subsplits, upgrade tfds!
if not isinstance(self.split_info, tfds.core.splits.SubSplitInfo):
subsplits = even_split_indices(self.split, self.global_num_workers, self.num_examples)
self.subsplit = subsplits[global_worker_id]
else:
subsplits = tfds.even_splits(self.split, self.global_num_workers)
self.subsplit = subsplits[global_worker_id]
input_context = None
if self.global_num_workers > 1 and self.subsplit is None:
# set input context to divide shards among distributed replicas
input_context = tf.distribute.InputContext(
num_input_pipelines=self.global_num_workers,
input_pipeline_id=global_worker_id,
num_replicas_in_sync=self.dist_num_replicas # FIXME does this arg have any impact?
)
read_config = tfds.ReadConfig(
shuffle_seed=self.common_seed,
shuffle_reshuffle_each_iteration=True,
input_context=input_context)
ds = self.builder.as_dataset(
split=self.subsplit or self.split, shuffle_files=self.is_training, read_config=read_config)
# avoid overloading threading w/ combo of TF ds threads + PyTorch workers
options = tf.data.Options()
thread_member = 'threading' if hasattr(options, 'threading') else 'experimental_threading'
getattr(options, thread_member).private_threadpool_size = max(1, self.max_threadpool_size // num_workers)
getattr(options, thread_member).max_intra_op_parallelism = 1
ds = ds.with_options(options)
if self.is_training or self.repeats > 1:
# to prevent excessive drop_last batch behaviour w/ IterableDatasets
# see warnings at https://pytorch.org/docs/stable/data.html#multi-process-data-loading
ds = ds.repeat() # allow wrap around and break iteration manually
if self.is_training:
ds = ds.shuffle(min(self.num_examples, self.shuffle_size) // self.global_num_workers, seed=self.worker_seed)
ds = ds.prefetch(min(self.num_examples // self.global_num_workers, self.prefetch_size))
self.ds = tfds.as_numpy(ds)
def __iter__(self):
if self.ds is None:
self._lazy_init()
# Compute a rounded up sample count that is used to:
# 1. make batches even cross workers & replicas in distributed validation.
# This adds extra examples and will slightly alter validation results.
# 2. determine loop ending condition in training w/ repeat enabled so that only full batch_size
# batches are produced (underlying tfds iter wraps around)
target_example_count = math.ceil(max(1, self.repeats) * self.num_examples / self.global_num_workers)
if self.is_training:
# round up to nearest batch_size per worker-replica
target_example_count = math.ceil(target_example_count / self.batch_size) * self.batch_size
# Iterate until exhausted or sample count hits target when training (ds.repeat enabled)
example_count = 0
for example in self.ds:
input_data = example[self.input_name]
if self.input_image:
input_data = Image.fromarray(input_data, mode=self.input_image)
target_data = example[self.target_name]
if self.target_image:
target_data = Image.fromarray(target_data, mode=self.target_image)
yield input_data, target_data
example_count += 1
if self.is_training and example_count >= target_example_count:
# Need to break out of loop when repeat() is enabled for training w/ oversampling
# this results in extra examples per epoch but seems more desirable than dropping
# up to N*J batches per epoch (where N = num distributed processes, and J = num worker processes)
break
# Pad across distributed nodes (make counts equal by adding examples)
if not self.is_training and self.dist_num_replicas > 1 and self.subsplit is not None and \
0 < example_count < target_example_count:
# Validation batch padding only done for distributed training where results are reduced across nodes.
# For single process case, it won't matter if workers return different batch sizes.
# If using input_context or % based splits, sample count can vary significantly across workers and this
# approach should not be used (hence disabled if self.subsplit isn't set).
while example_count < target_example_count:
yield input_data, target_data # yield prev sample again
example_count += 1
def __len__(self):
# this is just an estimate and does not factor in extra examples added to pad batches based on
# complete worker & replica info (not available until init in dataloader).
return math.ceil(max(1, self.repeats) * self.num_examples / self.dist_num_replicas)
def _filename(self, index, basename=False, absolute=False):
assert False, "Not supported" # no random access to examples
def filenames(self, basename=False, absolute=False):
""" Return all filenames in dataset, overrides base"""
if self.ds is None:
self._lazy_init()
names = []
for sample in self.ds:
if len(names) > self.num_examples:
break # safety for ds.repeat() case
if 'file_name' in sample:
name = sample['file_name']
elif 'filename' in sample:
name = sample['filename']
elif 'id' in sample:
name = sample['id']
else:
assert False, "No supported name field present"
names.append(name)
return names
|
Cream/TinyViT/data/augmentation/parsers/parser_tfds.py/0
|
{
"file_path": "Cream/TinyViT/data/augmentation/parsers/parser_tfds.py",
"repo_id": "Cream",
"token_count": 6076
}
| 335 |
# --------------------------------------------------------
# Logger
# Copyright (c) 2022 Microsoft
# Based on the code: Swin Transformer
# (https://github.com/microsoft/swin-transformer)
# --------------------------------------------------------
import os
import sys
import logging
import functools
from termcolor import colored
@functools.lru_cache()
def create_logger(output_dir, dist_rank=0, name=''):
# create logger
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
logger.propagate = False
# create formatter
fmt = '[%(asctime)s %(name)s] (%(filename)s %(lineno)d): %(levelname)s %(message)s'
color_fmt = colored('[%(asctime)s %(name)s]', 'green') + \
colored('(%(filename)s %(lineno)d)', 'yellow') + \
': %(levelname)s %(message)s'
# create console handlers for master process
if dist_rank == 0:
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setLevel(logging.DEBUG)
console_handler.setFormatter(
logging.Formatter(fmt=color_fmt, datefmt='%Y-%m-%d %H:%M:%S'))
logger.addHandler(console_handler)
# create file handlers
file_handler = logging.FileHandler(os.path.join(
output_dir, f'log_rank{dist_rank}.txt'), mode='a')
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(logging.Formatter(
fmt=fmt, datefmt='%Y-%m-%d %H:%M:%S'))
logger.addHandler(file_handler)
return logger
|
Cream/TinyViT/logger.py/0
|
{
"file_path": "Cream/TinyViT/logger.py",
"repo_id": "Cream",
"token_count": 570
}
| 336 |
"""The implementation of iRPE (image relative position encoding)."""
from easydict import EasyDict as edict
import math
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
try:
from rpe_ops.rpe_index import RPEIndexFunction
except ImportError:
RPEIndexFunction = None
import warnings
RED_STR = "\033[91m{}\033[00m"
warnings.warn(RED_STR.format("[WARNING] The module `rpe_ops` is not built. \
For better training performance, please build `rpe_ops`."),)
@torch.no_grad()
def piecewise_index(relative_position, alpha, beta, gamma, dtype):
"""piecewise index function defined in Eq. (18) in our paper.
Parameters
----------
relative_position: torch.Tensor, dtype: long or float
The shape of `relative_position` is (L, L).
alpha, beta, gamma: float
The coefficients of piecewise index function.
Returns
-------
idx: torch.Tensor, dtype: long
A tensor indexing relative distances to corresponding encodings.
`idx` is a long tensor, whose shape is (L, L) and each element is in [-beta, beta].
"""
rp_abs = relative_position.abs()
mask = rp_abs <= alpha
not_mask = ~mask
rp_out = relative_position[not_mask]
rp_abs_out = rp_abs[not_mask]
y_out = (torch.sign(rp_out) * (alpha +
torch.log(rp_abs_out / alpha) /
math.log(gamma / alpha) *
(beta - alpha)).round().clip(max=beta)).to(dtype)
idx = relative_position.clone()
if idx.dtype in [torch.float32, torch.float64]:
# round(x) when |x| <= alpha
idx = idx.round().to(dtype)
# assign the value when |x| > alpha
idx[not_mask] = y_out
return idx
def get_absolute_positions(height, width, dtype, device):
'''Get absolute positions
Take height = 3, width = 3 as an example:
rows: cols:
1 1 1 1 2 3
2 2 2 1 2 3
3 3 3 1 2 3
return stack([rows, cols], 2)
Parameters
----------
height, width: int
The height and width of feature map
dtype: torch.dtype
the data type of returned value
device: torch.device
the device of returned value
Return
------
2D absolute positions: torch.Tensor
The shape is (height, width, 2),
where 2 represents a 2D position (row, col).
'''
rows = torch.arange(height, dtype=dtype, device=device).view(
height, 1).repeat(1, width)
cols = torch.arange(width, dtype=dtype, device=device).view(
1, width).repeat(height, 1)
return torch.stack([rows, cols], 2)
@torch.no_grad()
def quantize_values(values):
"""Quantization: Map all values (long or float) into a discrte integer set.
Parameters
----------
values: torch.Tensor, dtype: long or float
arbitrary shape
Returns
-------
res: torch.Tensor, dtype: long
The quantization result starts at 0.
The shape is the same as that of `values`.
uq.numel(): long
The number of the quantization integers, namely `res` is in [0, uq.numel()).
"""
# quantize and re-assign bucket id
res = torch.empty_like(values)
uq = values.unique()
cnt = 0
for (tid, v) in enumerate(uq):
mask = (values == v)
cnt += torch.count_nonzero(mask)
res[mask] = tid
assert cnt == values.numel()
return res, uq.numel()
class METHOD:
"""define iRPE method IDs
We divide the implementation of CROSS into CROSS_ROWS and CROSS_COLS.
"""
EUCLIDEAN = 0
QUANT = 1
PRODUCT = 3
CROSS = 4
CROSS_ROWS = 41
CROSS_COLS = 42
@torch.no_grad()
def _rp_2d_euclidean(diff, **kwargs):
"""2D RPE with Euclidean method.
Parameters
----------
diff: torch.Tensor
The shape of `diff` is (L, L, 2),
where L is the sequence length,
and 2 represents a 2D offset (row_offset, col_offset).
Returns
-------
index: torch.Tensor, dtype: long
index to corresponding encodings.
The shape of `index` is (L, L),
where L is the sequence length.
"""
dis = diff.square().sum(2).float().sqrt().round()
return piecewise_index(dis, **kwargs)
@torch.no_grad()
def _rp_2d_quant(diff, **kwargs):
"""2D RPE with Quantization method.
Parameters
----------
diff: torch.Tensor
The shape of `diff` is (L, L, 2),
where L is the sequence length,
and 2 represents a 2D offset (row_offset, col_offset).
Returns
-------
index: torch.Tensor, dtype: long
index to corresponding encodings.
The shape of `index` is (L, L),
where L is the sequence length.
"""
dis = diff.square().sum(2)
return piecewise_index(dis, **kwargs)
@torch.no_grad()
def _rp_2d_product(diff, **kwargs):
"""2D RPE with Product method.
Parameters
----------
diff: torch.Tensor
The shape of `diff` is (L, L, 2),
where L is the sequence length,
and 2 represents a 2D offset (row_offset, col_offset).
Returns
-------
index: torch.Tensor, dtype: long
index to corresponding encodings.
The shape of `index` is (L, L),
where L is the sequence length.
"""
# convert beta to an integer since beta is a float number.
beta_int = int(kwargs['beta'])
S = 2 * beta_int + 1
# the output of piecewise index function is in [-beta_int, beta_int]
r = piecewise_index(diff[:, :, 0], **kwargs) + \
beta_int # [0, 2 * beta_int]
c = piecewise_index(diff[:, :, 1], **kwargs) + \
beta_int # [0, 2 * beta_int]
pid = r * S + c
return pid
@torch.no_grad()
def _rp_2d_cross_rows(diff, **kwargs):
"""2D RPE with Cross for rows.
Parameters
----------
diff: torch.Tensor
The shape of `diff` is (L, L, 2),
where L is the sequence length,
and 2 represents a 2D offset (row_offset, col_offset).
Returns
-------
index: torch.Tensor, dtype: long
index to corresponding encodings.
The shape of `index` is (L, L),
where L is the sequence length.
"""
dis = diff[:, :, 0]
return piecewise_index(dis, **kwargs)
@torch.no_grad()
def _rp_2d_cross_cols(diff, **kwargs):
"""2D RPE with Cross for columns.
Parameters
----------
diff: torch.Tensor
The shape of `diff` is (L, L, 2),
where L is the sequence length,
and 2 represents a 2D offset (row_offset, col_offset).
Returns
-------
index: torch.Tensor, dtype: long
index to corresponding encodings.
The shape of `index` is (L, L),
where L is the sequence length.
"""
dis = diff[:, :, 1]
return piecewise_index(dis, **kwargs)
# Define a mapping from METHOD_ID to Python function
_METHOD_FUNC = {
METHOD.EUCLIDEAN: _rp_2d_euclidean,
METHOD.QUANT: _rp_2d_quant,
METHOD.PRODUCT: _rp_2d_product,
METHOD.CROSS_ROWS: _rp_2d_cross_rows,
METHOD.CROSS_COLS: _rp_2d_cross_cols,
}
def get_num_buckets(method, alpha, beta, gamma):
""" Get number of buckets storing relative position encoding.
The buckets does not contain `skip` token.
Parameters
----------
method: METHOD
The method ID of image relative position encoding.
alpha, beta, gamma: float
The coefficients of piecewise index function.
Returns
-------
num_buckets: int
The number of buckets storing relative position encoding.
"""
beta_int = int(beta)
if method == METHOD.PRODUCT:
# IDs in [0, (2 * beta_int + 1)^2) for Product method
num_buckets = (2 * beta_int + 1) ** 2
else:
# IDs in [-beta_int, beta_int] except of Product method
num_buckets = 2 * beta_int + 1
return num_buckets
# (method, alpha, beta, gamma) -> (bucket_ids, num_buckets, height, width)
BUCKET_IDS_BUF = dict()
@torch.no_grad()
def get_bucket_ids_2d_without_skip(method, height, width,
alpha, beta, gamma,
dtype=torch.long, device=torch.device('cpu')):
"""Get bucket IDs for image relative position encodings without skip token
Parameters
----------
method: METHOD
The method ID of image relative position encoding.
height, width: int
The height and width of the feature map.
The sequence length is equal to `height * width`.
alpha, beta, gamma: float
The coefficients of piecewise index function.
dtype: torch.dtype
the data type of returned `bucket_ids`
device: torch.device
the device of returned `bucket_ids`
Returns
-------
bucket_ids: torch.Tensor, dtype: long
The bucket IDs which index to corresponding encodings.
The shape of `bucket_ids` is (skip + L, skip + L),
where `L = height * wdith`.
num_buckets: int
The number of buckets including `skip` token.
L: int
The sequence length
"""
key = (method, alpha, beta, gamma, dtype, device)
value = BUCKET_IDS_BUF.get(key, None)
if value is None or value[-2] < height or value[-1] < width:
if value is None:
max_height, max_width = height, width
else:
max_height = max(value[-2], height)
max_width = max(value[-1], width)
# relative position encoding mapping function
func = _METHOD_FUNC.get(method, None)
if func is None:
raise NotImplementedError(
f"[Error] The method ID {method} does not exist.")
pos = get_absolute_positions(max_height, max_width, dtype, device)
# compute the offset of a pair of 2D relative positions
max_L = max_height * max_width
pos1 = pos.view((max_L, 1, 2))
pos2 = pos.view((1, max_L, 2))
# diff: shape of (L, L, 2)
diff = pos1 - pos2
# bucket_ids: shape of (L, L)
bucket_ids = func(diff, alpha=alpha, beta=beta,
gamma=gamma, dtype=dtype)
beta_int = int(beta)
if method != METHOD.PRODUCT:
bucket_ids += beta_int
bucket_ids = bucket_ids.view(
max_height, max_width, max_height, max_width)
num_buckets = get_num_buckets(method, alpha, beta, gamma)
value = (bucket_ids, num_buckets, height, width)
BUCKET_IDS_BUF[key] = value
L = height * width
bucket_ids = value[0][:height, :width, :height, :width].reshape(L, L)
num_buckets = value[1]
return bucket_ids, num_buckets, L
@torch.no_grad()
def get_bucket_ids_2d(method, height, width,
skip, alpha, beta, gamma,
dtype=torch.long, device=torch.device('cpu')):
"""Get bucket IDs for image relative position encodings
Parameters
----------
method: METHOD
The method ID of image relative position encoding.
height, width: int
The height and width of the feature map.
The sequence length is equal to `height * width`.
skip: int
The number of skip token before spatial tokens.
When skip is 0, no classification token.
When skip is 1, there is a classification token before spatial tokens.
When skip > 1, there are `skip` extra tokens before spatial tokens.
alpha, beta, gamma: float
The coefficients of piecewise index function.
dtype: torch.dtype
the data type of returned `bucket_ids`
device: torch.device
the device of returned `bucket_ids`
Returns
-------
bucket_ids: torch.Tensor, dtype: long
The bucket IDs which index to corresponding encodings.
The shape of `bucket_ids` is (skip + L, skip + L),
where `L = height * wdith`.
num_buckets: int
The number of buckets including `skip` token.
"""
bucket_ids, num_buckets, L = get_bucket_ids_2d_without_skip(method, height, width,
alpha, beta, gamma,
dtype, device)
# add an extra encoding (id = num_buckets) for the classification token
if skip > 0:
new_bids = bucket_ids.new_empty(size=(skip + L, skip + L))
# if extra token exists, we add extra bucket as its encoding.
extra_bucket_id = num_buckets
num_buckets += 1
new_bids[:skip] = extra_bucket_id
new_bids[:, :skip] = extra_bucket_id
new_bids[skip:, skip:] = bucket_ids
bucket_ids = new_bids
bucket_ids = bucket_ids.contiguous()
return bucket_ids, num_buckets
class iRPE(nn.Module):
"""The implementation of image relative position encoding (excluding Cross method).
Parameters
----------
head_dim: int
The dimension for each head.
num_heads: int
The number of parallel attention heads.
mode: str or None
The mode of image relative position encoding.
Choices: [None, 'bias', 'contextual']
method: METHOD
The method ID of image relative position encoding.
The `METHOD` class is defined in `irpe.py`.
transposed: bool
Whether to transpose the input feature.
For iRPE on queries or keys, transposed should be `True`.
For iRPE on values, transposed should be `False`.
num_buckets: int
The number of buckets, which store encodings.
initializer: None or an inplace function
[Optional] The initializer to `lookup_table`.
Initalize `lookup_table` as zero by default.
rpe_config: RPEConfig
The config generated by the function `get_single_rpe_config`.
"""
# a buffer to store bucket index
# (key, rp_bucket, _ctx_rp_bucket_flatten)
_rp_bucket_buf = (None, None, None)
def __init__(self, head_dim, num_heads=8,
mode=None, method=None,
transposed=True, num_buckets=None,
initializer=None, rpe_config=None):
super().__init__()
self.num_heads = num_heads
self.head_dim = head_dim
# relative position
assert mode in [None, 'bias', 'contextual']
self.mode = mode
assert method is not None, 'method should be a METHOD ID rather than None'
self.method = method
self.transposed = transposed
self.num_buckets = num_buckets
if initializer is None:
def initializer(x): return None
self.initializer = initializer
self.reset_parameters()
self.rpe_config = rpe_config
@torch.no_grad()
def reset_parameters(self):
# initialize the parameters of iRPE
if self.transposed:
if self.mode == 'bias':
self.lookup_table_bias = nn.Parameter(
torch.zeros(self.num_heads, self.num_buckets))
self.initializer(self.lookup_table_bias)
elif self.mode == 'contextual':
self.lookup_table_weight = nn.Parameter(
torch.zeros(self.num_heads,
self.head_dim, self.num_buckets))
self.initializer(self.lookup_table_weight)
else:
if self.mode == 'bias':
raise NotImplementedError(
"[Error] Bias non-transposed RPE does not exist.")
elif self.mode == 'contextual':
self.lookup_table_weight = nn.Parameter(
torch.zeros(self.num_heads,
self.num_buckets, self.head_dim))
self.initializer(self.lookup_table_weight)
def forward(self, x, height=None, width=None):
"""forward function for iRPE.
Parameters
----------
x: torch.Tensor
Input Tensor whose shape is (B, H, L, head_dim),
where B is batch size,
H is the number of heads,
L is the sequence length,
equal to height * width (+1 if class token exists)
head_dim is the dimension of each head
Returns
-------
rpe_encoding: torch.Tensor
image Relative Position Encoding,
whose shape is (B, H, L, L)
"""
rp_bucket, self._ctx_rp_bucket_flatten = \
self._get_rp_bucket(x, height=height, width=width)
if self.transposed:
return self.forward_rpe_transpose(x, rp_bucket)
return self.forward_rpe_no_transpose(x, rp_bucket)
def _get_rp_bucket(self, x, height=None, width=None):
"""Get relative position encoding buckets IDs corresponding the input shape
Parameters
----------
x: torch.Tensor
Input Tensor whose shape is (B, H, L, head_dim),
where B is batch size,
H is the number of heads,
L is the sequence length,
equal to height * width (+1 if class token exists)
head_dim is the dimension of each head
height: int or None
[Optional] The height of the input
If not defined, height = floor(sqrt(L))
width: int or None
[Optional] The width of the input
If not defined, width = floor(sqrt(L))
Returns
-------
rp_bucket: torch.Tensor
relative position encoding buckets IDs
The shape is (L, L)
_ctx_rp_bucket_flatten: torch.Tensor or None
It is a private tensor for efficient computation.
"""
B, H, L, D = x.shape
device = x.device
if height is None:
E = int(math.sqrt(L))
height = width = E
key = (height, width, device)
# use buffer if the spatial shape and device is not changable.
if self._rp_bucket_buf[0] == key:
return self._rp_bucket_buf[1:3]
skip = L - height * width
config = self.rpe_config
if RPEIndexFunction is not None and self.mode == 'contextual' and self.transposed:
# RPEIndexFunction uses int32 index.
dtype = torch.int32
else:
dtype = torch.long
rp_bucket, num_buckets = get_bucket_ids_2d(method=self.method,
height=height, width=width,
skip=skip, alpha=config.alpha,
beta=config.beta, gamma=config.gamma,
dtype=dtype, device=device)
assert num_buckets == self.num_buckets
# transposed contextual
_ctx_rp_bucket_flatten = None
if self.mode == 'contextual' and self.transposed:
if RPEIndexFunction is None:
offset = torch.arange(0, L * self.num_buckets, self.num_buckets,
dtype=rp_bucket.dtype, device=rp_bucket.device).view(-1, 1)
_ctx_rp_bucket_flatten = (rp_bucket + offset).flatten()
self._rp_bucket_buf = (key, rp_bucket, _ctx_rp_bucket_flatten)
return rp_bucket, _ctx_rp_bucket_flatten
def forward_rpe_transpose(self, x, rp_bucket):
"""Forward function for iRPE (transposed version)
This version is utilized by RPE on Query or Key
Parameters
----------
x: torch.Tensor
Input Tensor whose shape is (B, H, L, head_dim),
where B is batch size,
H is the number of heads,
L is the sequence length,
equal to height * width (+1 if class token exists)
head_dim is the dimension of each head
rp_bucket: torch.Tensor
relative position encoding buckets IDs
The shape is (L, L)
Weights
-------
lookup_table_bias: torch.Tensor
The shape is (H or 1, num_buckets)
or
lookup_table_weight: torch.Tensor
The shape is (H or 1, head_dim, num_buckets)
Returns
-------
output: torch.Tensor
Relative position encoding on queries or keys.
The shape is (B or 1, H, L, L),
where D is the output dimension for each head.
"""
B = len(x) # batch_size
L_query, L_key = rp_bucket.shape
if self.mode == 'bias':
return self.lookup_table_bias[:, rp_bucket.flatten()].\
view(1, self.num_heads, L_query, L_key)
elif self.mode == 'contextual':
"""
ret[b, h, i, j] = lookup_table_weight[b, h, i, rp_bucket[i, j]]
ret[b, h, i * L_key + j] = \
lookup_table[b, h, i * num_buckets + rp_buckets[i, j]]
computational cost
------------------
matmul: B * H * L_query * head_dim * num_buckets
index: L_query + L_query * L_key + B * H * L_query * L_key
total: O(B * H * L_query * (head_dim * num_buckets + L_key))
"""
lookup_table = torch.matmul(
x.transpose(0, 1).reshape(-1, B * L_query, self.head_dim),
self.lookup_table_weight).\
view(-1, B, L_query, self.num_buckets).transpose(0, 1)
if RPEIndexFunction is not None:
return RPEIndexFunction.apply(lookup_table, rp_bucket)
else:
return lookup_table.flatten(2)[:, :, self._ctx_rp_bucket_flatten].\
view(B, -1, L_query, L_key)
def forward_rpe_no_transpose(self, x, rp_bucket):
"""Forward function for iRPE (non-transposed version)
This version is utilized by RPE on Value.
Parameters
----------
x: torch.Tensor
Input Tensor whose shape is (B, H, L, head_dim),
where B is batch size,
H is the number of heads,
L is the sequence length,
equal to height * width (+1 if class token exists)
head_dim is the dimension of each head
rp_bucket: torch.Tensor
relative position encoding buckets IDs
The shape is (L, L)
Weights
-------
lookup_table_weight: torch.Tensor
The shape is (H or 1, num_buckets, head_dim)
Returns
-------
output: torch.Tensor
Relative position encoding on values.
The shape is (B, H, L, D),
where D is the output dimension for each head.
"""
B = len(x) # batch_size
L_query, L_key = rp_bucket.shape
assert self.mode == 'contextual', "Only support contextual \
version in non-transposed version"
weight = self.lookup_table_weight[:, rp_bucket.flatten()].\
view(self.num_heads, L_query, L_key, self.head_dim)
# (H, L_query, B, L_key) @ (H, L_query, L_key, D) = (H, L_query, B, D)
# -> (B, H, L_query, D)
return torch.matmul(x.permute(1, 2, 0, 3), weight).permute(2, 0, 1, 3)
def __repr__(self):
return 'iRPE(head_dim={rpe.head_dim}, num_heads={rpe.num_heads}, \
mode="{rpe.mode}", method={rpe.method}, transposed={rpe.transposed}, \
num_buckets={rpe.num_buckets}, initializer={rpe.initializer}, \
rpe_config={rpe.rpe_config})'.format(rpe=self)
class iRPE_Cross(nn.Module):
"""The implementation of image relative position encoding (specific for Cross method).
Parameters
----------
head_dim: int
The dimension for each head.
num_heads: int
The number of parallel attention heads.
mode: str or None
The mode of image relative position encoding.
Choices: [None, 'bias', 'contextual']
method: METHOD
The method ID of image relative position encoding.
The `METHOD` class is defined in `irpe.py`.
transposed: bool
Whether to transpose the input feature.
For iRPE on queries or keys, transposed should be `True`.
For iRPE on values, transposed should be `False`.
num_buckets: int
The number of buckets, which store encodings.
initializer: None or an inplace function
[Optional] The initializer to `lookup_table`.
Initalize `lookup_table` as zero by default.
rpe_config: RPEConfig
The config generated by the function `get_single_rpe_config`.
"""
def __init__(self, method, **kwargs):
super().__init__()
assert method == METHOD.CROSS
self.rp_rows = iRPE(**kwargs, method=METHOD.CROSS_ROWS)
self.rp_cols = iRPE(**kwargs, method=METHOD.CROSS_COLS)
def forward(self, x, height=None, width=None):
"""forward function for iRPE.
Compute encoding on horizontal and vertical directions separately,
then summarize them.
Parameters
----------
x: torch.Tensor
Input Tensor whose shape is (B, H, L, head_dim),
where B is batch size,
H is the number of heads,
L is the sequence length,
equal to height * width (+1 if class token exists)
head_dim is the dimension of each head
height: int or None
[Optional] The height of the input
If not defined, height = floor(sqrt(L))
width: int or None
[Optional] The width of the input
If not defined, width = floor(sqrt(L))
Returns
-------
rpe_encoding: torch.Tensor
Image Relative Position Encoding,
whose shape is (B, H, L, L)
"""
rows = self.rp_rows(x, height=height, width=width)
cols = self.rp_cols(x, height=height, width=width)
return rows + cols
def __repr__(self):
return 'iRPE_Cross(head_dim={rpe.head_dim}, \
num_heads={rpe.num_heads}, mode="{rpe.mode}", method={rpe.method}, \
transposed={rpe.transposed}, num_buckets={rpe.num_buckets}, \
initializer={rpe.initializer}, \
rpe_config={rpe.rpe_config})'.format(rpe=self.rp_rows)
def get_single_rpe_config(ratio=1.9,
method=METHOD.PRODUCT,
mode='contextual',
shared_head=True,
skip=0):
"""Get the config of single relative position encoding
Parameters
----------
ratio: float
The ratio to control the number of buckets.
method: METHOD
The method ID of image relative position encoding.
The `METHOD` class is defined in `irpe.py`.
mode: str or None
The mode of image relative position encoding.
Choices: [None, 'bias', 'contextual']
shared_head: bool
Whether to share weight among different heads.
skip: int
The number of skip token before spatial tokens.
When skip is 0, no classification token.
When skip is 1, there is a classification token before spatial tokens.
When skip > 1, there are `skip` extra tokens before spatial tokens.
Returns
-------
config: RPEConfig
The config of single relative position encoding.
"""
config = edict()
# whether to share encodings across different heads
config.shared_head = shared_head
# mode: None, bias, contextual
config.mode = mode
# method: None, Bias, Quant, Cross, Product
config.method = method
# the coefficients of piecewise index function
config.alpha = 1 * ratio
config.beta = 2 * ratio
config.gamma = 8 * ratio
# set the number of buckets
config.num_buckets = get_num_buckets(method,
config.alpha,
config.beta,
config.gamma)
# add extra bucket for `skip` token (e.g. class token)
if skip > 0:
config.num_buckets += 1
return config
def get_rpe_config(ratio=1.9,
method=METHOD.PRODUCT,
mode='contextual',
shared_head=True,
skip=0,
rpe_on='k'):
"""Get the config of relative position encoding on queries, keys and values
Parameters
----------
ratio: float
The ratio to control the number of buckets.
method: METHOD or str
The method ID (or name) of image relative position encoding.
The `METHOD` class is defined in `irpe.py`.
mode: str or None
The mode of image relative position encoding.
Choices: [None, 'bias', 'contextual']
shared_head: bool
Whether to share weight among different heads.
skip: int
The number of skip token before spatial tokens.
When skip is 0, no classification token.
When skip is 1, there is a classification token before spatial tokens.
When skip > 1, there are `skip` extra tokens before spatial tokens.
rpe_on: str
Where RPE attaches.
"q": RPE on queries
"k": RPE on keys
"v": RPE on values
"qk": RPE on queries and keys
"qkv": RPE on queries, keys and values
Returns
-------
config: RPEConfigs
config.rpe_q: the config of relative position encoding on queries
config.rpe_k: the config of relative position encoding on keys
config.rpe_v: the config of relative position encoding on values
"""
# alias
if isinstance(method, str):
method_mapping = dict(
euc=METHOD.EUCLIDEAN,
quant=METHOD.QUANT,
cross=METHOD.CROSS,
product=METHOD.PRODUCT,
)
method = method_mapping[method.lower()]
if mode == 'ctx':
mode = 'contextual'
config = edict()
# relative position encoding on queries, keys and values
kwargs = dict(
ratio=ratio,
method=method,
mode=mode,
shared_head=shared_head,
skip=skip,
)
config.rpe_q = get_single_rpe_config(**kwargs) if 'q' in rpe_on else None
config.rpe_k = get_single_rpe_config(**kwargs) if 'k' in rpe_on else None
config.rpe_v = get_single_rpe_config(**kwargs) if 'v' in rpe_on else None
return config
def build_rpe(config, head_dim, num_heads):
"""Build iRPE modules on queries, keys and values.
Parameters
----------
config: RPEConfigs
config.rpe_q: the config of relative position encoding on queries
config.rpe_k: the config of relative position encoding on keys
config.rpe_v: the config of relative position encoding on values
None when RPE is not used.
head_dim: int
The dimension for each head.
num_heads: int
The number of parallel attention heads.
Returns
-------
modules: a list of nn.Module
The iRPE Modules on [queries, keys, values].
None when RPE is not used.
"""
if config is None:
return None, None, None
rpes = [config.rpe_q, config.rpe_k, config.rpe_v]
transposeds = [True, True, False]
def _build_single_rpe(rpe, transposed):
if rpe is None:
return None
rpe_cls = iRPE if rpe.method != METHOD.CROSS else iRPE_Cross
return rpe_cls(
head_dim=head_dim,
num_heads=1 if rpe.shared_head else num_heads,
mode=rpe.mode,
method=rpe.method,
transposed=transposed,
num_buckets=rpe.num_buckets,
rpe_config=rpe,
)
return [_build_single_rpe(rpe, transposed)
for rpe, transposed in zip(rpes, transposeds)]
if __name__ == '__main__':
config = get_rpe_config(skip=1)
rpe = build_rpe(config, head_dim=32, num_heads=4)
print(rpe)
|
Cream/iRPE/DETR-with-iRPE/models/rpe_attention/irpe.py/0
|
{
"file_path": "Cream/iRPE/DETR-with-iRPE/models/rpe_attention/irpe.py",
"repo_id": "Cream",
"token_count": 14289
}
| 337 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import torch
@torch.no_grad()
def accuracy(output, target, topk=(1,)):
"""Computes the precision@k for the specified values of k"""
if isinstance(output, list):
output = output[-1]
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.reshape(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].reshape(-1).float().sum(0, keepdim=True)
res.append(correct_k.mul_(100.0 / batch_size).item())
return res
|
CvT/lib/core/evaluate.py/0
|
{
"file_path": "CvT/lib/core/evaluate.py",
"repo_id": "CvT",
"token_count": 268
}
| 338 |
from .build import build_lr_scheduler
|
CvT/lib/scheduler/__init__.py/0
|
{
"file_path": "CvT/lib/scheduler/__init__.py",
"repo_id": "CvT",
"token_count": 12
}
| 339 |
import pandas as pd
from msanomalydetector import SpectralResidual, DetectMode
import matplotlib
import matplotlib.pyplot as plt
import logging
from azureml.core.run import Run
import os
def log_plot_result(input_df, output_df, col_name, mode):
fig = plt.figure(figsize=(20, 10))
ax1 = fig.add_subplot(211)
if mode == 'AnomalyAndMargin':
ax1.fill_between(output_df.index, output_df['lowerBoundary'], output_df['upperBoundary'], color='grey', alpha=0.2, zorder=1)
ax1.plot(output_df.index, output_df['expectedValue'], alpha=0.5, label='expected value', zorder=8)
ax1.plot(input_df.index, input_df['value'], label='value', zorder=5)
ax1.legend()
anomalies = input_df[output_df['isAnomaly']]
ax1.scatter(anomalies.index, anomalies['value'], c='red', zorder=10)
ax1.set_title(col_name)
ax2 = fig.add_subplot(212)
ax2.plot(output_df.index, output_df['mag'])
ax2.set_title('mag')
run = Run.get_context()
run.log_image(col_name, plot=plt)
def sr_detect(frame, detect_mode, batch_size, threshold, sensitivity):
model = SpectralResidual(frame, threshold=threshold, mag_window=3, score_window=40,
sensitivity=sensitivity, detect_mode=DetectMode(detect_mode), batch_size=batch_size)
result = model.detect()
if detect_mode == DetectMode.anomaly_and_margin.value:
return result[['isAnomaly', 'mag', 'score', 'expectedValue', 'lowerBoundary', 'upperBoundary']]
return result[['isAnomaly', 'mag', 'score']]
def detect(timestamp, data_to_detect, detect_mode, batch_size, threshold=0.3, sensitivity=99):
column_length = len(data_to_detect.columns)
if column_length == 1:
logging.debug('single column to detect')
frame = pd.DataFrame(columns=['timestamp', 'value'])
frame['timestamp'] = timestamp
frame['value'] = data_to_detect.iloc[:, 0]
output = sr_detect(frame, detect_mode, batch_size, threshold, sensitivity)
log_plot_result(frame, output, data_to_detect.columns[0], detect_mode)
else:
logging.debug(f'detect {column_length} columns')
output = pd.DataFrame()
for col in data_to_detect.columns:
frame = pd.DataFrame(columns=['timestamp', 'value'])
frame['timestamp'] = timestamp
frame['value'] = data_to_detect[col]
result = sr_detect(frame, detect_mode, batch_size, threshold, sensitivity)
log_plot_result(frame, result, col, detect_mode)
result.columns = [f'{rc}_{col}' for rc in result.columns]
output = pd.concat((output, result), axis=1)
return output
|
anomalydetector/aml_component/sr_detector.py/0
|
{
"file_path": "anomalydetector/aml_component/sr_detector.py",
"repo_id": "anomalydetector",
"token_count": 1091
}
| 340 |
"""
Copyright (C) Microsoft Corporation. All rights reserved.
Microsoft Corporation ("Microsoft") grants you a nonexclusive, perpetual,
royalty-free right to use, copy, and modify the software code provided by us
("Software Code"). You may not sublicense the Software Code or any use of it
(except to your affiliates and to vendors to perform work on your behalf)
through distribution, network access, service agreement, lease, rental, or
otherwise. This license does not purport to express any claim of ownership over
data you may have shared with Microsoft in the creation of the Software Code.
Unless applicable law gives you more rights, Microsoft reserves all other
rights not expressly granted herein, whether by implication, estoppel or
otherwise.
THE SOFTWARE CODE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
MICROSOFT OR ITS LICENSORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THE SOFTWARE CODE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
"""
import os
from srcnn.competition_metric import get_variance, evaluate_for_all_series
import time
import json
import argparse
from msanomalydetector.spectral_residual import SpectralResidual
from srcnn.utils import *
def auto():
path_auto = os.getcwd() + '/auto.json'
with open(path_auto, 'r+') as f:
store = json.load(f)
window = store['window']
epoch = store['epoch']
return window, epoch
def getfid(path):
return path.split('/')[-1]
def get_path(data_source):
if data_source == 'kpi':
dir_ = root + '/Test/'
trainfiles = [dir_ + _ for _ in os.listdir(dir_)]
files = trainfiles
else:
dir_ = root + '/' + data_source + '/'
files = [dir_ + _ for _ in os.listdir(dir_)]
return files
def get_score(data_source, files, thres, option):
total_time = 0
results = []
savedscore = []
for f in files:
print('reading', f)
if data_source == 'kpi' or data_source == 'test_kpi':
in_timestamp, in_value, in_label = read_csv_kpi(f)
else:
tmp_data = read_pkl(f)
in_timestamp, in_value, in_label = tmp_data['timestamp'], tmp_data['value'], tmp_data['label']
length = len(in_timestamp)
if model == 'sr_cnn' and len(in_value) < window:
print("length is shorter than win_size", len(in_value), window)
continue
time_start = time.time()
timestamp, label, pre, scores = models[model](in_timestamp, in_value, in_label, window, net, option, thres)
time_end = time.time()
total_time += time_end - time_start
results.append([timestamp, label, pre, f])
savedscore.append([label, scores, f, timestamp])
return total_time, results, savedscore
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='SRCNN')
parser.add_argument('--data', type=str, required=True, help='location of the data file')
parser.add_argument('--window', type=int, default=128, help='window size')
parser.add_argument('--epoch', type=int, default=10)
parser.add_argument('--model_path', type=str, default='snapshot', help='model path')
parser.add_argument('--delay', type=int, default=3, help='delay')
parser.add_argument('--thres', type=int, default=0.95, help='initial threshold of SR')
parser.add_argument('--auto', type=bool, default=False, help='Automatic filling parameters')
parser.add_argument('--model', type=str, default='sr_cnn', help='model')
parser.add_argument('--missing_option', type=str, default='anomaly',
help='missing data option, anomaly means treat missing data as anomaly')
args = parser.parse_args()
if args.auto:
window, epoch = auto()
else:
window = args.window
epoch = args.epoch
data_source = args.data
delay = args.delay
model = args.model
root = os.getcwd()
print(data, window, epoch)
models = {
'sr_cnn': sr_cnn_eval,
}
model_path = root + '/' + args.model_path + '/srcnn_retry' + str(epoch) + '_' + str(window) + '.bin'
srcnn_model = Anomaly(window)
net = load_model(srcnn_model, model_path).cuda()
files = get_path(data_source)
total_time, results, savedscore = get_score(data_source, files, args.thres, args.missing_option)
print('\n***********************************************')
print('data source:', data_source, ' model:', model)
print('-------------------------------')
total_fscore, pre, rec, TP, FP, TN, FN = evaluate_for_all_series(results, delay)
with open(data_source + '_saved_scores.json', 'w') as f:
json.dump(savedscore, f)
print('time used for making predictions:', total_time, 'seconds')
best = 0.
bestthre = 0.
print('delay :', delay)
if data_source == 'yahoo':
sru = {}
rf = open(data_source + 'sr3.json', 'r')
srres = json.load(rf)
for (srtime, srl, srpre, srf) in srres:
sru[getfid(srf)] = [srtime, srl, srpre]
for i in range(98):
newresults = []
threshold = 0.01 + i * 0.01
for f, (srtt, srlt, srpret, srft), (flabel, cnnscores, cnnf, cnnt) in zip(files, srres, savedscore):
fid = getfid(cnnf)
srtime = sru[fid][0]
srl = sru[fid][1]
srpre = sru[fid][2]
srtime = [(srtime[0] - 3600 * (64 - j)) for j in range(64)] + srtime
srl = [0] * 64 + srl
srpre = [0] * 64 + srpre
print(len(srl), len(flabel), '!!')
assert (len(srl) == len(flabel))
pre = [1 if item > threshold else 0 for item in cnnscores]
newresults.append([srtime, srpre, pre, f])
total_fscore, pre, rec, TP, FP, TN, FN = evaluate_for_all_series(newresults, delay, prt=False)
if total_fscore > best:
best = total_fscore
bestthre = threshold
results = []
threshold = bestthre
print('guided threshold :', threshold)
for f, (flabel, cnnscores, _, ftimestamp) in zip(files, savedscore):
pre = [1 if item > threshold else 0 for item in cnnscores]
results.append([ftimestamp, flabel, pre, f])
print('score\n')
total_fscore, pre, rec, TP, FP, TN, FN = evaluate_for_all_series(results, delay)
print(total_fscore)
best = 0.
for i in range(98):
newresults = []
threshold = 0.01 + i * 0.01
for f, (flabel, cnnscores, _, ftimestamp) in zip(files, savedscore):
pre = [1 if item > threshold else 0 for item in cnnscores]
newresults.append([ftimestamp, flabel, pre, f])
total_fscore, pre, rec, TP, FP, TN, FN = evaluate_for_all_series(newresults, delay, prt=False)
if total_fscore > best:
best = total_fscore
bestthre = threshold
print('tem best', best, threshold)
threshold = bestthre
print('best overall threshold :', threshold, 'best score :', best)
|
anomalydetector/srcnn/evalue.py/0
|
{
"file_path": "anomalydetector/srcnn/evalue.py",
"repo_id": "anomalydetector",
"token_count": 3083
}
| 341 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import logging
import os
import sys
from logging import Filter, Formatter, Logger, LogRecord, StreamHandler
from logging.handlers import TimedRotatingFileHandler
FORMATTER = Formatter("%(asctime)s - %(name)s — %(levelname)s — %(message)s")
LOG_FILE = "archai.log"
LOCAL_RANK = int(os.environ.get("LOCAL_RANK", 0))
class RankFilter(Filter):
"""A filter for logging records based on the rank of the process.
Only log records from the process with rank 0 will be logged,
while log records from other processes will be filtered out.
"""
def __init__(self, rank: int) -> None:
"""Initialize the filter with the rank of the process.
Args:
rank: The rank of the process that will generate log records.
"""
self.rank = rank
def filter(self, record: LogRecord) -> bool:
"""Filter a logging record based on the process rank.
Args:
record: The logging record to be filtered.
Returns:
`True` if the record should be logged, `False` otherwise.
"""
return self.rank == 0
def get_console_handler() -> StreamHandler:
"""Get a `StreamHandler` for logging to the console.
The `StreamHandler` can be used to log messages to the
console (i.e., `sys.stdout`) and is configured with a formatter.
Returns:
A `StreamHandler` for logging to the console.
"""
console_handler = StreamHandler(sys.stdout)
console_handler.setFormatter(FORMATTER)
return console_handler
def get_timed_file_handler() -> TimedRotatingFileHandler:
"""Get a `TimedRotatingFileHandler` for logging to timestamped files.
Returns:
A `TimedRotatingFileHandler` for logging to timestamped files.
"""
file_handler = TimedRotatingFileHandler(LOG_FILE, delay=True, when="midnight", encoding="utf-8")
file_handler.setFormatter(FORMATTER)
return file_handler
def get_logger(logger_name: str) -> Logger:
"""Get a logger with the specified name and default settings.
Args:
logger_name: The name of the logger.
Returns:
A `Logger` instance with the specified name and default settings.
"""
logger = logging.getLogger(logger_name)
logger.setLevel(logging.DEBUG)
logger.addHandler(get_console_handler())
logger.addHandler(get_timed_file_handler())
logger.addFilter(RankFilter(LOCAL_RANK))
logger.propagate = False
return logger
|
archai/archai/common/ordered_dict_logger_utils.py/0
|
{
"file_path": "archai/archai/common/ordered_dict_logger_utils.py",
"repo_id": "archai",
"token_count": 885
}
| 342 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from typing import Callable, Optional
from overrides import overrides
from torch.utils.data import Dataset
from torchvision.datasets import ImageFolder
from torchvision.transforms import ToTensor
from archai.api.dataset_provider import DatasetProvider
from archai.common.ordered_dict_logger import OrderedDictLogger
logger = OrderedDictLogger(source=__name__)
class ImageFolderDatasetProvider(DatasetProvider):
"""Image Folder dataset provider."""
def __init__(
self,
root: Optional[str] = "dataroot",
) -> None:
"""Initialize an image folder dataset provider.
Args:
root: Root directory of dataset where is saved.
"""
super().__init__()
self.root = root
@overrides
def get_train_dataset(
self,
transform: Optional[Callable] = None,
target_transform: Optional[Callable] = None,
loader: Optional[Callable] = None,
is_valid_file: Optional[Callable] = None,
) -> Dataset:
return ImageFolder(
self.root,
transform=transform or ToTensor(),
target_transform=target_transform,
loader=loader,
is_valid_file=is_valid_file,
)
@overrides
def get_val_dataset(
self,
transform: Optional[Callable] = None,
target_transform: Optional[Callable] = None,
loader: Optional[Callable] = None,
is_valid_file: Optional[Callable] = None,
) -> Dataset:
logger.warn("Validation set not available. Returning training set ...")
return self.get_train_dataset(
transform=transform, target_transform=target_transform, loader=loader, is_valid_file=is_valid_file
)
@overrides
def get_test_dataset(
self,
transform: Optional[Callable] = None,
target_transform: Optional[Callable] = None,
loader: Optional[Callable] = None,
is_valid_file: Optional[Callable] = None,
) -> Dataset:
logger.warn("Testing set not available. Returning validation set ...")
return self.get_test_dataset(
transform=transform, target_transform=target_transform, loader=loader, is_valid_file=is_valid_file
)
|
archai/archai/datasets/cv/image_folder_dataset_provider.py/0
|
{
"file_path": "archai/archai/datasets/cv/image_folder_dataset_provider.py",
"repo_id": "archai",
"token_count": 936
}
| 343 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import random
from itertools import chain
from typing import Any, Callable, Dict, List, Optional, Union
import numpy as np
from datasets.arrow_dataset import Dataset
from datasets.dataset_dict import DatasetDict, IterableDatasetDict
from datasets.download.download_manager import DownloadMode
from datasets.iterable_dataset import IterableDataset
from transformers.models.auto.tokenization_auto import AutoTokenizer
def should_refresh_cache(refresh: bool) -> DownloadMode:
"""Determine whether to refresh the cached dataset.
This function determines whether the cached dataset should be refreshed by
re-downloading or re-creating it based on the value of the `refresh`
parameter.
Args:
refresh: If `True`, the cache will be refreshed. If `False`, the
existing cache will be used if it exists.
Returns:
An enumerator indicating whether the cache should be refreshed or not.
"""
if refresh:
return DownloadMode.FORCE_REDOWNLOAD
return DownloadMode.REUSE_DATASET_IF_EXISTS
def tokenize_dataset(
examples: Dict[str, List[str]],
tokenizer: Optional[AutoTokenizer] = None,
mapping_column_name: Optional[List[str]] = None,
use_eos_token: Optional[bool] = False,
truncate: Optional[Union[bool, str]] = True,
padding: Optional[Union[bool, str]] = "max_length",
) -> Dict[str, Any]:
"""Tokenize a list of examples using a specified tokenizer.
Args:
examples: A list of examples to be tokenized.
tokenizer: The tokenizer to use.
mapping_column_name: The columns in `examples` that should be tokenized.
use_eos_token: Whether to append the EOS token to each example.
truncate: Whether truncation should be applied.
padding: Whether padding should be applied.
Returns:
Tokenized examples.
"""
def _add_eos_token(examples: List[str]) -> List[str]:
return [example + tokenizer.eos_token if example else example for example in examples]
if mapping_column_name is None:
mapping_column_name = ["text"]
examples_mapping = tuple(
_add_eos_token(examples[column_name]) if use_eos_token else examples[column_name]
for column_name in mapping_column_name
)
return tokenizer(*examples_mapping, truncation=truncate, padding=padding)
def tokenize_concatenated_dataset(
examples: Dict[str, List[str]],
tokenizer: Optional[AutoTokenizer] = None,
mapping_column_name: Optional[List[str]] = None,
use_eos_token: Optional[bool] = False,
dtype: Optional[np.dtype] = None,
) -> Dict[str, Any]:
"""Tokenize a list of examples using a specified tokenizer and
with concatenated batches (no truncation nor padding).
Args:
examples: A list of examples to be tokenized.
tokenizer: The tokenizer to use.
mapping_column_name: The columns in `examples` that should be tokenized.
use_eos_token: Whether to append the EOS token to each example.
dtype: Numpy data type of the tokenized examples.
Returns:
Concatenated tokenized examples.
"""
examples = tokenize_dataset(
examples,
tokenizer=tokenizer,
mapping_column_name=mapping_column_name,
use_eos_token=use_eos_token,
truncate=False,
padding=False,
)
tokenized_examples = np.fromiter(chain(*examples["input_ids"]), dtype=dtype)
return {"input_ids": [tokenized_examples], "length": [len(tokenized_examples)]}
def tokenize_contiguous_dataset(
examples: Dict[str, List[str]],
tokenizer: Optional[AutoTokenizer] = None,
mapping_column_name: Optional[List[str]] = None,
model_max_length: Optional[int] = 1024,
) -> Dict[str, Any]:
"""Tokenize a list of examples using a specified tokenizer and
with contiguous-length batches (no truncation nor padding).
Args:
examples: A list of examples to be tokenized.
tokenizer: The tokenizer to use.
mapping_column_name: The columns in `examples` that should be tokenized.
model_max_length: Maximum length of sequences.
Returns:
Contiguous-length tokenized examples.
"""
examples = tokenize_dataset(
examples, mapping_column_name=mapping_column_name, tokenizer=tokenizer, truncate=False, padding=False
)
concatenated_examples = {k: list(chain(*examples[k])) for k in examples.keys()}
total_length = len(concatenated_examples[list(examples.keys())[0]])
if total_length >= model_max_length:
total_length = (total_length // model_max_length) * model_max_length
result = {
k: [t[i : i + model_max_length] for i in range(0, total_length, model_max_length)]
for k, t in concatenated_examples.items()
}
return result
def tokenize_nsp_dataset(
examples: Dict[str, List[str]],
tokenizer: Optional[AutoTokenizer] = None,
mapping_column_name: Optional[List[str]] = None,
truncate: Optional[Union[bool, str]] = True,
padding: Optional[Union[bool, str]] = "max_length",
) -> Dict[str, Any]:
"""Tokenize a list of examples using a specified tokenizer and
with next-sentence prediction (NSP).
Args:
examples: A list of examples to be tokenized.
tokenizer: The tokenizer to use.
mapping_column_name: The columns in `examples` that should be tokenized.
truncate: Whether truncation should be applied.
padding: Whether padding should be applied.
Returns:
Tokenized examples with NSP labels.
"""
if mapping_column_name is None:
mapping_column_name = ["text"]
assert len(mapping_column_name) == 1, "`mapping_column_name` must have a single value."
examples_mapping = examples[mapping_column_name[0]]
examples, next_sentence_labels = [], []
for i in range(len(examples_mapping)):
if random.random() < 0.5:
examples.append(examples_mapping[i])
next_sentence_labels.append(0)
else:
examples.append(random.choices(examples_mapping, k=2))
next_sentence_labels.append(1)
tokenized_examples = tokenizer(examples, truncation=truncate, padding=padding)
tokenized_examples["next_sentence_label"] = next_sentence_labels
return tokenized_examples
def encode_dataset(
dataset: Union[Dataset, DatasetDict, IterableDataset, IterableDatasetDict],
tokenizer: AutoTokenizer,
mapping_fn: Optional[Callable[[Any], Dict[str, Any]]] = None,
mapping_fn_kwargs: Optional[Dict[str, Any]] = None,
mapping_column_name: Optional[Union[str, List[str]]] = "text",
batched: Optional[bool] = True,
batch_size: Optional[int] = 1000,
writer_batch_size: Optional[int] = 1000,
num_proc: Optional[int] = None,
format_column_name: Optional[Union[str, List[str]]] = None,
) -> Union[DatasetDict, IterableDatasetDict]:
"""Encode a dataset using a tokenizer.
Args:
dataset: The dataset to be encoded.
tokenizer: The tokenizer to use for encoding.
mapping_fn: A function that maps the dataset. If not provided,
the default `tokenize_dataset` function will be used.
mapping_fn_kwargs: Keyword arguments to pass to `mapping_fn`.
mapping_column_name: The columns in the dataset to be tokenized.
If `str`, only one column will be tokenized.
If `List[str]`, multiple columns will be tokenized.
batched: Whether the mapping should be done in batches or not.
batch_size: The number of examples per batch when mapping in batches.
writer_batch_size: The number of examples per write operation to cache.
num_proc: The number of processes to use for multi-processing.
format_column_name: The columns that should be available on the resulting dataset.
If `str`, only one column will be available.
If `List[str]`, multiple columns will be available.
Returns:
The encoded dataset.
"""
if not mapping_fn:
mapping_fn = tokenize_dataset
if isinstance(mapping_column_name, str):
mapping_column_name = (mapping_column_name,)
elif isinstance(mapping_column_name, list):
mapping_column_name = tuple(mapping_column_name)
fn_kwargs = mapping_fn_kwargs or {}
fn_kwargs["tokenizer"] = tokenizer
fn_kwargs["mapping_column_name"] = mapping_column_name
mapping_kwargs = {"batched": batched}
if isinstance(dataset, DatasetDict):
remove_columns = [v.column_names for _, v in dataset.items()]
assert all([c[0] for c in remove_columns])
mapping_kwargs["remove_columns"] = remove_columns[0]
mapping_kwargs["batch_size"] = batch_size
mapping_kwargs["writer_batch_size"] = writer_batch_size
mapping_kwargs["num_proc"] = num_proc
dataset = dataset.map(mapping_fn, fn_kwargs=fn_kwargs, **mapping_kwargs)
if isinstance(dataset, DatasetDict):
dataset.set_format(type="torch", columns=format_column_name)
elif isinstance(dataset, IterableDatasetDict):
dataset = dataset.with_format(type="torch")
return dataset
|
archai/archai/datasets/nlp/hf_dataset_provider_utils.py/0
|
{
"file_path": "archai/archai/datasets/nlp/hf_dataset_provider_utils.py",
"repo_id": "archai",
"token_count": 3433
}
| 344 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import random
from pathlib import Path
from typing import List, Optional
from overrides import overrides
from tqdm import tqdm
from archai.common.ordered_dict_logger import OrderedDictLogger
from archai.discrete_search.api.archai_model import ArchaiModel
from archai.discrete_search.api.search_objectives import SearchObjectives
from archai.discrete_search.api.search_results import SearchResults
from archai.discrete_search.api.search_space import EvolutionarySearchSpace
from archai.discrete_search.api.searcher import Searcher
from archai.discrete_search.utils.multi_objective import get_pareto_frontier
logger = OrderedDictLogger(source=__name__)
class RegularizedEvolutionSearch(Searcher):
"""Regularized Evolution algorithm.
It has been proposed in `Regularized Evolution for Image Classifier Architecture Search`.
Reference:
https://arxiv.org/abs/1802.01548v7.
"""
def __init__(
self,
search_space: EvolutionarySearchSpace,
search_objectives: SearchObjectives,
output_dir: str,
num_iters: Optional[int] = 10,
init_num_models: Optional[int] = 10,
initial_population_paths: Optional[List[str]] = None,
pareto_sample_size: Optional[int] = 40,
history_size: Optional[int] = 100,
clear_evaluated_models: Optional[bool] = True,
save_pareto_model_weights: bool = True,
seed: Optional[int] = 1,
) -> None:
"""Initialize the Regularized Evolution.
Args:
search_space: Discrete search space compatible with evolutionary algorithms.
search_objectives: Search objectives.
output_dir: Output directory.
num_iters: Number of iterations.
init_num_models: Number of initial models to evaluate.
initial_population_paths: Paths to initial population models.
pareto_sample_size: Number of models to sample from the pareto frontier.
history_size: Number of models to keep in the history.
clear_evaluated_models (bool, optional): Optimizes memory usage by clearing the architecture
of `ArchaiModel` after each iteration. Defaults to True.
save_pareto_model_weights: If `True`, saves the weights of the pareto models.
seed: Random seed.
"""
super(RegularizedEvolutionSearch, self).__init__()
assert isinstance(
search_space, EvolutionarySearchSpace
), f"{str(search_space.__class__)} is not compatible with {str(self.__class__)}"
self.iter_num = 0
self.search_space = search_space
self.so = search_objectives
self.output_dir = Path(output_dir)
self.output_dir.mkdir(exist_ok=True, parents=True)
# Algorithm settings
self.num_iters = num_iters
self.init_num_models = init_num_models
self.initial_population_paths = initial_population_paths
self.pareto_sample_size = pareto_sample_size
self.history_size = history_size
# Utils
self.clear_evaluated_models = clear_evaluated_models
self.save_pareto_model_weights = save_pareto_model_weights
self.search_state = SearchResults(search_space, self.so)
self.seed = seed
self.rng = random.Random(seed)
self.seen_archs = set()
self.num_sampled_archs = 0
assert self.init_num_models > 0
assert self.num_iters > 0
def sample_models(self, num_models: int, patience: Optional[int] = 5) -> List[ArchaiModel]:
"""Sample models from the search space.
Args:
num_models: Number of models to sample.
patience: Number of tries to sample a valid model.
Returns:
List of sampled models.
"""
nb_tries, valid_sample = 0, []
while len(valid_sample) < num_models and nb_tries < patience:
sample = [self.search_space.random_sample() for _ in range(num_models)]
_, valid_indices = self.so.validate_constraints(sample)
valid_sample += [sample[i] for i in valid_indices]
return valid_sample[:num_models]
def mutate_parents(
self, parents: List[ArchaiModel], mutations_per_parent: Optional[int] = 1, patience: Optional[int] = 20
) -> List[ArchaiModel]:
"""Mutate parents to generate new models.
Args:
parents: List of parent models.
mutations_per_parent: Number of mutations to apply to each parent.
patience: Number of tries to sample a valid model.
Returns:
List of mutated models.
"""
mutations = {}
for p in tqdm(parents, desc="Mutating parents"):
candidates = {}
nb_tries = 0
while len(candidates) < mutations_per_parent and nb_tries < patience:
mutated_model = self.search_space.mutate(p)
mutated_model.metadata["parent"] = p.archid
if not self.so.is_model_valid(mutated_model):
continue
if mutated_model.archid not in self.seen_archs:
mutated_model.metadata["generation"] = self.iter_num
candidates[mutated_model.archid] = mutated_model
nb_tries += 1
mutations.update(candidates)
return list(mutations.values())
@overrides
def search(self) -> SearchResults:
self.iter_num = 0
if self.initial_population_paths:
logger.info(f"Loading initial population from {len(self.initial_population_paths)} architectures ...")
iter_members = [self.search_space.load_arch(path) for path in self.initial_population_paths]
else:
logger.info(f"Using {self.init_num_models} random architectures as the initial population ...")
iter_members = self.sample_models(self.init_num_models)
self.all_pop = iter_members
for i in range(self.num_iters):
self.iter_num = i + 1
self.on_start_iteration(self.iter_num)
logger.info(f"Iteration {i+1}/{self.num_iters}")
if len(iter_members) == 0:
logger.info("No models to evaluate. Stopping search ...")
break
# Calculates objectives
logger.info(f"Calculating search objectives {list(self.so.objective_names)} for {len(iter_members)} models ...")
results = self.so.eval_all_objs(iter_members)
self.search_state.add_iteration_results(
iter_members,
results,
# Mutation and crossover info
extra_model_data={"parent": [p.metadata.get("parent", None) for p in iter_members]},
)
# Records evaluated archs to avoid computing the same architecture twice
self.seen_archs.update([m.archid for m in iter_members])
# Saves search iteration results
self.search_state.save_search_state(str(self.output_dir / f"search_state_{self.iter_num}.csv"))
self.search_state.save_pareto_frontier_models(
str(self.output_dir / f"pareto_models_iter_{self.iter_num}"),
save_weights=self.save_pareto_model_weights
)
self.search_state.save_all_2d_pareto_evolution_plots(str(self.output_dir))
# Clears models from memory if needed
if self.clear_evaluated_models:
logger.info("Optimzing memory usage ...")
[model.clear() for model in iter_members]
# Samples subset of models from the history buffer
history_indices = list(range(max(0, len(self.all_pop) - self.history_size), len(self.all_pop)))
sample_indices = self.rng.sample(history_indices, min(self.pareto_sample_size, self.history_size))
logger.info(f"Sampled {len(sample_indices)} models from the history ({len(history_indices)}) models.")
# Gets the pareto frontier of the history sample
logger.info("Calculating Pareto frontier of the sample ...")
pareto_sample = get_pareto_frontier(
[self.all_pop[sample_idx] for sample_idx in sample_indices],
{
obj_name: obj_results[sample_indices]
for obj_name, obj_results in self.search_state.all_evaluated_objs.items()
},
self.so,
)
logger.info(f"Found {len(pareto_sample)} pareto members from the sample.")
# mutate random 'k' subsets of the parents
# while ensuring the mutations fall within
# desired constraint limits
iter_members = self.mutate_parents(pareto_sample["models"], 1)
logger.info(f"Mutation: {len(iter_members)} new models.")
# update the set of architectures ever visited
self.all_pop.extend(iter_members)
return self.search_state
|
archai/archai/discrete_search/algos/regularized_evolution.py/0
|
{
"file_path": "archai/archai/discrete_search/algos/regularized_evolution.py",
"repo_id": "archai",
"token_count": 3881
}
| 345 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import copy
import pathlib
import shutil
import timeit
from typing import Any, Dict, List, Optional
import numpy as np
import torch
from onnxruntime import InferenceSession
from overrides import overrides
from archai.discrete_search.api.archai_model import ArchaiModel
from archai.discrete_search.api.model_evaluator import ModelEvaluator
from archai.discrete_search.search_spaces.nlp.transformer_flex.search_space import (
TransformerFlexSearchSpace,
)
from archai.onnx.config_utils.onnx_config_base import OnnxConfig
from archai.onnx.export import export_to_onnx
from archai.onnx.export_utils import prepare_model_for_onnx
from archai.onnx.onnx_loader import load_from_onnx
from archai.onnx.optimization import optimize_onnx
TMP_FOLDER = pathlib.Path("tmp")
class TransformerFlexOnnxLatency(ModelEvaluator):
"""Measure the average latency of models from the Transformer-Flex search space."""
def __init__(
self,
search_space: TransformerFlexSearchSpace,
providers: Optional[List[str]] = None,
batch_size: Optional[int] = 1,
seq_len: Optional[int] = 192,
past_seq_len: Optional[int] = 0,
n_trials: Optional[int] = 1,
use_median: Optional[bool] = False,
use_past: Optional[bool] = True,
validate: Optional[bool] = True,
share_weights: Optional[bool] = True,
opset: Optional[int] = 11,
optimize: Optional[bool] = True,
only_ort: Optional[bool] = False,
) -> None:
"""Initialize the evaluator.
This evaluator supports measuring in different ONNX Runtime providers. For measuring on
GPUs, use `providers=["CUDAExecutionProvider"]` and make sure that `onnxruntime-gpu`
package is installed.
Args:
search_space: The search space to use for loading the model.
providers: The list of ORT providers to use for benchmarking.
batch_size: The batch size to use when benchmarking the model.
seq_len: The sequence length to use when benchmarking the model.
past_seq_len: The past sequence length to use when benchmarking the model.
n_trials: The number of trials to use when benchmarking the model.
use_median: Whether to use the median or the mean of the measured
times as the result.
use_past: Whether to include past key/values in the model.
validate: Whether to validate the exported model.
share_weights: Whether to share the embedding and softmax weights.
opset: Set of operations to use with ONNX.
optimize: Whether to optimize the ONNX model.
only_ort: Whether to only apply ORT optimization.
"""
assert search_space.arch_type in ["codegen", "gpt2", "gpt2-flex"]
self.search_space = search_space
# Benchmark settings
self.providers = providers
self.batch_size = batch_size
self.seq_len = seq_len
self.past_seq_len = past_seq_len
self.n_trials = n_trials
self.use_median = use_median
self.use_past = use_past
self.validate = validate
self.share_weights = share_weights
self.opset = opset
self.optimize = optimize
self.only_ort = only_ort
def _load_and_prepare(self, config: Dict[str, Any]) -> torch.nn.Module:
config = copy.deepcopy(config)
if self.use_past:
config["use_cache"] = True
model = self.search_space._load_model_from_config(config)
return prepare_model_for_onnx(model, self.search_space.arch_type)
def _benchmark_model(self, session: InferenceSession, model_config: OnnxConfig) -> float:
inputs = model_config.generate_dummy_inputs(self.batch_size, self.seq_len, self.past_seq_len)
if self.use_past:
past_inputs = inputs.pop("past_key_values")
for i, past in enumerate(past_inputs):
inputs[f"past_{i}"] = past
timer = timeit.Timer(
stmt="onnx_model_session(None, inputs)",
globals={"inputs": {k: v.numpy() for k, v in inputs.items()}, "onnx_model_session": session.run},
)
# Perform a quick warmup prior to the calculation
_ = timer.timeit(number=max(int(self.n_trials // 100), 2))
# Calculate proper set of times (instead of sum)
runner = timer.repeat(repeat=self.n_trials, number=self.n_trials)
runner = [r / self.n_trials for r in runner]
return float(np.median(runner) if self.use_median else np.mean(runner))
@overrides
def evaluate(self, arch: ArchaiModel, budget: Optional[float] = None) -> float:
model = self._load_and_prepare(arch.metadata["config"])
# There is a bug for Python < 3.10 when using TemporaryFile with Windows,
# thus, we opted to manually save and remove the temporary file
TMP_FOLDER.mkdir(parents=True, exist_ok=True)
onnx_path = TMP_FOLDER / "model.onnx"
onnx_config = export_to_onnx(
model,
onnx_path.as_posix(),
task="causal-lm",
use_past=self.use_past,
validate=self.validate,
share_weights=self.share_weights,
opset=self.opset,
)
if self.optimize:
onnx_path = optimize_onnx(onnx_path.as_posix(), onnx_config, opt_level=0, only_ort=self.only_ort)
session = load_from_onnx(onnx_path, providers=self.providers)
latency = self._benchmark_model(session, onnx_config)
shutil.rmtree(TMP_FOLDER)
return latency
|
archai/archai/discrete_search/evaluators/nlp/transformer_flex_latency.py/0
|
{
"file_path": "archai/archai/discrete_search/evaluators/nlp/transformer_flex_latency.py",
"repo_id": "archai",
"token_count": 2384
}
| 346 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from archai.discrete_search.search_spaces.config.arch_config import ArchConfig
from archai.discrete_search.search_spaces.config.arch_param_tree import ArchParamTree
from archai.discrete_search.search_spaces.config.discrete_choice import DiscreteChoice
from archai.discrete_search.search_spaces.config.helpers import repeat_config
from archai.discrete_search.search_spaces.config.search_space import ConfigSearchSpace
|
archai/archai/discrete_search/search_spaces/config/__init__.py/0
|
{
"file_path": "archai/archai/discrete_search/search_spaces/config/__init__.py",
"repo_id": "archai",
"token_count": 139
}
| 347 |
from typing import Optional
import torch
import torch.nn as nn
from torch import Tensor
from transformers import PretrainedConfig
from archai.discrete_search.search_spaces.config import ArchConfig
try:
from flash_attn.modules.mlp import FusedMLP
except ImportError:
FusedMLP = None
from ...utils import get_optim_flag
from ...mixed_op import MixedAttentionBlock
# From https://github.com/HazyResearch/flash-attention (Copyright Tri Dao)
class Mlp(nn.Module):
def __init__(self, in_features, hidden_features=None, out_features=None, activation=nn.functional.gelu,
return_residual=False, device=None, dtype=None):
factory_kwargs = {'device': device, 'dtype': dtype}
super().__init__()
out_features = out_features or in_features
hidden_features = hidden_features or in_features
self.return_residual = return_residual
self.fc1 = nn.Linear(in_features, hidden_features, **factory_kwargs)
self.activation = activation
self.fc2 = nn.Linear(hidden_features, out_features, **factory_kwargs)
def forward(self, x):
y = self.fc1(x)
y = self.activation(y)
y = self.fc2(y)
return y if not self.return_residual else (y, x)
class CodeGenBlock(nn.Module):
def __init__(self, arch_config: ArchConfig, hf_config: PretrainedConfig,
hidden_size: int, layer_idx: Optional[int] = None):
super().__init__()
self.inner_dim = arch_config.pick('d_inner')
self.attn = MixedAttentionBlock(arch_config, hf_config, hidden_size, layer_idx=layer_idx)
self.fused_mlp = get_optim_flag(hf_config, 'fused_mlp')
if self.fused_mlp:
assert FusedMLP is not None, 'Need to install fused_mlp'
self.mlp = FusedMLP(hidden_size, self.inner_dim)
else:
self.mlp = Mlp(hidden_size, self.inner_dim)
self.resid_dropout = nn.Dropout(hf_config.resid_pdrop)
self.norm = nn.LayerNorm(hidden_size)
if getattr(hf_config, 'fused_dropout_add_ln', False):
raise NotImplementedError
def forward(self, hidden_states: Tensor, mixer_subset=None, mixer_kwargs=None, **kwargs):
r"""Pass the input through the encoder layer.
Args:
hidden_states: the sequence to the encoder layer (required).
mixer_subset: for cross-attention only. If not None, will take a subset of x
before applying the query projection. Useful for e.g., ViT where we only care
about the CLS token in the last layer.
"""
mixer_kwargs = mixer_kwargs or {}
mixer_kwargs.update(**kwargs)
residual = hidden_states
hidden_states = self.norm(hidden_states.to(dtype=self.norm.weight.dtype))
attn_output, _ = self.attn(hidden_states, **mixer_kwargs)
attn_output = self.resid_dropout(attn_output)
mlp_output = self.resid_dropout(self.mlp(hidden_states))
return residual + attn_output + mlp_output
|
archai/archai/discrete_search/search_spaces/nlp/tfpp/backbones/codegen/block.py/0
|
{
"file_path": "archai/archai/discrete_search/search_spaces/nlp/tfpp/backbones/codegen/block.py",
"repo_id": "archai",
"token_count": 1308
}
| 348 |
''' Modified from https://github.com/HazyResearch/flash-attention/ '''
import math
from warnings import warn
from typing import Tuple, Optional
import torch
import torch.nn as nn
import torch.nn.functional as F
from transformers import PretrainedConfig
from einops import rearrange
try:
from flash_attn.ops.fused_dense import FusedDense
from flash_attn.layers.rotary import RotaryEmbedding as FlashRotaryEmbedding
except ImportError:
FusedDense = None
FlashRotaryEmbedding = None
try:
import ft_attention
except ImportError:
ft_attention = None
try:
from flash_attn.modules.mha import FlashSelfAttention, _update_kv_cache
except ImportError:
FlashSelfAttention, _update_kv_cache = None, None
from ..utils import get_optim_flag
class BaseRotaryEmbedding(nn.Module):
def __init__(self, dim: int, base=10000, scale_base=0, device=None):
super().__init__()
if scale_base > 0:
raise NotImplementedError
# Generate and save the inverse frequency buffer (non trainable)
inv_freq = 1.0 / (base ** (torch.arange(0, dim, 2, device=device,
dtype=torch.float32) / dim))
self.register_buffer("inv_freq", inv_freq)
self.scale_base = scale_base
scale = ((torch.arange(0, dim, 2, device=device, dtype=torch.float32) + 0.4 * dim)
/ (1.4 * dim) if scale_base > 0 else None)
self.register_buffer("scale", scale)
self._seq_len_cached = 0
self._cos_cached = None
self._sin_cached = None
self._cos_k_cached = None
self._sin_k_cached = None
def _update_cos_sin_cache(self, x, seqlen_offset=0):
"""x: (batch, seqlen, nheads, headdim) or (batch, seqlen, 3, nheads, headdim)
"""
seqlen = x.shape[1] + seqlen_offset
# Reset the tables if the sequence length has changed,
# or if we're on a new device (possibly due to tracing for instance)
if (seqlen > self._seq_len_cached or self._cos_cached.device != x.device
or self._cos_cached.dtype != x.dtype):
self._seq_len_cached = seqlen
t = torch.arange(seqlen, device=x.device, dtype=self.inv_freq.dtype)
# Don't do einsum, it converts fp32 to fp16
# freqs = torch.einsum("i,j->ij", t, self.inv_freq)
freqs = torch.outer(t, self.inv_freq.to(device=t.device))
if self.scale is None:
self._cos_cached = torch.cos(freqs).to(x.dtype)
self._sin_cached = torch.sin(freqs).to(x.dtype)
else:
power = ((torch.arange(seqlen, dtype=self.scale.dtype, device=self.scale.device)
- seqlen // 2) / self.scale_base)
scale = self.scale.to(device=power.device) ** rearrange(power, 's -> s 1')
# We want the multiplication by scale to happen in fp32
self._cos_cached = (torch.cos(freqs) * scale).to(x.dtype)
self._sin_cached = (torch.sin(freqs) * scale).to(x.dtype)
self._cos_k_cached = (torch.cos(freqs) / scale).to(x.dtype)
self._sin_k_cached = (torch.sin(freqs) / scale).to(x.dtype)
def apply_rotary_emb_qkv(self, qkv: torch.FloatTensor,
sin: torch.FloatTensor,
cos: torch.FloatTensor,
sin_k: Optional[torch.FloatTensor] = None,
cos_k: Optional[torch.FloatTensor] = None) -> torch.FloatTensor:
_, seqlen, three, _, headdim = qkv.shape
assert three == 3
rotary_seqlen, rotary_dim = cos.shape
rotary_dim *= 2
assert rotary_dim <= headdim
assert seqlen <= rotary_seqlen
cos_k = cos if cos_k is None else cos_k
sin_k = sin if sin_k is None else sin_k
assert sin.shape == cos_k.shape == sin_k.shape == (rotary_seqlen, rotary_dim // 2)
q_rot = qkv[:, :, 0, :, :rotary_dim]
q_pass = qkv[:, :, 0, :, rotary_dim:]
k_rot = qkv[:, :, 1, :, :rotary_dim]
k_pass = qkv[:, :, 1, :, rotary_dim:]
# Queries
q1, q2 = q_rot.chunk(2, dim=-1)
c, s = rearrange(cos[:seqlen], 's d -> s 1 d'), rearrange(sin[:seqlen], 's d -> s 1 d')
q_rot = torch.cat([
q1 * c - q2 * s,
q1 * s + q2 * c
], axis=-1)
# Keys
k1, k2 = qkv[:, :, 1, :, :rotary_dim].chunk(2, dim=-1)
c, s = rearrange(cos_k[:seqlen], 's d -> s 1 d'), rearrange(sin_k[:seqlen], 's d -> s 1 d')
k_rot = torch.cat([
k1 * c - k2 * s,
k1 * s + k2 * c
], axis=-1)
q = torch.cat([
q_rot, q_pass
], axis=-1)
k = torch.cat([
k_rot, k_pass
], axis=-1)
qkv = torch.cat([
q.unsqueeze(2), k.unsqueeze(2), qkv[:, :, 2:3, :, :]
], axis=2)
# inplace, but we still return it for convenience
return qkv
def forward(self, qkv: torch.Tensor, seqlen_offset: int = 0) -> Tuple[torch.Tensor, torch.Tensor]:
"""
seqlen_offset: can be used in generation where the qkv being passed in is only the last
token in the batch.
"""
self._update_cos_sin_cache(qkv, seqlen_offset)
return self.apply_rotary_emb_qkv(
qkv, self._sin_cached[seqlen_offset:], self._cos_cached[seqlen_offset:]
)
class SelfAttention(nn.Module):
"""Implement the scaled dot product attention with softmax.
Arguments
---------
softmax_scale: The temperature to use for the softmax attention.
(default: 1/sqrt(d_keys) where d_keys is computed at
runtime)
attention_dropout: The dropout rate to apply to the attention
(default: 0.0)
"""
def __init__(self, causal=False, softmax_scale=None, attention_dropout=0.0):
super().__init__()
self.causal = causal
self.softmax_scale = softmax_scale
self.dropout_p = attention_dropout
def forward(self, qkv, causal=None, key_padding_mask=None):
"""Implements the multihead softmax attention.
Arguments
---------
qkv: The tensor containing the query, key, and value. (B, S, 3, H, D)
causal: if passed, will override self.causal
key_padding_mask: boolean mask to apply to the attention weights. True means to keep,
False means to mask out. (B, S)
"""
batch_size, seqlen = qkv.shape[0], qkv.shape[1]
causal = self.causal if causal is None else causal
q, k, v = qkv.unbind(dim=2)
softmax_scale = self.softmax_scale or 1.0 / math.sqrt(q.shape[-1])
scores = torch.einsum('bthd,bshd->bhts', q, k * softmax_scale)
if key_padding_mask is not None:
padding_mask = torch.full((batch_size, seqlen), -10000.0, dtype=scores.dtype,
device=scores.device)
padding_mask.masked_fill_(key_padding_mask, 0.0)
# TD [2022-09-30]: Adding is faster than masked_fill_ (idk why, just better kernel I guess)
scores = scores + rearrange(padding_mask, 'b s -> b 1 1 s')
if causal:
# "triu_tril_cuda_template" not implemented for 'BFloat16'
# So we have to construct the mask in float
causal_mask = torch.triu(torch.full((seqlen, seqlen), -10000.0, device=scores.device), 1)
# TD [2022-09-30]: Adding is faster than masked_fill_ (idk why, just better kernel I guess)
scores = scores + causal_mask.to(dtype=scores.dtype)
attention = torch.softmax(scores, dim=-1, dtype=v.dtype)
attention_drop = F.dropout(attention, self.dropout_p if self.training else 0.0)
output = torch.einsum('bhts,bshd->bthd', attention_drop, v)
return output
class MHA(nn.Module):
def __init__(self, hf_config: PretrainedConfig,
hidden_size: int, total_heads: int, op_heads: int,
bias=True, dropout=0.0, softmax_scale=None, causal=True, layer_idx=None,
rotary_emb_scale_base=0, return_residual=False,
checkpointing=False, device=None, dtype=None, **kwargs) -> None:
factory_kwargs = {'device': device, 'dtype': dtype}
super().__init__()
self.hidden_size = hidden_size
self.total_heads = total_heads
self.op_heads = op_heads
assert self.hidden_size % op_heads == 0, "hiddden_size must be divisible by op_heads"
self.head_dim = self.hidden_size // total_heads
self.op_size = op_heads * self.head_dim
self.causal = causal
self.layer_idx = layer_idx
self.rotary_emb_dim = getattr(hf_config, 'rotary_dim', 0)
self.fused_dense = get_optim_flag(hf_config, 'fused_dense')
self.flash_attn = get_optim_flag(hf_config, 'flash_attn')
self.return_residual = return_residual
self.checkpointing = checkpointing
if self.rotary_emb_dim > 0:
if get_optim_flag(hf_config, 'flash_rotary_emb'):
assert FlashRotaryEmbedding is not None, 'rotary_emb is not installed'
self.rotary_emb = FlashRotaryEmbedding(self.rotary_emb_dim, scale_base=rotary_emb_scale_base,
device=device)
else:
self.rotary_emb = BaseRotaryEmbedding(self.rotary_emb_dim, scale_base=rotary_emb_scale_base,
device=device)
else:
warn('MHA: rotary_emb_dim is 0, no rotary embedding will be used. Performance may degrade.')
linear_cls = nn.Linear
if self.fused_dense:
assert FusedDense is not None, 'Need to install fused_dense'
linear_cls = FusedDense
self.Wqkv = linear_cls(hidden_size, 3 * self.op_size, bias=bias, **factory_kwargs)
if self.flash_attn:
assert FlashSelfAttention is not None, 'flash_attn is not installed'
self.inner_attn = FlashSelfAttention(causal=causal, softmax_scale=softmax_scale,
attention_dropout=dropout)
else:
self.inner_attn = SelfAttention(causal=causal, softmax_scale=softmax_scale,
attention_dropout=dropout)
def _update_kv_cache(self, kv, inference_params):
"""kv: (batch_size, seqlen, 2, nheads, head_dim) or (batch_size, 1, 2, nheads, head_dim)
"""
assert not self.dwconv, 'Generation does not support dwconv yet'
assert self.layer_idx is not None, 'Generation requires layer_idx in the constructor'
return _update_kv_cache(kv, inference_params, self.layer_idx)
def forward(self, x, x_kv=None, key_padding_mask=None, cu_seqlens=None, max_seqlen=None,
mixer_subset=None, inference_params=None, **kwargs):
"""
Arguments:
x: (batch, seqlen, hidden_dim) (where hidden_dim = num heads * head dim) if
cu_seqlens is None and max_seqlen is None, else (total, hidden_dim) where total
is the is the sum of the sequence lengths in the batch.
x_kv: (batch, seqlen, hidden_dim), only applicable for cross-attention. If None, use x.
cu_seqlens: (batch_size + 1,), dtype torch.int32. The cumulative sequence lengths
of the sequences in the batch, used to index into x. Only applicable when using
FlashAttention.
max_seqlen: int. Maximum sequence length in the batch.
key_padding_mask: boolean mask, True means to keep, False means to mask out.
(batch, seqlen). Only applicable when not using FlashAttention.
mixer_subset: for cross-attention only. If not None, will take a subset of x
before applying the query projection. Useful for e.g., ViT where we only care
about the CLS token in the last layer.
inference_params: for generation. Adapted from Megatron-LM (and Apex)
https://github.com/NVIDIA/apex/blob/3ff1a10f72ec07067c4e44759442329804ac5162/apex/transformer/testing/standalone_transformer_lm.py#L470
"""
if cu_seqlens is not None:
assert max_seqlen is not None
assert key_padding_mask is None
assert self.flash_attn
assert not self.dwconv
assert self.rotary_emb_dim == 0
if key_padding_mask is not None:
assert cu_seqlens is None
assert max_seqlen is None
assert not self.flash_attn
if inference_params is not None:
assert key_padding_mask is None
assert cu_seqlens is None and max_seqlen is None
assert not self.dwconv
attn_kwargs = ({'cu_seqlens': cu_seqlens, 'max_seqlen': max_seqlen}
if self.flash_attn else {'key_padding_mask': key_padding_mask})
assert x_kv is None and mixer_subset is None
qkv = self.Wqkv(x)
qkv = rearrange(qkv, '... (three h d) -> ... three h d', three=3, d=self.head_dim)
if inference_params is None:
if self.rotary_emb_dim > 0:
qkv = self.rotary_emb(qkv)
if not self.checkpointing:
context = self.inner_attn(qkv, **attn_kwargs)
else:
context = torch.utils.checkpoint.checkpoint(self.inner_attn, qkv, **attn_kwargs)
else:
if (not inference_params.fused_ft_kernel) or inference_params.sequence_len_offset == 0:
if self.rotary_emb_dim > 0:
qkv = self.rotary_emb(qkv, seqlen_offset=inference_params.sequence_len_offset)
q = qkv[:, :, 0]
kv = self._update_kv_cache(qkv[:, :, 1:], inference_params)
# If we're processing the prompt, causal=None (use self.causal).
# If we're decoding, then causal=False.
causal = None if inference_params.sequence_len_offset == 0 else False
context = self.inner_cross_attn(q, kv, causal=causal)
else:
assert inference_params.fused_ft_kernel
assert ft_attention is not None
context = ft_attention.single_query_attention(
*rearrange(qkv, 'b 1 three h d -> b three h d').unbind(dim=1),
*inference_params.key_value_memory_dict[self.layer_idx],
inference_params.lengths_per_sample, inference_params.sequence_len_offset,
self.rotary_emb_dim
)
context = rearrange(context, 'b h d -> b 1 h d')
out = rearrange(context, '... h d -> ... (h d)')
return (out, None) if not self.return_residual else ((out, x), None)
|
archai/archai/discrete_search/search_spaces/nlp/tfpp/ops/mha.py/0
|
{
"file_path": "archai/archai/discrete_search/search_spaces/nlp/tfpp/ops/mha.py",
"repo_id": "archai",
"token_count": 7395
}
| 349 |
from typing import Any, Dict, Union, List, Tuple
from itertools import chain, product
import os
import json
import yaml
import warnings
from transformers import PretrainedConfig
import numpy as np
import torch
try:
from yaml import CLoader as Loader
except ImportError:
from yaml import Loader
def get_optim_flag(config: PretrainedConfig, flag_name: str):
if hasattr(config, flag_name):
return getattr(config, flag_name)
warnings.warn(f'{flag_name} is not set, using default value False')
return False
def from_json_file(json_file: Union[str, os.PathLike]) -> Dict[str, Any]:
with open(json_file, "r") as f:
try:
output_dict = json.load(f)
except json.decoder.JSONDecodeError:
output_dict = None
if output_dict is None:
return {}
return output_dict
def from_yaml_file(yaml_file: Union[str, os.PathLike]) -> Dict[str, Any]:
with open(yaml_file, "r") as f:
output_dict = yaml.load(f, Loader=Loader)
if output_dict is None:
return {}
return output_dict
def group_texts(examples, tokenizer, **kwargs):
block_size = tokenizer.model_max_length
# Concatenate all texts.
concatenated_examples = {k: list(chain(*examples[k])) for k in examples.keys()}
total_length = len(concatenated_examples[list(examples.keys())[0]])
# We drop the small remainder, we could add padding if the model supported it instead of this drop, you can
# customize this part to your needs.
if total_length >= block_size:
total_length = (total_length // block_size) * block_size
# Split by chunks of max_len.
result = {
k: [t[i : i + block_size] for i in range(0, total_length, block_size)]
for k, t in concatenated_examples.items()
}
result["labels"] = result["input_ids"].copy()
return result
def split_heads(tensor, num_heads, attn_head_size):
"""
Splits hidden_size dim into attn_head_size and num_heads
"""
new_shape = tensor.size()[:-1] + (num_heads, attn_head_size)
tensor = tensor.view(new_shape)
return tensor.permute(0, 2, 1, 3) # (batch, head, seq_length, head_features)
def merge_heads(tensor, num_heads, attn_head_size):
"""
Merges attn_head_size dim and num_attn_heads dim into hidden_size
"""
tensor = tensor.permute(0, 2, 1, 3).contiguous()
new_shape = tensor.size()[:-2] + (num_heads * attn_head_size,)
return tensor.view(new_shape)
def make_asso_map(input_ids, mask):
assert mask is not None
hc_attn = (input_ids.unsqueeze(-1) == input_ids.unsqueeze(1)).float()
diag_idx = torch.eye(*input_ids.shape[1:]).bool()
hc_attn[:, diag_idx] = 0
hc_attn *= mask.unsqueeze(-1) * mask.unsqueeze(1)
hc_attn /= (hc_attn.sum(-1, keepdim=True) + 1e-6)
return hc_attn
def make_broadcast_map(input_ids, mask, eos_id=103):
T = input_ids.shape[1]
eos_map = (input_ids == eos_id).float()
eos_map = eos_map.unsqueeze(1).expand(-1, T, -1)
eos_mapp = eos_map * (mask.unsqueeze(-1) * mask.unsqueeze(1))
eos_map = eos_mapp / (eos_map.sum(dim=-1, keepdim=True) + 1e-6)
return eos_map
def get_attn_head_simplex(total_attn_heads: Union[int, List[int]],
ops_list: List[str],
grid_scale: int = 3) -> List[Tuple]:
if not isinstance(total_attn_heads, (list, tuple)):
total_attn_heads = [total_attn_heads]
n_ops = len(ops_list)
grid = [t for t in product(*[range(grid_scale) for _ in range(n_ops)])]
grid = grid[1:] # Removes point (0, 0, ..., 0)
simplex = np.unique(
np.array(grid) / np.sum(grid, axis=1, keepdims=True), axis=0
)
# Stores valid allocations (sum(heads) == total_heads)
filtered_simplex = []
for total_heads in total_attn_heads:
heads = np.round(total_heads * simplex)
filtered_simplex.append(simplex[heads.sum(axis=1) == total_heads])
filtered_simplex = np.concatenate(filtered_simplex, axis=0)
filtered_simplex = [tuple(a) for a in np.unique(filtered_simplex, axis=0)]
return [
tuple([(op_name, float(item)) for op_name, item in zip(ops_list, alloc)])
for alloc in filtered_simplex
]
|
archai/archai/discrete_search/search_spaces/nlp/tfpp/utils.py/0
|
{
"file_path": "archai/archai/discrete_search/search_spaces/nlp/tfpp/utils.py",
"repo_id": "archai",
"token_count": 1900
}
| 350 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from typing import Any, Dict, List
import numpy as np
from archai.discrete_search.api.archai_model import ArchaiModel
from archai.discrete_search.api.search_objectives import SearchObjectives
def get_pareto_frontier(
models: List[ArchaiModel], evaluation_results: Dict[str, np.ndarray], objectives: SearchObjectives
) -> Dict[str, Any]:
"""Get the pareto frontier of the given models and evaluation results.
Args:
models: List of models.
evaluation_results: Dictionary of evaluation results.
objectives: Search objectives.
Returns:
Dictionary with models, evaluation results and whether they are pareto optimal.
"""
assert all(obj_name in objectives.objectives for obj_name in evaluation_results)
assert all(len(r) == len(models) for r in evaluation_results.values())
# Inverts maximization objectives
inverted_results = {
obj_name: (-obj_r if objectives.objectives[obj_name].higher_is_better else obj_r)
for obj_name, obj_r in evaluation_results.items()
}
# Converts results to an array of shape (len(models), len(objectives))
results_array = np.vstack(list(inverted_results.values())).T
pareto_points = np.array(_find_pareto_frontier_points(results_array))
return {
"models": [models[idx] for idx in pareto_points],
"evaluation_results": {
obj_name: obj_results[pareto_points] for obj_name, obj_results in evaluation_results.items()
},
"indices": pareto_points,
}
def get_non_dominated_sorting(
models: List[ArchaiModel], evaluation_results: Dict[str, np.ndarray], objectives: SearchObjectives
) -> List[Dict[str, Any]]:
"""Get the non-dominated sorting frontier of the given models and evaluation results.
Args:
models: List of models.
evaluation_results: Dictionary of evaluation results.
objectives: Search objectives.
Returns:
Dictionary with models, evaluation results and whether they are pareto optimal.
"""
assert all(obj_name in objectives.objectives for obj_name in evaluation_results)
assert all(len(r) == len(models) for r in evaluation_results.values())
# Inverts maximization objectives
inverted_results = {
obj_name: (-obj_r if objectives.objectives[obj_name].higher_is_better else obj_r)
for obj_name, obj_r in evaluation_results.items()
}
# Converts results to an array of shape (len(models), len(objectives))
results_array = np.vstack(list(inverted_results.values())).T
frontiers = [np.array(frontier) for frontier in _find_non_dominated_sorting(results_array)]
return [
{
"models": [models[idx] for idx in frontier],
"evaluation_results": {
obj_name: obj_results[frontier] for obj_name, obj_results in evaluation_results.items()
},
"indices": frontier,
}
for frontier in frontiers
]
def _find_pareto_frontier_points(all_points: np.ndarray) -> List[int]:
"""Takes in a list of n-dimensional points, one per row, returns the list of row indices
which are Pareto-frontier points.
Assumes that lower values on every dimension are better.
Args:
all_points: N-dimensional points.
Returns:
List of Pareto-frontier indexes.
"""
# For each point see if there exists any other point which dominates it on all dimensions
# If that is true, then it is not a pareto point and vice-versa
# Inputs should alwyas be a two-dimensional array
assert len(all_points.shape) == 2
pareto_inds = []
dim = all_points.shape[1]
# Gets the indices of unique points
_, unique_indices = np.unique(all_points, axis=0, return_index=True)
for i in unique_indices:
this_point = all_points[i, :]
is_pareto = True
for j in unique_indices:
if j == i:
continue
other_point = all_points[j, :]
diff = this_point - other_point
if sum(diff >= 0) == dim:
# Other point is smaller/larger on all dimensions
# so we have found at least one dominating point
is_pareto = False
break
if is_pareto:
pareto_inds.append(i)
return pareto_inds
def _find_non_dominated_sorting(all_points: np.ndarray) -> List[List[int]]:
"""Finds non-dominated sorting frontiers from a matrix (#points, #objectives).
Args:
all_points: N-dimensional points.
Returns:
List of frontier indices.
References:
Adapted from: https://github.com/anyoptimization/pymoo/blob/main/pymoo/util/nds/efficient_non_dominated_sort.py
Algorithm:
X. Zhang, Y. Tian, R. Cheng, and Y. Jin,
An efficient approach to nondominated sorting for evolutionary multiobjective optimization,
IEEE Transactions on Evolutionary Computation, 2015, 19(2): 201-213.
"""
lex_sorting = np.lexsort(all_points.T[::-1])
all_points = all_points.copy()[lex_sorting]
fronts = []
for idx in range(all_points.shape[0]):
front_rank = _find_front_rank(all_points, idx, fronts)
if front_rank >= len(fronts):
fronts.append([])
fronts[front_rank].append(idx)
ret = []
for front in fronts:
ret.append(lex_sorting[front])
return ret
def _find_front_rank(all_points: np.ndarray, idx: int, fronts: List[List[int]]) -> int:
"""Finds the front rank for all_points[idx] given `fronts`.
Args:
all_points: N-dimensional points.
idx: Point index.
fronts: Current NDS fronts.
Returns:
Front rank for `all_points[idx]`.
Reference:
Adapted from https://github.com/anyoptimization/pymoo/blob/main/pymoo/util/nds/efficient_non_dominated_sort.py
"""
def dominates(x, y):
for i in range(len(x)):
if y[i] < x[i]:
return False
return True
num_found_fronts = len(fronts)
rank = 0
current = all_points[idx]
while True:
if num_found_fronts == 0:
return 0
fk_indices = fronts[rank]
solutions = all_points[fk_indices[::-1]]
non_dominated = True
for s in solutions:
if dominates(s, current):
non_dominated = False
break
if non_dominated:
return rank
else:
rank += 1
if rank >= num_found_fronts:
return num_found_fronts
|
archai/archai/discrete_search/utils/multi_objective.py/0
|
{
"file_path": "archai/archai/discrete_search/utils/multi_objective.py",
"repo_id": "archai",
"token_count": 2700
}
| 351 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from __future__ import annotations
from typing import Any, Dict, Optional
import torch
from torch.nn import functional as F
from archai.quantization.quantizers import FakeDynamicQuant
class FakeQuantEmbedding(torch.nn.Embedding):
"""Translate a torch-based Embedding layer into a QAT-ready Embedding layer."""
def __init__(self, *args, **kwargs) -> None:
"""Initialize a fake quantized Embedding layer."""
bits = kwargs.pop("bits", 8)
onnx_compatible = kwargs.pop("onnx_compatible", False)
super().__init__(*args, **kwargs)
self.weight_fake_quant = FakeDynamicQuant(
dtype=torch.qint8,
reduce_range=False,
bits=bits,
onnx_compatible=onnx_compatible,
)
@property
def fake_quant_weight(self) -> torch.Tensor:
"""Return a fake quantization over the weight matrix."""
return self.weight_fake_quant(self.weight)
def forward(self, x: torch.Tensor) -> torch.Tensor:
return self.fake_quant_weight[x]
@classmethod
def from_float(
cls: FakeQuantEmbedding,
mod: torch.nn.Module,
qconfig: Optional[Dict[torch.nn.Module, Any]] = None,
**kwargs,
) -> FakeQuantEmbedding:
"""Map module from float to QAT-ready.
Args:
mod: Module to be mapped.
qconfig: Quantization configuration.
Returns:
QAT-ready module.
"""
module = cls(mod.num_embeddings, mod.embedding_dim, **kwargs)
module.weight = mod.weight
module.weight.model_parallel = False
return module
def to_float(self) -> torch.nn.Module:
"""Map module from QAT-ready to float.
Returns:
Float-based module.
"""
module = torch.nn.Embedding(self.num_embeddings, self.embedding_dim)
module.weight.data = self.weight_fake_quant(self.weight.data)
module.weight.model_parallel = True
return module
class FakeQuantEmbeddingForOnnx(FakeQuantEmbedding):
"""Allow a QAT-ready Embedding layer to be exported with ONNX."""
def __init__(self, *args, **kwargs) -> None:
"""Initialize a fake quantized Embedding layer compatible with ONNX."""
kwargs["onnx_compatible"] = True
super().__init__(*args, **kwargs)
class FakeDynamicQuantLinear(torch.nn.Linear):
"""Translate a torch-based Linear layer into a QAT-ready Linear layer."""
_FLOAT_MODULE = torch.nn.Linear
def __init__(
self,
*args,
dynamic_weight: Optional[bool] = True,
activation_reduce_range: Optional[bool] = True,
bits: Optional[int] = 8,
onnx_compatible: Optional[bool] = False,
qconfig: Optional[Dict[torch.nn.Module, Any]] = None,
**kwargs,
) -> None:
"""Initialize a fake quantized Linear layer.
Args:
dynamic_weight: Whether to use dynamic weights.
activation_reduce_range: Whether to reduce the range of activations.
bits: Number of quantization bits.
onnx_compatible: Whether quantization is compatible with ONNX.
qconfig: Quantization configuration.
"""
super().__init__(*args, **kwargs)
self.dynamic_weight = dynamic_weight
if dynamic_weight:
self.weight_fake_quant = FakeDynamicQuant(
dtype=torch.qint8,
reduce_range=False,
bits=bits,
onnx_compatible=onnx_compatible,
)
self.input_pre_process = FakeDynamicQuant(
reduce_range=activation_reduce_range,
bits=bits,
onnx_compatible=onnx_compatible,
)
@property
def fake_quant_weight(self) -> torch.Tensor:
"""Return a fake quantization over the weight matrix."""
return self.weight_fake_quant(self.weight)
def forward(self, x: torch.Tensor) -> torch.Tensor:
x = self.input_pre_process(x)
return F.linear(x, self.fake_quant_weight, self.bias)
@classmethod
def from_float(
cls: FakeDynamicQuantLinear,
mod: torch.nn.Module,
qconfig: Optional[Dict[torch.nn.Module, Any]] = None,
activation_reduce_range: Optional[bool] = True,
**kwargs,
) -> FakeDynamicQuantLinear:
"""Map module from float to QAT-ready.
Args:
mod: Module to be mapped.
qconfig: Quantization configuration.
activation_reduce_range: Whether to reduce the range of activations.
Returns:
QAT-ready module.
"""
assert type(mod) == cls._FLOAT_MODULE, (
" qat." + cls.__name__ + ".from_float only works for " + cls._FLOAT_MODULE.__name__
)
if not qconfig:
assert hasattr(mod, "qconfig"), "Input float module must have qconfig defined"
assert mod.qconfig, "Input float module must have a valid qconfig"
qconfig = mod.qconfig
qat_linear = cls(
mod.in_features,
mod.out_features,
bias=mod.bias is not None,
activation_reduce_range=activation_reduce_range,
qconfig=qconfig,
**kwargs,
)
qat_linear.weight = mod.weight
qat_linear.bias = mod.bias
return qat_linear
def to_float(self) -> torch.nn.Module:
"""Map module from QAT-ready to float.
Returns:
Float-based module.
"""
weight = self.weight_fake_quant(self.weight)
float_linear = torch.nn.Linear(self.in_features, self.out_features, bias=self.bias is not None)
float_linear.weight = torch.nn.Parameter(weight)
float_linear.bias = self.bias
return float_linear
class FakeDynamicQuantLinearForOnnx(FakeDynamicQuantLinear):
"""Allow a QAT-ready Linear layer to be exported with ONNX."""
def __init__(self, *args, **kwargs) -> None:
"""Initialize a fake quantized Linear layer compatible with ONNX."""
kwargs["activation_reduce_range"] = False
kwargs["onnx_compatible"] = True
super().__init__(*args, **kwargs)
class FakeDynamicQuantConv1d(torch.nn.Conv1d):
"""Translate a torch-based Conv1d layer into a QAT-ready Conv1d layer."""
_FLOAT_MODULE = torch.nn.Conv1d
def __init__(
self,
*args,
dynamic_weight: Optional[bool] = True,
activation_reduce_range: Optional[bool] = True,
bits: Optional[int] = 8,
onnx_compatible: Optional[bool] = False,
qconfig: Optional[Dict[torch.nn.Module, Any]] = None,
**kwargs,
) -> None:
"""Initialize a fake quantized Conv1d layer.
Args:
dynamic_weight: Whether to use dynamic weights.
activation_reduce_range: Whether to reduce the range of activations.
bits: Number of quantization bits.
onnx_compatible: Whether quantization is compatible with ONNX.
qconfig: Quantization configuration.
"""
super().__init__(*args, **kwargs)
self.dynamic_weight = dynamic_weight
if dynamic_weight:
self.weight_fake_quant = FakeDynamicQuant(
dtype=torch.qint8,
reduce_range=False,
bits=bits,
onnx_compatible=onnx_compatible,
)
self.input_pre_process = FakeDynamicQuant(
reduce_range=activation_reduce_range,
bits=bits,
onnx_compatible=onnx_compatible,
)
@property
def fake_quant_weight(self) -> torch.Tensor:
"""Return a fake quantization over the weight matrix."""
return self.weight_fake_quant(self.weight)
def forward(self, x: torch.Tensor) -> torch.Tensor:
x = self.input_pre_process(x)
return self._conv_forward(x, self.fake_quant_weight, self.bias)
@classmethod
def from_float(
cls: FakeDynamicQuantConv1d,
mod: torch.nn.Module,
qconfig: Optional[Dict[torch.nn.Module, Any]] = None,
activation_reduce_range: Optional[bool] = True,
**kwargs,
) -> FakeDynamicQuantConv1d:
"""Map module from float to QAT-ready.
Args:
mod: Module to be mapped.
qconfig: Quantization configuration.
activation_reduce_range: Whether to reduce the range of activations.
Returns:
QAT-ready module.
"""
assert type(mod) == cls._FLOAT_MODULE, (
" qat." + cls.__name__ + ".from_float only works for " + cls._FLOAT_MODULE.__name__
)
if not qconfig:
assert hasattr(mod, "qconfig"), "Input float module must have qconfig defined"
assert mod.qconfig, "Input float module must have a valid qconfig"
qconfig = mod.qconfig
qat_conv1d = cls(
in_channels=mod.in_channels,
out_channels=mod.out_channels,
kernel_size=mod.kernel_size,
stride=mod.stride,
padding=mod.padding,
dilation=mod.dilation,
groups=mod.groups,
padding_mode=mod.padding_mode,
bias=mod.bias is not None,
activation_reduce_range=activation_reduce_range,
qconfig=qconfig,
**kwargs,
)
qat_conv1d.weight = mod.weight
qat_conv1d.bias = mod.bias
return qat_conv1d
def to_float(self) -> torch.nn.Module:
"""Map module from QAT-ready to float.
Returns:
Float-based module.
"""
weight = self.weight_fake_quant(self.weight)
float_conv1d = torch.nn.Conv1d(
in_channels=self.in_channels,
out_channels=self.out_channels,
kernel_size=self.kernel_size,
stride=self.stride,
padding=self.padding,
dilation=self.dilation,
groups=self.groups,
padding_mode=self.padding_mode,
bias=self.bias is not None,
)
float_conv1d.weight = torch.nn.Parameter(weight)
float_conv1d.bias = self.bias
return float_conv1d
class FakeDynamicQuantConv1dForOnnx(FakeDynamicQuantConv1d):
"""Allow a QAT-ready Conv1d layer to be exported with ONNX."""
def __init__(self, *args, **kwargs) -> None:
"""Initialize a fake quantized Conv1d layer compatible with ONNX."""
kwargs["activation_reduce_range"] = False
kwargs["onnx_compatible"] = True
super().__init__(*args, **kwargs)
|
archai/archai/quantization/modules.py/0
|
{
"file_path": "archai/archai/quantization/modules.py",
"repo_id": "archai",
"token_count": 4841
}
| 352 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from typing import Iterator, Optional, Tuple
import torch
import torch.nn.functional as F
from overrides import overrides
from torch import nn
from archai.common.utils import zip_eq
from archai.supergraph.nas.arch_params import ArchParams
from archai.supergraph.nas.model_desc import OpDesc
from archai.supergraph.nas.operations import Op
# TODO: reduction cell might have output reduced by 2^1=2X due to
# stride 2 through input nodes however FactorizedReduce does only
# 4X reduction. Is this correct?
class MixedOp(Op):
"""The output of MixedOp is weighted output of all allowed primitives.
"""
PRIMITIVES = [
'max_pool_3x3',
'avg_pool_3x3',
'skip_connect', # identity
'sep_conv_3x3',
'sep_conv_5x5',
'dil_conv_3x3',
'dil_conv_5x5',
'none' # this must be at the end so top1 doesn't chose it
]
def __init__(self, op_desc:OpDesc, arch_params:Optional[ArchParams],
affine:bool):
super().__init__()
# assume last PRIMITIVE is 'none'
assert MixedOp.PRIMITIVES[-1] == 'none'
self._ops = nn.ModuleList()
for primitive in MixedOp.PRIMITIVES:
op = Op.create(
OpDesc(primitive, op_desc.params, in_len=1, trainables=None),
affine=affine, arch_params=None)
self._ops.append(op)
# we do this at the end so that we can capture all arch params registered by
# any previous child modules
self._setup_arch_params(arch_params)
@overrides
def forward(self, x):
asm = F.softmax(self._alphas[0], dim=0)
return sum(w * op(x) for w, op in zip(asm, self._ops))
@overrides
def finalize(self) -> Tuple[OpDesc, Optional[float]]:
with torch.no_grad():
# select except 'none' op
val, i = torch.topk(self._alphas[0][:-1], 1)
desc, _ = self._ops[i].finalize()
return desc, float(val.item())
@overrides
def can_drop_path(self) -> bool:
return False
@overrides
def ops(self)->Iterator[Tuple['Op', float]]: # type: ignore
return iter(sorted(zip_eq(self._ops, self._alphas[0]),
key=lambda t:t[1], reverse=True))
def _setup_arch_params(self, arch_params:Optional[ArchParams])->None:
# do we have shared arch params?
if arch_params is None:
# create our own arch params
new_p = nn.Parameter( # TODO: use better init than uniform random?
1.0e-3*torch.randn(len(MixedOp.PRIMITIVES)), requires_grad=True)
self.create_arch_params([('alphas', new_p)])
else:
assert arch_params.has_kind('alphas')
self.set_arch_params(arch_params)
# we store alphas in list so Pytorch don't register them
self._alphas = list(self.arch_params().param_by_kind('alphas'))
assert len(self._alphas)==1
|
archai/archai/supergraph/algos/darts/mixed_op.py/0
|
{
"file_path": "archai/archai/supergraph/algos/darts/mixed_op.py",
"repo_id": "archai",
"token_count": 1335
}
| 353 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from overrides import overrides
from archai.supergraph.algos.gumbelsoftmax.gs_arch_trainer import GsArchTrainer
from archai.supergraph.algos.gumbelsoftmax.gs_finalizers import GsFinalizers
from archai.supergraph.algos.gumbelsoftmax.gs_model_desc_builder import (
GsModelDescBuilder,
)
from archai.supergraph.nas.arch_trainer import TArchTrainer
from archai.supergraph.nas.exp_runner import ExperimentRunner
from archai.supergraph.nas.finalizers import Finalizers
class GsExperimentRunner(ExperimentRunner):
@overrides
def model_desc_builder(self)->GsModelDescBuilder:
return GsModelDescBuilder()
@overrides
def trainer_class(self)->TArchTrainer:
return GsArchTrainer
@overrides
def finalizers(self)->Finalizers:
return GsFinalizers()
|
archai/archai/supergraph/algos/gumbelsoftmax/gs_exp_runner.py/0
|
{
"file_path": "archai/archai/supergraph/algos/gumbelsoftmax/gs_exp_runner.py",
"repo_id": "archai",
"token_count": 300
}
| 354 |
import copy
from typing import List
import numpy as np
def prune(model_matrix:np.ndarray, vertex_ops:List[str]):
"""Prune the extraneous parts of the graph.
General procedure:
1) Remove parts of graph not connected to input.
2) Remove parts of graph not connected to output.
3) Reorder the vertices so that they are consecutive after steps 1 and 2.
These 3 steps can be combined by deleting the rows and columns of the
vertices that are not reachable from both the input and output (in reverse).
"""
shape = np.shape(model_matrix)
num_vertices = shape[0]
if len(shape) != 2 or shape[0] != shape[1]:
raise ValueError('model_matrix must be square')
if shape[0] != len(vertex_ops):
raise ValueError('length of vertex_ops must match model_matrix dimensions')
if not _is_upper_triangular(model_matrix):
raise ValueError('model_matrix must be upper triangular')
# DFS forward from input
visited_from_input = set([0])
frontier = [0]
while frontier:
top = frontier.pop()
for v in range(top + 1, num_vertices):
if model_matrix[top, v] and v not in visited_from_input:
visited_from_input.add(v)
frontier.append(v)
# DFS backward from output
visited_from_output = set([num_vertices - 1])
frontier = [num_vertices - 1]
while frontier:
top = frontier.pop()
for v in range(0, top):
if model_matrix[v, top] and v not in visited_from_output:
visited_from_output.add(v)
frontier.append(v)
# Any vertex that isn't connected to both input and output is extraneous to
# the computation graph.
extraneous = set(range(num_vertices)).difference(
visited_from_input.intersection(visited_from_output))
# If the non-extraneous graph is less than 2 vertices, the input is not
# connected to the output and the spec is invalid.
if len(extraneous) > num_vertices - 2:
raise RuntimeError(f'Cannot build model because there are {extraneous} vertices which are larger than total vertices {num_vertices}-2')
model_matrix = copy.deepcopy(model_matrix)
model_matrix = np.delete(model_matrix, list(extraneous), axis=0)
model_matrix = np.delete(model_matrix, list(extraneous), axis=1)
vertex_ops = copy.deepcopy(vertex_ops)
for index in sorted(extraneous, reverse=True):
del vertex_ops[index]
return model_matrix, vertex_ops
def _is_upper_triangular(model_matrix:np.ndarray):
# TODO: just use np.allclose(mat, np.triu(mat))
"""True if matrix is 0 on diagonal and below."""
for src in range(np.shape(model_matrix)[0]):
for dst in range(0, src + 1):
if model_matrix[src, dst] != 0:
return False
return True
|
archai/archai/supergraph/algos/nasbench101/model_matrix.py/0
|
{
"file_path": "archai/archai/supergraph/algos/nasbench101/model_matrix.py",
"repo_id": "archai",
"token_count": 1097
}
| 355 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import copy
import random
from typing import List, Optional, Tuple
from overrides import overrides
from archai.common.config import Config
from archai.supergraph.nas.model_desc import (
AuxTowerDesc,
CellDesc,
CellType,
ConvMacroParams,
EdgeDesc,
NodeDesc,
OpDesc,
TensorShape,
TensorShapes,
TensorShapesList,
)
from archai.supergraph.nas.model_desc_builder import ModelDescBuilder
class RandOps:
"""Container to store (op_names, to_states) for each nodes"""
PRIMITIVES = [
'max_pool_3x3',
'avg_pool_3x3',
'skip_connect', # identity
'sep_conv_3x3',
'sep_conv_5x5',
'dil_conv_3x3',
'dil_conv_5x5',
# we don't allow none edge for random ops
# 'none' # this must be at the end so top1 doesn't choose it
]
def __init__(self, n_nodes:int, max_edges:int) -> None:
self.ops_and_ins:List[Tuple[List[str], List[int]]] = []
for i in range(n_nodes):
op_names = random.choices(RandOps.PRIMITIVES, k=max_edges)
to_states = random.sample(list(range(i+2)), k=max_edges)
self.ops_and_ins.append((op_names, to_states))
class RandomModelDescBuilder(ModelDescBuilder):
@overrides
def build_cells(self, in_shapes:TensorShapesList, conf_model_desc:Config)\
->Tuple[List[CellDesc], List[Optional[AuxTowerDesc]]]:
max_edges = conf_model_desc['num_edges_to_sample']
node_count = self.get_node_count(0)
# create two sets of random ops, one for each cell type
self._normal_ops = RandOps(node_count, max_edges)
self._reduction_ops = RandOps(node_count, max_edges)
return super().build_cells(in_shapes, conf_model_desc)
@overrides
def build_nodes(self, stem_shapes:TensorShapes, conf_cell:Config,
cell_index:int, cell_type:CellType, node_count:int,
in_shape:TensorShape, out_shape:TensorShape) \
->Tuple[TensorShapes, List[NodeDesc]]:
assert in_shape[0]==out_shape[0]
reduction = (cell_type==CellType.Reduction)
ops = self._reduction_ops if reduction else self._normal_ops
assert node_count == len(ops.ops_and_ins)
nodes:List[NodeDesc] = []
conv_params = ConvMacroParams(in_shape[0], out_shape[0])
for op_names, to_states in ops.ops_and_ins:
edges=[]
# add random edges
for op_name, to_state in zip(op_names, to_states):
op_desc = OpDesc(op_name,
params={
'conv': conv_params,
'stride': 2 if reduction and to_state < 2 else 1
}, in_len=1, trainables=None, children=None)
edge = EdgeDesc(op_desc, input_ids=[to_state])
edges.append(edge)
nodes.append(NodeDesc(edges=edges, conv_params=conv_params))
out_shapes = [copy.deepcopy(out_shape) for _ in range(node_count)]
return out_shapes, nodes
|
archai/archai/supergraph/algos/random/random_model_desc_builder.py/0
|
{
"file_path": "archai/archai/supergraph/algos/random/random_model_desc_builder.py",
"repo_id": "archai",
"token_count": 1525
}
| 356 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import torchvision
from overrides import overrides
from torchvision.transforms import transforms
from archai.common import utils
from archai.common.config import Config
from archai.supergraph.datasets.dataset_provider import (
DatasetProvider,
ImgSize,
TrainTestDatasets,
register_dataset_provider,
)
class Cifar100Provider(DatasetProvider):
def __init__(self, conf_dataset:Config):
super().__init__(conf_dataset)
self._dataroot = utils.full_path(conf_dataset['dataroot'])
@overrides
def get_datasets(self, load_train:bool, load_test:bool,
transform_train, transform_test)->TrainTestDatasets:
trainset, testset = None, None
if load_train:
trainset = torchvision.datasets.CIFAR100(root=self._dataroot, train=True,
download=True, transform=transform_train)
if load_test:
testset = torchvision.datasets.CIFAR100(root=self._dataroot, train=False,
download=True, transform=transform_test)
return trainset, testset
@overrides
def get_transforms(self, img_size:ImgSize)->tuple:
MEAN = [0.507, 0.487, 0.441]
STD = [0.267, 0.256, 0.276]
transf = [
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip()
]
normalize = [
transforms.ToTensor(),
transforms.Normalize(MEAN, STD)
]
train_transform = transforms.Compose(transf + normalize)
test_transform = transforms.Compose(normalize)
return train_transform, test_transform
register_dataset_provider('cifar100', Cifar100Provider)
|
archai/archai/supergraph/datasets/providers/cifar100_provider.py/0
|
{
"file_path": "archai/archai/supergraph/datasets/providers/cifar100_provider.py",
"repo_id": "archai",
"token_count": 745
}
| 357 |
import os
from collections import namedtuple
import torch
import torch.nn as nn
import torch.nn.functional as F
__all__ = ['GoogLeNet', 'googlenet']
_GoogLeNetOuputs = namedtuple('GoogLeNetOuputs', ['logits', 'aux_logits2', 'aux_logits1'])
def googlenet(pretrained=False, progress=True, device='cpu', **kwargs):
r"""GoogLeNet (Inception v1) model architecture from
`"Going Deeper with Convolutions" <https://arxiv.org/abs/1409.4842>`_.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
progress (bool): If True, displays a progress bar of the download to stderr
aux_logits (bool): If True, adds two auxiliary branches that can improve training.
Default: *False* when pretrained is True otherwise *True*
transform_input (bool): If True, preprocesses the input according to the method with which it
was trained on ImageNet. Default: *False*
"""
model = GoogLeNet()
if pretrained:
script_dir = os.path.dirname(__file__)
state_dict = torch.load(script_dir + '/state_dicts/googlenet.pt', map_location=device)
model.load_state_dict(state_dict)
return model
class GoogLeNet(nn.Module):
## CIFAR10: aux_logits True->False
def __init__(self, num_classes=10, aux_logits=False, transform_input=False):
super(GoogLeNet, self).__init__()
self.aux_logits = aux_logits
self.transform_input = transform_input
## CIFAR10: out_channels 64->192, kernel_size 7->3, stride 2->1, padding 3->1
self.conv1 = BasicConv2d(3, 192, kernel_size=3, stride=1, padding=1)
# self.maxpool1 = nn.MaxPool2d(3, stride=2, ceil_mode=True)
# self.conv2 = BasicConv2d(64, 64, kernel_size=1)
# self.conv3 = BasicConv2d(64, 192, kernel_size=3, padding=1)
# self.maxpool2 = nn.MaxPool2d(3, stride=2, ceil_mode=True)
## END
self.inception3a = Inception(192, 64, 96, 128, 16, 32, 32)
self.inception3b = Inception(256, 128, 128, 192, 32, 96, 64)
## CIFAR10: padding 0->1, ciel_model True->False
self.maxpool3 = nn.MaxPool2d(3, stride=2, padding=1, ceil_mode=False)
## END
self.inception4a = Inception(480, 192, 96, 208, 16, 48, 64)
self.inception4b = Inception(512, 160, 112, 224, 24, 64, 64)
self.inception4c = Inception(512, 128, 128, 256, 24, 64, 64)
self.inception4d = Inception(512, 112, 144, 288, 32, 64, 64)
self.inception4e = Inception(528, 256, 160, 320, 32, 128, 128)
## CIFAR10: kernel_size 2->3, padding 0->1, ciel_model True->False
self.maxpool4 = nn.MaxPool2d(3, stride=2, padding=1, ceil_mode=False)
## END
self.inception5a = Inception(832, 256, 160, 320, 32, 128, 128)
self.inception5b = Inception(832, 384, 192, 384, 48, 128, 128)
if aux_logits:
self.aux1 = InceptionAux(512, num_classes)
self.aux2 = InceptionAux(528, num_classes)
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.dropout = nn.Dropout(0.2)
self.fc = nn.Linear(1024, num_classes)
# if init_weights:
# self._initialize_weights()
# def _initialize_weights(self):
# for m in self.modules():
# if isinstance(m, nn.Conv2d) or isinstance(m, nn.Linear):
# import scipy.stats as stats
# X = stats.truncnorm(-2, 2, scale=0.01)
# values = torch.as_tensor(X.rvs(m.weight.numel()), dtype=m.weight.dtype)
# values = values.view(m.weight.size())
# with torch.no_grad():
# m.weight.copy_(values)
# elif isinstance(m, nn.BatchNorm2d):
# nn.init.constant_(m.weight, 1)
# nn.init.constant_(m.bias, 0)
def forward(self, x):
if self.transform_input:
x_ch0 = torch.unsqueeze(x[:, 0], 1) * (0.229 / 0.5) + (0.485 - 0.5) / 0.5
x_ch1 = torch.unsqueeze(x[:, 1], 1) * (0.224 / 0.5) + (0.456 - 0.5) / 0.5
x_ch2 = torch.unsqueeze(x[:, 2], 1) * (0.225 / 0.5) + (0.406 - 0.5) / 0.5
x = torch.cat((x_ch0, x_ch1, x_ch2), 1)
# N x 3 x 224 x 224
x = self.conv1(x)
## CIFAR10
# N x 64 x 112 x 112
# x = self.maxpool1(x)
# N x 64 x 56 x 56
# x = self.conv2(x)
# N x 64 x 56 x 56
# x = self.conv3(x)
# N x 192 x 56 x 56
# x = self.maxpool2(x)
## END
# N x 192 x 28 x 28
x = self.inception3a(x)
# N x 256 x 28 x 28
x = self.inception3b(x)
# N x 480 x 28 x 28
x = self.maxpool3(x)
# N x 480 x 14 x 14
x = self.inception4a(x)
# N x 512 x 14 x 14
if self.training and self.aux_logits:
aux1 = self.aux1(x)
x = self.inception4b(x)
# N x 512 x 14 x 14
x = self.inception4c(x)
# N x 512 x 14 x 14
x = self.inception4d(x)
# N x 528 x 14 x 14
if self.training and self.aux_logits:
aux2 = self.aux2(x)
x = self.inception4e(x)
# N x 832 x 14 x 14
x = self.maxpool4(x)
# N x 832 x 7 x 7
x = self.inception5a(x)
# N x 832 x 7 x 7
x = self.inception5b(x)
# N x 1024 x 7 x 7
x = self.avgpool(x)
# N x 1024 x 1 x 1
x = x.view(x.size(0), -1)
# N x 1024
x = self.dropout(x)
x = self.fc(x)
# N x 1000 (num_classes)
if self.training and self.aux_logits:
return _GoogLeNetOuputs(x, aux2, aux1)
return x
class Inception(nn.Module):
def __init__(self, in_channels, ch1x1, ch3x3red, ch3x3, ch5x5red, ch5x5, pool_proj):
super(Inception, self).__init__()
self.branch1 = BasicConv2d(in_channels, ch1x1, kernel_size=1)
self.branch2 = nn.Sequential(
BasicConv2d(in_channels, ch3x3red, kernel_size=1),
BasicConv2d(ch3x3red, ch3x3, kernel_size=3, padding=1)
)
self.branch3 = nn.Sequential(
BasicConv2d(in_channels, ch5x5red, kernel_size=1),
BasicConv2d(ch5x5red, ch5x5, kernel_size=3, padding=1)
)
self.branch4 = nn.Sequential(
nn.MaxPool2d(kernel_size=3, stride=1, padding=1, ceil_mode=True),
BasicConv2d(in_channels, pool_proj, kernel_size=1)
)
def forward(self, x):
branch1 = self.branch1(x)
branch2 = self.branch2(x)
branch3 = self.branch3(x)
branch4 = self.branch4(x)
outputs = [branch1, branch2, branch3, branch4]
return torch.cat(outputs, 1)
class InceptionAux(nn.Module):
def __init__(self, in_channels, num_classes):
super(InceptionAux, self).__init__()
self.conv = BasicConv2d(in_channels, 128, kernel_size=1)
self.fc1 = nn.Linear(2048, 1024)
self.fc2 = nn.Linear(1024, num_classes)
def forward(self, x):
# aux1: N x 512 x 14 x 14, aux2: N x 528 x 14 x 14
x = F.adaptive_avg_pool2d(x, (4, 4))
# aux1: N x 512 x 4 x 4, aux2: N x 528 x 4 x 4
x = self.conv(x)
# N x 128 x 4 x 4
x = x.view(x.size(0), -1)
# N x 2048
x = F.relu(self.fc1(x), inplace=True)
# N x 2048
x = F.dropout(x, 0.7, training=self.training)
# N x 2048
x = self.fc2(x)
# N x 1024
return x
class BasicConv2d(nn.Module):
def __init__(self, in_channels, out_channels, **kwargs):
super(BasicConv2d, self).__init__()
self.conv = nn.Conv2d(in_channels, out_channels, bias=False, **kwargs)
self.bn = nn.BatchNorm2d(out_channels, eps=0.001)
def forward(self, x):
x = self.conv(x)
x = self.bn(x)
return F.relu(x, inplace=True)
|
archai/archai/supergraph/models/googlenet.py/0
|
{
"file_path": "archai/archai/supergraph/models/googlenet.py",
"repo_id": "archai",
"token_count": 4034
}
| 358 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from collections import UserDict
from typing import Iterable, Iterator, Optional, Tuple, Union
from torch import nn
_param_suffix = '_arch_param' # all arch parameter names must have this suffix
NNTypes = Union[nn.Parameter, nn.ParameterDict, nn.ParameterList]
class ArchParams(UserDict):
"""This class holds set of learnable architecture parameter(s) for a given module. For example, one instance of this class would hold alphas for one instance of MixedOp. For sharing parameters, instance of this class can be passed around. Different algorithms may add learnable parameters for their need."""
def __init__(self, arch_params:Iterable[Tuple[str, NNTypes]], registrar:Optional[nn.Module]=None):
"""Create architecture parameters and register them
Arguments:
registrar {Optional[nn.Module]} -- If this parameter is beingly newly created instead of being shared by other module then owner should be specified. When owner is not None, this method will create a variable in the owning module with suffix _arch_param so that the parameter gets registered with Pytorch and becomes available in module's .parameters() calls.
"""
super().__init__()
for name, param in arch_params:
self.data[name] = param
if registrar is not None:
setattr(registrar, name + _param_suffix, param)
def __setitem__(self, name:str, param:NNTypes)->None:
raise RuntimeError(f'ArchParams is immutable hence adding/updating key {name} is not allowed.')
def __delitem__(self, name:str) -> None:
raise RuntimeError(f'ArchParams is immutable hence removing key {name} is not allowed.')
def _by_kind(self, kind:Optional[str])->Iterator[NNTypes]:
# TODO: may be optimize to avoid split() calls?
for name, param in self.items():
if kind is None or name.split('.')[-1]==kind:
yield param
def param_by_kind(self, kind:Optional[str])->Iterator[nn.Parameter]:
# TODO: enforce type checking if debugger is active?
return self._by_kind(kind) # type: ignore
def paramlist_by_kind(self, kind:Optional[str])->Iterator[nn.ParameterList]:
# TODO: enforce type checking if debugger is active?
return self._by_kind(kind) # type: ignore
def paramdict_by_kind(self, kind:Optional[str])->Iterator[nn.ParameterDict]:
# TODO: enforce type checking if debugger is active?
return self._by_kind(kind) # type: ignore
def has_kind(self, kind:str)->bool:
# TODO: may be optimize to avoid split() calls?
for name in self.keys():
if name.split('.')[-1]==kind:
return True
return False
@staticmethod
def from_module(module:nn.Module, recurse:bool=False)->'ArchParams':
suffix_len = len(_param_suffix)
# Pytorch named params have . in name for each module, we pick last part and remove _arch_params prefix
arch_params = ((name[:-suffix_len], param) \
for name, param in module.named_parameters(recurse=recurse)
if name.endswith(_param_suffix))
return ArchParams(arch_params)
@staticmethod
def nonarch_from_module(module:nn.Module, recurse:bool=False)->Iterator[nn.Parameter]:
# Pytorch named params have . in name for each module, we pick last part and remove _arch_params prefix
return (param for name, param in module.named_parameters(recurse=recurse)
if not name.endswith(_param_suffix))
@staticmethod
def empty()->'ArchParams':
return ArchParams([])
|
archai/archai/supergraph/nas/arch_params.py/0
|
{
"file_path": "archai/archai/supergraph/nas/arch_params.py",
"repo_id": "archai",
"token_count": 1336
}
| 359 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from typing import Optional
import torch
import torch.nn.functional as F
from torch.nn.modules.loss import _WeightedLoss
class SmoothCrossEntropyLoss(_WeightedLoss):
"""Cross entropy loss with label smoothing support."""
def __init__(
self, weight: Optional[torch.Tensor] = None, reduction: Optional[str] = "mean", smoothing: Optional[float] = 0.0
) -> None:
"""Initialize the loss function.
Args:
weight: Weight for each class.
reduction: Reduction method.
smoothing: Label smoothing factor.
"""
super().__init__(weight=weight, reduction=reduction)
self.smoothing = smoothing
self.weight = weight
self.reduction = reduction
@staticmethod
def _smooth_one_hot(targets: torch.Tensor, n_classes: int, smoothing: Optional[float] = 0.0) -> torch.Tensor:
assert 0 <= smoothing < 1
with torch.no_grad():
# For label smoothing, we replace 1-hot vector with 0.9-hot vector instead.
# Create empty vector of same size as targets, fill them up with smoothing/(n-1)
# then replace element where 1 supposed to go and put there 1-smoothing instead
targets = (
torch.empty(size=(targets.size(0), n_classes), device=targets.device)
.fill_(smoothing / (n_classes - 1))
.scatter_(1, targets.data.unsqueeze(1), 1.0 - smoothing)
)
return targets
def forward(self, inputs: torch.Tensor, targets: torch.Tensor) -> torch.Tensor:
targets = SmoothCrossEntropyLoss._smooth_one_hot(targets, inputs.size(-1), self.smoothing)
lsm = F.log_softmax(inputs, -1)
if self.weight is not None: # To support weighted targets
lsm = lsm * self.weight.unsqueeze(0)
loss = -(targets * lsm).sum(-1)
if self.reduction == "sum":
loss = loss.sum()
elif self.reduction == "mean":
loss = loss.mean()
return loss
|
archai/archai/trainers/losses.py/0
|
{
"file_path": "archai/archai/trainers/losses.py",
"repo_id": "archai",
"token_count": 876
}
| 360 |
dataset:
name: imagenet
autoaug:
model:
type: resnet200
loader:
aug: fa_reduced_imagenet
cutout: 0
batch: 256
epochs: 270
lr_schedule:
type: 'resnet'
warmup:
multiplier: 2
epochs: 3
optimizer:
type: sgd
lr: 0.05
nesterov: True
decay: 0.0001
clip: 0
|
archai/confs/aug/resnet200_b256.yaml/0
|
{
"file_path": "archai/confs/aug/resnet200_b256.yaml",
"repo_id": "archai",
"token_count": 163
}
| 361 |
name: nas-env
channels:
- conda-forge
dependencies:
- python=3.10
- pip
- pip:
- "archai[cv,nlp] @ git+https://github.com/microsoft/archai.git"
|
archai/docs/advanced_guide/cloud/azure/notebooks/quickstart/conda.yaml/0
|
{
"file_path": "archai/docs/advanced_guide/cloud/azure/notebooks/quickstart/conda.yaml",
"repo_id": "archai",
"token_count": 67
}
| 362 |
<jupyter_start><jupyter_text>Training NLP-based Models with Hugging FaceTraining an NLP-based model involves several steps, including loading the data, encoding the data, defining the model architecture, and conducting the actual training process.Archai implements abstract base classes that defines the expected behavior of some classes, such as datasets (`DatasetProvider`) and trainers (`TrainerBase`). Additionally, we offer boilerplate classes for the most common frameworks, such as a `DatasetProvider` compatible with `huggingface/datasets` and a `TrainerBase` compatible with `huggingface/transformers`. Loading and Encoding the DataWhen using a dataset provider, such as Hugging Face's `datasets` library, the data loading process is simplified, as the provider takes care of downloading and pre-processing the required dataset. Next, the data needs to be encoded, typically by converting text data into numerical representations that can be fed into the model. This step is accomplished in the same way as the [previous notebook](./hf_dataset_provider.ipynb):<jupyter_code>from transformers import AutoTokenizer, DataCollatorForLanguageModeling
from archai.datasets.nlp.hf_dataset_provider import HfHubDatasetProvider
from archai.datasets.nlp.hf_dataset_provider_utils import tokenize_dataset
tokenizer = AutoTokenizer.from_pretrained("Salesforce/codegen-350M-mono", model_max_length=1024)
tokenizer.pad_token = tokenizer.eos_token
collator = DataCollatorForLanguageModeling(tokenizer=tokenizer, mlm=False)
dataset_provider = HfHubDatasetProvider("wikitext", dataset_config_name="wikitext-103-raw-v1")
# When loading `train_dataset`, we will override the split argument to only load 1%
# of the data and speed up its encoding
train_dataset = dataset_provider.get_train_dataset(split="train[:1%]")
encoded_train_dataset = train_dataset.map(tokenize_dataset, batched=True, fn_kwargs={"tokenizer": tokenizer})<jupyter_output>Found cached dataset wikitext (C:/Users/gderosa/.cache/huggingface/datasets/wikitext/wikitext-103-raw-v1/1.0.0/a241db52902eaf2c6aa732210bead40c090019a499ceb13bcbfa3f8ab646a126)
Loading cached processed dataset at C:\Users\gderosa\.cache\huggingface\datasets\wikitext\wikitext-103-raw-v1\1.0.0\a241db52902eaf2c6aa732210bead40c090019a499ceb13bcbfa3f8ab646a126\cache-04d7ff93d438ade6.arrow<jupyter_text>Defining the ModelOnce the data is encoded, we can define any NLP-based model. In this example, we will use a CodeGen architecture from `huggingface/transformers`.<jupyter_code>from transformers import CodeGenConfig, CodeGenForCausalLM
config = CodeGenConfig(
n_positions=1024,
n_embd=768,
n_layer=12,
n_head=12,
rotary_dim=16,
bos_token_id=0,
eos_token_id=0,
vocab_size=50295,
)
model = CodeGenForCausalLM(config=config)<jupyter_output><empty_output><jupyter_text>Running the TrainerThe final step is to use the Hugging Face trainer abstraction (`HfTrainer`) to conduct the training process, which involves optimizing the model's parameters using a pre-defined optimization algorithm and loss function, and updating the model's parameters based on the training data. This process is repeated until the model converges to a satisfactory accuracy or performance level.<jupyter_code>from transformers import TrainingArguments
from archai.trainers.nlp.hf_trainer import HfTrainer
training_args = TrainingArguments(
"hf-codegen",
evaluation_strategy="no",
logging_steps=1,
per_device_train_batch_size=1,
learning_rate=0.01,
weight_decay=0.1,
max_steps=1,
)
trainer = HfTrainer(
model=model,
args=training_args,
data_collator=collator,
train_dataset=encoded_train_dataset,
)
trainer.train()<jupyter_output>c:\Users\gderosa\Anaconda3\envs\archai\lib\site-packages\transformers\optimization.py:395: FutureWarning: This implementation of AdamW is deprecated and will be removed in a future version. Use the PyTorch implementation torch.optim.AdamW instead, or set `no_deprecation_warning=True` to disable this warning
FutureWarning,
|
archai/docs/getting_started/notebooks/nlp/hf_trainer.ipynb/0
|
{
"file_path": "archai/docs/getting_started/notebooks/nlp/hf_trainer.ipynb",
"repo_id": "archai",
"token_count": 1305
}
| 363 |
Natural Language Processing
===========================
.. toctree::
:maxdepth: 2
archai.datasets.nlp.tokenizer_utils
Hugging Face
------------
Dataset Provider
^^^^^^^^^^^^^^^^
.. automodule:: archai.datasets.nlp.hf_dataset_provider
:members:
:undoc-members:
Dataset Provider (Utilities)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. automodule:: archai.datasets.nlp.hf_dataset_provider_utils
:members:
:undoc-members:
Fast Dataset Provider
^^^^^^^^^^^^^^^^^^^^^
.. automodule:: archai.datasets.nlp.fast_hf_dataset_provider
:members:
:undoc-members:
Fast Dataset Provider (Utilities)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. automodule:: archai.datasets.nlp.fast_hf_dataset_provider_utils
:members:
:undoc-members:
NVIDIA
------
Data Loader (Utilities)
^^^^^^^^^^^^^^^^^^^^^^^
.. automodule:: archai.datasets.nlp.nvidia_data_loader_utils
:members:
:undoc-members:
Dataset Provider
^^^^^^^^^^^^^^^^
.. automodule:: archai.datasets.nlp.nvidia_dataset_provider
:members:
:undoc-members:
Dataset Provider (Utilities)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. automodule:: archai.datasets.nlp.nvidia_dataset_provider_utils
:members:
:undoc-members:
|
archai/docs/reference/api/archai.datasets.nlp.rst/0
|
{
"file_path": "archai/docs/reference/api/archai.datasets.nlp.rst",
"repo_id": "archai",
"token_count": 458
}
| 364 |
Transformer++
=============
Backbones
^^^^^^^^^
CodeGen
-------
.. automodule:: archai.discrete_search.search_spaces.nlp.tfpp.backbones.codegen.block
:members:
:undoc-members:
.. automodule:: archai.discrete_search.search_spaces.nlp.tfpp.backbones.codegen.model
:members:
:undoc-members:
Operators
^^^^^^^^^
Causal Self-Attention
---------------------
.. automodule:: archai.discrete_search.search_spaces.nlp.tfpp.ops.causal_self_attn
:members:
:undoc-members:
Fast Fourier Transform Convolution
----------------------------------
.. automodule:: archai.discrete_search.search_spaces.nlp.tfpp.ops.fftconv_
:members:
:undoc-members:
Local Attention
---------------
.. automodule:: archai.discrete_search.search_spaces.nlp.tfpp.ops.local_attention
:members:
:undoc-members:
Locality Sensitive Hashing Attention
------------------------------------
.. automodule:: archai.discrete_search.search_spaces.nlp.tfpp.ops.lsh_attn
:members:
:undoc-members:
Multi-Head Attention
--------------------
.. automodule:: archai.discrete_search.search_spaces.nlp.tfpp.ops.mha
:members:
:undoc-members:
Separable 1D-Convolution
------------------------
.. automodule:: archai.discrete_search.search_spaces.nlp.tfpp.ops.sep_conv1d
:members:
:undoc-members:
Structured Global Convolution
-----------------------------
.. automodule:: archai.discrete_search.search_spaces.nlp.tfpp.ops.sgconv
:members:
:undoc-members:
Structured Global Convolution 3
-------------------------------
.. automodule:: archai.discrete_search.search_spaces.nlp.tfpp.ops.sgconv3
:members:
:undoc-members:
Mixed Attention
^^^^^^^^^^^^^^^
.. automodule:: archai.discrete_search.search_spaces.nlp.tfpp.mixed_attention
:members:
:undoc-members:
Mixed Operators
^^^^^^^^^^^^^^^
.. automodule:: archai.discrete_search.search_spaces.nlp.tfpp.mixed_op
:members:
:undoc-members:
Model
^^^^^
.. automodule:: archai.discrete_search.search_spaces.nlp.tfpp.model
:members:
:undoc-members:
Search Space
^^^^^^^^^^^^
.. automodule:: archai.discrete_search.search_spaces.nlp.tfpp.search_space
:members:
:undoc-members:
Utilities
^^^^^^^^^
.. automodule:: archai.discrete_search.search_spaces.nlp.tfpp.utils
:members:
:undoc-members:
|
archai/docs/reference/api/archai.discrete_search.search_spaces.nlp.tfpp.rst/0
|
{
"file_path": "archai/docs/reference/api/archai.discrete_search.search_spaces.nlp.tfpp.rst",
"repo_id": "archai",
"token_count": 847
}
| 365 |
Random
======
Experiment Runner
-----------------
.. automodule:: archai.supergraph.algos.random.random_exp_runner
:members:
:undoc-members:
Model Description Builder
-------------------------
.. automodule:: archai.supergraph.algos.random.random_model_desc_builder
:members:
:undoc-members:
|
archai/docs/reference/api/archai.supergraph.algos.random.rst/0
|
{
"file_path": "archai/docs/reference/api/archai.supergraph.algos.random.rst",
"repo_id": "archai",
"token_count": 98
}
| 366 |
Copyright
=========
This project may contain trademarks or logos for projects, products, or services. Authorized use of Microsoft trademarks or logos is subject to and must follow Microsoft's Trademark & Brand Guidelines. Use of Microsoft trademarks or logos in modified versions of this project must not cause confusion or imply Microsoft sponsorship. Any use of third-party trademarks or logos are subject to those third-party's policies.
Trademark
---------
This project may contain trademarks or logos for projects, products, or services. Authorized use of Microsoft trademarks or logos is subject to and must follow Microsoft's Trademark & Brand Guidelines. Use of Microsoft trademarks or logos in modified versions of this project must not cause confusion or imply Microsoft sponsorship. Any use of third-party trademarks or logos are subject to those third-party's policies.
License
-------
This project is released under the MIT License. Please review the `file <https://github.com/microsoft/archai/blob/main/LICENSE>`_ for more details.
Credits
-------
Archai is built upon several open-source codebases:
* `Fast AutoAugment <https://github.com/kakaobrain/fast-autoaugment>`_;
* `pt.darts <https://github.com/khanrc/pt.darts>`_;
* `DARTS-PyTorch <https://github.com/dragen1860/DARTS-PyTorch>`_;
* `DARTS <https://github.com/quark0/darts>`_;
* `petridishnn <https://github.com/microsoft/petridishnn>`_;
* `PyTorch CIFAR-10 Models <https://github.com/huyvnphan/PyTorch-CIFAR10>`_;
* `NVidia DeepLearning Examples <https://github.com/NVIDIA/DeepLearningExamples>`_;
* `PyTorch Warmup Scheduler <https://github.com/ildoonet/pytorch-gradual-warmup-lr>`_;
* `NAS Evaluation is Frustratingly Hard <https://github.com/antoyang/NAS-Benchmark>`_;
* `NASBench-PyTorch <https://github.com/romulus0914/NASBench-PyTorch>`_.
Please see :github:`setup.py` for up-to-date dependencies list. If you feel credit to any material is missing, please let us know by filing an `issue <https://github.com/microsoft/archai/issues>`_.
|
archai/docs/support/copyright.rst/0
|
{
"file_path": "archai/docs/support/copyright.rst",
"repo_id": "archai",
"token_count": 586
}
| 367 |
from archai.common.config import Config
def get_dataroot() -> str:
conf = Config(config_filepath="confs/algos/manual.yaml")
return conf["dataset"]["dataroot"]
|
archai/scripts/supergraph/download_datasets/dataset_utils.py/0
|
{
"file_path": "archai/scripts/supergraph/download_datasets/dataset_utils.py",
"repo_id": "archai",
"token_count": 64
}
| 368 |
import argparse
import math
import os
import time
from typing import List, Mapping, Optional, Tuple
import numpy as np
import torch
import torchvision
import torchvision.transforms as transforms
import yaml
from torch import nn
from torch.nn.modules.loss import _Loss
from torch.optim.lr_scheduler import _LRScheduler
from torch.optim.optimizer import Optimizer
from torch.utils.data import DataLoader
from archai.common import common, utils
from archai.common.ordered_dict_logger import get_global_logger
from archai.supergraph.algos.nasbench101.nasbench101_dataset import Nasbench101Dataset
logger = get_global_logger()
def train(
epochs, train_dl, val_dal, net, device, crit, optim, sched, sched_on_epoch, half, quiet, grad_clip: float
) -> List[Mapping]:
train_acc, _ = 0.0, 0.0
metrics = []
for epoch in range(epochs):
lr = optim.param_groups[0]["lr"]
train_acc, loss = train_epoch(epoch, net, train_dl, device, crit, optim, sched, sched_on_epoch, half, grad_clip)
val_acc = test(net, val_dal, device, half) if val_dal is not None else math.nan
metrics.append({"val_top1": val_acc, "train_top1": train_acc, "lr": lr, "epoch": epoch, "train_loss": loss})
if not quiet:
logger.info(f"train_epoch={epoch}, val_top1={val_acc}," f" train_top1={train_acc}, lr={lr:.4g}")
return metrics
def optim_sched_resnet(net, epochs):
lr, momentum, weight_decay = 0.1, 0.9, 1.0e-4
optim = torch.optim.SGD(net.parameters(), lr, momentum=momentum, weight_decay=weight_decay)
logger.info(f"lr={lr}, momentum={momentum}, weight_decay={weight_decay}")
sched = torch.optim.lr_scheduler.MultiStepLR(optim, milestones=[100, 150, 200, 400, 600]) # resnet original paper
sched_on_epoch = True
logger.info(f"sched_on_epoch={sched_on_epoch}, sched={str(sched)}")
return optim, sched, sched_on_epoch
def optim_sched_paper(net, epochs):
lr, momentum, weight_decay = 0.2, 0.9, 0.0001
optim = torch.optim.RMSprop(net.parameters(), lr=lr, momentum=momentum, weight_decay=weight_decay)
logger.info(f"optim=RMSprop, lr={lr}, momentum={momentum}, weight_decay={weight_decay}")
sched = torch.optim.lr_scheduler.CosineAnnealingLR(optim, epochs)
sched_on_epoch = True
logger.info(f"sched_on_epoch={sched_on_epoch}, sched={str(sched)}")
return optim, sched, sched_on_epoch
def optim_sched_darts(net, epochs):
lr, momentum, weight_decay = 0.025, 0.9, 3.0e-4
optim = torch.optim.SGD(net.parameters(), lr, momentum=momentum, weight_decay=weight_decay)
logger.info(f"optim=SGD, lr={lr}, momentum={momentum}, weight_decay={weight_decay}")
sched = torch.optim.lr_scheduler.CosineAnnealingLR(optim, epochs)
sched_on_epoch = True
logger.info(f"sched_on_epoch={sched_on_epoch}, sched={str(sched)}")
return optim, sched, sched_on_epoch
def get_data(
datadir: str,
train_batch_size=256,
test_batch_size=256,
cutout=0,
train_num_workers=-1,
test_num_workers=-1,
val_percent=20.0,
) -> Tuple[DataLoader, Optional[DataLoader], DataLoader]:
if utils.is_debugging():
train_num_workers = test_num_workers = 0
logger.info("debugger=true, num_workers=0")
if train_num_workers <= -1:
train_num_workers = torch.cuda.device_count() * 4
if test_num_workers <= -1:
test_num_workers = torch.cuda.device_count() * 4
train_transform = cifar10_transform(aug=True, cutout=cutout)
trainset = torchvision.datasets.CIFAR10(root=datadir, train=True, download=True, transform=train_transform)
val_len = int(len(trainset) * val_percent / 100.0)
train_len = len(trainset) - val_len
valset = None
if val_len:
trainset, valset = torch.utils.data.random_split(trainset, [train_len, val_len])
train_dl = torch.utils.data.DataLoader(
trainset, batch_size=train_batch_size, shuffle=True, num_workers=train_num_workers, pin_memory=True
)
if valset is not None:
val_dl = torch.utils.data.DataLoader(
valset, batch_size=test_batch_size, shuffle=False, num_workers=test_num_workers, pin_memory=True
)
else:
val_dl = None
test_transform = cifar10_transform(aug=False, cutout=0)
testset = torchvision.datasets.CIFAR10(root=datadir, train=False, download=True, transform=test_transform)
test_dl = torch.utils.data.DataLoader(
testset, batch_size=test_batch_size, shuffle=False, num_workers=test_num_workers, pin_memory=True
)
logger.info(f"train_len={train_len}, val_len={val_len}, test_len={len(testset)}")
return train_dl, val_dl, test_dl
def train_epoch(
epoch, net, train_dl, device, crit, optim, sched, sched_on_epoch, half, grad_clip: float
) -> Tuple[float, float]:
correct, total, loss_total = 0, 0, 0.0
net.train()
for batch_idx, (inputs, targets) in enumerate(train_dl):
inputs = inputs.to(device, non_blocking=True)
targets = targets.to(device, non_blocking=True)
if half:
inputs = inputs.half()
outputs, loss = train_step(net, crit, optim, sched, sched_on_epoch, inputs, targets, grad_clip)
loss_total += loss
_, predicted = outputs.max(1)
total += targets.size(0)
correct += predicted.eq(targets).sum().item()
if sched and sched_on_epoch:
sched.step()
return 100.0 * correct / total, loss_total
def train_step(
net: nn.Module,
crit: _Loss,
optim: Optimizer,
sched: _LRScheduler,
sched_on_epoch: bool,
inputs: torch.Tensor,
targets: torch.Tensor,
grad_clip: float,
) -> Tuple[torch.Tensor, float]:
outputs = net(inputs)
loss = crit(outputs, targets)
optim.zero_grad()
loss.backward()
nn.utils.clip_grad_norm_(net.parameters(), grad_clip)
optim.step()
if sched and not sched_on_epoch:
sched.step()
return outputs, loss.item()
def test(net, test_dl, device, half) -> float:
correct, total = 0, 0
net.eval()
with torch.no_grad():
for batch_idx, (inputs, targets) in enumerate(test_dl):
inputs = inputs.to(device, non_blocking=False)
targets = targets.to(device)
if half:
inputs = inputs.half()
outputs = net(inputs)
_, predicted = outputs.max(1)
total += targets.size(0)
correct += predicted.eq(targets).sum().item()
return 100.0 * correct / total
def param_size(model: torch.nn.Module) -> int:
"""count all parameters excluding auxiliary"""
return sum(v.numel() for name, v in model.named_parameters() if "auxiliary" not in name)
def cifar10_transform(aug: bool, cutout=0):
MEAN = [0.49139968, 0.48215827, 0.44653124]
STD = [0.24703233, 0.24348505, 0.26158768]
transf = [transforms.ToTensor(), transforms.Normalize(MEAN, STD)]
if aug:
aug_transf = [transforms.RandomCrop(32, padding=4), transforms.RandomHorizontalFlip()]
transf = aug_transf + transf
if cutout > 0: # must be after normalization
transf += [CutoutDefault(cutout)]
return transforms.Compose(transf)
class CutoutDefault:
"""
Reference : https://github.com/quark0/darts/blob/master/cnn/utils.py
"""
def __init__(self, length):
self.length = length
def __call__(self, img):
h, w = img.size(1), img.size(2)
mask = np.ones((h, w), np.float32)
y = np.random.randint(h)
x = np.random.randint(w)
y1 = np.clip(y - self.length // 2, 0, h)
y2 = np.clip(y + self.length // 2, 0, h)
x1 = np.clip(x - self.length // 2, 0, w)
x2 = np.clip(x + self.length // 2, 0, w)
mask[y1:y2, x1:x2] = 0.0
mask = torch.from_numpy(mask)
mask = mask.expand_as(img)
img *= mask
return img
def log_metrics(
expdir: str, filename: str, metrics, test_acc: float, args, nsds: Nasbench101Dataset, model_id: int
) -> None:
print(
f"filename: {filename}",
f"test_acc: {test_acc}",
f"nasbenc101_test_acc: {nsds.get_test_acc(model_id)}",
metrics[-1],
)
results = [
("test_acc", test_acc),
("nasbenc101_test_acc", nsds.get_test_acc(model_id)),
("val_acc", metrics[-1]["val_top1"]),
("epochs", args.epochs),
("train_batch_size", args.train_batch_size),
("test_batch_size", args.test_batch_size),
("model_name", args.model_name),
("exp_name", args.experiment_name),
("exp_desc", args.experiment_description),
("seed", args.seed),
("devices", utils.cuda_device_names()),
("half", args.half),
("cutout", args.cutout),
("train_acc", metrics[-1]["train_top1"]),
("loader_workers", args.loader_workers),
("date", str(time.time())),
]
utils.append_csv_file(os.path.join(expdir, f"{filename}.tsv"), results)
with open(os.path.join(expdir, f"{filename}_metrics.yaml"), "w") as f:
yaml.dump(metrics, f)
with open(os.path.join(expdir, f"{filename}_nasbench101.yaml"), "w") as f:
yaml.dump(nsds[model_id], f)
def create_crit(device, half):
crit = nn.CrossEntropyLoss().to(device)
if half:
crit.half()
return crit
def create_model(nsds, index, device, half) -> nn.Module:
net = nsds.create_model(index)
logger.info(f"param_size_m={param_size(net):.1e}")
net = net.to(device)
if half:
net.half()
return net
def main():
parser = argparse.ArgumentParser(description="Pytorch cifar training")
parser.add_argument("--experiment-name", "-n", default="train_pytorch")
parser.add_argument("--experiment-description", "-d", default="Train cifar usin pure PyTorch code")
parser.add_argument("--epochs", "-e", type=int, default=108)
parser.add_argument("--model-name", "-m", default="5")
parser.add_argument("--device", default="", help='"cuda" or "cpu" or "" in which case use cuda if available')
parser.add_argument("--train-batch-size", "-b", type=int, default=128)
parser.add_argument("--test-batch-size", type=int, default=256)
parser.add_argument("--seed", "-s", type=float, default=42)
parser.add_argument("--half", type=lambda x: x.lower() == "true", nargs="?", const=True, default=False)
parser.add_argument("--cutout", type=int, default=0)
parser.add_argument("--grad-clip", type=float, default=5.0)
parser.add_argument("--datadir", default="", help="where to find dataset files, default is ~/torchvision_data_dir")
parser.add_argument("--outdir", default="", help="where to put results, default is ~/logdir")
parser.add_argument(
"--loader-workers", type=int, default=-1, help="number of thread/workers for data loader (-1 means auto)"
)
args = parser.parse_args()
if not args.datadir:
args.datadir = common.default_dataroot()
nsds_dir = args.datadir
if os.environ.get("PT_DATA_DIR", ""):
nsds_dir = os.environ.get("PT_DATA_DIR")
if not args.outdir:
args.outdir = os.environ.get("PT_OUTPUT_DIR", "")
if not args.outdir:
args.outdir = os.path.join("~/logdir", "nasbench101", args.experiment_name)
assert isinstance(nsds_dir, str)
expdir = utils.full_path(args.outdir)
os.makedirs(expdir, exist_ok=True)
utils.setup_cuda(args.seed)
datadir = utils.full_path(args.datadir)
os.makedirs(datadir, exist_ok=True)
# log config for reference
logger.info(f'exp_name="{args.experiment_name}", exp_desc="{args.experiment_description}"')
logger.info(f'model_name="{args.model_name}", seed={args.seed}, epochs={args.epochs}')
logger.info(f"half={args.half}, cutout={args.cutout}")
logger.info(f'datadir="{datadir}"')
logger.info(f'expdir="{expdir}"')
logger.info(f"train_batch_size={args.train_batch_size}")
if args.device:
device = torch.device(args.device)
else:
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
nsds = Nasbench101Dataset(os.path.join(nsds_dir, "nasbench_ds", "nasbench_full.pkl"))
# load data just before train start so any errors so far is not delayed
train_dl, val_dl, test_dl = get_data(
datadir=datadir,
train_batch_size=args.train_batch_size,
test_batch_size=args.test_batch_size,
train_num_workers=args.loader_workers,
test_num_workers=args.loader_workers,
cutout=args.cutout,
)
model_id = int(args.model_name) # 5, 401, 4001, 40001, 400001
epochs = args.epochs
net = create_model(nsds, model_id, device, args.half)
crit = create_crit(device, args.half)
optim, sched, sched_on_epoch = optim_sched_darts(net, epochs) # optim_sched_darts optim_sched_paper
train_metrics = train(
epochs,
train_dl,
val_dl,
net,
device,
crit,
optim,
sched,
sched_on_epoch,
args.half,
False,
grad_clip=args.grad_clip,
)
test_acc = test(net, test_dl, device, args.half)
log_metrics(expdir, f"metrics_{model_id}", train_metrics, test_acc, args, nsds, model_id)
if __name__ == "__main__":
main()
|
archai/scripts/supergraph/nasbench101/nasbench101_var.py/0
|
{
"file_path": "archai/scripts/supergraph/nasbench101/nasbench101_var.py",
"repo_id": "archai",
"token_count": 5657
}
| 369 |
# Experiment: {exp_name}
Job count: {job_count}
{summary_text}
|
archai/scripts/supergraph/reports/summary.md/0
|
{
"file_path": "archai/scripts/supergraph/reports/summary.md",
"repo_id": "archai",
"token_count": 25
}
| 370 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import argparse
import os
import sys
import uuid
from archai.common.store import ArchaiStore
CONNECTION_NAME = 'MODEL_STORAGE_CONNECTION_STRING'
USAGE_TABLE_NAME = 'USAGE_TABLE_NAME'
USAGE_TABLE = 'usage'
CONNECTION_STRING = ''
def get_all_usage_entities(store, name_filter=None):
""" Get all usage entities with optional device name filter """
table_client = store._get_table_client()
entities = []
query = "PartitionKey eq 'main'"
if name_filter:
query += f" and name eq '{name_filter}'"
try:
for e in table_client.query_entities(query_filter=query):
entities += [e]
except Exception as e:
print(f"### error reading table: {e}")
return entities
def add_usage(store, name, start, end):
e = store.get_status(str(uuid.uuid4()))
del e['status']
e['name'] = name
e['start'] = start
e['end'] = end
store.update_status_entity(e)
return e
def usage(con_str):
parser = argparse.ArgumentParser(
description='Print usage in .csv format using ' +
f'{CONNECTION_NAME} environment variable.')
parser.add_argument('--device', help='Optional match for the name column (default None).')
args = parser.parse_args()
storage_account_name, storage_account_key = ArchaiStore.parse_connection_string(con_str)
store = ArchaiStore(storage_account_name, storage_account_key, table_name=USAGE_TABLE)
entities = get_all_usage_entities(store, args.device)
store.print_entities(entities)
if __name__ == '__main__':
con_str = os.getenv(CONNECTION_NAME)
if not con_str:
print(f"Please specify your {CONNECTION_NAME} environment variable.")
sys.exit(1)
usage(con_str)
|
archai/tasks/face_segmentation/aml/azure/usage.py/0
|
{
"file_path": "archai/tasks/face_segmentation/aml/azure/usage.py",
"repo_id": "archai",
"token_count": 671
}
| 371 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import argparse
from onnxruntime import InferenceSession, get_available_providers
import os
import numpy as np
import cv2
import sys
import tqdm
from create_data import DataGenerator
def test_onnx(dataset_dir, model, out_dir, test_size=1000, show=False):
os.makedirs(out_dir, exist_ok=True)
provider_list = ['CPUExecutionProvider']
if 'CUDAExecutionProvider' in get_available_providers():
print("using gpu")
provider_list = ['CUDAExecutionProvider'] + provider_list
sess = InferenceSession(model, providers=provider_list)
if len(sess._sess.inputs_meta) > 1:
raise Exception("Cannot handle models with more than one input")
if len(sess._sess.outputs_meta) > 1:
raise Exception("Cannot handle models more than one output")
input_meta = sess._sess.inputs_meta[0]
output_meta = sess._sess.outputs_meta[0]
shape = output_meta.shape
if len(shape) == 4:
shape = shape[1:] # remove match dimension.
oc, ow, oh = shape
if oh < 20:
ow, oh, oc = input_meta.shape
shape = input_meta.shape
print(f"input shape: {shape}")
print(f"output shape: {shape}")
if len(shape) == 4:
shape = shape[1:] # remove match dimension.
w, h, c = shape
transpose = (0, 1, 2)
reverse = (0, 1, 2)
if shape[0] == 3:
# then we need to transpose the input.
print("transposing to move RGB channel")
transpose = (2, 0, 1)
reverse = (1, 2, 0)
c, w, h = shape
input_name = input_meta.name
data_gen = DataGenerator(dataset_dir, (w, h), subset='test', count=test_size, transpose=transpose)
with tqdm.tqdm(total=len(data_gen)) as pbar:
for fname, img in data_gen():
inf = sess.run(None, {input_name: img[None, ...]})[0]
inf = inf.reshape(inf.shape[1:]) # remove batch dimension
inf = inf.transpose(reverse).reshape((ow, oh, -1))
basename = os.path.splitext(os.path.basename(fname))[0]
filename = os.path.join(out_dir, basename + ".raw")
inf.tofile(filename)
if show:
# debug visualize
img = img.transpose(reverse)
cls_seg = np.argmax(inf, axis=-1)
img = (255 * img).astype(np.uint8)
norm = cv2.normalize(cls_seg, None, 0, 255, cv2.NORM_MINMAX, dtype=cv2.CV_8UC1)
cls_seg_color = cv2.applyColorMap(norm, cv2.COLORMAP_JET)
canvas = np.concatenate([img[..., ::-1], cls_seg_color], axis=1)
cv2.imshow('img', canvas)
key = cv2.waitKey() & 0xFF
if key == 27:
break
pbar.update(1)
if __name__ == '__main__':
model = os.path.join('model', 'model.onnx')
output = os.path.join('onnx_outputs')
parser = argparse.ArgumentParser(description='Run an ONNX model test on a batch of input images and write ' +
'the outputs to a given folder')
parser.add_argument('--input', help='Location of the original input images ' +
'(default INPUT_DATASET environment variable')
parser.add_argument('--model', '-m', help="Name of model to test (e.g. model/model.onnx)", default=model)
parser.add_argument('--output', '-o', help="Location to write outputs (default 'onnx_outputs')", default=output)
parser.add_argument('--show', '-s', help="Show each inference image", action="store_true")
args = parser.parse_args()
dataset = args.input
if not dataset:
dataset = os.getenv("INPUT_DATASET")
if not dataset:
print("please provide --input or set your INPUT_DATASET environment vairable")
sys.exit(1)
test_onnx(dataset, args.model, args.output, show=args.show)
|
archai/tasks/face_segmentation/aml/snpe/test_onnx.py/0
|
{
"file_path": "archai/tasks/face_segmentation/aml/snpe/test_onnx.py",
"repo_id": "archai",
"token_count": 1734
}
| 372 |
from pathlib import Path
from typing import Optional
from overrides import overrides
from pytorch_lightning import Trainer
from torch.utils.data import DataLoader
from archai.discrete_search.api import ModelEvaluator, DatasetProvider, ArchaiModel
from .pl_trainer import SegmentationTrainingLoop
class PartialTrainingValIOU(ModelEvaluator):
def __init__(self, dataset_provider: DatasetProvider,
output_dir: str, tr_epochs: float = 1.0,
batch_size: int = 16, lr: float = 2e-4,
tr_dl_workers: int = 8, val_dl_workers: int = 8,
val_check_interval: float = 1.0):
self.dataset_provider = dataset_provider
self.output_dir = Path(output_dir)
self.tr_epochs = tr_epochs
self.batch_size = batch_size
self.lr = lr
self.tr_dl_workers = tr_dl_workers
self.val_dl_workers = val_dl_workers
self.val_check_interval = val_check_interval
@overrides
def evaluate(self, model: ArchaiModel, budget: Optional[float] = None) -> float:
tr_dataset = self.dataset_provider.get_train_dataset()
val_dataset = self.dataset_provider.get_val_dataset()
tr_dataloader = DataLoader(
tr_dataset, batch_size=self.batch_size, shuffle=True,
num_workers=self.tr_dl_workers
)
val_dataloader = DataLoader(
val_dataset, batch_size=self.batch_size, shuffle=False,
num_workers=self.val_dl_workers
)
trainer = Trainer(
default_root_dir=str(self.output_dir), accelerator='gpu', devices=1,
val_check_interval=int(self.val_check_interval * len(tr_dataloader)),
max_steps=int(self.tr_epochs * len(tr_dataloader)),
)
trainer.fit(
SegmentationTrainingLoop(model.arch, lr=self.lr),
tr_dataloader, val_dataloader,
)
return trainer.validate(trainer.model, val_dataloader)[0]['validation_mIOU']
|
archai/tasks/face_segmentation/training/partial_training_evaluator.py/0
|
{
"file_path": "archai/tasks/face_segmentation/training/partial_training_evaluator.py",
"repo_id": "archai",
"token_count": 912
}
| 373 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import math
from typing import Tuple
import cv2
import numpy as np
import torch
from torch import Tensor
from torchvision.transforms import Compose, ToTensor
from torchvision.transforms import functional as F
class Sample:
"""A sample of an image and its landmarks."""
def __init__(self, image=None, landmarks=None):
self.image = np.array(image)
self.landmarks = landmarks
class ExtractRegionOfInterest:
"""Extracts a region of interest from an image and its landmarks."""
class Rectangle:
"""A rectangle defined by its top-left and bottom-right corners."""
def __init__(self, top_left, bottom_right):
assert isinstance(top_left, np.ndarray)
assert isinstance(bottom_right, np.ndarray)
self.top_left = top_left
self.bottom_right = bottom_right
@property
def corners(self):
top_right = np.array([self.bottom_right[0], self.top_left[1]])
bottom_left = np.array([self.top_left[0], self.bottom_right[1]])
return np.array([self.top_left, top_right, bottom_left, self.bottom_right])
def __init__(self, roi_size, scale=2):
self.roi_size = roi_size
self.rect_dst = self.Rectangle(np.array([0, 0]), np.array([self.roi_size, self.roi_size]))
self.scale = scale
@property
def src_to_dst_mapping(self):
"""Returns the homography matrix that maps the source ROI to the destination ROI."""
return cv2.findHomography(self.rect_src.corners, self.rect_dst.corners)[0][:3, :3]
def transform_image(self, image):
"""Transforms an image to the destination ROI."""
return cv2.warpPerspective(image, self.src_to_dst_mapping, (self.roi_size, self.roi_size))
def transform_points(self, points):
"""Transforms points to the destination ROI."""
assert points.ndim == 2 and points.shape[-1] == 2, "Expecting a 2D array of points."
points_h = np.hstack([points, np.ones((points.shape[0], 1))]) # Homogenize
points_h = points_h.dot(self.src_to_dst_mapping.T)
return points_h[:, :2] / points_h[:, 2][..., None] # Dehomogenize
def find_src_roi(self, sample: Sample):
"""Finds the source ROI that encloses the landmarks. Enlarged with scale a factor"""
bbox = self._get_bbox(sample.landmarks)
center = np.mean(bbox.corners, axis=0)
M = cv2.getRotationMatrix2D(center, angle=0, scale=self.scale)
corners = np.hstack([bbox.corners, np.ones((bbox.corners.shape[0], 1))])
corners_scaled = corners.dot(M.T)
self.rect_src = self.Rectangle(corners_scaled[0], corners_scaled[3])
return
def _get_bbox(self, points):
"""Gets the square bounding box that enclose points."""
min_point = np.min(points, axis=0)
max_point = np.max(points, axis=0)
size = max(max_point - min_point)
center = (min_point + max_point) / 2
top_left = center - size / 2
bottom_right = center + size / 2
return self.Rectangle(top_left, bottom_right)
def __call__(self, sample: tuple):
"""Extracts a region of interest from an image and its landmarks."""
assert sample.image is not None
assert sample.landmarks is not None
self.find_src_roi(sample)
sample.image = self.transform_image(sample.image)
sample.landmarks = self.transform_points(sample.landmarks)
return sample
class NormalizeCoordinates:
"""Normalize coordinates from pixel units to [-1, 1]."""
def __call__(self, sample: Sample):
assert sample.landmarks is not None
roi_size = torch.tensor(sample.image.shape[-2::], dtype=sample.landmarks.dtype)
sample.landmarks = (sample.landmarks - (roi_size / 2)) / (roi_size / 2)
return sample
class SampleToTensor:
"""Turns a NumPy data in a Sample into PyTorch data"""
def __call__(self, sample: Sample):
sample.image = torch.from_numpy(np.transpose(sample.image, (2, 0, 1)))
sample.image = sample.image / 255.0
sample.image = sample.image.float()
if sample.landmarks is not None:
sample.landmarks = torch.from_numpy(sample.landmarks).float()
return sample
class FaceLandmarkTransform:
"""Transforms a sample of an image and its landmarks."""
def __init__(
self,
crop_size,
):
self.transform = Compose(
[ExtractRegionOfInterest(roi_size=crop_size), SampleToTensor(), NormalizeCoordinates()]
)
def __call__(self, sample: Sample):
return self.transform(sample)
|
archai/tasks/facial_landmark_detection/transforms.py/0
|
{
"file_path": "archai/tasks/facial_landmark_detection/transforms.py",
"repo_id": "archai",
"token_count": 1923
}
| 374 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import warnings
from archai.common.deprecation_utils import deprecated
def test_deprecated_decorator():
def my_func():
pass
def my_func2():
pass
def my_func3():
pass
def my_func4():
pass
def my_func5():
pass
# Assert that it works without arguments
deprecated_func = deprecated()(my_func)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
deprecated_func()
assert len(w) == 1
assert str(w[0].message) == "`my_func` has been deprecated and will be removed."
# Assert that it works with message argument
deprecated_func_message = deprecated(message="Use another function instead.")(my_func2)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
deprecated_func_message()
assert len(w) == 1
assert str(w[0].message) == "`my_func2` has been deprecated and will be removed. Use another function instead."
# Assert that it works with deprecated_version argument
deprecated_func_version = deprecated(deprecate_version="1.0.0")(my_func3)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
deprecated_func_version()
assert len(w) == 1
assert str(w[0].message) == "`my_func3` has been deprecated in v1.0.0 and will be removed."
# Assert that it works with remove_version argument
deprecated_func_remove = deprecated(remove_version="2.0.0")(my_func4)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
deprecated_func_remove()
assert len(w) == 1
assert str(w[0].message) == "`my_func4` has been deprecated and will be removed in v2.0.0."
# Assert that it works with both deprecated_version and remove_version arguments
deprecated_func_both = deprecated(deprecate_version="1.0.0", remove_version="2.0.0")(my_func5)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
deprecated_func_both()
assert len(w) == 1
assert str(w[0].message) == "`my_func5` has been deprecated in v1.0.0 and will be removed in v2.0.0."
|
archai/tests/common/test_deprecation_utils.py/0
|
{
"file_path": "archai/tests/common/test_deprecation_utils.py",
"repo_id": "archai",
"token_count": 865
}
| 375 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from random import Random
import pytest
from archai.discrete_search.api.search_objectives import SearchObjectives
from archai.discrete_search.evaluators.functional import EvaluationFunction
@pytest.fixture
def search_objectives():
rng1 = Random(1)
rng2 = Random(2)
rng3 = Random(3)
o1 = EvaluationFunction(lambda m, b: rng1.random())
o2 = EvaluationFunction(lambda m, b: rng2.random())
r = EvaluationFunction(lambda m, b: rng3.random())
so = SearchObjectives()
so.add_objective("Random1", o1, higher_is_better=False, compute_intensive=False, constraint=(0.0, 0.4))
so.add_objective("Random2", o2, higher_is_better=True)
so.add_constraint("Random3 constraint", r, constraint=(0.0, 0.6))
return so
|
archai/tests/discrete_search/algos/fixtures/objectives.py/0
|
{
"file_path": "archai/tests/discrete_search/algos/fixtures/objectives.py",
"repo_id": "archai",
"token_count": 291
}
| 376 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import pytest
from torch import nn
from torch.nn import functional as F
from archai.discrete_search.api.archai_model import ArchaiModel
from archai.discrete_search.evaluators.pt_profiler import TorchNumParameters
from archai.discrete_search.evaluators.nlp.parameters import (
NonEmbeddingParamsProxy,
)
@pytest.fixture
def model():
class Model(nn.Module):
def __init__(self):
super().__init__()
self.embd = nn.Embedding(10, 10)
self.conv1 = nn.Conv2d(1, 10, kernel_size=5)
self.conv2 = nn.Conv2d(10, 20, kernel_size=5)
self.fc1 = nn.Linear(320, 50)
self.fc2 = nn.Linear(50, 10)
def forward(self, x):
x = self.embd(x)
x = F.relu(F.max_pool2d(self.conv1(x), 2))
x = F.relu(F.max_pool2d(self.conv2(x), 2))
x = x.view(-1, 320)
x = F.relu(self.fc1(x))
x = self.fc2(x)
return x
return ArchaiModel(Model(), archid="test")
def test_total_params_proxy(model):
# Assert that the number of trainable parameters is correct
proxy = TorchNumParameters(trainable_only=True)
num_params = proxy.evaluate(model)
assert num_params == sum(param.numel() for param in model.arch.parameters() if param.requires_grad)
# Assert that the number of all parameters is correct
proxy = TorchNumParameters(trainable_only=False)
num_params = proxy.evaluate(model)
assert num_params == sum(param.numel() for param in model.arch.parameters())
def test_non_embedding_params_proxy(model):
# Assert that the number of non-embedding trainable parameters is correct
proxy = NonEmbeddingParamsProxy(trainable_only=True)
non_embedding_params = proxy.evaluate(model)
embedding_params = sum(param.numel() for param in model.arch.embd.parameters() if param.requires_grad)
assert non_embedding_params + embedding_params == sum(
param.numel() for param in model.arch.parameters() if param.requires_grad
)
# Assert that the number of non-embedding parameters is correct
proxy = NonEmbeddingParamsProxy(trainable_only=False)
non_embedding_params = proxy.evaluate(model)
embedding_params = sum(param.numel() for param in model.arch.embd.parameters())
assert non_embedding_params + embedding_params == sum(param.numel() for param in model.arch.parameters())
|
archai/tests/discrete_search/evaluators/nlp/test_parameters.py/0
|
{
"file_path": "archai/tests/discrete_search/evaluators/nlp/test_parameters.py",
"repo_id": "archai",
"token_count": 977
}
| 377 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import pytest
import torch
from transformers import PretrainedConfig
from archai.onnx.config_utils.onnx_config_base import OnnxConfig, OnnxConfigWithPast
@pytest.fixture
def dummy_config():
class DummyConfig(PretrainedConfig):
max_position_embeddings = 16
hidden_size = 32
num_layers = 3
num_attention_heads = 4
return DummyConfig()
def test_onnx_config(dummy_config):
# Assert that default values are set correctly
onnx_config = OnnxConfig(dummy_config)
assert onnx_config.config == dummy_config
assert onnx_config.task == "causal-lm"
def test_onnx_config_get_inputs(dummy_config):
# Assert that input names and shapes are set correctly
onnx_config = OnnxConfig(dummy_config)
inputs = onnx_config.get_inputs()
assert inputs == {"input_ids": {0: "batch_size", 1: "seq_len"}}
def test_onnx_config_get_outputs(dummy_config):
# Assert that output names and shapes are set correctly
onnx_config = OnnxConfig(dummy_config)
outputs = onnx_config.get_outputs()
assert outputs == {"probs": {0: "batch_size"}}
def test_onnx_config_generate_dummy_inputs(dummy_config):
# Assert that dummy inputs are generated correctly
onnx_config = OnnxConfig(dummy_config)
inputs = onnx_config.generate_dummy_inputs(batch_size=3, seq_len=10)
assert torch.equal(inputs["input_ids"], torch.zeros((3, 10), dtype=torch.long))
def test_onnx_config_generate_dummy_inputs_exceeds_max_position_embeddings(dummy_config):
# Assert that dummy inputs can't exceed max_position_embeddings
onnx_config = OnnxConfig(dummy_config)
with pytest.raises(AssertionError):
onnx_config.generate_dummy_inputs(batch_size=3, seq_len=20)
def test_onnx_config_unsupported_task(dummy_config):
# Assert that unsupported tasks raise an error
with pytest.raises(AssertionError):
OnnxConfig(dummy_config, task="unsupported_task")
def test_onnx_config_with_past_default_values(dummy_config):
# Assert that default values are set correctly
onnx_config = OnnxConfigWithPast(dummy_config)
assert onnx_config.config == dummy_config
assert onnx_config.task == "causal-lm"
assert onnx_config.use_past is False
def test_onnx_config_with_past_get_inputs(dummy_config):
# Assert that input names and shapes are set correctly
onnx_config = OnnxConfigWithPast(dummy_config, use_past=True)
inputs = onnx_config.get_inputs()
assert inputs == {
"input_ids": {0: "batch_size", 1: "seq_len"},
"past_0": {1: "batch_size", 3: "past_seq_len"},
"past_1": {1: "batch_size", 3: "past_seq_len"},
"past_2": {1: "batch_size", 3: "past_seq_len"},
}
def test_onnx_config_with_past_get_outputs(dummy_config):
# Assert that output names and shapes are set correctly
onnx_config = OnnxConfigWithPast(dummy_config, use_past=True)
outputs = onnx_config.get_outputs()
assert outputs == {
"probs": {0: "batch_size"},
"present_0": {1: "batch_size", 3: "total_seq_len"},
"present_1": {1: "batch_size", 3: "total_seq_len"},
"present_2": {1: "batch_size", 3: "total_seq_len"},
}
def test_onnx_config_with_past_generate_dummy_inputs(dummy_config):
# Assert that dummy inputs are generated correctly
onnx_config = OnnxConfigWithPast(dummy_config, use_past=True)
inputs = onnx_config.generate_dummy_inputs(batch_size=3, seq_len=4, past_seq_len=2)
assert torch.equal(inputs["input_ids"], torch.zeros((3, 4), dtype=torch.long))
assert torch.equal(inputs["past_key_values"][0], torch.zeros((2, 3, 4, 2, 8)))
assert torch.equal(inputs["past_key_values"][1], torch.zeros((2, 3, 4, 2, 8)))
assert torch.equal(inputs["past_key_values"][2], torch.zeros((2, 3, 4, 2, 8)))
def test_onnx_config_with_past_generate_dummy_inputs_exceeds_max_position_embeddings(dummy_config):
# Assert that dummy inputs can't exceed max_position_embeddings
onnx_config = OnnxConfigWithPast(dummy_config, use_past=True)
with pytest.raises(AssertionError):
onnx_config.generate_dummy_inputs(batch_size=3, seq_len=10, past_seq_len=8)
|
archai/tests/onnx/config_utils/test_onnx_config_base.py/0
|
{
"file_path": "archai/tests/onnx/config_utils/test_onnx_config_base.py",
"repo_id": "archai",
"token_count": 1704
}
| 378 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import unittest
from copy import deepcopy
from typing import Callable, List, Tuple
import numpy as np
from tqdm import tqdm
import archai.supergraph.algos.divnas.analyse_activations as aa
from archai.supergraph.algos.divnas.analyse_activations import (
_compute_mi,
compute_brute_force_sol,
create_submod_f,
)
from archai.supergraph.algos.divnas.seqopt import SeqOpt
from archai.supergraph.algos.divnas.wmr import Wmr
def create_rbf_func(first: np.array, sigma: float) -> Callable:
assert len(first.shape) == 1
assert sigma >= 0.0
def rbf_bound(second: np.array):
assert len(second.shape) == 1
val = aa.rbf(first, second, sigma)
return val
return rbf_bound
def synthetic_data2() -> List[Tuple[np.array, np.array]]:
# num grid locations
num_loc = 10
# plop some kernels on 0, 3, 9
k_0_func = create_rbf_func(np.array([0.0]), 3.0)
k_3_func = create_rbf_func(np.array([3.0]), 0.1)
k_6_func = create_rbf_func(np.array([6.0]), 0.1)
k_9_func = create_rbf_func(np.array([9.0]), 0.5)
Y = []
for i in range(num_loc):
i_arr = np.array([i])
y = 20.0 * k_0_func(i_arr) - 25.0 * k_3_func(i_arr) + 100.0 * k_6_func(i_arr) - 100.0 * k_9_func(i_arr)
y_arr = np.array([y])
Y.append((y_arr, i_arr))
return Y
def synthetic_data() -> List[Tuple[np.array, np.array]]:
# num grid locations
num_loc = 10
# plop some kernels on 0, 3, 9
k_0_func = create_rbf_func(np.array([0.0]), 3.0)
k_3_func = create_rbf_func(np.array([3.0]), 0.1)
k_6_func = create_rbf_func(np.array([6.0]), 0.1)
k_9_func = create_rbf_func(np.array([9.0]), 0.5)
Y = []
for i in range(num_loc):
i_arr = np.array([i])
y = -10.0 * k_0_func(i_arr) + 25.0 * k_3_func(i_arr) - 100.0 * k_6_func(i_arr) - 100.0 * k_9_func(i_arr)
y_arr = np.array([y])
Y.append((y_arr, i_arr))
return Y
def compute_synthetic_data_covariance(Y: List[Tuple[np.array, np.array]], sigma=0.8):
num_obsvs = len(Y)
covariance = np.zeros((num_obsvs, num_obsvs), np.float32)
for i in range(num_obsvs):
for j in range(num_obsvs):
if i == j:
covariance[i][j] = covariance[j][i] = 1.0
continue
obsv_i = Y[i][0]
obsv_j = Y[j][0]
assert obsv_i.shape == obsv_j.shape
if len(obsv_i.shape) == 1:
obsv_i = np.reshape(obsv_i, (obsv_i.shape[0], 1))
obsv_j = np.reshape(obsv_j, (obsv_j.shape[0], 1))
rbfs = np.exp(-np.sum(np.square(obsv_i - obsv_j), axis=1) / (2 * sigma * sigma))
avg_cov = np.sum(rbfs) / obsv_i.shape[0]
covariance[i][j] = covariance[j][i] = avg_cov
return covariance
class SeqOptSyntheticDataTestCase(unittest.TestCase):
def setUp(self):
self.Y = synthetic_data2()
self.vals = [item[0] for item in self.Y]
self.cov_kernel = compute_synthetic_data_covariance(self.Y)
def test_marginal_gain_calculation(self):
"""Tests that marginal gain calculation is correct"""
V = set(range(self.cov_kernel.shape[0]))
A_random = set([1])
V_minus_A_random = V - A_random
y = 2
I_A_random = _compute_mi(self.cov_kernel, A_random, V_minus_A_random)
A_aug = deepcopy(A_random)
A_aug.add(y)
V_minus_A_aug = V - A_aug
I_A_aug = _compute_mi(self.cov_kernel, A_aug, V_minus_A_aug)
diff_via_direct = abs(I_A_aug - I_A_random)
print(f"MI(A) {I_A_random}, MI(A U y) {I_A_aug}, diff {diff_via_direct}")
diff = aa.compute_marginal_gain(y, A_random, V, self.cov_kernel)
# the marginal_gain leaves out 0.5 * log term as it does not
# matter for ranking elements
half_log_diff = 0.5 * np.log(diff)
print(f"Diff via aa.compute {half_log_diff}")
self.assertAlmostEqual(diff_via_direct, half_log_diff, delta=0.01)
def test_greedy(self):
# budgeted number of sensors
budget = 4
# brute force solution
bf_sensors, bf_val = compute_brute_force_sol(self.cov_kernel, budget)
print(f"Brute force max subset {bf_sensors}, max mi {bf_val}")
# greedy
greedy_sensors = aa.greedy_op_selection(self.cov_kernel, budget)
# find MI of the greedy solution
V = set(range(self.cov_kernel.shape[0]))
A_greedy = set(greedy_sensors)
V_minus_A_greedy = V - A_greedy
I_greedy = _compute_mi(self.cov_kernel, A_greedy, V_minus_A_greedy)
print(f"Greedy solution is {greedy_sensors}, mi is {I_greedy}")
self.assertAlmostEqual(bf_val, I_greedy, delta=0.1)
def test_wmr(self):
eta = 0.01
num_rounds = 10000
gt_distrib = [0.15, 0.5, 0.3, 0.05]
num_items = len(gt_distrib)
wmr = Wmr(num_items, eta)
for _ in range(num_rounds):
sampled_index = np.random.choice(num_items, p=gt_distrib)
rewards = np.zeros((num_items))
rewards[sampled_index] = 1.0
wmr.update(rewards)
print(wmr.weights)
self.assertTrue(wmr.weights[1] > 0.4)
def test_seqopt(self):
# budgeted number of sensors
budget = 4
# brute force solution
bf_sensors, bf_val = compute_brute_force_sol(self.cov_kernel, budget)
print(f"Brute force max subset {bf_sensors}, max mi {bf_val}")
# greedy
greedy_sensors = aa.greedy_op_selection(self.cov_kernel, budget)
# find MI of the greedy solution
V = set(range(self.cov_kernel.shape[0]))
A_greedy = set(greedy_sensors)
V_minus_A_greedy = V - A_greedy
I_greedy = _compute_mi(self.cov_kernel, A_greedy, V_minus_A_greedy)
print(f"Greedy solution is {greedy_sensors}, mi is {I_greedy}")
# online greedy
eps = 0.1
num_items = self.cov_kernel.shape[0]
seqopt = SeqOpt(num_items, eps)
num_rounds = 100
for i in tqdm(range(num_rounds)):
# sample a list of activations from seqopt
sel_list = seqopt.sample_sequence(with_replacement=False)
# NOTE: we are going to use the batch covariance
# every round as this is a toy setting and we want to
# verify that seqopt is converging to good solutions
# update seqopt
compute_marginal_gain_func = create_submod_f(self.cov_kernel)
seqopt.update(sel_list, compute_marginal_gain_func)
# now sample a list of ops and hope it is diverse
seqopt_sensors = seqopt.sample_sequence(with_replacement=False)
seqopt_sensors = seqopt_sensors[:budget]
V = set(range(self.cov_kernel.shape[0]))
A_seqopt = set(seqopt_sensors)
V_minus_A_seqopt = V - A_seqopt
I_seqopt = _compute_mi(self.cov_kernel, A_seqopt, V_minus_A_seqopt)
print(f"SeqOpt solution is {seqopt_sensors}, mi is {I_seqopt}")
self.assertAlmostEqual(I_seqopt, I_greedy, delta=0.1)
self.assertAlmostEqual(I_greedy, bf_val, delta=0.1)
def main():
unittest.main()
# # generate some synthetic 1d data
# Y = synthetic_data2()
# vals = [item[0] for item in Y]
# print(f'{np.unique(vals).shape[0]} unique observations' )
# plt.figure()
# plt.plot(vals)
# # plt.show()
# # budget on sensor
# budget = 4
# # compute kernel covariance of observations
# cov_kernel = compute_synthetic_data_covariance(Y)
# print(f'Det of cov_kernel is {np.linalg.det(cov_kernel)}')
# plt.figure()
# sns.heatmap(cov_kernel, annot=False, cmap='coolwarm')
# # plt.show()
# # brute force solution
# bf_sensors, bf_val = compute_brute_force_sol(cov_kernel, budget)
# print(f'Brute force max subset {bf_sensors}, max mi {bf_val}')
# # greedy
# greedy_sensors = aa.greedy_op_selection(cov_kernel, budget)
# # find MI of the greedy solution
# V = set(range(cov_kernel.shape[0]))
# A_greedy = set(greedy_sensors)
# V_minus_A_greedy = V - A_greedy
# I_greedy = _compute_mi(cov_kernel, A_greedy, V_minus_A_greedy)
# print(f'Greedy solution is {greedy_sensors}, mi is {I_greedy}')
# # online greedy
# eps = 0.1
# num_items = cov_kernel.shape[0]
# seqopt = SeqOpt(num_items, eps)
# num_rounds = 100
# for i in range(num_rounds):
# print(f'Round {i}/{num_rounds}')
# # sample a list of activations from seqopt
# sel_list = seqopt.sample_sequence(with_replacement=False)
# # NOTE: we are going to use the batch covariance
# # every round as this is a toy setting and we want to
# # verify that seqopt is converging to good solutions
# # update seqopt
# compute_marginal_gain_func = create_submod_f(cov_kernel)
# seqopt.update(sel_list, compute_marginal_gain_func)
# # now sample a list of ops and hope it is diverse
# seqopt_sensors = seqopt.sample_sequence(with_replacement=False)
# V = set(range(cov_kernel.shape[0]))
# A_seqopt = set(seqopt_sensors)
# V_minus_A_seqopt = V - A_seqopt
# I_seqopt = _compute_mi(cov_kernel, A_seqopt, V_minus_A_seqopt)
# print(f'SeqOpt solution is {seqopt_sensors}, mi is {I_seqopt}')
if __name__ == "__main__":
main()
|
archai/tests/supergraph/test_divnas.py/0
|
{
"file_path": "archai/tests/supergraph/test_divnas.py",
"repo_id": "archai",
"token_count": 4502
}
| 379 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import torch
from archai.trainers.losses import SmoothCrossEntropyLoss
def test_smooth_cross_entropy_loss():
inputs = torch.randn(3, 5)
targets = torch.tensor([1, 2, 3])
# Assert that the loss is reduced correctly (mean)
loss_fn = SmoothCrossEntropyLoss(reduction="mean")
loss = loss_fn(inputs, targets)
assert loss.shape == torch.Size([])
# Assert that the loss is reduced correctly (sum)
loss_fn = SmoothCrossEntropyLoss(reduction="sum")
loss = loss_fn(inputs, targets)
assert loss.shape == torch.Size([])
# Assert that the loss is weighted correctly
weight = torch.tensor([0.1, 0.2, 0.3, 0.4, 0.5])
loss_fn = SmoothCrossEntropyLoss(weight=weight, reduction="mean")
loss = loss_fn(inputs, targets)
assert loss.shape == torch.Size([])
# Assert that the loss is smoothed correctly
smoothing = 0.1
loss_fn = SmoothCrossEntropyLoss(smoothing=smoothing, reduction="mean")
loss = loss_fn(inputs, targets)
assert loss.shape == torch.Size([])
|
archai/tests/trainers/test_losses.py/0
|
{
"file_path": "archai/tests/trainers/test_losses.py",
"repo_id": "archai",
"token_count": 392
}
| 380 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from ...v7_0.task.models import *
from .task_client import TaskClient
__all__ = [
'Issue',
'JobOption',
'MaskHint',
'PlanEnvironment',
'ProjectReference',
'ReferenceLinks',
'TaskAgentJob',
'TaskAgentJobStep',
'TaskAgentJobTask',
'TaskAgentJobVariable',
'TaskAttachment',
'TaskLog',
'TaskLogReference',
'TaskOrchestrationContainer',
'TaskOrchestrationItem',
'TaskOrchestrationOwner',
'TaskOrchestrationPlan',
'TaskOrchestrationPlanGroupsQueueMetrics',
'TaskOrchestrationPlanReference',
'TaskOrchestrationQueuedPlan',
'TaskOrchestrationQueuedPlanGroup',
'TaskReference',
'Timeline',
'TimelineAttempt',
'TimelineRecord',
'TimelineRecordFeedLinesWrapper',
'TimelineReference',
'VariableValue',
'TaskClient'
]
|
azure-devops-python-api/azure-devops/azure/devops/released/task/__init__.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/released/task/__init__.py",
"repo_id": "azure-devops-python-api",
"token_count": 395
}
| 381 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from ...v7_0.work.models import *
from .work_client import WorkClient
__all__ = [
'Activity',
'BacklogColumn',
'BacklogConfiguration',
'BacklogFields',
'BacklogLevel',
'BacklogLevelConfiguration',
'BacklogLevelWorkItems',
'Board',
'BoardBadge',
'BoardCardRuleSettings',
'BoardCardSettings',
'BoardColumn',
'BoardFields',
'BoardChart',
'BoardChartReference',
'BoardReference',
'BoardRow',
'BoardSuggestedValue',
'BoardUserSettings',
'CapacityContractBase',
'CapacityPatch',
'CategoryConfiguration',
'CreatePlan',
'DateRange',
'DeliveryViewData',
'FieldReference',
'FilterClause',
'GraphSubjectBase',
'IdentityRef',
'ITaskboardColumnMapping',
'IterationCapacity',
'IterationWorkItems',
'Link',
'Member',
'ParentChildWIMap',
'Plan',
'PlanViewData',
'PredefinedQuery',
'ProcessConfiguration',
'ReferenceLinks',
'ReorderOperation',
'ReorderResult',
'Rule',
'TaskboardColumn',
'TaskboardColumnMapping',
'TaskboardColumns',
'TaskboardWorkItemColumn',
'TeamCapacity',
'TeamCapacityTotals',
'TeamContext',
'TeamFieldValue',
'TeamFieldValues',
'TeamFieldValuesPatch',
'TeamIterationAttributes',
'TeamMemberCapacity',
'TeamMemberCapacityIdentityRef',
'TeamSetting',
'TeamSettingsDataContractBase',
'TeamSettingsDaysOff',
'TeamSettingsDaysOffPatch',
'TeamSettingsIteration',
'TeamSettingsPatch',
'TimelineCriteriaStatus',
'TimelineIterationStatus',
'TimelineTeamData',
'TimelineTeamIteration',
'TimelineTeamStatus',
'UpdatePlan',
'UpdateTaskboardColumn',
'UpdateTaskboardWorkItemColumn',
'WorkItem',
'WorkItemColor',
'WorkItemCommentVersionRef',
'WorkItemFieldReference',
'WorkItemLink',
'WorkItemReference',
'WorkItemRelation',
'WorkItemTrackingResource',
'WorkItemTrackingResourceReference',
'WorkItemTypeReference',
'WorkItemTypeStateInfo',
'WorkClient'
]
|
azure-devops-python-api/azure-devops/azure/devops/released/work/__init__.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/released/work/__init__.py",
"repo_id": "azure-devops-python-api",
"token_count": 880
}
| 382 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest import Serializer, Deserializer
from ...client import Client
from . import models
class BuildClient(Client):
"""Build
:param str base_url: Service URL
:param Authentication creds: Authenticated credentials.
"""
def __init__(self, base_url=None, creds=None):
super(BuildClient, self).__init__(base_url, creds)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
resource_area_identifier = '965220d5-5bb9-42cf-8d67-9b146df2a5a4'
def create_artifact(self, artifact, project, build_id):
"""CreateArtifact.
Associates an artifact with a build.
:param :class:`<BuildArtifact> <azure.devops.v7_0.build.models.BuildArtifact>` artifact: The artifact.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:rtype: :class:`<BuildArtifact> <azure.devops.v7_0.build.models.BuildArtifact>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
content = self._serialize.body(artifact, 'BuildArtifact')
response = self._send(http_method='POST',
location_id='1db06c96-014e-44e1-ac91-90b2d4b3e984',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('BuildArtifact', response)
def get_artifact(self, project, build_id, artifact_name):
"""GetArtifact.
Gets a specific artifact for a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param str artifact_name: The name of the artifact.
:rtype: :class:`<BuildArtifact> <azure.devops.v7_0.build.models.BuildArtifact>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
query_parameters = {}
if artifact_name is not None:
query_parameters['artifactName'] = self._serialize.query('artifact_name', artifact_name, 'str')
response = self._send(http_method='GET',
location_id='1db06c96-014e-44e1-ac91-90b2d4b3e984',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('BuildArtifact', response)
def get_artifact_content_zip(self, project, build_id, artifact_name, **kwargs):
"""GetArtifactContentZip.
Gets a specific artifact for a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param str artifact_name: The name of the artifact.
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
query_parameters = {}
if artifact_name is not None:
query_parameters['artifactName'] = self._serialize.query('artifact_name', artifact_name, 'str')
response = self._send(http_method='GET',
location_id='1db06c96-014e-44e1-ac91-90b2d4b3e984',
version='7.0',
route_values=route_values,
query_parameters=query_parameters,
accept_media_type='application/zip')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def get_artifacts(self, project, build_id):
"""GetArtifacts.
Gets all artifacts for a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:rtype: [BuildArtifact]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
response = self._send(http_method='GET',
location_id='1db06c96-014e-44e1-ac91-90b2d4b3e984',
version='7.0',
route_values=route_values)
return self._deserialize('[BuildArtifact]', self._unwrap_collection(response))
def get_file(self, project, build_id, artifact_name, file_id, file_name, **kwargs):
"""GetFile.
Gets a file from the build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param str artifact_name: The name of the artifact.
:param str file_id: The primary key for the file.
:param str file_name: The name that the file will be set to.
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
query_parameters = {}
if artifact_name is not None:
query_parameters['artifactName'] = self._serialize.query('artifact_name', artifact_name, 'str')
if file_id is not None:
query_parameters['fileId'] = self._serialize.query('file_id', file_id, 'str')
if file_name is not None:
query_parameters['fileName'] = self._serialize.query('file_name', file_name, 'str')
response = self._send(http_method='GET',
location_id='1db06c96-014e-44e1-ac91-90b2d4b3e984',
version='7.0',
route_values=route_values,
query_parameters=query_parameters,
accept_media_type='application/octet-stream')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def get_attachments(self, project, build_id, type):
"""GetAttachments.
Gets the list of attachments of a specific type that are associated with a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param str type: The type of attachment.
:rtype: [Attachment]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
if type is not None:
route_values['type'] = self._serialize.url('type', type, 'str')
response = self._send(http_method='GET',
location_id='f2192269-89fa-4f94-baf6-8fb128c55159',
version='7.0',
route_values=route_values)
return self._deserialize('[Attachment]', self._unwrap_collection(response))
def get_attachment(self, project, build_id, timeline_id, record_id, type, name, **kwargs):
"""GetAttachment.
Gets a specific attachment.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param str timeline_id: The ID of the timeline.
:param str record_id: The ID of the timeline record.
:param str type: The type of the attachment.
:param str name: The name of the attachment.
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
if timeline_id is not None:
route_values['timelineId'] = self._serialize.url('timeline_id', timeline_id, 'str')
if record_id is not None:
route_values['recordId'] = self._serialize.url('record_id', record_id, 'str')
if type is not None:
route_values['type'] = self._serialize.url('type', type, 'str')
if name is not None:
route_values['name'] = self._serialize.url('name', name, 'str')
response = self._send(http_method='GET',
location_id='af5122d3-3438-485e-a25a-2dbbfde84ee6',
version='7.0',
route_values=route_values,
accept_media_type='application/octet-stream')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def authorize_project_resources(self, resources, project):
"""AuthorizeProjectResources.
[Preview API]
:param [DefinitionResourceReference] resources:
:param str project: Project ID or project name
:rtype: [DefinitionResourceReference]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(resources, '[DefinitionResourceReference]')
response = self._send(http_method='PATCH',
location_id='398c85bc-81aa-4822-947c-a194a05f0fef',
version='7.0-preview.1',
route_values=route_values,
content=content)
return self._deserialize('[DefinitionResourceReference]', self._unwrap_collection(response))
def get_project_resources(self, project, type=None, id=None):
"""GetProjectResources.
[Preview API]
:param str project: Project ID or project name
:param str type:
:param str id:
:rtype: [DefinitionResourceReference]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if type is not None:
query_parameters['type'] = self._serialize.query('type', type, 'str')
if id is not None:
query_parameters['id'] = self._serialize.query('id', id, 'str')
response = self._send(http_method='GET',
location_id='398c85bc-81aa-4822-947c-a194a05f0fef',
version='7.0-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[DefinitionResourceReference]', self._unwrap_collection(response))
def list_branches(self, project, provider_name, service_endpoint_id=None, repository=None, branch_name=None):
"""ListBranches.
Gets a list of branches for the given source code repository.
:param str project: Project ID or project name
:param str provider_name: The name of the source provider.
:param str service_endpoint_id: If specified, the ID of the service endpoint to query. Can only be omitted for providers that do not use service endpoints, e.g. TFVC or TFGit.
:param str repository: The vendor-specific identifier or the name of the repository to get branches. Can only be omitted for providers that do not support multiple repositories.
:param str branch_name: If supplied, the name of the branch to check for specifically.
:rtype: [str]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if provider_name is not None:
route_values['providerName'] = self._serialize.url('provider_name', provider_name, 'str')
query_parameters = {}
if service_endpoint_id is not None:
query_parameters['serviceEndpointId'] = self._serialize.query('service_endpoint_id', service_endpoint_id, 'str')
if repository is not None:
query_parameters['repository'] = self._serialize.query('repository', repository, 'str')
if branch_name is not None:
query_parameters['branchName'] = self._serialize.query('branch_name', branch_name, 'str')
response = self._send(http_method='GET',
location_id='e05d4403-9b81-4244-8763-20fde28d1976',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[str]', self._unwrap_collection(response))
def get_build_badge(self, project, repo_type, repo_id=None, branch_name=None):
"""GetBuildBadge.
[Preview API] Gets a badge that indicates the status of the most recent build for the specified branch.
:param str project: Project ID or project name
:param str repo_type: The repository type.
:param str repo_id: The repository ID.
:param str branch_name: The branch name.
:rtype: :class:`<BuildBadge> <azure.devops.v7_0.build.models.BuildBadge>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if repo_type is not None:
route_values['repoType'] = self._serialize.url('repo_type', repo_type, 'str')
query_parameters = {}
if repo_id is not None:
query_parameters['repoId'] = self._serialize.query('repo_id', repo_id, 'str')
if branch_name is not None:
query_parameters['branchName'] = self._serialize.query('branch_name', branch_name, 'str')
response = self._send(http_method='GET',
location_id='21b3b9ce-fad5-4567-9ad0-80679794e003',
version='7.0-preview.2',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('BuildBadge', response)
def get_build_badge_data(self, project, repo_type, repo_id=None, branch_name=None):
"""GetBuildBadgeData.
[Preview API] Gets a badge that indicates the status of the most recent build for the specified branch.
:param str project: Project ID or project name
:param str repo_type: The repository type.
:param str repo_id: The repository ID.
:param str branch_name: The branch name.
:rtype: str
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if repo_type is not None:
route_values['repoType'] = self._serialize.url('repo_type', repo_type, 'str')
query_parameters = {}
if repo_id is not None:
query_parameters['repoId'] = self._serialize.query('repo_id', repo_id, 'str')
if branch_name is not None:
query_parameters['branchName'] = self._serialize.query('branch_name', branch_name, 'str')
response = self._send(http_method='GET',
location_id='21b3b9ce-fad5-4567-9ad0-80679794e003',
version='7.0-preview.2',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('str', response)
def get_retention_leases_for_build(self, project, build_id):
"""GetRetentionLeasesForBuild.
Gets all retention leases that apply to a specific build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:rtype: [RetentionLease]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
response = self._send(http_method='GET',
location_id='3da19a6a-f088-45c4-83ce-2ad3a87be6c4',
version='7.0',
route_values=route_values)
return self._deserialize('[RetentionLease]', self._unwrap_collection(response))
def delete_build(self, project, build_id):
"""DeleteBuild.
Deletes a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
self._send(http_method='DELETE',
location_id='0cd358e1-9217-4d94-8269-1c1ee6f93dcf',
version='7.0',
route_values=route_values)
def get_build(self, project, build_id, property_filters=None):
"""GetBuild.
Gets a build
:param str project: Project ID or project name
:param int build_id:
:param str property_filters:
:rtype: :class:`<Build> <azure.devops.v7_0.build.models.Build>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
query_parameters = {}
if property_filters is not None:
query_parameters['propertyFilters'] = self._serialize.query('property_filters', property_filters, 'str')
response = self._send(http_method='GET',
location_id='0cd358e1-9217-4d94-8269-1c1ee6f93dcf',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('Build', response)
def get_builds(self, project, definitions=None, queues=None, build_number=None, min_time=None, max_time=None, requested_for=None, reason_filter=None, status_filter=None, result_filter=None, tag_filters=None, properties=None, top=None, continuation_token=None, max_builds_per_definition=None, deleted_filter=None, query_order=None, branch_name=None, build_ids=None, repository_id=None, repository_type=None):
"""GetBuilds.
Gets a list of builds.
:param str project: Project ID or project name
:param [int] definitions: A comma-delimited list of definition IDs. If specified, filters to builds for these definitions.
:param [int] queues: A comma-delimited list of queue IDs. If specified, filters to builds that ran against these queues.
:param str build_number: If specified, filters to builds that match this build number. Append * to do a prefix search.
:param datetime min_time: If specified, filters to builds that finished/started/queued after this date based on the queryOrder specified.
:param datetime max_time: If specified, filters to builds that finished/started/queued before this date based on the queryOrder specified.
:param str requested_for: If specified, filters to builds requested for the specified user.
:param str reason_filter: If specified, filters to builds that match this reason.
:param str status_filter: If specified, filters to builds that match this status.
:param str result_filter: If specified, filters to builds that match this result.
:param [str] tag_filters: A comma-delimited list of tags. If specified, filters to builds that have the specified tags.
:param [str] properties: A comma-delimited list of properties to retrieve.
:param int top: The maximum number of builds to return.
:param str continuation_token: A continuation token, returned by a previous call to this method, that can be used to return the next set of builds.
:param int max_builds_per_definition: The maximum number of builds to return per definition.
:param str deleted_filter: Indicates whether to exclude, include, or only return deleted builds.
:param str query_order: The order in which builds should be returned.
:param str branch_name: If specified, filters to builds that built branches that built this branch.
:param [int] build_ids: A comma-delimited list that specifies the IDs of builds to retrieve.
:param str repository_id: If specified, filters to builds that built from this repository.
:param str repository_type: If specified, filters to builds that built from repositories of this type.
:rtype: :class:`<[Build]> <azure.devops.v7_0.build.models.[Build]>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if definitions is not None:
definitions = ",".join(map(str, definitions))
query_parameters['definitions'] = self._serialize.query('definitions', definitions, 'str')
if queues is not None:
queues = ",".join(map(str, queues))
query_parameters['queues'] = self._serialize.query('queues', queues, 'str')
if build_number is not None:
query_parameters['buildNumber'] = self._serialize.query('build_number', build_number, 'str')
if min_time is not None:
query_parameters['minTime'] = self._serialize.query('min_time', min_time, 'iso-8601')
if max_time is not None:
query_parameters['maxTime'] = self._serialize.query('max_time', max_time, 'iso-8601')
if requested_for is not None:
query_parameters['requestedFor'] = self._serialize.query('requested_for', requested_for, 'str')
if reason_filter is not None:
query_parameters['reasonFilter'] = self._serialize.query('reason_filter', reason_filter, 'str')
if status_filter is not None:
query_parameters['statusFilter'] = self._serialize.query('status_filter', status_filter, 'str')
if result_filter is not None:
query_parameters['resultFilter'] = self._serialize.query('result_filter', result_filter, 'str')
if tag_filters is not None:
tag_filters = ",".join(tag_filters)
query_parameters['tagFilters'] = self._serialize.query('tag_filters', tag_filters, 'str')
if properties is not None:
properties = ",".join(properties)
query_parameters['properties'] = self._serialize.query('properties', properties, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'str')
if max_builds_per_definition is not None:
query_parameters['maxBuildsPerDefinition'] = self._serialize.query('max_builds_per_definition', max_builds_per_definition, 'int')
if deleted_filter is not None:
query_parameters['deletedFilter'] = self._serialize.query('deleted_filter', deleted_filter, 'str')
if query_order is not None:
query_parameters['queryOrder'] = self._serialize.query('query_order', query_order, 'str')
if branch_name is not None:
query_parameters['branchName'] = self._serialize.query('branch_name', branch_name, 'str')
if build_ids is not None:
build_ids = ",".join(map(str, build_ids))
query_parameters['buildIds'] = self._serialize.query('build_ids', build_ids, 'str')
if repository_id is not None:
query_parameters['repositoryId'] = self._serialize.query('repository_id', repository_id, 'str')
if repository_type is not None:
query_parameters['repositoryType'] = self._serialize.query('repository_type', repository_type, 'str')
response = self._send(http_method='GET',
location_id='0cd358e1-9217-4d94-8269-1c1ee6f93dcf',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[Build]', self._unwrap_collection(response))
def queue_build(self, build, project, ignore_warnings=None, check_in_ticket=None, source_build_id=None, definition_id=None):
"""QueueBuild.
Queues a build
:param :class:`<Build> <azure.devops.v7_0.build.models.Build>` build:
:param str project: Project ID or project name
:param bool ignore_warnings:
:param str check_in_ticket:
:param int source_build_id:
:param int definition_id: Optional definition id to queue a build without a body. Ignored if there's a valid body
:rtype: :class:`<Build> <azure.devops.v7_0.build.models.Build>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if ignore_warnings is not None:
query_parameters['ignoreWarnings'] = self._serialize.query('ignore_warnings', ignore_warnings, 'bool')
if check_in_ticket is not None:
query_parameters['checkInTicket'] = self._serialize.query('check_in_ticket', check_in_ticket, 'str')
if source_build_id is not None:
query_parameters['sourceBuildId'] = self._serialize.query('source_build_id', source_build_id, 'int')
if definition_id is not None:
query_parameters['definitionId'] = self._serialize.query('definition_id', definition_id, 'int')
content = self._serialize.body(build, 'Build')
response = self._send(http_method='POST',
location_id='0cd358e1-9217-4d94-8269-1c1ee6f93dcf',
version='7.0',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('Build', response)
def update_build(self, build, project, build_id, retry=None):
"""UpdateBuild.
Updates a build.
:param :class:`<Build> <azure.devops.v7_0.build.models.Build>` build: The build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param bool retry:
:rtype: :class:`<Build> <azure.devops.v7_0.build.models.Build>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
query_parameters = {}
if retry is not None:
query_parameters['retry'] = self._serialize.query('retry', retry, 'bool')
content = self._serialize.body(build, 'Build')
response = self._send(http_method='PATCH',
location_id='0cd358e1-9217-4d94-8269-1c1ee6f93dcf',
version='7.0',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('Build', response)
def update_builds(self, builds, project):
"""UpdateBuilds.
Updates multiple builds.
:param [Build] builds: The builds to update.
:param str project: Project ID or project name
:rtype: [Build]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(builds, '[Build]')
response = self._send(http_method='PATCH',
location_id='0cd358e1-9217-4d94-8269-1c1ee6f93dcf',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('[Build]', self._unwrap_collection(response))
def get_build_controller(self, controller_id):
"""GetBuildController.
Gets a controller
:param int controller_id:
:rtype: :class:`<BuildController> <azure.devops.v7_0.build.models.BuildController>`
"""
route_values = {}
if controller_id is not None:
route_values['controllerId'] = self._serialize.url('controller_id', controller_id, 'int')
response = self._send(http_method='GET',
location_id='fcac1932-2ee1-437f-9b6f-7f696be858f6',
version='7.0',
route_values=route_values)
return self._deserialize('BuildController', response)
def get_build_controllers(self, name=None):
"""GetBuildControllers.
Gets controller, optionally filtered by name
:param str name:
:rtype: [BuildController]
"""
query_parameters = {}
if name is not None:
query_parameters['name'] = self._serialize.query('name', name, 'str')
response = self._send(http_method='GET',
location_id='fcac1932-2ee1-437f-9b6f-7f696be858f6',
version='7.0',
query_parameters=query_parameters)
return self._deserialize('[BuildController]', self._unwrap_collection(response))
def create_definition(self, definition, project, definition_to_clone_id=None, definition_to_clone_revision=None):
"""CreateDefinition.
Creates a new definition.
:param :class:`<BuildDefinition> <azure.devops.v7_0.build.models.BuildDefinition>` definition: The definition.
:param str project: Project ID or project name
:param int definition_to_clone_id:
:param int definition_to_clone_revision:
:rtype: :class:`<BuildDefinition> <azure.devops.v7_0.build.models.BuildDefinition>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if definition_to_clone_id is not None:
query_parameters['definitionToCloneId'] = self._serialize.query('definition_to_clone_id', definition_to_clone_id, 'int')
if definition_to_clone_revision is not None:
query_parameters['definitionToCloneRevision'] = self._serialize.query('definition_to_clone_revision', definition_to_clone_revision, 'int')
content = self._serialize.body(definition, 'BuildDefinition')
response = self._send(http_method='POST',
location_id='dbeaf647-6167-421a-bda9-c9327b25e2e6',
version='7.0',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('BuildDefinition', response)
def delete_definition(self, project, definition_id):
"""DeleteDefinition.
Deletes a definition and all associated builds.
:param str project: Project ID or project name
:param int definition_id: The ID of the definition.
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if definition_id is not None:
route_values['definitionId'] = self._serialize.url('definition_id', definition_id, 'int')
self._send(http_method='DELETE',
location_id='dbeaf647-6167-421a-bda9-c9327b25e2e6',
version='7.0',
route_values=route_values)
def get_definition(self, project, definition_id, revision=None, min_metrics_time=None, property_filters=None, include_latest_builds=None):
"""GetDefinition.
Gets a definition, optionally at a specific revision.
:param str project: Project ID or project name
:param int definition_id: The ID of the definition.
:param int revision: The revision number to retrieve. If this is not specified, the latest version will be returned.
:param datetime min_metrics_time: If specified, indicates the date from which metrics should be included.
:param [str] property_filters: A comma-delimited list of properties to include in the results.
:param bool include_latest_builds:
:rtype: :class:`<BuildDefinition> <azure.devops.v7_0.build.models.BuildDefinition>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if definition_id is not None:
route_values['definitionId'] = self._serialize.url('definition_id', definition_id, 'int')
query_parameters = {}
if revision is not None:
query_parameters['revision'] = self._serialize.query('revision', revision, 'int')
if min_metrics_time is not None:
query_parameters['minMetricsTime'] = self._serialize.query('min_metrics_time', min_metrics_time, 'iso-8601')
if property_filters is not None:
property_filters = ",".join(property_filters)
query_parameters['propertyFilters'] = self._serialize.query('property_filters', property_filters, 'str')
if include_latest_builds is not None:
query_parameters['includeLatestBuilds'] = self._serialize.query('include_latest_builds', include_latest_builds, 'bool')
response = self._send(http_method='GET',
location_id='dbeaf647-6167-421a-bda9-c9327b25e2e6',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('BuildDefinition', response)
def get_definitions(self, project, name=None, repository_id=None, repository_type=None, query_order=None, top=None, continuation_token=None, min_metrics_time=None, definition_ids=None, path=None, built_after=None, not_built_after=None, include_all_properties=None, include_latest_builds=None, task_id_filter=None, process_type=None, yaml_filename=None):
"""GetDefinitions.
Gets a list of definitions.
:param str project: Project ID or project name
:param str name: If specified, filters to definitions whose names match this pattern.
:param str repository_id: A repository ID. If specified, filters to definitions that use this repository.
:param str repository_type: If specified, filters to definitions that have a repository of this type.
:param str query_order: Indicates the order in which definitions should be returned.
:param int top: The maximum number of definitions to return.
:param str continuation_token: A continuation token, returned by a previous call to this method, that can be used to return the next set of definitions.
:param datetime min_metrics_time: If specified, indicates the date from which metrics should be included.
:param [int] definition_ids: A comma-delimited list that specifies the IDs of definitions to retrieve.
:param str path: If specified, filters to definitions under this folder.
:param datetime built_after: If specified, filters to definitions that have builds after this date.
:param datetime not_built_after: If specified, filters to definitions that do not have builds after this date.
:param bool include_all_properties: Indicates whether the full definitions should be returned. By default, shallow representations of the definitions are returned.
:param bool include_latest_builds: Indicates whether to return the latest and latest completed builds for this definition.
:param str task_id_filter: If specified, filters to definitions that use the specified task.
:param int process_type: If specified, filters to definitions with the given process type.
:param str yaml_filename: If specified, filters to YAML definitions that match the given filename. To use this filter includeAllProperties should be set to true
:rtype: :class:`<[BuildDefinitionReference]> <azure.devops.v7_0.build.models.[BuildDefinitionReference]>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if name is not None:
query_parameters['name'] = self._serialize.query('name', name, 'str')
if repository_id is not None:
query_parameters['repositoryId'] = self._serialize.query('repository_id', repository_id, 'str')
if repository_type is not None:
query_parameters['repositoryType'] = self._serialize.query('repository_type', repository_type, 'str')
if query_order is not None:
query_parameters['queryOrder'] = self._serialize.query('query_order', query_order, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'str')
if min_metrics_time is not None:
query_parameters['minMetricsTime'] = self._serialize.query('min_metrics_time', min_metrics_time, 'iso-8601')
if definition_ids is not None:
definition_ids = ",".join(map(str, definition_ids))
query_parameters['definitionIds'] = self._serialize.query('definition_ids', definition_ids, 'str')
if path is not None:
query_parameters['path'] = self._serialize.query('path', path, 'str')
if built_after is not None:
query_parameters['builtAfter'] = self._serialize.query('built_after', built_after, 'iso-8601')
if not_built_after is not None:
query_parameters['notBuiltAfter'] = self._serialize.query('not_built_after', not_built_after, 'iso-8601')
if include_all_properties is not None:
query_parameters['includeAllProperties'] = self._serialize.query('include_all_properties', include_all_properties, 'bool')
if include_latest_builds is not None:
query_parameters['includeLatestBuilds'] = self._serialize.query('include_latest_builds', include_latest_builds, 'bool')
if task_id_filter is not None:
query_parameters['taskIdFilter'] = self._serialize.query('task_id_filter', task_id_filter, 'str')
if process_type is not None:
query_parameters['processType'] = self._serialize.query('process_type', process_type, 'int')
if yaml_filename is not None:
query_parameters['yamlFilename'] = self._serialize.query('yaml_filename', yaml_filename, 'str')
response = self._send(http_method='GET',
location_id='dbeaf647-6167-421a-bda9-c9327b25e2e6',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[BuildDefinitionReference]', self._unwrap_collection(response))
def restore_definition(self, project, definition_id, deleted):
"""RestoreDefinition.
Restores a deleted definition
:param str project: Project ID or project name
:param int definition_id: The identifier of the definition to restore.
:param bool deleted: When false, restores a deleted definition.
:rtype: :class:`<BuildDefinition> <azure.devops.v7_0.build.models.BuildDefinition>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if definition_id is not None:
route_values['definitionId'] = self._serialize.url('definition_id', definition_id, 'int')
query_parameters = {}
if deleted is not None:
query_parameters['deleted'] = self._serialize.query('deleted', deleted, 'bool')
response = self._send(http_method='PATCH',
location_id='dbeaf647-6167-421a-bda9-c9327b25e2e6',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('BuildDefinition', response)
def update_definition(self, definition, project, definition_id, secrets_source_definition_id=None, secrets_source_definition_revision=None):
"""UpdateDefinition.
Updates an existing build definition. In order for this operation to succeed, the value of the "Revision" property of the request body must match the existing build definition's. It is recommended that you obtain the existing build definition by using GET, modify the build definition as necessary, and then submit the modified definition with PUT.
:param :class:`<BuildDefinition> <azure.devops.v7_0.build.models.BuildDefinition>` definition: The new version of the definition. Its "Revision" property must match the existing definition for the update to be accepted.
:param str project: Project ID or project name
:param int definition_id: The ID of the definition.
:param int secrets_source_definition_id:
:param int secrets_source_definition_revision:
:rtype: :class:`<BuildDefinition> <azure.devops.v7_0.build.models.BuildDefinition>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if definition_id is not None:
route_values['definitionId'] = self._serialize.url('definition_id', definition_id, 'int')
query_parameters = {}
if secrets_source_definition_id is not None:
query_parameters['secretsSourceDefinitionId'] = self._serialize.query('secrets_source_definition_id', secrets_source_definition_id, 'int')
if secrets_source_definition_revision is not None:
query_parameters['secretsSourceDefinitionRevision'] = self._serialize.query('secrets_source_definition_revision', secrets_source_definition_revision, 'int')
content = self._serialize.body(definition, 'BuildDefinition')
response = self._send(http_method='PUT',
location_id='dbeaf647-6167-421a-bda9-c9327b25e2e6',
version='7.0',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('BuildDefinition', response)
def get_file_contents(self, project, provider_name, service_endpoint_id=None, repository=None, commit_or_branch=None, path=None, **kwargs):
"""GetFileContents.
Gets the contents of a file in the given source code repository.
:param str project: Project ID or project name
:param str provider_name: The name of the source provider.
:param str service_endpoint_id: If specified, the ID of the service endpoint to query. Can only be omitted for providers that do not use service endpoints, e.g. TFVC or TFGit.
:param str repository: If specified, the vendor-specific identifier or the name of the repository to get branches. Can only be omitted for providers that do not support multiple repositories.
:param str commit_or_branch: The identifier of the commit or branch from which a file's contents are retrieved.
:param str path: The path to the file to retrieve, relative to the root of the repository.
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if provider_name is not None:
route_values['providerName'] = self._serialize.url('provider_name', provider_name, 'str')
query_parameters = {}
if service_endpoint_id is not None:
query_parameters['serviceEndpointId'] = self._serialize.query('service_endpoint_id', service_endpoint_id, 'str')
if repository is not None:
query_parameters['repository'] = self._serialize.query('repository', repository, 'str')
if commit_or_branch is not None:
query_parameters['commitOrBranch'] = self._serialize.query('commit_or_branch', commit_or_branch, 'str')
if path is not None:
query_parameters['path'] = self._serialize.query('path', path, 'str')
response = self._send(http_method='GET',
location_id='29d12225-b1d9-425f-b668-6c594a981313',
version='7.0',
route_values=route_values,
query_parameters=query_parameters,
accept_media_type='text/plain')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def create_folder(self, folder, project, path):
"""CreateFolder.
[Preview API] Creates a new folder.
:param :class:`<Folder> <azure.devops.v7_0.build.models.Folder>` folder: The folder.
:param str project: Project ID or project name
:param str path: The full path of the folder.
:rtype: :class:`<Folder> <azure.devops.v7_0.build.models.Folder>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if path is not None:
query_parameters['path'] = self._serialize.query('path', path, 'str')
content = self._serialize.body(folder, 'Folder')
response = self._send(http_method='PUT',
location_id='a906531b-d2da-4f55-bda7-f3e676cc50d9',
version='7.0-preview.2',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('Folder', response)
def delete_folder(self, project, path):
"""DeleteFolder.
[Preview API] Deletes a definition folder. Definitions and their corresponding builds will also be deleted.
:param str project: Project ID or project name
:param str path: The full path to the folder.
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if path is not None:
query_parameters['path'] = self._serialize.query('path', path, 'str')
self._send(http_method='DELETE',
location_id='a906531b-d2da-4f55-bda7-f3e676cc50d9',
version='7.0-preview.2',
route_values=route_values,
query_parameters=query_parameters)
def get_folders(self, project, path=None, query_order=None):
"""GetFolders.
[Preview API] Gets a list of build definition folders.
:param str project: Project ID or project name
:param str path: The path to start with.
:param str query_order: The order in which folders should be returned.
:rtype: [Folder]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if path is not None:
route_values['path'] = self._serialize.url('path', path, 'str')
query_parameters = {}
if query_order is not None:
query_parameters['queryOrder'] = self._serialize.query('query_order', query_order, 'str')
response = self._send(http_method='GET',
location_id='a906531b-d2da-4f55-bda7-f3e676cc50d9',
version='7.0-preview.2',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[Folder]', self._unwrap_collection(response))
def update_folder(self, folder, project, path):
"""UpdateFolder.
[Preview API] Updates an existing folder at given existing path
:param :class:`<Folder> <azure.devops.v7_0.build.models.Folder>` folder: The new version of the folder.
:param str project: Project ID or project name
:param str path: The full path to the folder.
:rtype: :class:`<Folder> <azure.devops.v7_0.build.models.Folder>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if path is not None:
query_parameters['path'] = self._serialize.query('path', path, 'str')
content = self._serialize.body(folder, 'Folder')
response = self._send(http_method='POST',
location_id='a906531b-d2da-4f55-bda7-f3e676cc50d9',
version='7.0-preview.2',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('Folder', response)
def get_build_general_settings(self, project):
"""GetBuildGeneralSettings.
Gets pipeline general settings.
:param str project: Project ID or project name
:rtype: :class:`<PipelineGeneralSettings> <azure.devops.v7_0.build.models.PipelineGeneralSettings>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
response = self._send(http_method='GET',
location_id='c4aefd19-30ff-405b-80ad-aca021e7242a',
version='7.0',
route_values=route_values)
return self._deserialize('PipelineGeneralSettings', response)
def update_build_general_settings(self, new_settings, project):
"""UpdateBuildGeneralSettings.
Updates pipeline general settings.
:param :class:`<PipelineGeneralSettings> <azure.devops.v7_0.build.models.PipelineGeneralSettings>` new_settings:
:param str project: Project ID or project name
:rtype: :class:`<PipelineGeneralSettings> <azure.devops.v7_0.build.models.PipelineGeneralSettings>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(new_settings, 'PipelineGeneralSettings')
response = self._send(http_method='PATCH',
location_id='c4aefd19-30ff-405b-80ad-aca021e7242a',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('PipelineGeneralSettings', response)
def get_retention_history(self, days_to_lookback=None):
"""GetRetentionHistory.
Returns the retention history for the project collection. This includes pipelines that have custom retention rules that may prevent the retention job from cleaning them up, runs per pipeline with retention type, files associated with pipelines owned by the collection with retention type, and the number of files per pipeline.
:param int days_to_lookback:
:rtype: :class:`<BuildRetentionHistory> <azure.devops.v7_0.build.models.BuildRetentionHistory>`
"""
query_parameters = {}
if days_to_lookback is not None:
query_parameters['daysToLookback'] = self._serialize.query('days_to_lookback', days_to_lookback, 'int')
response = self._send(http_method='GET',
location_id='1a9c48be-0ef5-4ec2-b94f-f053bdd2d3bf',
version='7.0',
query_parameters=query_parameters)
return self._deserialize('BuildRetentionHistory', response)
def get_build_changes(self, project, build_id, continuation_token=None, top=None, include_source_change=None):
"""GetBuildChanges.
Gets the changes associated with a build
:param str project: Project ID or project name
:param int build_id:
:param str continuation_token:
:param int top: The maximum number of changes to return
:param bool include_source_change:
:rtype: :class:`<[Change]> <azure.devops.v7_0.build.models.[Change]>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
query_parameters = {}
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if include_source_change is not None:
query_parameters['includeSourceChange'] = self._serialize.query('include_source_change', include_source_change, 'bool')
response = self._send(http_method='GET',
location_id='54572c7b-bbd3-45d4-80dc-28be08941620',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[Change]', self._unwrap_collection(response))
def get_changes_between_builds(self, project, from_build_id=None, to_build_id=None, top=None):
"""GetChangesBetweenBuilds.
Gets the changes made to the repository between two given builds.
:param str project: Project ID or project name
:param int from_build_id: The ID of the first build.
:param int to_build_id: The ID of the last build.
:param int top: The maximum number of changes to return.
:rtype: [Change]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if from_build_id is not None:
query_parameters['fromBuildId'] = self._serialize.query('from_build_id', from_build_id, 'int')
if to_build_id is not None:
query_parameters['toBuildId'] = self._serialize.query('to_build_id', to_build_id, 'int')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
response = self._send(http_method='GET',
location_id='f10f0ea5-18a1-43ec-a8fb-2042c7be9b43',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[Change]', self._unwrap_collection(response))
def get_latest_build(self, project, definition, branch_name=None):
"""GetLatestBuild.
[Preview API] Gets the latest build for a definition, optionally scoped to a specific branch.
:param str project: Project ID or project name
:param str definition: definition name with optional leading folder path, or the definition id
:param str branch_name: optional parameter that indicates the specific branch to use. If not specified, the default branch is used.
:rtype: :class:`<Build> <azure.devops.v7_0.build.models.Build>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if definition is not None:
route_values['definition'] = self._serialize.url('definition', definition, 'str')
query_parameters = {}
if branch_name is not None:
query_parameters['branchName'] = self._serialize.query('branch_name', branch_name, 'str')
response = self._send(http_method='GET',
location_id='54481611-01f4-47f3-998f-160da0f0c229',
version='7.0-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('Build', response)
def add_retention_leases(self, new_leases, project):
"""AddRetentionLeases.
Adds new leases for pipeline runs.
:param [NewRetentionLease] new_leases:
:param str project: Project ID or project name
:rtype: [RetentionLease]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(new_leases, '[NewRetentionLease]')
response = self._send(http_method='POST',
location_id='272051e4-9af1-45b5-ae22-8d960a5539d4',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('[RetentionLease]', self._unwrap_collection(response))
def delete_retention_leases_by_id(self, project, ids):
"""DeleteRetentionLeasesById.
Removes specific retention leases.
:param str project: Project ID or project name
:param [int] ids:
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if ids is not None:
ids = ",".join(map(str, ids))
query_parameters['ids'] = self._serialize.query('ids', ids, 'str')
self._send(http_method='DELETE',
location_id='272051e4-9af1-45b5-ae22-8d960a5539d4',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
def get_retention_lease(self, project, lease_id):
"""GetRetentionLease.
Returns the details of the retention lease given a lease id.
:param str project: Project ID or project name
:param int lease_id:
:rtype: :class:`<RetentionLease> <azure.devops.v7_0.build.models.RetentionLease>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if lease_id is not None:
route_values['leaseId'] = self._serialize.url('lease_id', lease_id, 'int')
response = self._send(http_method='GET',
location_id='272051e4-9af1-45b5-ae22-8d960a5539d4',
version='7.0',
route_values=route_values)
return self._deserialize('RetentionLease', response)
def get_retention_leases_by_minimal_retention_leases(self, project, leases_to_fetch):
"""GetRetentionLeasesByMinimalRetentionLeases.
Returns any leases matching the specified MinimalRetentionLeases
:param str project: Project ID or project name
:param [MinimalRetentionLease] leases_to_fetch: List of JSON-serialized MinimalRetentionLeases separated by '|'
:rtype: [RetentionLease]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if leases_to_fetch is not None:
leases_to_fetch = "|".join(map(str, leases_to_fetch))
query_parameters['leasesToFetch'] = self._serialize.query('leases_to_fetch', leases_to_fetch, 'str')
response = self._send(http_method='GET',
location_id='272051e4-9af1-45b5-ae22-8d960a5539d4',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[RetentionLease]', self._unwrap_collection(response))
def get_retention_leases_by_owner_id(self, project, owner_id=None, definition_id=None, run_id=None):
"""GetRetentionLeasesByOwnerId.
Returns any leases owned by the specified entity, optionally scoped to a single pipeline definition and run.
:param str project: Project ID or project name
:param str owner_id:
:param int definition_id: An optional parameter to limit the search to a specific pipeline definition.
:param int run_id: An optional parameter to limit the search to a single pipeline run. Requires definitionId.
:rtype: [RetentionLease]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if owner_id is not None:
query_parameters['ownerId'] = self._serialize.query('owner_id', owner_id, 'str')
if definition_id is not None:
query_parameters['definitionId'] = self._serialize.query('definition_id', definition_id, 'int')
if run_id is not None:
query_parameters['runId'] = self._serialize.query('run_id', run_id, 'int')
response = self._send(http_method='GET',
location_id='272051e4-9af1-45b5-ae22-8d960a5539d4',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[RetentionLease]', self._unwrap_collection(response))
def get_retention_leases_by_user_id(self, project, user_owner_id, definition_id=None, run_id=None):
"""GetRetentionLeasesByUserId.
Returns any leases owned by the specified user, optionally scoped to a single pipeline definition and run.
:param str project: Project ID or project name
:param str user_owner_id: The user id to search for.
:param int definition_id: An optional parameter to limit the search to a specific pipeline definition.
:param int run_id: An optional parameter to limit the search to a single pipeline run. Requires definitionId.
:rtype: [RetentionLease]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if user_owner_id is not None:
query_parameters['userOwnerId'] = self._serialize.query('user_owner_id', user_owner_id, 'str')
if definition_id is not None:
query_parameters['definitionId'] = self._serialize.query('definition_id', definition_id, 'int')
if run_id is not None:
query_parameters['runId'] = self._serialize.query('run_id', run_id, 'int')
response = self._send(http_method='GET',
location_id='272051e4-9af1-45b5-ae22-8d960a5539d4',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[RetentionLease]', self._unwrap_collection(response))
def update_retention_lease(self, lease_update, project, lease_id):
"""UpdateRetentionLease.
Updates the duration or pipeline protection status of a retention lease.
:param :class:`<RetentionLeaseUpdate> <azure.devops.v7_0.build.models.RetentionLeaseUpdate>` lease_update: The new data for the retention lease.
:param str project: Project ID or project name
:param int lease_id: The ID of the lease to update.
:rtype: :class:`<RetentionLease> <azure.devops.v7_0.build.models.RetentionLease>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if lease_id is not None:
route_values['leaseId'] = self._serialize.url('lease_id', lease_id, 'int')
content = self._serialize.body(lease_update, 'RetentionLeaseUpdate')
response = self._send(http_method='PATCH',
location_id='272051e4-9af1-45b5-ae22-8d960a5539d4',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('RetentionLease', response)
def get_build_log(self, project, build_id, log_id, start_line=None, end_line=None, **kwargs):
"""GetBuildLog.
Gets an individual log file for a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param int log_id: The ID of the log file.
:param long start_line: The start line.
:param long end_line: The end line.
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
if log_id is not None:
route_values['logId'] = self._serialize.url('log_id', log_id, 'int')
query_parameters = {}
if start_line is not None:
query_parameters['startLine'] = self._serialize.query('start_line', start_line, 'long')
if end_line is not None:
query_parameters['endLine'] = self._serialize.query('end_line', end_line, 'long')
response = self._send(http_method='GET',
location_id='35a80daf-7f30-45fc-86e8-6b813d9c90df',
version='7.0',
route_values=route_values,
query_parameters=query_parameters,
accept_media_type='text/plain')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def get_build_log_lines(self, project, build_id, log_id, start_line=None, end_line=None):
"""GetBuildLogLines.
Gets an individual log file for a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param int log_id: The ID of the log file.
:param long start_line: The start line.
:param long end_line: The end line.
:rtype: [str]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
if log_id is not None:
route_values['logId'] = self._serialize.url('log_id', log_id, 'int')
query_parameters = {}
if start_line is not None:
query_parameters['startLine'] = self._serialize.query('start_line', start_line, 'long')
if end_line is not None:
query_parameters['endLine'] = self._serialize.query('end_line', end_line, 'long')
response = self._send(http_method='GET',
location_id='35a80daf-7f30-45fc-86e8-6b813d9c90df',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[str]', self._unwrap_collection(response))
def get_build_logs(self, project, build_id):
"""GetBuildLogs.
Gets the logs for a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:rtype: [BuildLog]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
response = self._send(http_method='GET',
location_id='35a80daf-7f30-45fc-86e8-6b813d9c90df',
version='7.0',
route_values=route_values)
return self._deserialize('[BuildLog]', self._unwrap_collection(response))
def get_build_logs_zip(self, project, build_id, **kwargs):
"""GetBuildLogsZip.
Gets the logs for a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
response = self._send(http_method='GET',
location_id='35a80daf-7f30-45fc-86e8-6b813d9c90df',
version='7.0',
route_values=route_values,
accept_media_type='application/zip')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def get_build_log_zip(self, project, build_id, log_id, start_line=None, end_line=None, **kwargs):
"""GetBuildLogZip.
Gets an individual log file for a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param int log_id: The ID of the log file.
:param long start_line: The start line.
:param long end_line: The end line.
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
if log_id is not None:
route_values['logId'] = self._serialize.url('log_id', log_id, 'int')
query_parameters = {}
if start_line is not None:
query_parameters['startLine'] = self._serialize.query('start_line', start_line, 'long')
if end_line is not None:
query_parameters['endLine'] = self._serialize.query('end_line', end_line, 'long')
response = self._send(http_method='GET',
location_id='35a80daf-7f30-45fc-86e8-6b813d9c90df',
version='7.0',
route_values=route_values,
query_parameters=query_parameters,
accept_media_type='application/zip')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def get_project_metrics(self, project, metric_aggregation_type=None, min_metrics_time=None):
"""GetProjectMetrics.
[Preview API] Gets build metrics for a project.
:param str project: Project ID or project name
:param str metric_aggregation_type: The aggregation type to use (hourly, daily).
:param datetime min_metrics_time: The date from which to calculate metrics.
:rtype: [BuildMetric]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if metric_aggregation_type is not None:
route_values['metricAggregationType'] = self._serialize.url('metric_aggregation_type', metric_aggregation_type, 'str')
query_parameters = {}
if min_metrics_time is not None:
query_parameters['minMetricsTime'] = self._serialize.query('min_metrics_time', min_metrics_time, 'iso-8601')
response = self._send(http_method='GET',
location_id='7433fae7-a6bc-41dc-a6e2-eef9005ce41a',
version='7.0-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[BuildMetric]', self._unwrap_collection(response))
def get_definition_metrics(self, project, definition_id, min_metrics_time=None):
"""GetDefinitionMetrics.
[Preview API] Gets build metrics for a definition.
:param str project: Project ID or project name
:param int definition_id: The ID of the definition.
:param datetime min_metrics_time: The date from which to calculate metrics.
:rtype: [BuildMetric]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if definition_id is not None:
route_values['definitionId'] = self._serialize.url('definition_id', definition_id, 'int')
query_parameters = {}
if min_metrics_time is not None:
query_parameters['minMetricsTime'] = self._serialize.query('min_metrics_time', min_metrics_time, 'iso-8601')
response = self._send(http_method='GET',
location_id='d973b939-0ce0-4fec-91d8-da3940fa1827',
version='7.0-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[BuildMetric]', self._unwrap_collection(response))
def get_build_option_definitions(self, project=None):
"""GetBuildOptionDefinitions.
Gets all build definition options supported by the system.
:param str project: Project ID or project name
:rtype: [BuildOptionDefinition]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
response = self._send(http_method='GET',
location_id='591cb5a4-2d46-4f3a-a697-5cd42b6bd332',
version='7.0',
route_values=route_values)
return self._deserialize('[BuildOptionDefinition]', self._unwrap_collection(response))
def get_path_contents(self, project, provider_name, service_endpoint_id=None, repository=None, commit_or_branch=None, path=None):
"""GetPathContents.
Gets the contents of a directory in the given source code repository.
:param str project: Project ID or project name
:param str provider_name: The name of the source provider.
:param str service_endpoint_id: If specified, the ID of the service endpoint to query. Can only be omitted for providers that do not use service endpoints, e.g. TFVC or TFGit.
:param str repository: If specified, the vendor-specific identifier or the name of the repository to get branches. Can only be omitted for providers that do not support multiple repositories.
:param str commit_or_branch: The identifier of the commit or branch from which a file's contents are retrieved.
:param str path: The path contents to list, relative to the root of the repository.
:rtype: [SourceRepositoryItem]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if provider_name is not None:
route_values['providerName'] = self._serialize.url('provider_name', provider_name, 'str')
query_parameters = {}
if service_endpoint_id is not None:
query_parameters['serviceEndpointId'] = self._serialize.query('service_endpoint_id', service_endpoint_id, 'str')
if repository is not None:
query_parameters['repository'] = self._serialize.query('repository', repository, 'str')
if commit_or_branch is not None:
query_parameters['commitOrBranch'] = self._serialize.query('commit_or_branch', commit_or_branch, 'str')
if path is not None:
query_parameters['path'] = self._serialize.query('path', path, 'str')
response = self._send(http_method='GET',
location_id='7944d6fb-df01-4709-920a-7a189aa34037',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[SourceRepositoryItem]', self._unwrap_collection(response))
def get_build_properties(self, project, build_id, filter=None):
"""GetBuildProperties.
Gets properties for a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param [str] filter: A comma-delimited list of properties. If specified, filters to these specific properties.
:rtype: :class:`<object> <azure.devops.v7_0.build.models.object>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
query_parameters = {}
if filter is not None:
filter = ",".join(filter)
query_parameters['filter'] = self._serialize.query('filter', filter, 'str')
response = self._send(http_method='GET',
location_id='0a6312e9-0627-49b7-8083-7d74a64849c9',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('object', response)
def update_build_properties(self, document, project, build_id):
"""UpdateBuildProperties.
Updates properties for a build.
:param :class:`<[JsonPatchOperation]> <azure.devops.v7_0.build.models.[JsonPatchOperation]>` document: A json-patch document describing the properties to update.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:rtype: :class:`<object> <azure.devops.v7_0.build.models.object>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
content = self._serialize.body(document, '[JsonPatchOperation]')
response = self._send(http_method='PATCH',
location_id='0a6312e9-0627-49b7-8083-7d74a64849c9',
version='7.0',
route_values=route_values,
content=content,
media_type='application/json-patch+json')
return self._deserialize('object', response)
def get_definition_properties(self, project, definition_id, filter=None):
"""GetDefinitionProperties.
Gets properties for a definition.
:param str project: Project ID or project name
:param int definition_id: The ID of the definition.
:param [str] filter: A comma-delimited list of properties. If specified, filters to these specific properties.
:rtype: :class:`<object> <azure.devops.v7_0.build.models.object>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if definition_id is not None:
route_values['definitionId'] = self._serialize.url('definition_id', definition_id, 'int')
query_parameters = {}
if filter is not None:
filter = ",".join(filter)
query_parameters['filter'] = self._serialize.query('filter', filter, 'str')
response = self._send(http_method='GET',
location_id='d9826ad7-2a68-46a9-a6e9-677698777895',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('object', response)
def update_definition_properties(self, document, project, definition_id):
"""UpdateDefinitionProperties.
Updates properties for a definition.
:param :class:`<[JsonPatchOperation]> <azure.devops.v7_0.build.models.[JsonPatchOperation]>` document: A json-patch document describing the properties to update.
:param str project: Project ID or project name
:param int definition_id: The ID of the definition.
:rtype: :class:`<object> <azure.devops.v7_0.build.models.object>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if definition_id is not None:
route_values['definitionId'] = self._serialize.url('definition_id', definition_id, 'int')
content = self._serialize.body(document, '[JsonPatchOperation]')
response = self._send(http_method='PATCH',
location_id='d9826ad7-2a68-46a9-a6e9-677698777895',
version='7.0',
route_values=route_values,
content=content,
media_type='application/json-patch+json')
return self._deserialize('object', response)
def get_pull_request(self, project, provider_name, pull_request_id, repository_id=None, service_endpoint_id=None):
"""GetPullRequest.
Gets a pull request object from source provider.
:param str project: Project ID or project name
:param str provider_name: The name of the source provider.
:param str pull_request_id: Vendor-specific id of the pull request.
:param str repository_id: Vendor-specific identifier or the name of the repository that contains the pull request.
:param str service_endpoint_id: If specified, the ID of the service endpoint to query. Can only be omitted for providers that do not use service endpoints, e.g. TFVC or TFGit.
:rtype: :class:`<PullRequest> <azure.devops.v7_0.build.models.PullRequest>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if provider_name is not None:
route_values['providerName'] = self._serialize.url('provider_name', provider_name, 'str')
if pull_request_id is not None:
route_values['pullRequestId'] = self._serialize.url('pull_request_id', pull_request_id, 'str')
query_parameters = {}
if repository_id is not None:
query_parameters['repositoryId'] = self._serialize.query('repository_id', repository_id, 'str')
if service_endpoint_id is not None:
query_parameters['serviceEndpointId'] = self._serialize.query('service_endpoint_id', service_endpoint_id, 'str')
response = self._send(http_method='GET',
location_id='d8763ec7-9ff0-4fb4-b2b2-9d757906ff14',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('PullRequest', response)
def get_build_report(self, project, build_id, type=None):
"""GetBuildReport.
Gets a build report.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param str type:
:rtype: :class:`<BuildReportMetadata> <azure.devops.v7_0.build.models.BuildReportMetadata>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
query_parameters = {}
if type is not None:
query_parameters['type'] = self._serialize.query('type', type, 'str')
response = self._send(http_method='GET',
location_id='45bcaa88-67e1-4042-a035-56d3b4a7d44c',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('BuildReportMetadata', response)
def get_build_report_html_content(self, project, build_id, type=None, **kwargs):
"""GetBuildReportHtmlContent.
Gets a build report.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param str type:
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
query_parameters = {}
if type is not None:
query_parameters['type'] = self._serialize.query('type', type, 'str')
response = self._send(http_method='GET',
location_id='45bcaa88-67e1-4042-a035-56d3b4a7d44c',
version='7.0',
route_values=route_values,
query_parameters=query_parameters,
accept_media_type='text/html')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def list_repositories(self, project, provider_name, service_endpoint_id=None, repository=None, result_set=None, page_results=None, continuation_token=None):
"""ListRepositories.
Gets a list of source code repositories.
:param str project: Project ID or project name
:param str provider_name: The name of the source provider.
:param str service_endpoint_id: If specified, the ID of the service endpoint to query. Can only be omitted for providers that do not use service endpoints, e.g. TFVC or TFGit.
:param str repository: If specified, the vendor-specific identifier or the name of a single repository to get.
:param str result_set: 'top' for the repositories most relevant for the endpoint. If not set, all repositories are returned. Ignored if 'repository' is set.
:param bool page_results: If set to true, this will limit the set of results and will return a continuation token to continue the query.
:param str continuation_token: When paging results, this is a continuation token, returned by a previous call to this method, that can be used to return the next set of repositories.
:rtype: :class:`<SourceRepositories> <azure.devops.v7_0.build.models.SourceRepositories>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if provider_name is not None:
route_values['providerName'] = self._serialize.url('provider_name', provider_name, 'str')
query_parameters = {}
if service_endpoint_id is not None:
query_parameters['serviceEndpointId'] = self._serialize.query('service_endpoint_id', service_endpoint_id, 'str')
if repository is not None:
query_parameters['repository'] = self._serialize.query('repository', repository, 'str')
if result_set is not None:
query_parameters['resultSet'] = self._serialize.query('result_set', result_set, 'str')
if page_results is not None:
query_parameters['pageResults'] = self._serialize.query('page_results', page_results, 'bool')
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'str')
response = self._send(http_method='GET',
location_id='d44d1680-f978-4834-9b93-8c6e132329c9',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('SourceRepositories', response)
def authorize_definition_resources(self, resources, project, definition_id):
"""AuthorizeDefinitionResources.
[Preview API]
:param [DefinitionResourceReference] resources:
:param str project: Project ID or project name
:param int definition_id:
:rtype: [DefinitionResourceReference]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if definition_id is not None:
route_values['definitionId'] = self._serialize.url('definition_id', definition_id, 'int')
content = self._serialize.body(resources, '[DefinitionResourceReference]')
response = self._send(http_method='PATCH',
location_id='ea623316-1967-45eb-89ab-e9e6110cf2d6',
version='7.0-preview.1',
route_values=route_values,
content=content)
return self._deserialize('[DefinitionResourceReference]', self._unwrap_collection(response))
def get_definition_resources(self, project, definition_id):
"""GetDefinitionResources.
[Preview API]
:param str project: Project ID or project name
:param int definition_id:
:rtype: [DefinitionResourceReference]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if definition_id is not None:
route_values['definitionId'] = self._serialize.url('definition_id', definition_id, 'int')
response = self._send(http_method='GET',
location_id='ea623316-1967-45eb-89ab-e9e6110cf2d6',
version='7.0-preview.1',
route_values=route_values)
return self._deserialize('[DefinitionResourceReference]', self._unwrap_collection(response))
def get_resource_usage(self):
"""GetResourceUsage.
[Preview API] Gets information about build resources in the system.
:rtype: :class:`<BuildResourceUsage> <azure.devops.v7_0.build.models.BuildResourceUsage>`
"""
response = self._send(http_method='GET',
location_id='3813d06c-9e36-4ea1-aac3-61a485d60e3d',
version='7.0-preview.2')
return self._deserialize('BuildResourceUsage', response)
def get_retention_settings(self, project):
"""GetRetentionSettings.
Gets the project's retention settings.
:param str project: Project ID or project name
:rtype: :class:`<ProjectRetentionSetting> <azure.devops.v7_0.build.models.ProjectRetentionSetting>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
response = self._send(http_method='GET',
location_id='dadb46e7-5851-4c72-820e-ae8abb82f59f',
version='7.0',
route_values=route_values)
return self._deserialize('ProjectRetentionSetting', response)
def update_retention_settings(self, update_model, project):
"""UpdateRetentionSettings.
Updates the project's retention settings.
:param :class:`<UpdateProjectRetentionSettingModel> <azure.devops.v7_0.build.models.UpdateProjectRetentionSettingModel>` update_model:
:param str project: Project ID or project name
:rtype: :class:`<ProjectRetentionSetting> <azure.devops.v7_0.build.models.ProjectRetentionSetting>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(update_model, 'UpdateProjectRetentionSettingModel')
response = self._send(http_method='PATCH',
location_id='dadb46e7-5851-4c72-820e-ae8abb82f59f',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('ProjectRetentionSetting', response)
def get_definition_revisions(self, project, definition_id):
"""GetDefinitionRevisions.
Gets all revisions of a definition.
:param str project: Project ID or project name
:param int definition_id: The ID of the definition.
:rtype: [BuildDefinitionRevision]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if definition_id is not None:
route_values['definitionId'] = self._serialize.url('definition_id', definition_id, 'int')
response = self._send(http_method='GET',
location_id='7c116775-52e5-453e-8c5d-914d9762d8c4',
version='7.0',
route_values=route_values)
return self._deserialize('[BuildDefinitionRevision]', self._unwrap_collection(response))
def get_build_settings(self, project=None):
"""GetBuildSettings.
Gets the build settings.
:param str project: Project ID or project name
:rtype: :class:`<BuildSettings> <azure.devops.v7_0.build.models.BuildSettings>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
response = self._send(http_method='GET',
location_id='aa8c1c9c-ef8b-474a-b8c4-785c7b191d0d',
version='7.0',
route_values=route_values)
return self._deserialize('BuildSettings', response)
def update_build_settings(self, settings, project=None):
"""UpdateBuildSettings.
Updates the build settings.
:param :class:`<BuildSettings> <azure.devops.v7_0.build.models.BuildSettings>` settings: The new settings.
:param str project: Project ID or project name
:rtype: :class:`<BuildSettings> <azure.devops.v7_0.build.models.BuildSettings>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(settings, 'BuildSettings')
response = self._send(http_method='PATCH',
location_id='aa8c1c9c-ef8b-474a-b8c4-785c7b191d0d',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('BuildSettings', response)
def list_source_providers(self, project):
"""ListSourceProviders.
Get a list of source providers and their capabilities.
:param str project: Project ID or project name
:rtype: [SourceProviderAttributes]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
response = self._send(http_method='GET',
location_id='3ce81729-954f-423d-a581-9fea01d25186',
version='7.0',
route_values=route_values)
return self._deserialize('[SourceProviderAttributes]', self._unwrap_collection(response))
def update_stage(self, update_parameters, build_id, stage_ref_name, project=None):
"""UpdateStage.
Update a build stage
:param :class:`<UpdateStageParameters> <azure.devops.v7_0.build.models.UpdateStageParameters>` update_parameters:
:param int build_id:
:param str stage_ref_name:
:param str project: Project ID or project name
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
if stage_ref_name is not None:
route_values['stageRefName'] = self._serialize.url('stage_ref_name', stage_ref_name, 'str')
content = self._serialize.body(update_parameters, 'UpdateStageParameters')
self._send(http_method='PATCH',
location_id='b8aac6c9-744b-46e1-88fc-3550969f9313',
version='7.0',
route_values=route_values,
content=content)
def get_status_badge(self, project, definition, branch_name=None, stage_name=None, job_name=None, configuration=None, label=None):
"""GetStatusBadge.
[Preview API] <p>Gets the build status for a definition, optionally scoped to a specific branch, stage, job, and configuration.</p> <p>If there are more than one, then it is required to pass in a stageName value when specifying a jobName, and the same rule then applies for both if passing a configuration parameter.</p>
:param str project: Project ID or project name
:param str definition: Either the definition name with optional leading folder path, or the definition id.
:param str branch_name: Only consider the most recent build for this branch. If not specified, the default branch is used.
:param str stage_name: Use this stage within the pipeline to render the status.
:param str job_name: Use this job within a stage of the pipeline to render the status.
:param str configuration: Use this job configuration to render the status
:param str label: Replaces the default text on the left side of the badge.
:rtype: str
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if definition is not None:
route_values['definition'] = self._serialize.url('definition', definition, 'str')
query_parameters = {}
if branch_name is not None:
query_parameters['branchName'] = self._serialize.query('branch_name', branch_name, 'str')
if stage_name is not None:
query_parameters['stageName'] = self._serialize.query('stage_name', stage_name, 'str')
if job_name is not None:
query_parameters['jobName'] = self._serialize.query('job_name', job_name, 'str')
if configuration is not None:
query_parameters['configuration'] = self._serialize.query('configuration', configuration, 'str')
if label is not None:
query_parameters['label'] = self._serialize.query('label', label, 'str')
response = self._send(http_method='GET',
location_id='07acfdce-4757-4439-b422-ddd13a2fcc10',
version='7.0-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('str', response)
def add_build_tag(self, project, build_id, tag):
"""AddBuildTag.
Adds a tag to a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param str tag: The tag to add.
:rtype: [str]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
if tag is not None:
route_values['tag'] = self._serialize.url('tag', tag, 'str')
response = self._send(http_method='PUT',
location_id='6e6114b2-8161-44c8-8f6c-c5505782427f',
version='7.0',
route_values=route_values)
return self._deserialize('[str]', self._unwrap_collection(response))
def add_build_tags(self, tags, project, build_id):
"""AddBuildTags.
Adds tags to a build.
:param [str] tags: The tags to add.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:rtype: [str]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
content = self._serialize.body(tags, '[str]')
response = self._send(http_method='POST',
location_id='6e6114b2-8161-44c8-8f6c-c5505782427f',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('[str]', self._unwrap_collection(response))
def delete_build_tag(self, project, build_id, tag):
"""DeleteBuildTag.
Removes a tag from a build. NOTE: This API will not work for tags with special characters. To remove tags with special characters, use the PATCH method instead (in 6.0+)
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param str tag: The tag to remove.
:rtype: [str]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
if tag is not None:
route_values['tag'] = self._serialize.url('tag', tag, 'str')
response = self._send(http_method='DELETE',
location_id='6e6114b2-8161-44c8-8f6c-c5505782427f',
version='7.0',
route_values=route_values)
return self._deserialize('[str]', self._unwrap_collection(response))
def get_build_tags(self, project, build_id):
"""GetBuildTags.
Gets the tags for a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:rtype: [str]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
response = self._send(http_method='GET',
location_id='6e6114b2-8161-44c8-8f6c-c5505782427f',
version='7.0',
route_values=route_values)
return self._deserialize('[str]', self._unwrap_collection(response))
def update_build_tags(self, update_parameters, project, build_id):
"""UpdateBuildTags.
Adds/Removes tags from a build.
:param :class:`<UpdateTagParameters> <azure.devops.v7_0.build.models.UpdateTagParameters>` update_parameters: The tags to add/remove.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:rtype: [str]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
content = self._serialize.body(update_parameters, 'UpdateTagParameters')
response = self._send(http_method='PATCH',
location_id='6e6114b2-8161-44c8-8f6c-c5505782427f',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('[str]', self._unwrap_collection(response))
def add_definition_tag(self, project, definition_id, tag):
"""AddDefinitionTag.
Adds a tag to a definition
:param str project: Project ID or project name
:param int definition_id: The ID of the definition.
:param str tag: The tag to add.
:rtype: [str]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if definition_id is not None:
route_values['definitionId'] = self._serialize.url('definition_id', definition_id, 'int')
if tag is not None:
route_values['tag'] = self._serialize.url('tag', tag, 'str')
response = self._send(http_method='PUT',
location_id='cb894432-134a-4d31-a839-83beceaace4b',
version='7.0',
route_values=route_values)
return self._deserialize('[str]', self._unwrap_collection(response))
def add_definition_tags(self, tags, project, definition_id):
"""AddDefinitionTags.
Adds multiple tags to a definition.
:param [str] tags: The tags to add.
:param str project: Project ID or project name
:param int definition_id: The ID of the definition.
:rtype: [str]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if definition_id is not None:
route_values['definitionId'] = self._serialize.url('definition_id', definition_id, 'int')
content = self._serialize.body(tags, '[str]')
response = self._send(http_method='POST',
location_id='cb894432-134a-4d31-a839-83beceaace4b',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('[str]', self._unwrap_collection(response))
def delete_definition_tag(self, project, definition_id, tag):
"""DeleteDefinitionTag.
Removes a tag from a definition. NOTE: This API will not work for tags with special characters. To remove tags with special characters, use the PATCH method instead (in 6.0+)
:param str project: Project ID or project name
:param int definition_id: The ID of the definition.
:param str tag: The tag to remove.
:rtype: [str]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if definition_id is not None:
route_values['definitionId'] = self._serialize.url('definition_id', definition_id, 'int')
if tag is not None:
route_values['tag'] = self._serialize.url('tag', tag, 'str')
response = self._send(http_method='DELETE',
location_id='cb894432-134a-4d31-a839-83beceaace4b',
version='7.0',
route_values=route_values)
return self._deserialize('[str]', self._unwrap_collection(response))
def get_definition_tags(self, project, definition_id, revision=None):
"""GetDefinitionTags.
Gets the tags for a definition.
:param str project: Project ID or project name
:param int definition_id: The ID of the definition.
:param int revision: The definition revision number. If not specified, uses the latest revision of the definition.
:rtype: [str]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if definition_id is not None:
route_values['definitionId'] = self._serialize.url('definition_id', definition_id, 'int')
query_parameters = {}
if revision is not None:
query_parameters['revision'] = self._serialize.query('revision', revision, 'int')
response = self._send(http_method='GET',
location_id='cb894432-134a-4d31-a839-83beceaace4b',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[str]', self._unwrap_collection(response))
def update_definition_tags(self, update_parameters, project, definition_id):
"""UpdateDefinitionTags.
Adds/Removes tags from a definition.
:param :class:`<UpdateTagParameters> <azure.devops.v7_0.build.models.UpdateTagParameters>` update_parameters: The tags to add/remove.
:param str project: Project ID or project name
:param int definition_id: The ID of the definition.
:rtype: [str]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if definition_id is not None:
route_values['definitionId'] = self._serialize.url('definition_id', definition_id, 'int')
content = self._serialize.body(update_parameters, 'UpdateTagParameters')
response = self._send(http_method='PATCH',
location_id='cb894432-134a-4d31-a839-83beceaace4b',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('[str]', self._unwrap_collection(response))
def delete_tag(self, project, tag):
"""DeleteTag.
Removes a tag from builds, definitions, and from the tag store
:param str project: Project ID or project name
:param str tag: The tag to remove.
:rtype: [str]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if tag is not None:
route_values['tag'] = self._serialize.url('tag', tag, 'str')
response = self._send(http_method='DELETE',
location_id='d84ac5c6-edc7-43d5-adc9-1b34be5dea09',
version='7.0',
route_values=route_values)
return self._deserialize('[str]', self._unwrap_collection(response))
def get_tags(self, project):
"""GetTags.
Gets a list of all build tags in the project.
:param str project: Project ID or project name
:rtype: [str]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
response = self._send(http_method='GET',
location_id='d84ac5c6-edc7-43d5-adc9-1b34be5dea09',
version='7.0',
route_values=route_values)
return self._deserialize('[str]', self._unwrap_collection(response))
def delete_template(self, project, template_id):
"""DeleteTemplate.
Deletes a build definition template.
:param str project: Project ID or project name
:param str template_id: The ID of the template.
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if template_id is not None:
route_values['templateId'] = self._serialize.url('template_id', template_id, 'str')
self._send(http_method='DELETE',
location_id='e884571e-7f92-4d6a-9274-3f5649900835',
version='7.0',
route_values=route_values)
def get_template(self, project, template_id):
"""GetTemplate.
Gets a specific build definition template.
:param str project: Project ID or project name
:param str template_id: The ID of the requested template.
:rtype: :class:`<BuildDefinitionTemplate> <azure.devops.v7_0.build.models.BuildDefinitionTemplate>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if template_id is not None:
route_values['templateId'] = self._serialize.url('template_id', template_id, 'str')
response = self._send(http_method='GET',
location_id='e884571e-7f92-4d6a-9274-3f5649900835',
version='7.0',
route_values=route_values)
return self._deserialize('BuildDefinitionTemplate', response)
def get_templates(self, project):
"""GetTemplates.
Gets all definition templates.
:param str project: Project ID or project name
:rtype: [BuildDefinitionTemplate]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
response = self._send(http_method='GET',
location_id='e884571e-7f92-4d6a-9274-3f5649900835',
version='7.0',
route_values=route_values)
return self._deserialize('[BuildDefinitionTemplate]', self._unwrap_collection(response))
def save_template(self, template, project, template_id):
"""SaveTemplate.
Updates an existing build definition template.
:param :class:`<BuildDefinitionTemplate> <azure.devops.v7_0.build.models.BuildDefinitionTemplate>` template: The new version of the template.
:param str project: Project ID or project name
:param str template_id: The ID of the template.
:rtype: :class:`<BuildDefinitionTemplate> <azure.devops.v7_0.build.models.BuildDefinitionTemplate>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if template_id is not None:
route_values['templateId'] = self._serialize.url('template_id', template_id, 'str')
content = self._serialize.body(template, 'BuildDefinitionTemplate')
response = self._send(http_method='PUT',
location_id='e884571e-7f92-4d6a-9274-3f5649900835',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('BuildDefinitionTemplate', response)
def get_build_timeline(self, project, build_id, timeline_id=None, change_id=None, plan_id=None):
"""GetBuildTimeline.
Gets details for a build
:param str project: Project ID or project name
:param int build_id:
:param str timeline_id:
:param int change_id:
:param str plan_id:
:rtype: :class:`<Timeline> <azure.devops.v7_0.build.models.Timeline>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
if timeline_id is not None:
route_values['timelineId'] = self._serialize.url('timeline_id', timeline_id, 'str')
query_parameters = {}
if change_id is not None:
query_parameters['changeId'] = self._serialize.query('change_id', change_id, 'int')
if plan_id is not None:
query_parameters['planId'] = self._serialize.query('plan_id', plan_id, 'str')
response = self._send(http_method='GET',
location_id='8baac422-4c6e-4de5-8532-db96d92acffa',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('Timeline', response)
def restore_webhooks(self, trigger_types, project, provider_name, service_endpoint_id=None, repository=None):
"""RestoreWebhooks.
Recreates the webhooks for the specified triggers in the given source code repository.
:param [DefinitionTriggerType] trigger_types: The types of triggers to restore webhooks for.
:param str project: Project ID or project name
:param str provider_name: The name of the source provider.
:param str service_endpoint_id: If specified, the ID of the service endpoint to query. Can only be omitted for providers that do not use service endpoints, e.g. TFVC or TFGit.
:param str repository: If specified, the vendor-specific identifier or the name of the repository to get webhooks. Can only be omitted for providers that do not support multiple repositories.
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if provider_name is not None:
route_values['providerName'] = self._serialize.url('provider_name', provider_name, 'str')
query_parameters = {}
if service_endpoint_id is not None:
query_parameters['serviceEndpointId'] = self._serialize.query('service_endpoint_id', service_endpoint_id, 'str')
if repository is not None:
query_parameters['repository'] = self._serialize.query('repository', repository, 'str')
content = self._serialize.body(trigger_types, '[DefinitionTriggerType]')
self._send(http_method='POST',
location_id='793bceb8-9736-4030-bd2f-fb3ce6d6b478',
version='7.0',
route_values=route_values,
query_parameters=query_parameters,
content=content)
def list_webhooks(self, project, provider_name, service_endpoint_id=None, repository=None):
"""ListWebhooks.
Gets a list of webhooks installed in the given source code repository.
:param str project: Project ID or project name
:param str provider_name: The name of the source provider.
:param str service_endpoint_id: If specified, the ID of the service endpoint to query. Can only be omitted for providers that do not use service endpoints, e.g. TFVC or TFGit.
:param str repository: If specified, the vendor-specific identifier or the name of the repository to get webhooks. Can only be omitted for providers that do not support multiple repositories.
:rtype: [RepositoryWebhook]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if provider_name is not None:
route_values['providerName'] = self._serialize.url('provider_name', provider_name, 'str')
query_parameters = {}
if service_endpoint_id is not None:
query_parameters['serviceEndpointId'] = self._serialize.query('service_endpoint_id', service_endpoint_id, 'str')
if repository is not None:
query_parameters['repository'] = self._serialize.query('repository', repository, 'str')
response = self._send(http_method='GET',
location_id='8f20ff82-9498-4812-9f6e-9c01bdc50e99',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[RepositoryWebhook]', self._unwrap_collection(response))
def get_build_work_items_refs(self, project, build_id, top=None):
"""GetBuildWorkItemsRefs.
Gets the work items associated with a build. Only work items in the same project are returned.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param int top: The maximum number of work items to return.
:rtype: [ResourceRef]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
query_parameters = {}
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
response = self._send(http_method='GET',
location_id='5a21f5d2-5642-47e4-a0bd-1356e6731bee',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[ResourceRef]', self._unwrap_collection(response))
def get_build_work_items_refs_from_commits(self, commit_ids, project, build_id, top=None):
"""GetBuildWorkItemsRefsFromCommits.
Gets the work items associated with a build, filtered to specific commits.
:param [str] commit_ids: A comma-delimited list of commit IDs.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param int top: The maximum number of work items to return, or the number of commits to consider if no commit IDs are specified.
:rtype: [ResourceRef]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
query_parameters = {}
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
content = self._serialize.body(commit_ids, '[str]')
response = self._send(http_method='POST',
location_id='5a21f5d2-5642-47e4-a0bd-1356e6731bee',
version='7.0',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('[ResourceRef]', self._unwrap_collection(response))
def get_work_items_between_builds(self, project, from_build_id, to_build_id, top=None):
"""GetWorkItemsBetweenBuilds.
Gets all the work items between two builds.
:param str project: Project ID or project name
:param int from_build_id: The ID of the first build.
:param int to_build_id: The ID of the last build.
:param int top: The maximum number of work items to return.
:rtype: [ResourceRef]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if from_build_id is not None:
query_parameters['fromBuildId'] = self._serialize.query('from_build_id', from_build_id, 'int')
if to_build_id is not None:
query_parameters['toBuildId'] = self._serialize.query('to_build_id', to_build_id, 'int')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
response = self._send(http_method='GET',
location_id='52ba8915-5518-42e3-a4bb-b0182d159e2d',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[ResourceRef]', self._unwrap_collection(response))
def get_definition_yaml(self, project, definition_id, revision=None, min_metrics_time=None, property_filters=None, include_latest_builds=None):
"""GetDefinitionYaml.
Converts a definition to YAML, optionally at a specific revision.
:param str project: Project ID or project name
:param int definition_id: The ID of the definition.
:param int revision: The revision number to retrieve. If this is not specified, the latest version will be returned.
:param datetime min_metrics_time: If specified, indicates the date from which metrics should be included.
:param [str] property_filters: A comma-delimited list of properties to include in the results.
:param bool include_latest_builds:
:rtype: :class:`<YamlBuild> <azure.devops.v7_0.build.models.YamlBuild>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if definition_id is not None:
route_values['definitionId'] = self._serialize.url('definition_id', definition_id, 'int')
query_parameters = {}
if revision is not None:
query_parameters['revision'] = self._serialize.query('revision', revision, 'int')
if min_metrics_time is not None:
query_parameters['minMetricsTime'] = self._serialize.query('min_metrics_time', min_metrics_time, 'iso-8601')
if property_filters is not None:
property_filters = ",".join(property_filters)
query_parameters['propertyFilters'] = self._serialize.query('property_filters', property_filters, 'str')
if include_latest_builds is not None:
query_parameters['includeLatestBuilds'] = self._serialize.query('include_latest_builds', include_latest_builds, 'bool')
response = self._send(http_method='GET',
location_id='7c3df3a1-7e51-4150-8cf7-540347f8697f',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('YamlBuild', response)
|
azure-devops-python-api/azure-devops/azure/devops/v7_0/build/build_client.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_0/build/build_client.py",
"repo_id": "azure-devops-python-api",
"token_count": 57157
}
| 383 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest import Serializer, Deserializer
from ...client import Client
from . import models
class CustomerIntelligenceClient(Client):
"""CustomerIntelligence
:param str base_url: Service URL
:param Authentication creds: Authenticated credentials.
"""
def __init__(self, base_url=None, creds=None):
super(CustomerIntelligenceClient, self).__init__(base_url, creds)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
resource_area_identifier = None
def publish_events(self, events):
"""PublishEvents.
[Preview API]
:param [CustomerIntelligenceEvent] events:
"""
content = self._serialize.body(events, '[CustomerIntelligenceEvent]')
self._send(http_method='POST',
location_id='b5cc35c2-ff2b-491d-a085-24b6e9f396fd',
version='7.0-preview.1',
content=content)
|
azure-devops-python-api/azure-devops/azure/devops/v7_0/customer_intelligence/customer_intelligence_client.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_0/customer_intelligence/customer_intelligence_client.py",
"repo_id": "azure-devops-python-api",
"token_count": 504
}
| 384 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class ContributedFeature(Model):
"""
A feature that can be enabled or disabled
:param _links: Named links describing the feature
:type _links: :class:`ReferenceLinks <azure.devops.v7_0.feature_management.models.ReferenceLinks>`
:param default_state: If true, the feature is enabled unless overridden at some scope
:type default_state: bool
:param default_value_rules: Rules for setting the default value if not specified by any setting/scope. Evaluated in order until a rule returns an Enabled or Disabled state (not Undefined)
:type default_value_rules: list of :class:`ContributedFeatureValueRule <azure.devops.v7_0.feature_management.models.ContributedFeatureValueRule>`
:param description: The description of the feature
:type description: str
:param feature_properties: Extra properties for the feature
:type feature_properties: dict
:param feature_state_changed_listeners: Handler for listening to setter calls on feature value. These listeners are only invoked after a successful set has occurred
:type feature_state_changed_listeners: list of :class:`ContributedFeatureListener <azure.devops.v7_0.feature_management.models.ContributedFeatureListener>`
:param id: The full contribution id of the feature
:type id: str
:param include_as_claim: If this is set to true, then the id for this feature will be added to the list of claims for the request.
:type include_as_claim: bool
:param name: The friendly name of the feature
:type name: str
:param order: Suggested order to display feature in.
:type order: int
:param override_rules: Rules for overriding a feature value. These rules are run before explicit user/host state values are checked. They are evaluated in order until a rule returns an Enabled or Disabled state (not Undefined)
:type override_rules: list of :class:`ContributedFeatureValueRule <azure.devops.v7_0.feature_management.models.ContributedFeatureValueRule>`
:param scopes: The scopes/levels at which settings can set the enabled/disabled state of this feature
:type scopes: list of :class:`ContributedFeatureSettingScope <azure.devops.v7_0.feature_management.models.ContributedFeatureSettingScope>`
:param service_instance_type: The service instance id of the service that owns this feature
:type service_instance_type: str
:param tags: Tags associated with the feature.
:type tags: list of str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'default_state': {'key': 'defaultState', 'type': 'bool'},
'default_value_rules': {'key': 'defaultValueRules', 'type': '[ContributedFeatureValueRule]'},
'description': {'key': 'description', 'type': 'str'},
'feature_properties': {'key': 'featureProperties', 'type': '{object}'},
'feature_state_changed_listeners': {'key': 'featureStateChangedListeners', 'type': '[ContributedFeatureListener]'},
'id': {'key': 'id', 'type': 'str'},
'include_as_claim': {'key': 'includeAsClaim', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'order': {'key': 'order', 'type': 'int'},
'override_rules': {'key': 'overrideRules', 'type': '[ContributedFeatureValueRule]'},
'scopes': {'key': 'scopes', 'type': '[ContributedFeatureSettingScope]'},
'service_instance_type': {'key': 'serviceInstanceType', 'type': 'str'},
'tags': {'key': 'tags', 'type': '[str]'}
}
def __init__(self, _links=None, default_state=None, default_value_rules=None, description=None, feature_properties=None, feature_state_changed_listeners=None, id=None, include_as_claim=None, name=None, order=None, override_rules=None, scopes=None, service_instance_type=None, tags=None):
super(ContributedFeature, self).__init__()
self._links = _links
self.default_state = default_state
self.default_value_rules = default_value_rules
self.description = description
self.feature_properties = feature_properties
self.feature_state_changed_listeners = feature_state_changed_listeners
self.id = id
self.include_as_claim = include_as_claim
self.name = name
self.order = order
self.override_rules = override_rules
self.scopes = scopes
self.service_instance_type = service_instance_type
self.tags = tags
class ContributedFeatureHandlerSettings(Model):
"""
:param name: Name of the handler to run
:type name: str
:param properties: Properties to feed to the handler
:type properties: dict
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{object}'}
}
def __init__(self, name=None, properties=None):
super(ContributedFeatureHandlerSettings, self).__init__()
self.name = name
self.properties = properties
class ContributedFeatureListener(ContributedFeatureHandlerSettings):
"""
An identifier and properties used to pass into a handler for a listener or plugin
:param name: Name of the handler to run
:type name: str
:param properties: Properties to feed to the handler
:type properties: dict
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{object}'},
}
def __init__(self, name=None, properties=None):
super(ContributedFeatureListener, self).__init__(name=name, properties=properties)
class ContributedFeatureSettingScope(Model):
"""
The scope to which a feature setting applies
:param setting_scope: The name of the settings scope to use when reading/writing the setting
:type setting_scope: str
:param user_scoped: Whether this is a user-scope or this is a host-wide (all users) setting
:type user_scoped: bool
"""
_attribute_map = {
'setting_scope': {'key': 'settingScope', 'type': 'str'},
'user_scoped': {'key': 'userScoped', 'type': 'bool'}
}
def __init__(self, setting_scope=None, user_scoped=None):
super(ContributedFeatureSettingScope, self).__init__()
self.setting_scope = setting_scope
self.user_scoped = user_scoped
class ContributedFeatureState(Model):
"""
A contributed feature/state pair
:param feature_id: The full contribution id of the feature
:type feature_id: str
:param overridden: True if the effective state was set by an override rule (indicating that the state cannot be managed by the end user)
:type overridden: bool
:param reason: Reason that the state was set (by a plugin/rule).
:type reason: str
:param scope: The scope at which this state applies
:type scope: :class:`ContributedFeatureSettingScope <azure.devops.v7_0.feature_management.models.ContributedFeatureSettingScope>`
:param state: The current state of this feature
:type state: object
"""
_attribute_map = {
'feature_id': {'key': 'featureId', 'type': 'str'},
'overridden': {'key': 'overridden', 'type': 'bool'},
'reason': {'key': 'reason', 'type': 'str'},
'scope': {'key': 'scope', 'type': 'ContributedFeatureSettingScope'},
'state': {'key': 'state', 'type': 'object'}
}
def __init__(self, feature_id=None, overridden=None, reason=None, scope=None, state=None):
super(ContributedFeatureState, self).__init__()
self.feature_id = feature_id
self.overridden = overridden
self.reason = reason
self.scope = scope
self.state = state
class ContributedFeatureStateQuery(Model):
"""
A query for the effective contributed feature states for a list of feature ids
:param feature_ids: The list of feature ids to query
:type feature_ids: list of str
:param feature_states: The query result containing the current feature states for each of the queried feature ids
:type feature_states: dict
:param scope_values: A dictionary of scope values (project name, etc.) to use in the query (if querying across scopes)
:type scope_values: dict
"""
_attribute_map = {
'feature_ids': {'key': 'featureIds', 'type': '[str]'},
'feature_states': {'key': 'featureStates', 'type': '{ContributedFeatureState}'},
'scope_values': {'key': 'scopeValues', 'type': '{str}'}
}
def __init__(self, feature_ids=None, feature_states=None, scope_values=None):
super(ContributedFeatureStateQuery, self).__init__()
self.feature_ids = feature_ids
self.feature_states = feature_states
self.scope_values = scope_values
class ContributedFeatureValueRule(ContributedFeatureHandlerSettings):
"""
A rule for dynamically getting the enabled/disabled state of a feature
:param name: Name of the handler to run
:type name: str
:param properties: Properties to feed to the handler
:type properties: dict
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{object}'},
}
def __init__(self, name=None, properties=None):
super(ContributedFeatureValueRule, self).__init__(name=name, properties=properties)
class ReferenceLinks(Model):
"""
The class to represent a collection of REST reference links.
:param links: The readonly view of the links. Because Reference links are readonly, we only want to expose them as read only.
:type links: dict
"""
_attribute_map = {
'links': {'key': 'links', 'type': '{object}'}
}
def __init__(self, links=None):
super(ReferenceLinks, self).__init__()
self.links = links
__all__ = [
'ContributedFeature',
'ContributedFeatureHandlerSettings',
'ContributedFeatureListener',
'ContributedFeatureSettingScope',
'ContributedFeatureState',
'ContributedFeatureStateQuery',
'ContributedFeatureValueRule',
'ReferenceLinks',
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_0/feature_management/models.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_0/feature_management/models.py",
"repo_id": "azure-devops-python-api",
"token_count": 3539
}
| 385 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class Avatar(Model):
"""
:param is_auto_generated:
:type is_auto_generated: bool
:param size:
:type size: object
:param time_stamp:
:type time_stamp: datetime
:param value:
:type value: str
"""
_attribute_map = {
'is_auto_generated': {'key': 'isAutoGenerated', 'type': 'bool'},
'size': {'key': 'size', 'type': 'object'},
'time_stamp': {'key': 'timeStamp', 'type': 'iso-8601'},
'value': {'key': 'value', 'type': 'str'}
}
def __init__(self, is_auto_generated=None, size=None, time_stamp=None, value=None):
super(Avatar, self).__init__()
self.is_auto_generated = is_auto_generated
self.size = size
self.time_stamp = time_stamp
self.value = value
class GraphCachePolicies(Model):
"""
:param cache_size: Size of the cache
:type cache_size: int
"""
_attribute_map = {
'cache_size': {'key': 'cacheSize', 'type': 'int'}
}
def __init__(self, cache_size=None):
super(GraphCachePolicies, self).__init__()
self.cache_size = cache_size
class GraphDescriptorResult(Model):
"""
Subject descriptor of a Graph entity
:param _links: This field contains zero or more interesting links about the graph descriptor. These links may be invoked to obtain additional relationships or more detailed information about this graph descriptor.
:type _links: :class:`ReferenceLinks <azure.devops.v7_0.graph.models.ReferenceLinks>`
:param value:
:type value: :class:`str <azure.devops.v7_0.graph.models.str>`
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'value': {'key': 'value', 'type': 'str'}
}
def __init__(self, _links=None, value=None):
super(GraphDescriptorResult, self).__init__()
self._links = _links
self.value = value
class GraphFederatedProviderData(Model):
"""
Represents a set of data used to communicate with a federated provider on behalf of a particular user.
:param access_token: The access token that can be used to communicated with the federated provider on behalf on the target identity, if we were able to successfully acquire one, otherwise <code>null</code>, if we were not.
:type access_token: str
:param provider_name: The name of the federated provider, e.g. "github.com".
:type provider_name: str
:param subject_descriptor: The descriptor of the graph subject to which this federated provider data corresponds.
:type subject_descriptor: str
:param version: The version number of this federated provider data, which corresponds to when it was last updated. Can be used to prevent returning stale provider data from the cache when the caller is aware of a newer version, such as to prevent local cache poisoning from a remote cache or store. This is the app layer equivalent of the data layer sequence ID.
:type version: long
"""
_attribute_map = {
'access_token': {'key': 'accessToken', 'type': 'str'},
'provider_name': {'key': 'providerName', 'type': 'str'},
'subject_descriptor': {'key': 'subjectDescriptor', 'type': 'str'},
'version': {'key': 'version', 'type': 'long'}
}
def __init__(self, access_token=None, provider_name=None, subject_descriptor=None, version=None):
super(GraphFederatedProviderData, self).__init__()
self.access_token = access_token
self.provider_name = provider_name
self.subject_descriptor = subject_descriptor
self.version = version
class GraphGroupCreationContext(Model):
"""
Do not attempt to use this type to create a new group. This type does not contain sufficient fields to create a new group.
:param storage_key: Optional: If provided, we will use this identifier for the storage key of the created group
:type storage_key: str
"""
_attribute_map = {
'storage_key': {'key': 'storageKey', 'type': 'str'}
}
def __init__(self, storage_key=None):
super(GraphGroupCreationContext, self).__init__()
self.storage_key = storage_key
class GraphMembership(Model):
"""
Relationship between a container and a member
:param _links: This field contains zero or more interesting links about the graph membership. These links may be invoked to obtain additional relationships or more detailed information about this graph membership.
:type _links: :class:`ReferenceLinks <azure.devops.v7_0.graph.models.ReferenceLinks>`
:param container_descriptor:
:type container_descriptor: str
:param member_descriptor:
:type member_descriptor: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'container_descriptor': {'key': 'containerDescriptor', 'type': 'str'},
'member_descriptor': {'key': 'memberDescriptor', 'type': 'str'}
}
def __init__(self, _links=None, container_descriptor=None, member_descriptor=None):
super(GraphMembership, self).__init__()
self._links = _links
self.container_descriptor = container_descriptor
self.member_descriptor = member_descriptor
class GraphMembershipState(Model):
"""
Status of a Graph membership (active/inactive)
:param _links: This field contains zero or more interesting links about the graph membership state. These links may be invoked to obtain additional relationships or more detailed information about this graph membership state.
:type _links: :class:`ReferenceLinks <azure.devops.v7_0.graph.models.ReferenceLinks>`
:param active: When true, the membership is active
:type active: bool
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'active': {'key': 'active', 'type': 'bool'}
}
def __init__(self, _links=None, active=None):
super(GraphMembershipState, self).__init__()
self._links = _links
self.active = active
class GraphMembershipTraversal(Model):
"""
:param incompleteness_reason: Reason why the subject could not be traversed completely
:type incompleteness_reason: str
:param is_complete: When true, the subject is traversed completely
:type is_complete: bool
:param subject_descriptor: The traversed subject descriptor
:type subject_descriptor: :class:`str <azure.devops.v7_0.graph.models.str>`
:param traversed_subject_ids: Subject descriptor ids of the traversed members
:type traversed_subject_ids: list of str
:param traversed_subjects: Subject descriptors of the traversed members
:type traversed_subjects: list of :class:`str <azure.devops.v7_0.graph.models.str>`
"""
_attribute_map = {
'incompleteness_reason': {'key': 'incompletenessReason', 'type': 'str'},
'is_complete': {'key': 'isComplete', 'type': 'bool'},
'subject_descriptor': {'key': 'subjectDescriptor', 'type': 'str'},
'traversed_subject_ids': {'key': 'traversedSubjectIds', 'type': '[str]'},
'traversed_subjects': {'key': 'traversedSubjects', 'type': '[str]'}
}
def __init__(self, incompleteness_reason=None, is_complete=None, subject_descriptor=None, traversed_subject_ids=None, traversed_subjects=None):
super(GraphMembershipTraversal, self).__init__()
self.incompleteness_reason = incompleteness_reason
self.is_complete = is_complete
self.subject_descriptor = subject_descriptor
self.traversed_subject_ids = traversed_subject_ids
self.traversed_subjects = traversed_subjects
class GraphProviderInfo(Model):
"""
Who is the provider for this user and what is the identifier and domain that is used to uniquely identify the user.
:param descriptor: The descriptor is the primary way to reference the graph subject while the system is running. This field will uniquely identify the same graph subject across both Accounts and Organizations.
:type descriptor: str
:param domain: This represents the name of the container of origin for a graph member. (For MSA this is "Windows Live ID", for AAD the tenantID of the directory.)
:type domain: str
:param origin: The type of source provider for the origin identifier (ex: "aad", "msa")
:type origin: str
:param origin_id: The unique identifier from the system of origin. (For MSA this is the PUID in hex notation, for AAD this is the object id.)
:type origin_id: str
"""
_attribute_map = {
'descriptor': {'key': 'descriptor', 'type': 'str'},
'domain': {'key': 'domain', 'type': 'str'},
'origin': {'key': 'origin', 'type': 'str'},
'origin_id': {'key': 'originId', 'type': 'str'}
}
def __init__(self, descriptor=None, domain=None, origin=None, origin_id=None):
super(GraphProviderInfo, self).__init__()
self.descriptor = descriptor
self.domain = domain
self.origin = origin
self.origin_id = origin_id
class GraphScopeCreationContext(Model):
"""
This type is the subset of fields that can be provided by the user to create a Vsts scope. Scope creation is currently limited to internal back-compat scenarios. End users that attempt to create a scope with this API will fail.
:param admin_group_description: Set this field to override the default description of this scope's admin group.
:type admin_group_description: str
:param admin_group_name: All scopes have an Administrator Group that controls access to the contents of the scope. Set this field to use a non-default group name for that administrators group.
:type admin_group_name: str
:param creator_id: Set this optional field if this scope is created on behalf of a user other than the user making the request. This should be the Id of the user that is not the requester.
:type creator_id: str
:param name: The scope must be provided with a unique name within the parent scope. This means the created scope can have a parent or child with the same name, but no siblings with the same name.
:type name: str
:param scope_type: The type of scope being created.
:type scope_type: object
:param storage_key: An optional ID that uniquely represents the scope within it's parent scope. If this parameter is not provided, Vsts will generate on automatically.
:type storage_key: str
"""
_attribute_map = {
'admin_group_description': {'key': 'adminGroupDescription', 'type': 'str'},
'admin_group_name': {'key': 'adminGroupName', 'type': 'str'},
'creator_id': {'key': 'creatorId', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'scope_type': {'key': 'scopeType', 'type': 'object'},
'storage_key': {'key': 'storageKey', 'type': 'str'}
}
def __init__(self, admin_group_description=None, admin_group_name=None, creator_id=None, name=None, scope_type=None, storage_key=None):
super(GraphScopeCreationContext, self).__init__()
self.admin_group_description = admin_group_description
self.admin_group_name = admin_group_name
self.creator_id = creator_id
self.name = name
self.scope_type = scope_type
self.storage_key = storage_key
class GraphStorageKeyResult(Model):
"""
Storage key of a Graph entity
:param _links: This field contains zero or more interesting links about the graph storage key. These links may be invoked to obtain additional relationships or more detailed information about this graph storage key.
:type _links: :class:`ReferenceLinks <azure.devops.v7_0.graph.models.ReferenceLinks>`
:param value:
:type value: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'value': {'key': 'value', 'type': 'str'}
}
def __init__(self, _links=None, value=None):
super(GraphStorageKeyResult, self).__init__()
self._links = _links
self.value = value
class GraphSubjectBase(Model):
"""
:param _links: This field contains zero or more interesting links about the graph subject. These links may be invoked to obtain additional relationships or more detailed information about this graph subject.
:type _links: :class:`ReferenceLinks <azure.devops.v7_0.graph.models.ReferenceLinks>`
:param descriptor: The descriptor is the primary way to reference the graph subject while the system is running. This field will uniquely identify the same graph subject across both Accounts and Organizations.
:type descriptor: str
:param display_name: This is the non-unique display name of the graph subject. To change this field, you must alter its value in the source provider.
:type display_name: str
:param url: This url is the full route to the source resource of this graph subject.
:type url: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, _links=None, descriptor=None, display_name=None, url=None):
super(GraphSubjectBase, self).__init__()
self._links = _links
self.descriptor = descriptor
self.display_name = display_name
self.url = url
class GraphSubjectLookup(Model):
"""
Batching of subjects to lookup using the Graph API
:param lookup_keys:
:type lookup_keys: list of :class:`GraphSubjectLookupKey <azure.devops.v7_0.graph.models.GraphSubjectLookupKey>`
"""
_attribute_map = {
'lookup_keys': {'key': 'lookupKeys', 'type': '[GraphSubjectLookupKey]'}
}
def __init__(self, lookup_keys=None):
super(GraphSubjectLookup, self).__init__()
self.lookup_keys = lookup_keys
class GraphSubjectLookupKey(Model):
"""
:param descriptor:
:type descriptor: :class:`str <azure.devops.v7_0.graph.models.str>`
"""
_attribute_map = {
'descriptor': {'key': 'descriptor', 'type': 'str'}
}
def __init__(self, descriptor=None):
super(GraphSubjectLookupKey, self).__init__()
self.descriptor = descriptor
class GraphSubjectQuery(Model):
"""
Subject to search using the Graph API
:param query: Search term to search for Azure Devops users or/and groups
:type query: str
:param scope_descriptor: Optional parameter. Specify a non-default scope (collection, project) to search for users or groups within the scope.
:type scope_descriptor: :class:`str <azure.devops.v7_0.graph.models.str>`
:param subject_kind: "User" or "Group" can be specified, both or either
:type subject_kind: list of str
"""
_attribute_map = {
'query': {'key': 'query', 'type': 'str'},
'scope_descriptor': {'key': 'scopeDescriptor', 'type': 'str'},
'subject_kind': {'key': 'subjectKind', 'type': '[str]'}
}
def __init__(self, query=None, scope_descriptor=None, subject_kind=None):
super(GraphSubjectQuery, self).__init__()
self.query = query
self.scope_descriptor = scope_descriptor
self.subject_kind = subject_kind
class GraphUserCreationContext(Model):
"""
Do not attempt to use this type to create a new user. Use one of the subclasses instead. This type does not contain sufficient fields to create a new user.
:param storage_key: Optional: If provided, we will use this identifier for the storage key of the created user
:type storage_key: str
"""
_attribute_map = {
'storage_key': {'key': 'storageKey', 'type': 'str'}
}
def __init__(self, storage_key=None):
super(GraphUserCreationContext, self).__init__()
self.storage_key = storage_key
class GraphUserUpdateContext(Model):
"""
Do not attempt to use this type to update user. Use one of the subclasses instead. This type does not contain sufficient fields to create a new user.
:param storage_key: Storage key should not be specified in case of updating user
:type storage_key: str
"""
_attribute_map = {
'storage_key': {'key': 'storageKey', 'type': 'str'}
}
def __init__(self, storage_key=None):
super(GraphUserUpdateContext, self).__init__()
self.storage_key = storage_key
class JsonPatchOperation(Model):
"""
The JSON model for a JSON Patch operation
:param from_: The path to copy from for the Move/Copy operation.
:type from_: str
:param op: The patch operation
:type op: object
:param path: The path for the operation. In the case of an array, a zero based index can be used to specify the position in the array (e.g. /biscuits/0/name). The "-" character can be used instead of an index to insert at the end of the array (e.g. /biscuits/-).
:type path: str
:param value: The value for the operation. This is either a primitive or a JToken.
:type value: object
"""
_attribute_map = {
'from_': {'key': 'from', 'type': 'str'},
'op': {'key': 'op', 'type': 'object'},
'path': {'key': 'path', 'type': 'str'},
'value': {'key': 'value', 'type': 'object'}
}
def __init__(self, from_=None, op=None, path=None, value=None):
super(JsonPatchOperation, self).__init__()
self.from_ = from_
self.op = op
self.path = path
self.value = value
class PagedGraphGroups(Model):
"""
:param continuation_token: This will be non-null if there is another page of data. There will never be more than one continuation token returned by a request.
:type continuation_token: list of str
:param graph_groups: The enumerable list of groups found within a page.
:type graph_groups: list of :class:`GraphGroup <azure.devops.v7_0.graph.models.GraphGroup>`
"""
_attribute_map = {
'continuation_token': {'key': 'continuationToken', 'type': '[str]'},
'graph_groups': {'key': 'graphGroups', 'type': '[GraphGroup]'}
}
def __init__(self, continuation_token=None, graph_groups=None):
super(PagedGraphGroups, self).__init__()
self.continuation_token = continuation_token
self.graph_groups = graph_groups
class PagedGraphUsers(Model):
"""
:param continuation_token: This will be non-null if there is another page of data. There will never be more than one continuation token returned by a request.
:type continuation_token: list of str
:param graph_users: The enumerable set of users found within a page.
:type graph_users: list of :class:`GraphUser <azure.devops.v7_0.graph.models.GraphUser>`
"""
_attribute_map = {
'continuation_token': {'key': 'continuationToken', 'type': '[str]'},
'graph_users': {'key': 'graphUsers', 'type': '[GraphUser]'}
}
def __init__(self, continuation_token=None, graph_users=None):
super(PagedGraphUsers, self).__init__()
self.continuation_token = continuation_token
self.graph_users = graph_users
class ReferenceLinks(Model):
"""
The class to represent a collection of REST reference links.
:param links: The readonly view of the links. Because Reference links are readonly, we only want to expose them as read only.
:type links: dict
"""
_attribute_map = {
'links': {'key': 'links', 'type': '{object}'}
}
def __init__(self, links=None):
super(ReferenceLinks, self).__init__()
self.links = links
class GraphSubject(GraphSubjectBase):
"""
Top-level graph entity
:param _links: This field contains zero or more interesting links about the graph subject. These links may be invoked to obtain additional relationships or more detailed information about this graph subject.
:type _links: :class:`ReferenceLinks <azure.devops.v7_0.graph.models.ReferenceLinks>`
:param descriptor: The descriptor is the primary way to reference the graph subject while the system is running. This field will uniquely identify the same graph subject across both Accounts and Organizations.
:type descriptor: str
:param display_name: This is the non-unique display name of the graph subject. To change this field, you must alter its value in the source provider.
:type display_name: str
:param url: This url is the full route to the source resource of this graph subject.
:type url: str
:param legacy_descriptor: [Internal Use Only] The legacy descriptor is here in case you need to access old version IMS using identity descriptor.
:type legacy_descriptor: str
:param origin: The type of source provider for the origin identifier (ex:AD, AAD, MSA)
:type origin: str
:param origin_id: The unique identifier from the system of origin. Typically a sid, object id or Guid. Linking and unlinking operations can cause this value to change for a user because the user is not backed by a different provider and has a different unique id in the new provider.
:type origin_id: str
:param subject_kind: This field identifies the type of the graph subject (ex: Group, Scope, User).
:type subject_kind: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'legacy_descriptor': {'key': 'legacyDescriptor', 'type': 'str'},
'origin': {'key': 'origin', 'type': 'str'},
'origin_id': {'key': 'originId', 'type': 'str'},
'subject_kind': {'key': 'subjectKind', 'type': 'str'}
}
def __init__(self, _links=None, descriptor=None, display_name=None, url=None, legacy_descriptor=None, origin=None, origin_id=None, subject_kind=None):
super(GraphSubject, self).__init__(_links=_links, descriptor=descriptor, display_name=display_name, url=url)
self.legacy_descriptor = legacy_descriptor
self.origin = origin
self.origin_id = origin_id
self.subject_kind = subject_kind
class GraphMember(GraphSubject):
"""
:param _links: This field contains zero or more interesting links about the graph subject. These links may be invoked to obtain additional relationships or more detailed information about this graph subject.
:type _links: :class:`ReferenceLinks <azure.devops.v7_0.graph.models.ReferenceLinks>`
:param descriptor: The descriptor is the primary way to reference the graph subject while the system is running. This field will uniquely identify the same graph subject across both Accounts and Organizations.
:type descriptor: str
:param display_name: This is the non-unique display name of the graph subject. To change this field, you must alter its value in the source provider.
:type display_name: str
:param url: This url is the full route to the source resource of this graph subject.
:type url: str
:param legacy_descriptor: [Internal Use Only] The legacy descriptor is here in case you need to access old version IMS using identity descriptor.
:type legacy_descriptor: str
:param origin: The type of source provider for the origin identifier (ex:AD, AAD, MSA)
:type origin: str
:param origin_id: The unique identifier from the system of origin. Typically a sid, object id or Guid. Linking and unlinking operations can cause this value to change for a user because the user is not backed by a different provider and has a different unique id in the new provider.
:type origin_id: str
:param subject_kind: This field identifies the type of the graph subject (ex: Group, Scope, User).
:type subject_kind: str
:param domain: This represents the name of the container of origin for a graph member. (For MSA this is "Windows Live ID", for AD the name of the domain, for AAD the tenantID of the directory, for VSTS groups the ScopeId, etc)
:type domain: str
:param mail_address: The email address of record for a given graph member. This may be different than the principal name.
:type mail_address: str
:param principal_name: This is the PrincipalName of this graph member from the source provider. The source provider may change this field over time and it is not guaranteed to be immutable for the life of the graph member by VSTS.
:type principal_name: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'legacy_descriptor': {'key': 'legacyDescriptor', 'type': 'str'},
'origin': {'key': 'origin', 'type': 'str'},
'origin_id': {'key': 'originId', 'type': 'str'},
'subject_kind': {'key': 'subjectKind', 'type': 'str'},
'domain': {'key': 'domain', 'type': 'str'},
'mail_address': {'key': 'mailAddress', 'type': 'str'},
'principal_name': {'key': 'principalName', 'type': 'str'}
}
def __init__(self, _links=None, descriptor=None, display_name=None, url=None, legacy_descriptor=None, origin=None, origin_id=None, subject_kind=None, domain=None, mail_address=None, principal_name=None):
super(GraphMember, self).__init__(_links=_links, descriptor=descriptor, display_name=display_name, url=url, legacy_descriptor=legacy_descriptor, origin=origin, origin_id=origin_id, subject_kind=subject_kind)
self.domain = domain
self.mail_address = mail_address
self.principal_name = principal_name
class GraphScope(GraphSubject):
"""
Container where a graph entity is defined (organization, project, team)
:param _links: This field contains zero or more interesting links about the graph subject. These links may be invoked to obtain additional relationships or more detailed information about this graph subject.
:type _links: :class:`ReferenceLinks <azure.devops.v7_0.graph.models.ReferenceLinks>`
:param descriptor: The descriptor is the primary way to reference the graph subject while the system is running. This field will uniquely identify the same graph subject across both Accounts and Organizations.
:type descriptor: str
:param display_name: This is the non-unique display name of the graph subject. To change this field, you must alter its value in the source provider.
:type display_name: str
:param url: This url is the full route to the source resource of this graph subject.
:type url: str
:param legacy_descriptor: [Internal Use Only] The legacy descriptor is here in case you need to access old version IMS using identity descriptor.
:type legacy_descriptor: str
:param origin: The type of source provider for the origin identifier (ex:AD, AAD, MSA)
:type origin: str
:param origin_id: The unique identifier from the system of origin. Typically a sid, object id or Guid. Linking and unlinking operations can cause this value to change for a user because the user is not backed by a different provider and has a different unique id in the new provider.
:type origin_id: str
:param subject_kind: This field identifies the type of the graph subject (ex: Group, Scope, User).
:type subject_kind: str
:param administrator_descriptor: The subject descriptor that references the administrators group for this scope. Only members of this group can change the contents of this scope or assign other users permissions to access this scope.
:type administrator_descriptor: str
:param is_global: When true, this scope is also a securing host for one or more scopes.
:type is_global: bool
:param parent_descriptor: The subject descriptor for the closest account or organization in the ancestor tree of this scope.
:type parent_descriptor: str
:param scope_type: The type of this scope. Typically ServiceHost or TeamProject.
:type scope_type: object
:param securing_host_descriptor: The subject descriptor for the containing organization in the ancestor tree of this scope.
:type securing_host_descriptor: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'legacy_descriptor': {'key': 'legacyDescriptor', 'type': 'str'},
'origin': {'key': 'origin', 'type': 'str'},
'origin_id': {'key': 'originId', 'type': 'str'},
'subject_kind': {'key': 'subjectKind', 'type': 'str'},
'administrator_descriptor': {'key': 'administratorDescriptor', 'type': 'str'},
'is_global': {'key': 'isGlobal', 'type': 'bool'},
'parent_descriptor': {'key': 'parentDescriptor', 'type': 'str'},
'scope_type': {'key': 'scopeType', 'type': 'object'},
'securing_host_descriptor': {'key': 'securingHostDescriptor', 'type': 'str'}
}
def __init__(self, _links=None, descriptor=None, display_name=None, url=None, legacy_descriptor=None, origin=None, origin_id=None, subject_kind=None, administrator_descriptor=None, is_global=None, parent_descriptor=None, scope_type=None, securing_host_descriptor=None):
super(GraphScope, self).__init__(_links=_links, descriptor=descriptor, display_name=display_name, url=url, legacy_descriptor=legacy_descriptor, origin=origin, origin_id=origin_id, subject_kind=subject_kind)
self.administrator_descriptor = administrator_descriptor
self.is_global = is_global
self.parent_descriptor = parent_descriptor
self.scope_type = scope_type
self.securing_host_descriptor = securing_host_descriptor
class GraphUser(GraphMember):
"""
Graph user entity
:param _links: This field contains zero or more interesting links about the graph subject. These links may be invoked to obtain additional relationships or more detailed information about this graph subject.
:type _links: :class:`ReferenceLinks <azure.devops.v7_0.graph.models.ReferenceLinks>`
:param descriptor: The descriptor is the primary way to reference the graph subject while the system is running. This field will uniquely identify the same graph subject across both Accounts and Organizations.
:type descriptor: str
:param display_name: This is the non-unique display name of the graph subject. To change this field, you must alter its value in the source provider.
:type display_name: str
:param url: This url is the full route to the source resource of this graph subject.
:type url: str
:param legacy_descriptor: [Internal Use Only] The legacy descriptor is here in case you need to access old version IMS using identity descriptor.
:type legacy_descriptor: str
:param origin: The type of source provider for the origin identifier (ex:AD, AAD, MSA)
:type origin: str
:param origin_id: The unique identifier from the system of origin. Typically a sid, object id or Guid. Linking and unlinking operations can cause this value to change for a user because the user is not backed by a different provider and has a different unique id in the new provider.
:type origin_id: str
:param subject_kind: This field identifies the type of the graph subject (ex: Group, Scope, User).
:type subject_kind: str
:param domain: This represents the name of the container of origin for a graph member. (For MSA this is "Windows Live ID", for AD the name of the domain, for AAD the tenantID of the directory, for VSTS groups the ScopeId, etc)
:type domain: str
:param mail_address: The email address of record for a given graph member. This may be different than the principal name.
:type mail_address: str
:param principal_name: This is the PrincipalName of this graph member from the source provider. The source provider may change this field over time and it is not guaranteed to be immutable for the life of the graph member by VSTS.
:type principal_name: str
:param directory_alias: The short, generally unique name for the user in the backing directory. For AAD users, this corresponds to the mail nickname, which is often but not necessarily similar to the part of the user's mail address before the @ sign. For GitHub users, this corresponds to the GitHub user handle.
:type directory_alias: str
:param is_deleted_in_origin: When true, the group has been deleted in the identity provider
:type is_deleted_in_origin: bool
:param metadata_update_date:
:type metadata_update_date: datetime
:param meta_type: The meta type of the user in the origin, such as "member", "guest", etc. See UserMetaType for the set of possible values.
:type meta_type: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'legacy_descriptor': {'key': 'legacyDescriptor', 'type': 'str'},
'origin': {'key': 'origin', 'type': 'str'},
'origin_id': {'key': 'originId', 'type': 'str'},
'subject_kind': {'key': 'subjectKind', 'type': 'str'},
'domain': {'key': 'domain', 'type': 'str'},
'mail_address': {'key': 'mailAddress', 'type': 'str'},
'principal_name': {'key': 'principalName', 'type': 'str'},
'directory_alias': {'key': 'directoryAlias', 'type': 'str'},
'is_deleted_in_origin': {'key': 'isDeletedInOrigin', 'type': 'bool'},
'metadata_update_date': {'key': 'metadataUpdateDate', 'type': 'iso-8601'},
'meta_type': {'key': 'metaType', 'type': 'str'}
}
def __init__(self, _links=None, descriptor=None, display_name=None, url=None, legacy_descriptor=None, origin=None, origin_id=None, subject_kind=None, domain=None, mail_address=None, principal_name=None, directory_alias=None, is_deleted_in_origin=None, metadata_update_date=None, meta_type=None):
super(GraphUser, self).__init__(_links=_links, descriptor=descriptor, display_name=display_name, url=url, legacy_descriptor=legacy_descriptor, origin=origin, origin_id=origin_id, subject_kind=subject_kind, domain=domain, mail_address=mail_address, principal_name=principal_name)
self.directory_alias = directory_alias
self.is_deleted_in_origin = is_deleted_in_origin
self.metadata_update_date = metadata_update_date
self.meta_type = meta_type
class GraphGroup(GraphMember):
"""
Graph group entity
:param _links: This field contains zero or more interesting links about the graph subject. These links may be invoked to obtain additional relationships or more detailed information about this graph subject.
:type _links: :class:`ReferenceLinks <azure.devops.v7_0.graph.models.ReferenceLinks>`
:param descriptor: The descriptor is the primary way to reference the graph subject while the system is running. This field will uniquely identify the same graph subject across both Accounts and Organizations.
:type descriptor: str
:param display_name: This is the non-unique display name of the graph subject. To change this field, you must alter its value in the source provider.
:type display_name: str
:param url: This url is the full route to the source resource of this graph subject.
:type url: str
:param legacy_descriptor: [Internal Use Only] The legacy descriptor is here in case you need to access old version IMS using identity descriptor.
:type legacy_descriptor: str
:param origin: The type of source provider for the origin identifier (ex:AD, AAD, MSA)
:type origin: str
:param origin_id: The unique identifier from the system of origin. Typically a sid, object id or Guid. Linking and unlinking operations can cause this value to change for a user because the user is not backed by a different provider and has a different unique id in the new provider.
:type origin_id: str
:param subject_kind: This field identifies the type of the graph subject (ex: Group, Scope, User).
:type subject_kind: str
:param domain: This represents the name of the container of origin for a graph member. (For MSA this is "Windows Live ID", for AD the name of the domain, for AAD the tenantID of the directory, for VSTS groups the ScopeId, etc)
:type domain: str
:param mail_address: The email address of record for a given graph member. This may be different than the principal name.
:type mail_address: str
:param principal_name: This is the PrincipalName of this graph member from the source provider. The source provider may change this field over time and it is not guaranteed to be immutable for the life of the graph member by VSTS.
:type principal_name: str
:param description: A short phrase to help human readers disambiguate groups with similar names
:type description: str
:param is_cross_project:
:type is_cross_project: bool
:param is_deleted:
:type is_deleted: bool
:param is_global_scope:
:type is_global_scope: bool
:param is_restricted_visible:
:type is_restricted_visible: bool
:param local_scope_id:
:type local_scope_id: str
:param scope_id:
:type scope_id: str
:param scope_name:
:type scope_name: str
:param scope_type:
:type scope_type: str
:param securing_host_id:
:type securing_host_id: str
:param special_type:
:type special_type: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'legacy_descriptor': {'key': 'legacyDescriptor', 'type': 'str'},
'origin': {'key': 'origin', 'type': 'str'},
'origin_id': {'key': 'originId', 'type': 'str'},
'subject_kind': {'key': 'subjectKind', 'type': 'str'},
'domain': {'key': 'domain', 'type': 'str'},
'mail_address': {'key': 'mailAddress', 'type': 'str'},
'principal_name': {'key': 'principalName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'is_cross_project': {'key': 'isCrossProject', 'type': 'bool'},
'is_deleted': {'key': 'isDeleted', 'type': 'bool'},
'is_global_scope': {'key': 'isGlobalScope', 'type': 'bool'},
'is_restricted_visible': {'key': 'isRestrictedVisible', 'type': 'bool'},
'local_scope_id': {'key': 'localScopeId', 'type': 'str'},
'scope_id': {'key': 'scopeId', 'type': 'str'},
'scope_name': {'key': 'scopeName', 'type': 'str'},
'scope_type': {'key': 'scopeType', 'type': 'str'},
'securing_host_id': {'key': 'securingHostId', 'type': 'str'},
'special_type': {'key': 'specialType', 'type': 'str'}
}
def __init__(self, _links=None, descriptor=None, display_name=None, url=None, legacy_descriptor=None, origin=None, origin_id=None, subject_kind=None, domain=None, mail_address=None, principal_name=None, description=None, is_cross_project=None, is_deleted=None, is_global_scope=None, is_restricted_visible=None, local_scope_id=None, scope_id=None, scope_name=None, scope_type=None, securing_host_id=None, special_type=None):
super(GraphGroup, self).__init__(_links=_links, descriptor=descriptor, display_name=display_name, url=url, legacy_descriptor=legacy_descriptor, origin=origin, origin_id=origin_id, subject_kind=subject_kind, domain=domain, mail_address=mail_address, principal_name=principal_name)
self.description = description
self.is_cross_project = is_cross_project
self.is_deleted = is_deleted
self.is_global_scope = is_global_scope
self.is_restricted_visible = is_restricted_visible
self.local_scope_id = local_scope_id
self.scope_id = scope_id
self.scope_name = scope_name
self.scope_type = scope_type
self.securing_host_id = securing_host_id
self.special_type = special_type
__all__ = [
'Avatar',
'GraphCachePolicies',
'GraphDescriptorResult',
'GraphFederatedProviderData',
'GraphGroupCreationContext',
'GraphMembership',
'GraphMembershipState',
'GraphMembershipTraversal',
'GraphProviderInfo',
'GraphScopeCreationContext',
'GraphStorageKeyResult',
'GraphSubjectBase',
'GraphSubjectLookup',
'GraphSubjectLookupKey',
'GraphSubjectQuery',
'GraphUserCreationContext',
'GraphUserUpdateContext',
'JsonPatchOperation',
'PagedGraphGroups',
'PagedGraphUsers',
'ReferenceLinks',
'GraphSubject',
'GraphMember',
'GraphScope',
'GraphUser',
'GraphGroup',
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_0/graph/models.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_0/graph/models.py",
"repo_id": "azure-devops-python-api",
"token_count": 14047
}
| 386 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class ApprovalConfig(Model):
"""
Config to create a new approval.
:param approvers: Ordered list of approvers.
:type approvers: list of :class:`IdentityRef <azure.devops.v7_0.microsoft._azure._pipelines._approval._web_api.models.IdentityRef>`
:param blocked_approvers: Identities which are not allowed to approve.
:type blocked_approvers: list of :class:`IdentityRef <azure.devops.v7_0.microsoft._azure._pipelines._approval._web_api.models.IdentityRef>`
:param execution_order: Order in which approvers will be actionable.
:type execution_order: object
:param instructions: Instructions for the approver.
:type instructions: str
:param min_required_approvers: Minimum number of approvers that should approve for the entire approval to be considered approved. Defaults to all.
:type min_required_approvers: int
"""
_attribute_map = {
'approvers': {'key': 'approvers', 'type': '[IdentityRef]'},
'blocked_approvers': {'key': 'blockedApprovers', 'type': '[IdentityRef]'},
'execution_order': {'key': 'executionOrder', 'type': 'object'},
'instructions': {'key': 'instructions', 'type': 'str'},
'min_required_approvers': {'key': 'minRequiredApprovers', 'type': 'int'}
}
def __init__(self, approvers=None, blocked_approvers=None, execution_order=None, instructions=None, min_required_approvers=None):
super(ApprovalConfig, self).__init__()
self.approvers = approvers
self.blocked_approvers = blocked_approvers
self.execution_order = execution_order
self.instructions = instructions
self.min_required_approvers = min_required_approvers
class ApprovalConfigSettings(ApprovalConfig):
"""
Config to create a new approval.
:param approvers: Ordered list of approvers.
:type approvers: list of :class:`IdentityRef <azure.devops.v7_0.microsoft._azure._pipelines._approval._web_api.models.IdentityRef>`
:param blocked_approvers: Identities which are not allowed to approve.
:type blocked_approvers: list of :class:`IdentityRef <azure.devops.v7_0.microsoft._azure._pipelines._approval._web_api.models.IdentityRef>`
:param execution_order: Order in which approvers will be actionable.
:type execution_order: object
:param instructions: Instructions for the approver.
:type instructions: str
:param min_required_approvers: Minimum number of approvers that should approve for the entire approval to be considered approved. Defaults to all.
:type min_required_approvers: int
:param requester_cannot_be_approver: Determines whether check requester can approve the check.
:type requester_cannot_be_approver: bool
"""
_attribute_map = {
'approvers': {'key': 'approvers', 'type': '[IdentityRef]'},
'blocked_approvers': {'key': 'blockedApprovers', 'type': '[IdentityRef]'},
'execution_order': {'key': 'executionOrder', 'type': 'object'},
'instructions': {'key': 'instructions', 'type': 'str'},
'min_required_approvers': {'key': 'minRequiredApprovers', 'type': 'int'},
'requester_cannot_be_approver': {'key': 'requesterCannotBeApprover', 'type': 'bool'}
}
def __init__(self, approvers=None, blocked_approvers=None, execution_order=None, instructions=None, min_required_approvers=None, requester_cannot_be_approver=None):
super(ApprovalConfigSettings, self).__init__(approvers=approvers, blocked_approvers=blocked_approvers, execution_order=execution_order, instructions=instructions, min_required_approvers=min_required_approvers)
self.requester_cannot_be_approver = requester_cannot_be_approver
class GraphSubjectBase(Model):
"""
:param _links: This field contains zero or more interesting links about the graph subject. These links may be invoked to obtain additional relationships or more detailed information about this graph subject.
:type _links: :class:`ReferenceLinks <azure.devops.v7_0.microsoft._visual_studio._services._web_api.models.ReferenceLinks>`
:param descriptor: The descriptor is the primary way to reference the graph subject while the system is running. This field will uniquely identify the same graph subject across both Accounts and Organizations.
:type descriptor: str
:param display_name: This is the non-unique display name of the graph subject. To change this field, you must alter its value in the source provider.
:type display_name: str
:param url: This url is the full route to the source resource of this graph subject.
:type url: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, _links=None, descriptor=None, display_name=None, url=None):
super(GraphSubjectBase, self).__init__()
self._links = _links
self.descriptor = descriptor
self.display_name = display_name
self.url = url
class CheckConfigurationRef(Model):
"""
:param id: Check configuration id.
:type id: int
:param resource: Resource on which check get configured.
:type resource: :class:`Resource <azure.devops.v7_0.pipelines_checks.models.Resource>`
:param type: Check configuration type
:type type: :class:`CheckType <azure.devops.v7_0.pipelines_checks.models.CheckType>`
:param url: The URL from which one can fetch the configured check.
:type url: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'resource': {'key': 'resource', 'type': 'Resource'},
'type': {'key': 'type', 'type': 'CheckType'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, id=None, resource=None, type=None, url=None):
super(CheckConfigurationRef, self).__init__()
self.id = id
self.resource = resource
self.type = type
self.url = url
class CheckRunResult(Model):
"""
:param result_message:
:type result_message: str
:param status:
:type status: object
"""
_attribute_map = {
'result_message': {'key': 'resultMessage', 'type': 'str'},
'status': {'key': 'status', 'type': 'object'}
}
def __init__(self, result_message=None, status=None):
super(CheckRunResult, self).__init__()
self.result_message = result_message
self.status = status
class CheckSuiteRef(Model):
"""
:param context: Evaluation context for the check suite request
:type context: :class:`object <azure.devops.v7_0.pipelines_checks.models.object>`
:param id: Unique suite id generated by the pipeline orchestrator for the pipeline check runs request on the list of resources Pipeline orchestrator will used this identifier to map the check requests on a stage
:type id: str
"""
_attribute_map = {
'context': {'key': 'context', 'type': 'object'},
'id': {'key': 'id', 'type': 'str'}
}
def __init__(self, context=None, id=None):
super(CheckSuiteRef, self).__init__()
self.context = context
self.id = id
class CheckSuiteRequest(Model):
"""
:param context:
:type context: :class:`object <azure.devops.v7_0.pipelines_checks.models.object>`
:param id:
:type id: str
:param resources:
:type resources: list of :class:`Resource <azure.devops.v7_0.pipelines_checks.models.Resource>`
"""
_attribute_map = {
'context': {'key': 'context', 'type': 'object'},
'id': {'key': 'id', 'type': 'str'},
'resources': {'key': 'resources', 'type': '[Resource]'}
}
def __init__(self, context=None, id=None, resources=None):
super(CheckSuiteRequest, self).__init__()
self.context = context
self.id = id
self.resources = resources
class CheckType(Model):
"""
:param id: Gets or sets check type id.
:type id: str
:param name: Name of the check type.
:type name: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, id=None, name=None):
super(CheckType, self).__init__()
self.id = id
self.name = name
class IdentityRef(GraphSubjectBase):
"""
:param _links: This field contains zero or more interesting links about the graph subject. These links may be invoked to obtain additional relationships or more detailed information about this graph subject.
:type _links: :class:`ReferenceLinks <azure.devops.v7_0.microsoft._visual_studio._services._web_api.models.ReferenceLinks>`
:param descriptor: The descriptor is the primary way to reference the graph subject while the system is running. This field will uniquely identify the same graph subject across both Accounts and Organizations.
:type descriptor: str
:param display_name: This is the non-unique display name of the graph subject. To change this field, you must alter its value in the source provider.
:type display_name: str
:param url: This url is the full route to the source resource of this graph subject.
:type url: str
:param directory_alias: Deprecated - Can be retrieved by querying the Graph user referenced in the "self" entry of the IdentityRef "_links" dictionary
:type directory_alias: str
:param id:
:type id: str
:param image_url: Deprecated - Available in the "avatar" entry of the IdentityRef "_links" dictionary
:type image_url: str
:param inactive: Deprecated - Can be retrieved by querying the Graph membership state referenced in the "membershipState" entry of the GraphUser "_links" dictionary
:type inactive: bool
:param is_aad_identity: Deprecated - Can be inferred from the subject type of the descriptor (Descriptor.IsAadUserType/Descriptor.IsAadGroupType)
:type is_aad_identity: bool
:param is_container: Deprecated - Can be inferred from the subject type of the descriptor (Descriptor.IsGroupType)
:type is_container: bool
:param is_deleted_in_origin:
:type is_deleted_in_origin: bool
:param profile_url: Deprecated - not in use in most preexisting implementations of ToIdentityRef
:type profile_url: str
:param unique_name: Deprecated - use Domain+PrincipalName instead
:type unique_name: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'directory_alias': {'key': 'directoryAlias', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'image_url': {'key': 'imageUrl', 'type': 'str'},
'inactive': {'key': 'inactive', 'type': 'bool'},
'is_aad_identity': {'key': 'isAadIdentity', 'type': 'bool'},
'is_container': {'key': 'isContainer', 'type': 'bool'},
'is_deleted_in_origin': {'key': 'isDeletedInOrigin', 'type': 'bool'},
'profile_url': {'key': 'profileUrl', 'type': 'str'},
'unique_name': {'key': 'uniqueName', 'type': 'str'}
}
def __init__(self, _links=None, descriptor=None, display_name=None, url=None, directory_alias=None, id=None, image_url=None, inactive=None, is_aad_identity=None, is_container=None, is_deleted_in_origin=None, profile_url=None, unique_name=None):
super(IdentityRef, self).__init__(_links=_links, descriptor=descriptor, display_name=display_name, url=url)
self.directory_alias = directory_alias
self.id = id
self.image_url = image_url
self.inactive = inactive
self.is_aad_identity = is_aad_identity
self.is_container = is_container
self.is_deleted_in_origin = is_deleted_in_origin
self.profile_url = profile_url
self.unique_name = unique_name
class ReferenceLinks(Model):
"""
The class to represent a collection of REST reference links.
:param links: The readonly view of the links. Because Reference links are readonly, we only want to expose them as read only.
:type links: dict
"""
_attribute_map = {
'links': {'key': 'links', 'type': '{object}'}
}
def __init__(self, links=None):
super(ReferenceLinks, self).__init__()
self.links = links
class Resource(Model):
"""
:param id: Id of the resource.
:type id: str
:param name: Name of the resource.
:type name: str
:param type: Type of the resource.
:type type: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'}
}
def __init__(self, id=None, name=None, type=None):
super(Resource, self).__init__()
self.id = id
self.name = name
self.type = type
class TaskCheckConfig(Model):
"""
Config to facilitate task check
:param definition_ref:
:type definition_ref: :class:`TaskCheckDefinitionReference <azure.devops.v7_0.microsoft._azure._pipelines._task_check._web_api.models.TaskCheckDefinitionReference>`
:param display_name:
:type display_name: str
:param inputs:
:type inputs: dict
:param linked_variable_group:
:type linked_variable_group: str
:param retry_interval:
:type retry_interval: int
"""
_attribute_map = {
'definition_ref': {'key': 'definitionRef', 'type': 'TaskCheckDefinitionReference'},
'display_name': {'key': 'displayName', 'type': 'str'},
'inputs': {'key': 'inputs', 'type': '{str}'},
'linked_variable_group': {'key': 'linkedVariableGroup', 'type': 'str'},
'retry_interval': {'key': 'retryInterval', 'type': 'int'}
}
def __init__(self, definition_ref=None, display_name=None, inputs=None, linked_variable_group=None, retry_interval=None):
super(TaskCheckConfig, self).__init__()
self.definition_ref = definition_ref
self.display_name = display_name
self.inputs = inputs
self.linked_variable_group = linked_variable_group
self.retry_interval = retry_interval
class TaskCheckDefinitionReference(Model):
"""
:param id:
:type id: str
:param name:
:type name: str
:param version:
:type version: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'}
}
def __init__(self, id=None, name=None, version=None):
super(TaskCheckDefinitionReference, self).__init__()
self.id = id
self.name = name
self.version = version
class CheckConfiguration(CheckConfigurationRef):
"""
:param id: Check configuration id.
:type id: int
:param resource: Resource on which check get configured.
:type resource: :class:`Resource <azure.devops.v7_0.pipelines_checks.models.Resource>`
:param type: Check configuration type
:type type: :class:`CheckType <azure.devops.v7_0.pipelines_checks.models.CheckType>`
:param url: The URL from which one can fetch the configured check.
:type url: str
:param _links: Reference links.
:type _links: :class:`ReferenceLinks <azure.devops.v7_0.pipelines_checks.models.ReferenceLinks>`
:param created_by: Identity of person who configured check.
:type created_by: :class:`IdentityRef <azure.devops.v7_0.pipelines_checks.models.IdentityRef>`
:param created_on: Time when check got configured.
:type created_on: datetime
:param modified_by: Identity of person who modified the configured check.
:type modified_by: :class:`IdentityRef <azure.devops.v7_0.pipelines_checks.models.IdentityRef>`
:param modified_on: Time when configured check was modified.
:type modified_on: datetime
:param timeout: Timeout in minutes for the check.
:type timeout: int
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'resource': {'key': 'resource', 'type': 'Resource'},
'type': {'key': 'type', 'type': 'CheckType'},
'url': {'key': 'url', 'type': 'str'},
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'created_by': {'key': 'createdBy', 'type': 'IdentityRef'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'modified_by': {'key': 'modifiedBy', 'type': 'IdentityRef'},
'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
'timeout': {'key': 'timeout', 'type': 'int'}
}
def __init__(self, id=None, resource=None, type=None, url=None, _links=None, created_by=None, created_on=None, modified_by=None, modified_on=None, timeout=None):
super(CheckConfiguration, self).__init__(id=id, resource=resource, type=type, url=url)
self._links = _links
self.created_by = created_by
self.created_on = created_on
self.modified_by = modified_by
self.modified_on = modified_on
self.timeout = timeout
class CheckRun(CheckRunResult):
"""
:param result_message:
:type result_message: str
:param status:
:type status: object
:param completed_date:
:type completed_date: datetime
:param created_date:
:type created_date: datetime
:param check_configuration_ref:
:type check_configuration_ref: :class:`CheckConfigurationRef <azure.devops.v7_0.pipelines_checks.models.CheckConfigurationRef>`
:param id:
:type id: str
"""
_attribute_map = {
'result_message': {'key': 'resultMessage', 'type': 'str'},
'status': {'key': 'status', 'type': 'object'},
'completed_date': {'key': 'completedDate', 'type': 'iso-8601'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'check_configuration_ref': {'key': 'checkConfigurationRef', 'type': 'CheckConfigurationRef'},
'id': {'key': 'id', 'type': 'str'}
}
def __init__(self, result_message=None, status=None, completed_date=None, created_date=None, check_configuration_ref=None, id=None):
super(CheckRun, self).__init__(result_message=result_message, status=status)
self.completed_date = completed_date
self.created_date = created_date
self.check_configuration_ref = check_configuration_ref
self.id = id
class CheckSuite(CheckSuiteRef):
"""
:param context: Evaluation context for the check suite request
:type context: :class:`object <azure.devops.v7_0.pipelines_checks.models.object>`
:param id: Unique suite id generated by the pipeline orchestrator for the pipeline check runs request on the list of resources Pipeline orchestrator will used this identifier to map the check requests on a stage
:type id: str
:param _links: Reference links.
:type _links: :class:`ReferenceLinks <azure.devops.v7_0.pipelines_checks.models.ReferenceLinks>`
:param completed_date: Completed date of the given check suite request
:type completed_date: datetime
:param check_runs: List of check runs associated with the given check suite request.
:type check_runs: list of :class:`CheckRun <azure.devops.v7_0.pipelines_checks.models.CheckRun>`
:param message: Optional message for the given check suite request
:type message: str
:param status: Overall check runs status for the given suite request. This is check suite status
:type status: object
"""
_attribute_map = {
'context': {'key': 'context', 'type': 'object'},
'id': {'key': 'id', 'type': 'str'},
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'completed_date': {'key': 'completedDate', 'type': 'iso-8601'},
'check_runs': {'key': 'checkRuns', 'type': '[CheckRun]'},
'message': {'key': 'message', 'type': 'str'},
'status': {'key': 'status', 'type': 'object'}
}
def __init__(self, context=None, id=None, _links=None, completed_date=None, check_runs=None, message=None, status=None):
super(CheckSuite, self).__init__(context=context, id=id)
self._links = _links
self.completed_date = completed_date
self.check_runs = check_runs
self.message = message
self.status = status
__all__ = [
'ApprovalConfig',
'ApprovalConfigSettings',
'GraphSubjectBase',
'CheckConfigurationRef',
'CheckRunResult',
'CheckSuiteRef',
'CheckSuiteRequest',
'CheckType',
'IdentityRef',
'ReferenceLinks',
'Resource',
'TaskCheckConfig',
'TaskCheckDefinitionReference',
'CheckConfiguration',
'CheckRun',
'CheckSuite',
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_0/pipelines_checks/models.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_0/pipelines_checks/models.py",
"repo_id": "azure-devops-python-api",
"token_count": 7870
}
| 387 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest import Serializer, Deserializer
from ...client import Client
from . import models
class ProvenanceClient(Client):
"""Provenance
:param str base_url: Service URL
:param Authentication creds: Authenticated credentials.
"""
def __init__(self, base_url=None, creds=None):
super(ProvenanceClient, self).__init__(base_url, creds)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
resource_area_identifier = 'b40c1171-807a-493a-8f3f-5c26d5e2f5aa'
def create_session(self, session_request, protocol, project=None):
"""CreateSession.
[Preview API] Creates a session, a wrapper around a feed that can store additional metadata on the packages published to it.
:param :class:`<SessionRequest> <azure.devops.v7_0.provenance.models.SessionRequest>` session_request: The feed and metadata for the session
:param str protocol: The protocol that the session will target
:param str project: Project ID or project name
:rtype: :class:`<SessionResponse> <azure.devops.v7_0.provenance.models.SessionResponse>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if protocol is not None:
route_values['protocol'] = self._serialize.url('protocol', protocol, 'str')
content = self._serialize.body(session_request, 'SessionRequest')
response = self._send(http_method='POST',
location_id='503b4e54-ebf4-4d04-8eee-21c00823c2ac',
version='7.0-preview.1',
route_values=route_values,
content=content)
return self._deserialize('SessionResponse', response)
|
azure-devops-python-api/azure-devops/azure/devops/v7_0/provenance/provenance_client.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_0/provenance/provenance_client.py",
"repo_id": "azure-devops-python-api",
"token_count": 872
}
| 388 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from .models import *
from .service_endpoint_client import ServiceEndpointClient
__all__ = [
'AadOauthTokenRequest',
'AadOauthTokenResult',
'AuthenticationSchemeReference',
'AuthorizationHeader',
'AzureManagementGroup',
'AzureManagementGroupQueryResult',
'AzureSubscription',
'AzureSubscriptionQueryResult',
'ClientCertificate',
'DataSource',
'DataSourceBinding',
'DataSourceBindingBase',
'DataSourceDetails',
'DependencyBinding',
'DependencyData',
'DependsOn',
'EndpointAuthorization',
'EndpointUrl',
'GraphSubjectBase',
'HelpLink',
'IdentityRef',
'InputDescriptor',
'InputValidation',
'InputValue',
'InputValues',
'InputValuesError',
'OAuthConfiguration',
'OAuthConfigurationParams',
'ProjectReference',
'ReferenceLinks',
'RefreshAuthenticationParameters',
'ResultTransformationDetails',
'ServiceEndpoint',
'ServiceEndpointAuthenticationScheme',
'ServiceEndpointDetails',
'ServiceEndpointExecutionData',
'ServiceEndpointExecutionOwner',
'ServiceEndpointExecutionRecord',
'ServiceEndpointExecutionRecordsInput',
'ServiceEndpointProjectReference',
'ServiceEndpointRequest',
'ServiceEndpointRequestResult',
'ServiceEndpointType',
'ServiceEndpointClient'
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_0/service_endpoint/__init__.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_0/service_endpoint/__init__.py",
"repo_id": "azure-devops-python-api",
"token_count": 553
}
| 389 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest import Serializer, Deserializer
from ...client import Client
from . import models
class TaskAgentClient(Client):
"""TaskAgent
:param str base_url: Service URL
:param Authentication creds: Authenticated credentials.
"""
def __init__(self, base_url=None, creds=None):
super(TaskAgentClient, self).__init__(base_url, creds)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
resource_area_identifier = 'a85b8835-c1a1-4aac-ae97-1c3d0ba72dbd'
def add_agent_cloud(self, agent_cloud):
"""AddAgentCloud.
:param :class:`<TaskAgentCloud> <azure.devops.v7_0.task_agent.models.TaskAgentCloud>` agent_cloud:
:rtype: :class:`<TaskAgentCloud> <azure.devops.v7_0.task_agent.models.TaskAgentCloud>`
"""
content = self._serialize.body(agent_cloud, 'TaskAgentCloud')
response = self._send(http_method='POST',
location_id='bfa72b3d-0fc6-43fb-932b-a7f6559f93b9',
version='7.0',
content=content)
return self._deserialize('TaskAgentCloud', response)
def delete_agent_cloud(self, agent_cloud_id):
"""DeleteAgentCloud.
:param int agent_cloud_id:
:rtype: :class:`<TaskAgentCloud> <azure.devops.v7_0.task_agent.models.TaskAgentCloud>`
"""
route_values = {}
if agent_cloud_id is not None:
route_values['agentCloudId'] = self._serialize.url('agent_cloud_id', agent_cloud_id, 'int')
response = self._send(http_method='DELETE',
location_id='bfa72b3d-0fc6-43fb-932b-a7f6559f93b9',
version='7.0',
route_values=route_values)
return self._deserialize('TaskAgentCloud', response)
def get_agent_cloud(self, agent_cloud_id):
"""GetAgentCloud.
:param int agent_cloud_id:
:rtype: :class:`<TaskAgentCloud> <azure.devops.v7_0.task_agent.models.TaskAgentCloud>`
"""
route_values = {}
if agent_cloud_id is not None:
route_values['agentCloudId'] = self._serialize.url('agent_cloud_id', agent_cloud_id, 'int')
response = self._send(http_method='GET',
location_id='bfa72b3d-0fc6-43fb-932b-a7f6559f93b9',
version='7.0',
route_values=route_values)
return self._deserialize('TaskAgentCloud', response)
def get_agent_clouds(self):
"""GetAgentClouds.
:rtype: [TaskAgentCloud]
"""
response = self._send(http_method='GET',
location_id='bfa72b3d-0fc6-43fb-932b-a7f6559f93b9',
version='7.0')
return self._deserialize('[TaskAgentCloud]', self._unwrap_collection(response))
def update_agent_cloud(self, updated_cloud, agent_cloud_id):
"""UpdateAgentCloud.
:param :class:`<TaskAgentCloud> <azure.devops.v7_0.task_agent.models.TaskAgentCloud>` updated_cloud:
:param int agent_cloud_id:
:rtype: :class:`<TaskAgentCloud> <azure.devops.v7_0.task_agent.models.TaskAgentCloud>`
"""
route_values = {}
if agent_cloud_id is not None:
route_values['agentCloudId'] = self._serialize.url('agent_cloud_id', agent_cloud_id, 'int')
content = self._serialize.body(updated_cloud, 'TaskAgentCloud')
response = self._send(http_method='PATCH',
location_id='bfa72b3d-0fc6-43fb-932b-a7f6559f93b9',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('TaskAgentCloud', response)
def get_agent_cloud_types(self):
"""GetAgentCloudTypes.
[Preview API] Get agent cloud types.
:rtype: [TaskAgentCloudType]
"""
response = self._send(http_method='GET',
location_id='5932e193-f376-469d-9c3e-e5588ce12cb5',
version='7.0-preview.1')
return self._deserialize('[TaskAgentCloudType]', self._unwrap_collection(response))
def add_agent(self, agent, pool_id):
"""AddAgent.
Adds an agent to a pool. You probably don't want to call this endpoint directly. Instead, [configure an agent](https://docs.microsoft.com/azure/devops/pipelines/agents/agents) using the agent download package.
:param :class:`<TaskAgent> <azure.devops.v7_0.task_agent.models.TaskAgent>` agent: Details about the agent being added
:param int pool_id: The agent pool in which to add the agent
:rtype: :class:`<TaskAgent> <azure.devops.v7_0.task_agent.models.TaskAgent>`
"""
route_values = {}
if pool_id is not None:
route_values['poolId'] = self._serialize.url('pool_id', pool_id, 'int')
content = self._serialize.body(agent, 'TaskAgent')
response = self._send(http_method='POST',
location_id='e298ef32-5878-4cab-993c-043836571f42',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('TaskAgent', response)
def delete_agent(self, pool_id, agent_id):
"""DeleteAgent.
Delete an agent. You probably don't want to call this endpoint directly. Instead, [use the agent configuration script](https://docs.microsoft.com/azure/devops/pipelines/agents/agents) to remove an agent from your organization.
:param int pool_id: The pool ID to remove the agent from
:param int agent_id: The agent ID to remove
"""
route_values = {}
if pool_id is not None:
route_values['poolId'] = self._serialize.url('pool_id', pool_id, 'int')
if agent_id is not None:
route_values['agentId'] = self._serialize.url('agent_id', agent_id, 'int')
self._send(http_method='DELETE',
location_id='e298ef32-5878-4cab-993c-043836571f42',
version='7.0',
route_values=route_values)
def get_agent(self, pool_id, agent_id, include_capabilities=None, include_assigned_request=None, include_last_completed_request=None, property_filters=None):
"""GetAgent.
Get information about an agent.
:param int pool_id: The agent pool containing the agent
:param int agent_id: The agent ID to get information about
:param bool include_capabilities: Whether to include the agent's capabilities in the response
:param bool include_assigned_request: Whether to include details about the agent's current work
:param bool include_last_completed_request: Whether to include details about the agents' most recent completed work
:param [str] property_filters: Filter which custom properties will be returned
:rtype: :class:`<TaskAgent> <azure.devops.v7_0.task_agent.models.TaskAgent>`
"""
route_values = {}
if pool_id is not None:
route_values['poolId'] = self._serialize.url('pool_id', pool_id, 'int')
if agent_id is not None:
route_values['agentId'] = self._serialize.url('agent_id', agent_id, 'int')
query_parameters = {}
if include_capabilities is not None:
query_parameters['includeCapabilities'] = self._serialize.query('include_capabilities', include_capabilities, 'bool')
if include_assigned_request is not None:
query_parameters['includeAssignedRequest'] = self._serialize.query('include_assigned_request', include_assigned_request, 'bool')
if include_last_completed_request is not None:
query_parameters['includeLastCompletedRequest'] = self._serialize.query('include_last_completed_request', include_last_completed_request, 'bool')
if property_filters is not None:
property_filters = ",".join(property_filters)
query_parameters['propertyFilters'] = self._serialize.query('property_filters', property_filters, 'str')
response = self._send(http_method='GET',
location_id='e298ef32-5878-4cab-993c-043836571f42',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('TaskAgent', response)
def get_agents(self, pool_id, agent_name=None, include_capabilities=None, include_assigned_request=None, include_last_completed_request=None, property_filters=None, demands=None):
"""GetAgents.
Get a list of agents.
:param int pool_id: The agent pool containing the agents
:param str agent_name: Filter on agent name
:param bool include_capabilities: Whether to include the agents' capabilities in the response
:param bool include_assigned_request: Whether to include details about the agents' current work
:param bool include_last_completed_request: Whether to include details about the agents' most recent completed work
:param [str] property_filters: Filter which custom properties will be returned
:param [str] demands: Filter by demands the agents can satisfy
:rtype: [TaskAgent]
"""
route_values = {}
if pool_id is not None:
route_values['poolId'] = self._serialize.url('pool_id', pool_id, 'int')
query_parameters = {}
if agent_name is not None:
query_parameters['agentName'] = self._serialize.query('agent_name', agent_name, 'str')
if include_capabilities is not None:
query_parameters['includeCapabilities'] = self._serialize.query('include_capabilities', include_capabilities, 'bool')
if include_assigned_request is not None:
query_parameters['includeAssignedRequest'] = self._serialize.query('include_assigned_request', include_assigned_request, 'bool')
if include_last_completed_request is not None:
query_parameters['includeLastCompletedRequest'] = self._serialize.query('include_last_completed_request', include_last_completed_request, 'bool')
if property_filters is not None:
property_filters = ",".join(property_filters)
query_parameters['propertyFilters'] = self._serialize.query('property_filters', property_filters, 'str')
if demands is not None:
demands = ",".join(demands)
query_parameters['demands'] = self._serialize.query('demands', demands, 'str')
response = self._send(http_method='GET',
location_id='e298ef32-5878-4cab-993c-043836571f42',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[TaskAgent]', self._unwrap_collection(response))
def replace_agent(self, agent, pool_id, agent_id):
"""ReplaceAgent.
Replace an agent. You probably don't want to call this endpoint directly. Instead, [use the agent configuration script](https://docs.microsoft.com/azure/devops/pipelines/agents/agents) to remove and reconfigure an agent from your organization.
:param :class:`<TaskAgent> <azure.devops.v7_0.task_agent.models.TaskAgent>` agent: Updated details about the replacing agent
:param int pool_id: The agent pool to use
:param int agent_id: The agent to replace
:rtype: :class:`<TaskAgent> <azure.devops.v7_0.task_agent.models.TaskAgent>`
"""
route_values = {}
if pool_id is not None:
route_values['poolId'] = self._serialize.url('pool_id', pool_id, 'int')
if agent_id is not None:
route_values['agentId'] = self._serialize.url('agent_id', agent_id, 'int')
content = self._serialize.body(agent, 'TaskAgent')
response = self._send(http_method='PUT',
location_id='e298ef32-5878-4cab-993c-043836571f42',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('TaskAgent', response)
def update_agent(self, agent, pool_id, agent_id):
"""UpdateAgent.
Update agent details.
:param :class:`<TaskAgent> <azure.devops.v7_0.task_agent.models.TaskAgent>` agent: Updated details about the agent
:param int pool_id: The agent pool to use
:param int agent_id: The agent to update
:rtype: :class:`<TaskAgent> <azure.devops.v7_0.task_agent.models.TaskAgent>`
"""
route_values = {}
if pool_id is not None:
route_values['poolId'] = self._serialize.url('pool_id', pool_id, 'int')
if agent_id is not None:
route_values['agentId'] = self._serialize.url('agent_id', agent_id, 'int')
content = self._serialize.body(agent, 'TaskAgent')
response = self._send(http_method='PATCH',
location_id='e298ef32-5878-4cab-993c-043836571f42',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('TaskAgent', response)
def add_deployment_group(self, deployment_group, project):
"""AddDeploymentGroup.
Create a deployment group.
:param :class:`<DeploymentGroupCreateParameter> <azure.devops.v7_0.task_agent.models.DeploymentGroupCreateParameter>` deployment_group: Deployment group to create.
:param str project: Project ID or project name
:rtype: :class:`<DeploymentGroup> <azure.devops.v7_0.task_agent.models.DeploymentGroup>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(deployment_group, 'DeploymentGroupCreateParameter')
response = self._send(http_method='POST',
location_id='083c4d89-ab35-45af-aa11-7cf66895c53e',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('DeploymentGroup', response)
def delete_deployment_group(self, project, deployment_group_id):
"""DeleteDeploymentGroup.
Delete a deployment group.
:param str project: Project ID or project name
:param int deployment_group_id: ID of the deployment group to be deleted.
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if deployment_group_id is not None:
route_values['deploymentGroupId'] = self._serialize.url('deployment_group_id', deployment_group_id, 'int')
self._send(http_method='DELETE',
location_id='083c4d89-ab35-45af-aa11-7cf66895c53e',
version='7.0',
route_values=route_values)
def get_deployment_group(self, project, deployment_group_id, action_filter=None, expand=None):
"""GetDeploymentGroup.
Get a deployment group by its ID.
:param str project: Project ID or project name
:param int deployment_group_id: ID of the deployment group.
:param str action_filter: Get the deployment group only if this action can be performed on it.
:param str expand: Include these additional details in the returned object.
:rtype: :class:`<DeploymentGroup> <azure.devops.v7_0.task_agent.models.DeploymentGroup>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if deployment_group_id is not None:
route_values['deploymentGroupId'] = self._serialize.url('deployment_group_id', deployment_group_id, 'int')
query_parameters = {}
if action_filter is not None:
query_parameters['actionFilter'] = self._serialize.query('action_filter', action_filter, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='083c4d89-ab35-45af-aa11-7cf66895c53e',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('DeploymentGroup', response)
def get_deployment_groups(self, project, name=None, action_filter=None, expand=None, continuation_token=None, top=None, ids=None):
"""GetDeploymentGroups.
Get a list of deployment groups by name or IDs.
:param str project: Project ID or project name
:param str name: Name of the deployment group.
:param str action_filter: Get only deployment groups on which this action can be performed.
:param str expand: Include these additional details in the returned objects.
:param str continuation_token: Get deployment groups with names greater than this continuationToken lexicographically.
:param int top: Maximum number of deployment groups to return. Default is **1000**.
:param [int] ids: Comma separated list of IDs of the deployment groups.
:rtype: :class:`<[DeploymentGroup]> <azure.devops.v7_0.task_agent.models.[DeploymentGroup]>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if name is not None:
query_parameters['name'] = self._serialize.query('name', name, 'str')
if action_filter is not None:
query_parameters['actionFilter'] = self._serialize.query('action_filter', action_filter, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if ids is not None:
ids = ",".join(map(str, ids))
query_parameters['ids'] = self._serialize.query('ids', ids, 'str')
response = self._send(http_method='GET',
location_id='083c4d89-ab35-45af-aa11-7cf66895c53e',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[DeploymentGroup]', self._unwrap_collection(response))
def update_deployment_group(self, deployment_group, project, deployment_group_id):
"""UpdateDeploymentGroup.
Update a deployment group.
:param :class:`<DeploymentGroupUpdateParameter> <azure.devops.v7_0.task_agent.models.DeploymentGroupUpdateParameter>` deployment_group: Deployment group to update.
:param str project: Project ID or project name
:param int deployment_group_id: ID of the deployment group.
:rtype: :class:`<DeploymentGroup> <azure.devops.v7_0.task_agent.models.DeploymentGroup>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if deployment_group_id is not None:
route_values['deploymentGroupId'] = self._serialize.url('deployment_group_id', deployment_group_id, 'int')
content = self._serialize.body(deployment_group, 'DeploymentGroupUpdateParameter')
response = self._send(http_method='PATCH',
location_id='083c4d89-ab35-45af-aa11-7cf66895c53e',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('DeploymentGroup', response)
def get_environment_deployment_execution_records(self, project, environment_id, continuation_token=None, top=None):
"""GetEnvironmentDeploymentExecutionRecords.
Get environment deployment execution history
:param str project: Project ID or project name
:param int environment_id:
:param str continuation_token:
:param int top:
:rtype: :class:`<[EnvironmentDeploymentExecutionRecord]> <azure.devops.v7_0.task_agent.models.[EnvironmentDeploymentExecutionRecord]>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if environment_id is not None:
route_values['environmentId'] = self._serialize.url('environment_id', environment_id, 'int')
query_parameters = {}
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'str')
if top is not None:
query_parameters['top'] = self._serialize.query('top', top, 'int')
response = self._send(http_method='GET',
location_id='51bb5d21-4305-4ea6-9dbb-b7488af73334',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[EnvironmentDeploymentExecutionRecord]', self._unwrap_collection(response))
def add_environment(self, environment_create_parameter, project):
"""AddEnvironment.
Create an environment.
:param :class:`<EnvironmentCreateParameter> <azure.devops.v7_0.task_agent.models.EnvironmentCreateParameter>` environment_create_parameter: Environment to create.
:param str project: Project ID or project name
:rtype: :class:`<EnvironmentInstance> <azure.devops.v7_0.task_agent.models.EnvironmentInstance>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(environment_create_parameter, 'EnvironmentCreateParameter')
response = self._send(http_method='POST',
location_id='8572b1fc-2482-47fa-8f74-7e3ed53ee54b',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('EnvironmentInstance', response)
def delete_environment(self, project, environment_id):
"""DeleteEnvironment.
Delete the specified environment.
:param str project: Project ID or project name
:param int environment_id: ID of the environment.
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if environment_id is not None:
route_values['environmentId'] = self._serialize.url('environment_id', environment_id, 'int')
self._send(http_method='DELETE',
location_id='8572b1fc-2482-47fa-8f74-7e3ed53ee54b',
version='7.0',
route_values=route_values)
def get_environment_by_id(self, project, environment_id, expands=None):
"""GetEnvironmentById.
Get an environment by its ID.
:param str project: Project ID or project name
:param int environment_id: ID of the environment.
:param str expands: Include these additional details in the returned objects.
:rtype: :class:`<EnvironmentInstance> <azure.devops.v7_0.task_agent.models.EnvironmentInstance>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if environment_id is not None:
route_values['environmentId'] = self._serialize.url('environment_id', environment_id, 'int')
query_parameters = {}
if expands is not None:
query_parameters['expands'] = self._serialize.query('expands', expands, 'str')
response = self._send(http_method='GET',
location_id='8572b1fc-2482-47fa-8f74-7e3ed53ee54b',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('EnvironmentInstance', response)
def get_environments(self, project, name=None, continuation_token=None, top=None):
"""GetEnvironments.
Get all environments.
:param str project: Project ID or project name
:param str name:
:param str continuation_token:
:param int top:
:rtype: :class:`<[EnvironmentInstance]> <azure.devops.v7_0.task_agent.models.[EnvironmentInstance]>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if name is not None:
query_parameters['name'] = self._serialize.query('name', name, 'str')
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
response = self._send(http_method='GET',
location_id='8572b1fc-2482-47fa-8f74-7e3ed53ee54b',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[EnvironmentInstance]', self._unwrap_collection(response))
def update_environment(self, environment_update_parameter, project, environment_id):
"""UpdateEnvironment.
Update the specified environment.
:param :class:`<EnvironmentUpdateParameter> <azure.devops.v7_0.task_agent.models.EnvironmentUpdateParameter>` environment_update_parameter: Environment data to update.
:param str project: Project ID or project name
:param int environment_id: ID of the environment.
:rtype: :class:`<EnvironmentInstance> <azure.devops.v7_0.task_agent.models.EnvironmentInstance>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if environment_id is not None:
route_values['environmentId'] = self._serialize.url('environment_id', environment_id, 'int')
content = self._serialize.body(environment_update_parameter, 'EnvironmentUpdateParameter')
response = self._send(http_method='PATCH',
location_id='8572b1fc-2482-47fa-8f74-7e3ed53ee54b',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('EnvironmentInstance', response)
def add_kubernetes_resource(self, create_parameters, project, environment_id):
"""AddKubernetesResource.
:param :class:`<KubernetesResourceCreateParameters> <azure.devops.v7_0.task_agent.models.KubernetesResourceCreateParameters>` create_parameters:
:param str project: Project ID or project name
:param int environment_id:
:rtype: :class:`<KubernetesResource> <azure.devops.v7_0.task_agent.models.KubernetesResource>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if environment_id is not None:
route_values['environmentId'] = self._serialize.url('environment_id', environment_id, 'int')
content = self._serialize.body(create_parameters, 'KubernetesResourceCreateParameters')
response = self._send(http_method='POST',
location_id='73fba52f-15ab-42b3-a538-ce67a9223a04',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('KubernetesResource', response)
def delete_kubernetes_resource(self, project, environment_id, resource_id):
"""DeleteKubernetesResource.
:param str project: Project ID or project name
:param int environment_id:
:param int resource_id:
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if environment_id is not None:
route_values['environmentId'] = self._serialize.url('environment_id', environment_id, 'int')
if resource_id is not None:
route_values['resourceId'] = self._serialize.url('resource_id', resource_id, 'int')
self._send(http_method='DELETE',
location_id='73fba52f-15ab-42b3-a538-ce67a9223a04',
version='7.0',
route_values=route_values)
def get_kubernetes_resource(self, project, environment_id, resource_id):
"""GetKubernetesResource.
:param str project: Project ID or project name
:param int environment_id:
:param int resource_id:
:rtype: :class:`<KubernetesResource> <azure.devops.v7_0.task_agent.models.KubernetesResource>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if environment_id is not None:
route_values['environmentId'] = self._serialize.url('environment_id', environment_id, 'int')
if resource_id is not None:
route_values['resourceId'] = self._serialize.url('resource_id', resource_id, 'int')
response = self._send(http_method='GET',
location_id='73fba52f-15ab-42b3-a538-ce67a9223a04',
version='7.0',
route_values=route_values)
return self._deserialize('KubernetesResource', response)
def add_agent_pool(self, pool):
"""AddAgentPool.
Create an agent pool.
:param :class:`<TaskAgentPool> <azure.devops.v7_0.task_agent.models.TaskAgentPool>` pool: Details about the new agent pool
:rtype: :class:`<TaskAgentPool> <azure.devops.v7_0.task_agent.models.TaskAgentPool>`
"""
content = self._serialize.body(pool, 'TaskAgentPool')
response = self._send(http_method='POST',
location_id='a8c47e17-4d56-4a56-92bb-de7ea7dc65be',
version='7.0',
content=content)
return self._deserialize('TaskAgentPool', response)
def delete_agent_pool(self, pool_id):
"""DeleteAgentPool.
Delete an agent pool.
:param int pool_id: ID of the agent pool to delete
"""
route_values = {}
if pool_id is not None:
route_values['poolId'] = self._serialize.url('pool_id', pool_id, 'int')
self._send(http_method='DELETE',
location_id='a8c47e17-4d56-4a56-92bb-de7ea7dc65be',
version='7.0',
route_values=route_values)
def get_agent_pool(self, pool_id, properties=None, action_filter=None):
"""GetAgentPool.
Get information about an agent pool.
:param int pool_id: An agent pool ID
:param [str] properties: Agent pool properties (comma-separated)
:param str action_filter: Filter by whether the calling user has use or manage permissions
:rtype: :class:`<TaskAgentPool> <azure.devops.v7_0.task_agent.models.TaskAgentPool>`
"""
route_values = {}
if pool_id is not None:
route_values['poolId'] = self._serialize.url('pool_id', pool_id, 'int')
query_parameters = {}
if properties is not None:
properties = ",".join(properties)
query_parameters['properties'] = self._serialize.query('properties', properties, 'str')
if action_filter is not None:
query_parameters['actionFilter'] = self._serialize.query('action_filter', action_filter, 'str')
response = self._send(http_method='GET',
location_id='a8c47e17-4d56-4a56-92bb-de7ea7dc65be',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('TaskAgentPool', response)
def get_agent_pools(self, pool_name=None, properties=None, pool_type=None, action_filter=None):
"""GetAgentPools.
Get a list of agent pools.
:param str pool_name: Filter by name
:param [str] properties: Filter by agent pool properties (comma-separated)
:param str pool_type: Filter by pool type
:param str action_filter: Filter by whether the calling user has use or manage permissions
:rtype: [TaskAgentPool]
"""
query_parameters = {}
if pool_name is not None:
query_parameters['poolName'] = self._serialize.query('pool_name', pool_name, 'str')
if properties is not None:
properties = ",".join(properties)
query_parameters['properties'] = self._serialize.query('properties', properties, 'str')
if pool_type is not None:
query_parameters['poolType'] = self._serialize.query('pool_type', pool_type, 'str')
if action_filter is not None:
query_parameters['actionFilter'] = self._serialize.query('action_filter', action_filter, 'str')
response = self._send(http_method='GET',
location_id='a8c47e17-4d56-4a56-92bb-de7ea7dc65be',
version='7.0',
query_parameters=query_parameters)
return self._deserialize('[TaskAgentPool]', self._unwrap_collection(response))
def get_agent_pools_by_ids(self, pool_ids, action_filter=None):
"""GetAgentPoolsByIds.
Get a list of agent pools.
:param [int] pool_ids: pool Ids to fetch
:param str action_filter: Filter by whether the calling user has use or manage permissions
:rtype: [TaskAgentPool]
"""
query_parameters = {}
if pool_ids is not None:
pool_ids = ",".join(map(str, pool_ids))
query_parameters['poolIds'] = self._serialize.query('pool_ids', pool_ids, 'str')
if action_filter is not None:
query_parameters['actionFilter'] = self._serialize.query('action_filter', action_filter, 'str')
response = self._send(http_method='GET',
location_id='a8c47e17-4d56-4a56-92bb-de7ea7dc65be',
version='7.0',
query_parameters=query_parameters)
return self._deserialize('[TaskAgentPool]', self._unwrap_collection(response))
def update_agent_pool(self, pool, pool_id):
"""UpdateAgentPool.
Update properties on an agent pool
:param :class:`<TaskAgentPool> <azure.devops.v7_0.task_agent.models.TaskAgentPool>` pool: Updated agent pool details
:param int pool_id: The agent pool to update
:rtype: :class:`<TaskAgentPool> <azure.devops.v7_0.task_agent.models.TaskAgentPool>`
"""
route_values = {}
if pool_id is not None:
route_values['poolId'] = self._serialize.url('pool_id', pool_id, 'int')
content = self._serialize.body(pool, 'TaskAgentPool')
response = self._send(http_method='PATCH',
location_id='a8c47e17-4d56-4a56-92bb-de7ea7dc65be',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('TaskAgentPool', response)
def add_agent_queue(self, queue, project=None, authorize_pipelines=None):
"""AddAgentQueue.
Create a new agent queue to connect a project to an agent pool.
:param :class:`<TaskAgentQueue> <azure.devops.v7_0.task_agent.models.TaskAgentQueue>` queue: Details about the queue to create
:param str project: Project ID or project name
:param bool authorize_pipelines: Automatically authorize this queue when using YAML
:rtype: :class:`<TaskAgentQueue> <azure.devops.v7_0.task_agent.models.TaskAgentQueue>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if authorize_pipelines is not None:
query_parameters['authorizePipelines'] = self._serialize.query('authorize_pipelines', authorize_pipelines, 'bool')
content = self._serialize.body(queue, 'TaskAgentQueue')
response = self._send(http_method='POST',
location_id='900fa995-c559-4923-aae7-f8424fe4fbea',
version='7.0',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('TaskAgentQueue', response)
def delete_agent_queue(self, queue_id, project=None):
"""DeleteAgentQueue.
Removes an agent queue from a project.
:param int queue_id: The agent queue to remove
:param str project: Project ID or project name
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if queue_id is not None:
route_values['queueId'] = self._serialize.url('queue_id', queue_id, 'int')
self._send(http_method='DELETE',
location_id='900fa995-c559-4923-aae7-f8424fe4fbea',
version='7.0',
route_values=route_values)
def get_agent_queue(self, queue_id, project=None, action_filter=None):
"""GetAgentQueue.
Get information about an agent queue.
:param int queue_id: The agent queue to get information about
:param str project: Project ID or project name
:param str action_filter: Filter by whether the calling user has use or manage permissions
:rtype: :class:`<TaskAgentQueue> <azure.devops.v7_0.task_agent.models.TaskAgentQueue>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if queue_id is not None:
route_values['queueId'] = self._serialize.url('queue_id', queue_id, 'int')
query_parameters = {}
if action_filter is not None:
query_parameters['actionFilter'] = self._serialize.query('action_filter', action_filter, 'str')
response = self._send(http_method='GET',
location_id='900fa995-c559-4923-aae7-f8424fe4fbea',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('TaskAgentQueue', response)
def get_agent_queues(self, project=None, queue_name=None, action_filter=None):
"""GetAgentQueues.
Get a list of agent queues.
:param str project: Project ID or project name
:param str queue_name: Filter on the agent queue name
:param str action_filter: Filter by whether the calling user has use or manage permissions
:rtype: [TaskAgentQueue]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if queue_name is not None:
query_parameters['queueName'] = self._serialize.query('queue_name', queue_name, 'str')
if action_filter is not None:
query_parameters['actionFilter'] = self._serialize.query('action_filter', action_filter, 'str')
response = self._send(http_method='GET',
location_id='900fa995-c559-4923-aae7-f8424fe4fbea',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[TaskAgentQueue]', self._unwrap_collection(response))
def get_agent_queues_by_ids(self, queue_ids, project=None, action_filter=None):
"""GetAgentQueuesByIds.
Get a list of agent queues by their IDs
:param [int] queue_ids: A comma-separated list of agent queue IDs to retrieve
:param str project: Project ID or project name
:param str action_filter: Filter by whether the calling user has use or manage permissions
:rtype: [TaskAgentQueue]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if queue_ids is not None:
queue_ids = ",".join(map(str, queue_ids))
query_parameters['queueIds'] = self._serialize.query('queue_ids', queue_ids, 'str')
if action_filter is not None:
query_parameters['actionFilter'] = self._serialize.query('action_filter', action_filter, 'str')
response = self._send(http_method='GET',
location_id='900fa995-c559-4923-aae7-f8424fe4fbea',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[TaskAgentQueue]', self._unwrap_collection(response))
def get_agent_queues_by_names(self, queue_names, project=None, action_filter=None):
"""GetAgentQueuesByNames.
Get a list of agent queues by their names
:param [str] queue_names: A comma-separated list of agent names to retrieve
:param str project: Project ID or project name
:param str action_filter: Filter by whether the calling user has use or manage permissions
:rtype: [TaskAgentQueue]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if queue_names is not None:
queue_names = ",".join(queue_names)
query_parameters['queueNames'] = self._serialize.query('queue_names', queue_names, 'str')
if action_filter is not None:
query_parameters['actionFilter'] = self._serialize.query('action_filter', action_filter, 'str')
response = self._send(http_method='GET',
location_id='900fa995-c559-4923-aae7-f8424fe4fbea',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[TaskAgentQueue]', self._unwrap_collection(response))
def get_agent_queues_for_pools(self, pool_ids, project=None, action_filter=None):
"""GetAgentQueuesForPools.
Get a list of agent queues by pool ids
:param [int] pool_ids: A comma-separated list of pool ids to get the corresponding queues for
:param str project: Project ID or project name
:param str action_filter: Filter by whether the calling user has use or manage permissions
:rtype: [TaskAgentQueue]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if pool_ids is not None:
pool_ids = ",".join(map(str, pool_ids))
query_parameters['poolIds'] = self._serialize.query('pool_ids', pool_ids, 'str')
if action_filter is not None:
query_parameters['actionFilter'] = self._serialize.query('action_filter', action_filter, 'str')
response = self._send(http_method='GET',
location_id='900fa995-c559-4923-aae7-f8424fe4fbea',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[TaskAgentQueue]', self._unwrap_collection(response))
def get_agent_cloud_requests(self, agent_cloud_id):
"""GetAgentCloudRequests.
:param int agent_cloud_id:
:rtype: [TaskAgentCloudRequest]
"""
route_values = {}
if agent_cloud_id is not None:
route_values['agentCloudId'] = self._serialize.url('agent_cloud_id', agent_cloud_id, 'int')
response = self._send(http_method='GET',
location_id='20189bd7-5134-49c2-b8e9-f9e856eea2b2',
version='7.0',
route_values=route_values)
return self._deserialize('[TaskAgentCloudRequest]', self._unwrap_collection(response))
def delete_deployment_target(self, project, deployment_group_id, target_id):
"""DeleteDeploymentTarget.
Delete a deployment target in a deployment group. This deletes the agent from associated deployment pool too.
:param str project: Project ID or project name
:param int deployment_group_id: ID of the deployment group in which deployment target is deleted.
:param int target_id: ID of the deployment target to delete.
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if deployment_group_id is not None:
route_values['deploymentGroupId'] = self._serialize.url('deployment_group_id', deployment_group_id, 'int')
if target_id is not None:
route_values['targetId'] = self._serialize.url('target_id', target_id, 'int')
self._send(http_method='DELETE',
location_id='2f0aa599-c121-4256-a5fd-ba370e0ae7b6',
version='7.0',
route_values=route_values)
def get_deployment_target(self, project, deployment_group_id, target_id, expand=None):
"""GetDeploymentTarget.
Get a deployment target by its ID in a deployment group
:param str project: Project ID or project name
:param int deployment_group_id: ID of the deployment group to which deployment target belongs.
:param int target_id: ID of the deployment target to return.
:param str expand: Include these additional details in the returned objects.
:rtype: :class:`<DeploymentMachine> <azure.devops.v7_0.task_agent.models.DeploymentMachine>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if deployment_group_id is not None:
route_values['deploymentGroupId'] = self._serialize.url('deployment_group_id', deployment_group_id, 'int')
if target_id is not None:
route_values['targetId'] = self._serialize.url('target_id', target_id, 'int')
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='2f0aa599-c121-4256-a5fd-ba370e0ae7b6',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('DeploymentMachine', response)
def get_deployment_targets(self, project, deployment_group_id, tags=None, name=None, partial_name_match=None, expand=None, agent_status=None, agent_job_result=None, continuation_token=None, top=None, enabled=None, property_filters=None):
"""GetDeploymentTargets.
Get a list of deployment targets in a deployment group.
:param str project: Project ID or project name
:param int deployment_group_id: ID of the deployment group.
:param [str] tags: Get only the deployment targets that contain all these comma separted list of tags.
:param str name: Name pattern of the deployment targets to return.
:param bool partial_name_match: When set to true, treats **name** as pattern. Else treats it as absolute match. Default is **false**.
:param str expand: Include these additional details in the returned objects.
:param str agent_status: Get only deployment targets that have this status.
:param str agent_job_result: Get only deployment targets that have this last job result.
:param str continuation_token: Get deployment targets with names greater than this continuationToken lexicographically.
:param int top: Maximum number of deployment targets to return. Default is **1000**.
:param bool enabled: Get only deployment targets that are enabled or disabled. Default is 'null' which returns all the targets.
:param [str] property_filters:
:rtype: :class:`<[DeploymentMachine]> <azure.devops.v7_0.task_agent.models.[DeploymentMachine]>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if deployment_group_id is not None:
route_values['deploymentGroupId'] = self._serialize.url('deployment_group_id', deployment_group_id, 'int')
query_parameters = {}
if tags is not None:
tags = ",".join(tags)
query_parameters['tags'] = self._serialize.query('tags', tags, 'str')
if name is not None:
query_parameters['name'] = self._serialize.query('name', name, 'str')
if partial_name_match is not None:
query_parameters['partialNameMatch'] = self._serialize.query('partial_name_match', partial_name_match, 'bool')
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
if agent_status is not None:
query_parameters['agentStatus'] = self._serialize.query('agent_status', agent_status, 'str')
if agent_job_result is not None:
query_parameters['agentJobResult'] = self._serialize.query('agent_job_result', agent_job_result, 'str')
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if enabled is not None:
query_parameters['enabled'] = self._serialize.query('enabled', enabled, 'bool')
if property_filters is not None:
property_filters = ",".join(property_filters)
query_parameters['propertyFilters'] = self._serialize.query('property_filters', property_filters, 'str')
response = self._send(http_method='GET',
location_id='2f0aa599-c121-4256-a5fd-ba370e0ae7b6',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[DeploymentMachine]', self._unwrap_collection(response))
def update_deployment_targets(self, machines, project, deployment_group_id):
"""UpdateDeploymentTargets.
Update tags of a list of deployment targets in a deployment group.
:param [DeploymentTargetUpdateParameter] machines: Deployment targets with tags to udpdate.
:param str project: Project ID or project name
:param int deployment_group_id: ID of the deployment group in which deployment targets are updated.
:rtype: [DeploymentMachine]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if deployment_group_id is not None:
route_values['deploymentGroupId'] = self._serialize.url('deployment_group_id', deployment_group_id, 'int')
content = self._serialize.body(machines, '[DeploymentTargetUpdateParameter]')
response = self._send(http_method='PATCH',
location_id='2f0aa599-c121-4256-a5fd-ba370e0ae7b6',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('[DeploymentMachine]', self._unwrap_collection(response))
def add_task_group(self, task_group, project):
"""AddTaskGroup.
Create a task group.
:param :class:`<TaskGroupCreateParameter> <azure.devops.v7_0.task_agent.models.TaskGroupCreateParameter>` task_group: Task group object to create.
:param str project: Project ID or project name
:rtype: :class:`<TaskGroup> <azure.devops.v7_0.task_agent.models.TaskGroup>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(task_group, 'TaskGroupCreateParameter')
response = self._send(http_method='POST',
location_id='6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('TaskGroup', response)
def delete_task_group(self, project, task_group_id, comment=None):
"""DeleteTaskGroup.
Delete a task group.
:param str project: Project ID or project name
:param str task_group_id: Id of the task group to be deleted.
:param str comment: Comments to delete.
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if task_group_id is not None:
route_values['taskGroupId'] = self._serialize.url('task_group_id', task_group_id, 'str')
query_parameters = {}
if comment is not None:
query_parameters['comment'] = self._serialize.query('comment', comment, 'str')
self._send(http_method='DELETE',
location_id='6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
def get_task_groups(self, project, task_group_id=None, expanded=None, task_id_filter=None, deleted=None, top=None, continuation_token=None, query_order=None):
"""GetTaskGroups.
List task groups.
:param str project: Project ID or project name
:param str task_group_id: Id of the task group.
:param bool expanded: 'true' to recursively expand task groups. Default is 'false'.
:param str task_id_filter: Guid of the taskId to filter.
:param bool deleted: 'true'to include deleted task groups. Default is 'false'.
:param int top: Number of task groups to get.
:param datetime continuation_token: Gets the task groups after the continuation token provided.
:param str query_order: Gets the results in the defined order. Default is 'CreatedOnDescending'.
:rtype: [TaskGroup]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if task_group_id is not None:
route_values['taskGroupId'] = self._serialize.url('task_group_id', task_group_id, 'str')
query_parameters = {}
if expanded is not None:
query_parameters['expanded'] = self._serialize.query('expanded', expanded, 'bool')
if task_id_filter is not None:
query_parameters['taskIdFilter'] = self._serialize.query('task_id_filter', task_id_filter, 'str')
if deleted is not None:
query_parameters['deleted'] = self._serialize.query('deleted', deleted, 'bool')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'iso-8601')
if query_order is not None:
query_parameters['queryOrder'] = self._serialize.query('query_order', query_order, 'str')
response = self._send(http_method='GET',
location_id='6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[TaskGroup]', self._unwrap_collection(response))
def update_task_group(self, task_group, project, task_group_id=None):
"""UpdateTaskGroup.
Update a task group.
:param :class:`<TaskGroupUpdateParameter> <azure.devops.v7_0.task_agent.models.TaskGroupUpdateParameter>` task_group: Task group to update.
:param str project: Project ID or project name
:param str task_group_id: Id of the task group to update.
:rtype: :class:`<TaskGroup> <azure.devops.v7_0.task_agent.models.TaskGroup>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if task_group_id is not None:
route_values['taskGroupId'] = self._serialize.url('task_group_id', task_group_id, 'str')
content = self._serialize.body(task_group, 'TaskGroupUpdateParameter')
response = self._send(http_method='PUT',
location_id='6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('TaskGroup', response)
def add_variable_group(self, variable_group_parameters):
"""AddVariableGroup.
Add a variable group.
:param :class:`<VariableGroupParameters> <azure.devops.v7_0.task_agent.models.VariableGroupParameters>` variable_group_parameters:
:rtype: :class:`<VariableGroup> <azure.devops.v7_0.task_agent.models.VariableGroup>`
"""
content = self._serialize.body(variable_group_parameters, 'VariableGroupParameters')
response = self._send(http_method='POST',
location_id='ef5b7057-ffc3-4c77-bbad-c10b4a4abcc7',
version='7.0',
content=content)
return self._deserialize('VariableGroup', response)
def delete_variable_group(self, group_id, project_ids):
"""DeleteVariableGroup.
Delete a variable group
:param int group_id: Id of the variable group.
:param [str] project_ids:
"""
route_values = {}
if group_id is not None:
route_values['groupId'] = self._serialize.url('group_id', group_id, 'int')
query_parameters = {}
if project_ids is not None:
project_ids = ",".join(project_ids)
query_parameters['projectIds'] = self._serialize.query('project_ids', project_ids, 'str')
self._send(http_method='DELETE',
location_id='ef5b7057-ffc3-4c77-bbad-c10b4a4abcc7',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
def share_variable_group(self, variable_group_project_references, variable_group_id):
"""ShareVariableGroup.
Add a variable group.
:param [VariableGroupProjectReference] variable_group_project_references:
:param int variable_group_id:
"""
query_parameters = {}
if variable_group_id is not None:
query_parameters['variableGroupId'] = self._serialize.query('variable_group_id', variable_group_id, 'int')
content = self._serialize.body(variable_group_project_references, '[VariableGroupProjectReference]')
self._send(http_method='PATCH',
location_id='ef5b7057-ffc3-4c77-bbad-c10b4a4abcc7',
version='7.0',
query_parameters=query_parameters,
content=content)
def update_variable_group(self, variable_group_parameters, group_id):
"""UpdateVariableGroup.
Update a variable group.
:param :class:`<VariableGroupParameters> <azure.devops.v7_0.task_agent.models.VariableGroupParameters>` variable_group_parameters:
:param int group_id: Id of the variable group to update.
:rtype: :class:`<VariableGroup> <azure.devops.v7_0.task_agent.models.VariableGroup>`
"""
route_values = {}
if group_id is not None:
route_values['groupId'] = self._serialize.url('group_id', group_id, 'int')
content = self._serialize.body(variable_group_parameters, 'VariableGroupParameters')
response = self._send(http_method='PUT',
location_id='ef5b7057-ffc3-4c77-bbad-c10b4a4abcc7',
version='7.0',
route_values=route_values,
content=content)
return self._deserialize('VariableGroup', response)
def get_variable_group(self, project, group_id):
"""GetVariableGroup.
Get a variable group.
:param str project: Project ID or project name
:param int group_id: Id of the variable group.
:rtype: :class:`<VariableGroup> <azure.devops.v7_0.task_agent.models.VariableGroup>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if group_id is not None:
route_values['groupId'] = self._serialize.url('group_id', group_id, 'int')
response = self._send(http_method='GET',
location_id='f5b09dd5-9d54-45a1-8b5a-1c8287d634cc',
version='7.0',
route_values=route_values)
return self._deserialize('VariableGroup', response)
def get_variable_groups(self, project, group_name=None, action_filter=None, top=None, continuation_token=None, query_order=None):
"""GetVariableGroups.
Get variable groups.
:param str project: Project ID or project name
:param str group_name: Name of variable group.
:param str action_filter: Action filter for the variable group. It specifies the action which can be performed on the variable groups.
:param int top: Number of variable groups to get.
:param int continuation_token: Gets the variable groups after the continuation token provided.
:param str query_order: Gets the results in the defined order. Default is 'IdDescending'.
:rtype: [VariableGroup]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if group_name is not None:
query_parameters['groupName'] = self._serialize.query('group_name', group_name, 'str')
if action_filter is not None:
query_parameters['actionFilter'] = self._serialize.query('action_filter', action_filter, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'int')
if query_order is not None:
query_parameters['queryOrder'] = self._serialize.query('query_order', query_order, 'str')
response = self._send(http_method='GET',
location_id='f5b09dd5-9d54-45a1-8b5a-1c8287d634cc',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[VariableGroup]', self._unwrap_collection(response))
def get_variable_groups_by_id(self, project, group_ids):
"""GetVariableGroupsById.
Get variable groups by ids.
:param str project: Project ID or project name
:param [int] group_ids: Comma separated list of Ids of variable groups.
:rtype: [VariableGroup]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if group_ids is not None:
group_ids = ",".join(map(str, group_ids))
query_parameters['groupIds'] = self._serialize.query('group_ids', group_ids, 'str')
response = self._send(http_method='GET',
location_id='f5b09dd5-9d54-45a1-8b5a-1c8287d634cc',
version='7.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[VariableGroup]', self._unwrap_collection(response))
def get_yaml_schema(self, validate_task_names=None):
"""GetYamlSchema.
GET the Yaml schema used for Yaml file validation.
:param bool validate_task_names: Whether the schema should validate that tasks are actually installed (useful for offline tools where you don't want validation).
:rtype: object
"""
query_parameters = {}
if validate_task_names is not None:
query_parameters['validateTaskNames'] = self._serialize.query('validate_task_names', validate_task_names, 'bool')
response = self._send(http_method='GET',
location_id='1f9990b9-1dba-441f-9c2e-6485888c42b6',
version='7.0',
query_parameters=query_parameters)
return self._deserialize('object', response)
|
azure-devops-python-api/azure-devops/azure/devops/v7_0/task_agent/task_agent_client.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_0/task_agent/task_agent_client.py",
"repo_id": "azure-devops-python-api",
"token_count": 29949
}
| 390 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from .models import *
from .upack_api_client import UPackApiClient
__all__ = [
'BatchOperationData',
'JsonPatchOperation',
'MinimalPackageDetails',
'Package',
'PackageVersionDetails',
'ReferenceLinks',
'UPackPackagesBatchRequest',
'UPackPackageVersionDeletionState',
'UPackRecycleBinPackageVersionDetails',
'UPackApiClient'
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_0/upack_api/__init__.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_0/upack_api/__init__.py",
"repo_id": "azure-devops-python-api",
"token_count": 216
}
| 391 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class AddProcessWorkItemTypeFieldRequest(Model):
"""
Class that describes a request to add a field in a work item type.
:param allowed_values: The list of field allowed values.
:type allowed_values: list of str
:param allow_groups: Allow setting field value to a group identity. Only applies to identity fields.
:type allow_groups: bool
:param default_value: The default value of the field.
:type default_value: object
:param read_only: If true the field cannot be edited.
:type read_only: bool
:param reference_name: Reference name of the field.
:type reference_name: str
:param required: If true the field cannot be empty.
:type required: bool
"""
_attribute_map = {
'allowed_values': {'key': 'allowedValues', 'type': '[str]'},
'allow_groups': {'key': 'allowGroups', 'type': 'bool'},
'default_value': {'key': 'defaultValue', 'type': 'object'},
'read_only': {'key': 'readOnly', 'type': 'bool'},
'reference_name': {'key': 'referenceName', 'type': 'str'},
'required': {'key': 'required', 'type': 'bool'}
}
def __init__(self, allowed_values=None, allow_groups=None, default_value=None, read_only=None, reference_name=None, required=None):
super(AddProcessWorkItemTypeFieldRequest, self).__init__()
self.allowed_values = allowed_values
self.allow_groups = allow_groups
self.default_value = default_value
self.read_only = read_only
self.reference_name = reference_name
self.required = required
class Control(Model):
"""
Represent a control in the form.
:param contribution: Contribution for the control.
:type contribution: :class:`WitContribution <azure.devops.v7_0.work_item_tracking.models.WitContribution>`
:param control_type: Type of the control.
:type control_type: str
:param height: Height of the control, for html controls.
:type height: int
:param id: The id for the layout node.
:type id: str
:param inherited: A value indicating whether this layout node has been inherited. from a parent layout. This is expected to only be only set by the combiner.
:type inherited: bool
:param is_contribution: A value indicating if the layout node is contribution or not.
:type is_contribution: bool
:param label: Label for the field.
:type label: str
:param metadata: Inner text of the control.
:type metadata: str
:param order: Order in which the control should appear in its group.
:type order: int
:param overridden: A value indicating whether this layout node has been overridden . by a child layout.
:type overridden: bool
:param read_only: A value indicating if the control is readonly.
:type read_only: bool
:param visible: A value indicating if the control should be hidden or not.
:type visible: bool
:param watermark: Watermark text for the textbox.
:type watermark: str
"""
_attribute_map = {
'contribution': {'key': 'contribution', 'type': 'WitContribution'},
'control_type': {'key': 'controlType', 'type': 'str'},
'height': {'key': 'height', 'type': 'int'},
'id': {'key': 'id', 'type': 'str'},
'inherited': {'key': 'inherited', 'type': 'bool'},
'is_contribution': {'key': 'isContribution', 'type': 'bool'},
'label': {'key': 'label', 'type': 'str'},
'metadata': {'key': 'metadata', 'type': 'str'},
'order': {'key': 'order', 'type': 'int'},
'overridden': {'key': 'overridden', 'type': 'bool'},
'read_only': {'key': 'readOnly', 'type': 'bool'},
'visible': {'key': 'visible', 'type': 'bool'},
'watermark': {'key': 'watermark', 'type': 'str'}
}
def __init__(self, contribution=None, control_type=None, height=None, id=None, inherited=None, is_contribution=None, label=None, metadata=None, order=None, overridden=None, read_only=None, visible=None, watermark=None):
super(Control, self).__init__()
self.contribution = contribution
self.control_type = control_type
self.height = height
self.id = id
self.inherited = inherited
self.is_contribution = is_contribution
self.label = label
self.metadata = metadata
self.order = order
self.overridden = overridden
self.read_only = read_only
self.visible = visible
self.watermark = watermark
class CreateProcessModel(Model):
"""
Describes a process being created.
:param description: Description of the process
:type description: str
:param name: Name of the process
:type name: str
:param parent_process_type_id: The ID of the parent process
:type parent_process_type_id: str
:param reference_name: Reference name of process being created. If not specified, server will assign a unique reference name
:type reference_name: str
"""
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'parent_process_type_id': {'key': 'parentProcessTypeId', 'type': 'str'},
'reference_name': {'key': 'referenceName', 'type': 'str'}
}
def __init__(self, description=None, name=None, parent_process_type_id=None, reference_name=None):
super(CreateProcessModel, self).__init__()
self.description = description
self.name = name
self.parent_process_type_id = parent_process_type_id
self.reference_name = reference_name
class CreateProcessRuleRequest(Model):
"""
Request object/class for creating a rule on a work item type.
:param actions: List of actions to take when the rule is triggered.
:type actions: list of :class:`RuleAction <azure.devops.v7_0.work_item_tracking.models.RuleAction>`
:param conditions: List of conditions when the rule should be triggered.
:type conditions: list of :class:`RuleCondition <azure.devops.v7_0.work_item_tracking.models.RuleCondition>`
:param is_disabled: Indicates if the rule is disabled.
:type is_disabled: bool
:param name: Name for the rule.
:type name: str
"""
_attribute_map = {
'actions': {'key': 'actions', 'type': '[RuleAction]'},
'conditions': {'key': 'conditions', 'type': '[RuleCondition]'},
'is_disabled': {'key': 'isDisabled', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, actions=None, conditions=None, is_disabled=None, name=None):
super(CreateProcessRuleRequest, self).__init__()
self.actions = actions
self.conditions = conditions
self.is_disabled = is_disabled
self.name = name
class CreateProcessWorkItemTypeRequest(Model):
"""
Class for create work item type request
:param color: Color hexadecimal code to represent the work item type
:type color: str
:param description: Description of the work item type
:type description: str
:param icon: Icon to represent the work item type
:type icon: str
:param inherits_from: Parent work item type for work item type
:type inherits_from: str
:param is_disabled: True if the work item type need to be disabled
:type is_disabled: bool
:param name: Name of work item type
:type name: str
"""
_attribute_map = {
'color': {'key': 'color', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'icon': {'key': 'icon', 'type': 'str'},
'inherits_from': {'key': 'inheritsFrom', 'type': 'str'},
'is_disabled': {'key': 'isDisabled', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, color=None, description=None, icon=None, inherits_from=None, is_disabled=None, name=None):
super(CreateProcessWorkItemTypeRequest, self).__init__()
self.color = color
self.description = description
self.icon = icon
self.inherits_from = inherits_from
self.is_disabled = is_disabled
self.name = name
class Extension(Model):
"""
Represents the extensions part of the layout
:param id: Id of the extension
:type id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'}
}
def __init__(self, id=None):
super(Extension, self).__init__()
self.id = id
class FieldModel(Model):
"""
:param description:
:type description: str
:param id:
:type id: str
:param is_identity:
:type is_identity: bool
:param name:
:type name: str
:param type:
:type type: object
:param url:
:type url: str
"""
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'is_identity': {'key': 'isIdentity', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'object'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, description=None, id=None, is_identity=None, name=None, type=None, url=None):
super(FieldModel, self).__init__()
self.description = description
self.id = id
self.is_identity = is_identity
self.name = name
self.type = type
self.url = url
class FieldRuleModel(Model):
"""
:param actions:
:type actions: list of :class:`RuleActionModel <azure.devops.v7_0.work_item_tracking.models.RuleActionModel>`
:param conditions:
:type conditions: list of :class:`RuleConditionModel <azure.devops.v7_0.work_item_tracking.models.RuleConditionModel>`
:param friendly_name:
:type friendly_name: str
:param id:
:type id: str
:param is_disabled:
:type is_disabled: bool
:param is_system:
:type is_system: bool
"""
_attribute_map = {
'actions': {'key': 'actions', 'type': '[RuleActionModel]'},
'conditions': {'key': 'conditions', 'type': '[RuleConditionModel]'},
'friendly_name': {'key': 'friendlyName', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'is_disabled': {'key': 'isDisabled', 'type': 'bool'},
'is_system': {'key': 'isSystem', 'type': 'bool'}
}
def __init__(self, actions=None, conditions=None, friendly_name=None, id=None, is_disabled=None, is_system=None):
super(FieldRuleModel, self).__init__()
self.actions = actions
self.conditions = conditions
self.friendly_name = friendly_name
self.id = id
self.is_disabled = is_disabled
self.is_system = is_system
class FormLayout(Model):
"""
Describes the layout of a work item type
:param extensions: Gets and sets extensions list.
:type extensions: list of :class:`Extension <azure.devops.v7_0.work_item_tracking.models.Extension>`
:param pages: Top level tabs of the layout.
:type pages: list of :class:`Page <azure.devops.v7_0.work_item_tracking.models.Page>`
:param system_controls: Headers controls of the layout.
:type system_controls: list of :class:`Control <azure.devops.v7_0.work_item_tracking.models.Control>`
"""
_attribute_map = {
'extensions': {'key': 'extensions', 'type': '[Extension]'},
'pages': {'key': 'pages', 'type': '[Page]'},
'system_controls': {'key': 'systemControls', 'type': '[Control]'}
}
def __init__(self, extensions=None, pages=None, system_controls=None):
super(FormLayout, self).__init__()
self.extensions = extensions
self.pages = pages
self.system_controls = system_controls
class Group(Model):
"""
Represent a group in the form that holds controls in it.
:param contribution: Contribution for the group.
:type contribution: :class:`WitContribution <azure.devops.v7_0.work_item_tracking.models.WitContribution>`
:param controls: Controls to be put in the group.
:type controls: list of :class:`Control <azure.devops.v7_0.work_item_tracking.models.Control>`
:param height: The height for the contribution.
:type height: int
:param id: The id for the layout node.
:type id: str
:param inherited: A value indicating whether this layout node has been inherited from a parent layout. This is expected to only be only set by the combiner.
:type inherited: bool
:param is_contribution: A value indicating if the layout node is contribution are not.
:type is_contribution: bool
:param label: Label for the group.
:type label: str
:param order: Order in which the group should appear in the section.
:type order: int
:param overridden: A value indicating whether this layout node has been overridden by a child layout.
:type overridden: bool
:param visible: A value indicating if the group should be hidden or not.
:type visible: bool
"""
_attribute_map = {
'contribution': {'key': 'contribution', 'type': 'WitContribution'},
'controls': {'key': 'controls', 'type': '[Control]'},
'height': {'key': 'height', 'type': 'int'},
'id': {'key': 'id', 'type': 'str'},
'inherited': {'key': 'inherited', 'type': 'bool'},
'is_contribution': {'key': 'isContribution', 'type': 'bool'},
'label': {'key': 'label', 'type': 'str'},
'order': {'key': 'order', 'type': 'int'},
'overridden': {'key': 'overridden', 'type': 'bool'},
'visible': {'key': 'visible', 'type': 'bool'}
}
def __init__(self, contribution=None, controls=None, height=None, id=None, inherited=None, is_contribution=None, label=None, order=None, overridden=None, visible=None):
super(Group, self).__init__()
self.contribution = contribution
self.controls = controls
self.height = height
self.id = id
self.inherited = inherited
self.is_contribution = is_contribution
self.label = label
self.order = order
self.overridden = overridden
self.visible = visible
class HideStateModel(Model):
"""
Class that describes the work item state is hidden.
:param hidden: Returns 'true', if workitem state is hidden, 'false' otherwise.
:type hidden: bool
"""
_attribute_map = {
'hidden': {'key': 'hidden', 'type': 'bool'}
}
def __init__(self, hidden=None):
super(HideStateModel, self).__init__()
self.hidden = hidden
class Page(Model):
"""
Describes a page in the work item form layout
:param contribution: Contribution for the page.
:type contribution: :class:`WitContribution <azure.devops.v7_0.work_item_tracking.models.WitContribution>`
:param id: The id for the layout node.
:type id: str
:param inherited: A value indicating whether this layout node has been inherited from a parent layout. This is expected to only be only set by the combiner.
:type inherited: bool
:param is_contribution: A value indicating if the layout node is contribution are not.
:type is_contribution: bool
:param label: The label for the page.
:type label: str
:param locked: A value indicating whether any user operations are permitted on this page and the contents of this page
:type locked: bool
:param order: Order in which the page should appear in the layout.
:type order: int
:param overridden: A value indicating whether this layout node has been overridden by a child layout.
:type overridden: bool
:param page_type: The icon for the page.
:type page_type: object
:param sections: The sections of the page.
:type sections: list of :class:`Section <azure.devops.v7_0.work_item_tracking.models.Section>`
:param visible: A value indicating if the page should be hidden or not.
:type visible: bool
"""
_attribute_map = {
'contribution': {'key': 'contribution', 'type': 'WitContribution'},
'id': {'key': 'id', 'type': 'str'},
'inherited': {'key': 'inherited', 'type': 'bool'},
'is_contribution': {'key': 'isContribution', 'type': 'bool'},
'label': {'key': 'label', 'type': 'str'},
'locked': {'key': 'locked', 'type': 'bool'},
'order': {'key': 'order', 'type': 'int'},
'overridden': {'key': 'overridden', 'type': 'bool'},
'page_type': {'key': 'pageType', 'type': 'object'},
'sections': {'key': 'sections', 'type': '[Section]'},
'visible': {'key': 'visible', 'type': 'bool'}
}
def __init__(self, contribution=None, id=None, inherited=None, is_contribution=None, label=None, locked=None, order=None, overridden=None, page_type=None, sections=None, visible=None):
super(Page, self).__init__()
self.contribution = contribution
self.id = id
self.inherited = inherited
self.is_contribution = is_contribution
self.label = label
self.locked = locked
self.order = order
self.overridden = overridden
self.page_type = page_type
self.sections = sections
self.visible = visible
class PickListMetadata(Model):
"""
Metadata for picklist.
:param id: ID of the picklist
:type id: str
:param is_suggested: Indicates whether items outside of suggested list are allowed
:type is_suggested: bool
:param name: Name of the picklist
:type name: str
:param type: DataType of picklist
:type type: str
:param url: Url of the picklist
:type url: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'is_suggested': {'key': 'isSuggested', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, id=None, is_suggested=None, name=None, type=None, url=None):
super(PickListMetadata, self).__init__()
self.id = id
self.is_suggested = is_suggested
self.name = name
self.type = type
self.url = url
class ProcessBehavior(Model):
"""
Process Behavior Model.
:param color: Color.
:type color: str
:param customization: Indicates the type of customization on this work item. System behaviors are inherited from parent process but not modified. Inherited behaviors are modified modified behaviors that were inherited from parent process. Custom behaviors are behaviors created by user in current process.
:type customization: object
:param description: . Description
:type description: str
:param fields: Process Behavior Fields.
:type fields: list of :class:`ProcessBehaviorField <azure.devops.v7_0.work_item_tracking.models.ProcessBehaviorField>`
:param inherits: Parent behavior reference.
:type inherits: :class:`ProcessBehaviorReference <azure.devops.v7_0.work_item_tracking.models.ProcessBehaviorReference>`
:param name: Behavior Name.
:type name: str
:param rank: Rank of the behavior
:type rank: int
:param reference_name: Behavior Id
:type reference_name: str
:param url: Url of the behavior.
:type url: str
"""
_attribute_map = {
'color': {'key': 'color', 'type': 'str'},
'customization': {'key': 'customization', 'type': 'object'},
'description': {'key': 'description', 'type': 'str'},
'fields': {'key': 'fields', 'type': '[ProcessBehaviorField]'},
'inherits': {'key': 'inherits', 'type': 'ProcessBehaviorReference'},
'name': {'key': 'name', 'type': 'str'},
'rank': {'key': 'rank', 'type': 'int'},
'reference_name': {'key': 'referenceName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, color=None, customization=None, description=None, fields=None, inherits=None, name=None, rank=None, reference_name=None, url=None):
super(ProcessBehavior, self).__init__()
self.color = color
self.customization = customization
self.description = description
self.fields = fields
self.inherits = inherits
self.name = name
self.rank = rank
self.reference_name = reference_name
self.url = url
class ProcessBehaviorCreateRequest(Model):
"""
Process Behavior Create Payload.
:param color: Color.
:type color: str
:param inherits: Parent behavior id.
:type inherits: str
:param name: Name of the behavior.
:type name: str
:param reference_name: ReferenceName is optional, if not specified will be auto-generated.
:type reference_name: str
"""
_attribute_map = {
'color': {'key': 'color', 'type': 'str'},
'inherits': {'key': 'inherits', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'reference_name': {'key': 'referenceName', 'type': 'str'}
}
def __init__(self, color=None, inherits=None, name=None, reference_name=None):
super(ProcessBehaviorCreateRequest, self).__init__()
self.color = color
self.inherits = inherits
self.name = name
self.reference_name = reference_name
class ProcessBehaviorField(Model):
"""
Process Behavior Field.
:param name: Name of the field.
:type name: str
:param reference_name: Reference name of the field.
:type reference_name: str
:param url: Url to field.
:type url: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'reference_name': {'key': 'referenceName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, name=None, reference_name=None, url=None):
super(ProcessBehaviorField, self).__init__()
self.name = name
self.reference_name = reference_name
self.url = url
class ProcessBehaviorReference(Model):
"""
Process behavior Reference.
:param behavior_ref_name: Id of a Behavior.
:type behavior_ref_name: str
:param url: Url to behavior.
:type url: str
"""
_attribute_map = {
'behavior_ref_name': {'key': 'behaviorRefName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, behavior_ref_name=None, url=None):
super(ProcessBehaviorReference, self).__init__()
self.behavior_ref_name = behavior_ref_name
self.url = url
class ProcessBehaviorUpdateRequest(Model):
"""
Process Behavior Replace Payload.
:param color: Color.
:type color: str
:param name: Behavior Name.
:type name: str
"""
_attribute_map = {
'color': {'key': 'color', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, color=None, name=None):
super(ProcessBehaviorUpdateRequest, self).__init__()
self.color = color
self.name = name
class ProcessInfo(Model):
"""
Process.
:param customization_type: Indicates the type of customization on this process. System Process is default process. Inherited Process is modified process that was System process before.
:type customization_type: object
:param description: Description of the process.
:type description: str
:param is_default: Is the process default.
:type is_default: bool
:param is_enabled: Is the process enabled.
:type is_enabled: bool
:param name: Name of the process.
:type name: str
:param parent_process_type_id: ID of the parent process.
:type parent_process_type_id: str
:param projects: Projects in this process to which the user is subscribed to.
:type projects: list of :class:`ProjectReference <azure.devops.v7_0.work_item_tracking.models.ProjectReference>`
:param reference_name: Reference name of the process.
:type reference_name: str
:param type_id: The ID of the process.
:type type_id: str
"""
_attribute_map = {
'customization_type': {'key': 'customizationType', 'type': 'object'},
'description': {'key': 'description', 'type': 'str'},
'is_default': {'key': 'isDefault', 'type': 'bool'},
'is_enabled': {'key': 'isEnabled', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'parent_process_type_id': {'key': 'parentProcessTypeId', 'type': 'str'},
'projects': {'key': 'projects', 'type': '[ProjectReference]'},
'reference_name': {'key': 'referenceName', 'type': 'str'},
'type_id': {'key': 'typeId', 'type': 'str'}
}
def __init__(self, customization_type=None, description=None, is_default=None, is_enabled=None, name=None, parent_process_type_id=None, projects=None, reference_name=None, type_id=None):
super(ProcessInfo, self).__init__()
self.customization_type = customization_type
self.description = description
self.is_default = is_default
self.is_enabled = is_enabled
self.name = name
self.parent_process_type_id = parent_process_type_id
self.projects = projects
self.reference_name = reference_name
self.type_id = type_id
class ProcessModel(Model):
"""
:param description: Description of the process
:type description: str
:param name: Name of the process
:type name: str
:param projects: Projects in this process
:type projects: list of :class:`ProjectReference <azure.devops.v7_0.work_item_tracking.models.ProjectReference>`
:param properties: Properties of the process
:type properties: :class:`ProcessProperties <azure.devops.v7_0.work_item_tracking.models.ProcessProperties>`
:param reference_name: Reference name of the process
:type reference_name: str
:param type_id: The ID of the process
:type type_id: str
"""
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'projects': {'key': 'projects', 'type': '[ProjectReference]'},
'properties': {'key': 'properties', 'type': 'ProcessProperties'},
'reference_name': {'key': 'referenceName', 'type': 'str'},
'type_id': {'key': 'typeId', 'type': 'str'}
}
def __init__(self, description=None, name=None, projects=None, properties=None, reference_name=None, type_id=None):
super(ProcessModel, self).__init__()
self.description = description
self.name = name
self.projects = projects
self.properties = properties
self.reference_name = reference_name
self.type_id = type_id
class ProcessProperties(Model):
"""
Properties of the process.
:param class_: Class of the process.
:type class_: object
:param is_default: Is the process default process.
:type is_default: bool
:param is_enabled: Is the process enabled.
:type is_enabled: bool
:param parent_process_type_id: ID of the parent process.
:type parent_process_type_id: str
:param version: Version of the process.
:type version: str
"""
_attribute_map = {
'class_': {'key': 'class', 'type': 'object'},
'is_default': {'key': 'isDefault', 'type': 'bool'},
'is_enabled': {'key': 'isEnabled', 'type': 'bool'},
'parent_process_type_id': {'key': 'parentProcessTypeId', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'}
}
def __init__(self, class_=None, is_default=None, is_enabled=None, parent_process_type_id=None, version=None):
super(ProcessProperties, self).__init__()
self.class_ = class_
self.is_default = is_default
self.is_enabled = is_enabled
self.parent_process_type_id = parent_process_type_id
self.version = version
class ProcessRule(CreateProcessRuleRequest):
"""
Process Rule Response.
:param actions: List of actions to take when the rule is triggered.
:type actions: list of :class:`RuleAction <azure.devops.v7_0.work_item_tracking.models.RuleAction>`
:param conditions: List of conditions when the rule should be triggered.
:type conditions: list of :class:`RuleCondition <azure.devops.v7_0.work_item_tracking.models.RuleCondition>`
:param is_disabled: Indicates if the rule is disabled.
:type is_disabled: bool
:param name: Name for the rule.
:type name: str
:param customization_type: Indicates if the rule is system generated or created by user.
:type customization_type: object
:param id: Id to uniquely identify the rule.
:type id: str
:param url: Resource Url.
:type url: str
"""
_attribute_map = {
'actions': {'key': 'actions', 'type': '[RuleAction]'},
'conditions': {'key': 'conditions', 'type': '[RuleCondition]'},
'is_disabled': {'key': 'isDisabled', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'customization_type': {'key': 'customizationType', 'type': 'object'},
'id': {'key': 'id', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, actions=None, conditions=None, is_disabled=None, name=None, customization_type=None, id=None, url=None):
super(ProcessRule, self).__init__(actions=actions, conditions=conditions, is_disabled=is_disabled, name=name)
self.customization_type = customization_type
self.id = id
self.url = url
class ProcessWorkItemType(Model):
"""
Class that describes a work item type object
:param behaviors:
:type behaviors: list of :class:`WorkItemTypeBehavior <azure.devops.v7_0.work_item_tracking.models.WorkItemTypeBehavior>`
:param color: Color hexadecimal code to represent the work item type
:type color: str
:param customization: Indicates the type of customization on this work item System work item types are inherited from parent process but not modified Inherited work item types are modified work item that were inherited from parent process Custom work item types are work item types that were created in the current process
:type customization: object
:param description: Description of the work item type
:type description: str
:param icon: Icon to represent the work item typ
:type icon: str
:param inherits: Reference name of the parent work item type
:type inherits: str
:param is_disabled: Indicates if a work item type is disabled
:type is_disabled: bool
:param layout:
:type layout: :class:`FormLayout <azure.devops.v7_0.work_item_tracking.models.FormLayout>`
:param name: Name of the work item type
:type name: str
:param reference_name: Reference name of work item type
:type reference_name: str
:param states:
:type states: list of :class:`WorkItemStateResultModel <azure.devops.v7_0.work_item_tracking.models.WorkItemStateResultModel>`
:param url: Url of the work item type
:type url: str
"""
_attribute_map = {
'behaviors': {'key': 'behaviors', 'type': '[WorkItemTypeBehavior]'},
'color': {'key': 'color', 'type': 'str'},
'customization': {'key': 'customization', 'type': 'object'},
'description': {'key': 'description', 'type': 'str'},
'icon': {'key': 'icon', 'type': 'str'},
'inherits': {'key': 'inherits', 'type': 'str'},
'is_disabled': {'key': 'isDisabled', 'type': 'bool'},
'layout': {'key': 'layout', 'type': 'FormLayout'},
'name': {'key': 'name', 'type': 'str'},
'reference_name': {'key': 'referenceName', 'type': 'str'},
'states': {'key': 'states', 'type': '[WorkItemStateResultModel]'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, behaviors=None, color=None, customization=None, description=None, icon=None, inherits=None, is_disabled=None, layout=None, name=None, reference_name=None, states=None, url=None):
super(ProcessWorkItemType, self).__init__()
self.behaviors = behaviors
self.color = color
self.customization = customization
self.description = description
self.icon = icon
self.inherits = inherits
self.is_disabled = is_disabled
self.layout = layout
self.name = name
self.reference_name = reference_name
self.states = states
self.url = url
class ProcessWorkItemTypeField(Model):
"""
Class that describes a field in a work item type and its properties.
:param allowed_values: The list of field allowed values.
:type allowed_values: list of object
:param allow_groups: Allow setting field value to a group identity. Only applies to identity fields.
:type allow_groups: bool
:param customization: Indicates the type of customization on this work item.
:type customization: object
:param default_value: The default value of the field.
:type default_value: object
:param description: Description of the field.
:type description: str
:param name: Name of the field.
:type name: str
:param read_only: If true the field cannot be edited.
:type read_only: bool
:param reference_name: Reference name of the field.
:type reference_name: str
:param required: If true the field cannot be empty.
:type required: bool
:param type: Type of the field.
:type type: object
:param url: Resource URL of the field.
:type url: str
"""
_attribute_map = {
'allowed_values': {'key': 'allowedValues', 'type': '[object]'},
'allow_groups': {'key': 'allowGroups', 'type': 'bool'},
'customization': {'key': 'customization', 'type': 'object'},
'default_value': {'key': 'defaultValue', 'type': 'object'},
'description': {'key': 'description', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'read_only': {'key': 'readOnly', 'type': 'bool'},
'reference_name': {'key': 'referenceName', 'type': 'str'},
'required': {'key': 'required', 'type': 'bool'},
'type': {'key': 'type', 'type': 'object'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, allowed_values=None, allow_groups=None, customization=None, default_value=None, description=None, name=None, read_only=None, reference_name=None, required=None, type=None, url=None):
super(ProcessWorkItemTypeField, self).__init__()
self.allowed_values = allowed_values
self.allow_groups = allow_groups
self.customization = customization
self.default_value = default_value
self.description = description
self.name = name
self.read_only = read_only
self.reference_name = reference_name
self.required = required
self.type = type
self.url = url
class ProjectReference(Model):
"""
Defines the project reference class.
:param description: Description of the project
:type description: str
:param id: The ID of the project
:type id: str
:param name: Name of the project
:type name: str
:param url: Url of the project
:type url: str
"""
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, description=None, id=None, name=None, url=None):
super(ProjectReference, self).__init__()
self.description = description
self.id = id
self.name = name
self.url = url
class RuleAction(Model):
"""
Action to take when the rule is triggered.
:param action_type: Type of action to take when the rule is triggered.
:type action_type: object
:param target_field: Field on which the action should be taken.
:type target_field: str
:param value: Value to apply on target field, once the action is taken.
:type value: str
"""
_attribute_map = {
'action_type': {'key': 'actionType', 'type': 'object'},
'target_field': {'key': 'targetField', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'}
}
def __init__(self, action_type=None, target_field=None, value=None):
super(RuleAction, self).__init__()
self.action_type = action_type
self.target_field = target_field
self.value = value
class RuleActionModel(Model):
"""
Action to take when the rule is triggered.
:param action_type:
:type action_type: str
:param target_field:
:type target_field: str
:param value:
:type value: str
"""
_attribute_map = {
'action_type': {'key': 'actionType', 'type': 'str'},
'target_field': {'key': 'targetField', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'}
}
def __init__(self, action_type=None, target_field=None, value=None):
super(RuleActionModel, self).__init__()
self.action_type = action_type
self.target_field = target_field
self.value = value
class RuleCondition(Model):
"""
Defines a condition on a field when the rule should be triggered.
:param condition_type: Type of condition. $When. This condition limits the execution of its children to cases when another field has a particular value, i.e. when the Is value of the referenced field is equal to the given literal value. $WhenNot.This condition limits the execution of its children to cases when another field does not have a particular value, i.e.when the Is value of the referenced field is not equal to the given literal value. $WhenChanged.This condition limits the execution of its children to cases when another field has changed, i.e.when the Is value of the referenced field is not equal to the Was value of that field. $WhenNotChanged.This condition limits the execution of its children to cases when another field has not changed, i.e.when the Is value of the referenced field is equal to the Was value of that field.
:type condition_type: object
:param field: Field that defines condition.
:type field: str
:param value: Value of field to define the condition for rule.
:type value: str
"""
_attribute_map = {
'condition_type': {'key': 'conditionType', 'type': 'object'},
'field': {'key': 'field', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'}
}
def __init__(self, condition_type=None, field=None, value=None):
super(RuleCondition, self).__init__()
self.condition_type = condition_type
self.field = field
self.value = value
class RuleConditionModel(Model):
"""
:param condition_type:
:type condition_type: str
:param field:
:type field: str
:param value:
:type value: str
"""
_attribute_map = {
'condition_type': {'key': 'conditionType', 'type': 'str'},
'field': {'key': 'field', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'}
}
def __init__(self, condition_type=None, field=None, value=None):
super(RuleConditionModel, self).__init__()
self.condition_type = condition_type
self.field = field
self.value = value
class Section(Model):
"""
Defines a section of the work item form layout
:param groups: List of child groups in this section
:type groups: list of :class:`Group <azure.devops.v7_0.work_item_tracking.models.Group>`
:param id: The id for the layout node.
:type id: str
:param overridden: A value indicating whether this layout node has been overridden by a child layout.
:type overridden: bool
"""
_attribute_map = {
'groups': {'key': 'groups', 'type': '[Group]'},
'id': {'key': 'id', 'type': 'str'},
'overridden': {'key': 'overridden', 'type': 'bool'}
}
def __init__(self, groups=None, id=None, overridden=None):
super(Section, self).__init__()
self.groups = groups
self.id = id
self.overridden = overridden
class UpdateProcessModel(Model):
"""
Describes a request to update a process
:param description: New description of the process
:type description: str
:param is_default: If true new projects will use this process by default
:type is_default: bool
:param is_enabled: If false the process will be disabled and cannot be used to create projects
:type is_enabled: bool
:param name: New name of the process
:type name: str
"""
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'is_default': {'key': 'isDefault', 'type': 'bool'},
'is_enabled': {'key': 'isEnabled', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, description=None, is_default=None, is_enabled=None, name=None):
super(UpdateProcessModel, self).__init__()
self.description = description
self.is_default = is_default
self.is_enabled = is_enabled
self.name = name
class UpdateProcessRuleRequest(CreateProcessRuleRequest):
"""
Request class/object to update the rule.
:param actions: List of actions to take when the rule is triggered.
:type actions: list of :class:`RuleAction <azure.devops.v7_0.work_item_tracking.models.RuleAction>`
:param conditions: List of conditions when the rule should be triggered.
:type conditions: list of :class:`RuleCondition <azure.devops.v7_0.work_item_tracking.models.RuleCondition>`
:param is_disabled: Indicates if the rule is disabled.
:type is_disabled: bool
:param name: Name for the rule.
:type name: str
:param id: Id to uniquely identify the rule.
:type id: str
"""
_attribute_map = {
'actions': {'key': 'actions', 'type': '[RuleAction]'},
'conditions': {'key': 'conditions', 'type': '[RuleCondition]'},
'is_disabled': {'key': 'isDisabled', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'}
}
def __init__(self, actions=None, conditions=None, is_disabled=None, name=None, id=None):
super(UpdateProcessRuleRequest, self).__init__(actions=actions, conditions=conditions, is_disabled=is_disabled, name=name)
self.id = id
class UpdateProcessWorkItemTypeFieldRequest(Model):
"""
Class to describe a request that updates a field's properties in a work item type.
:param allowed_values: The list of field allowed values.
:type allowed_values: list of str
:param allow_groups: Allow setting field value to a group identity. Only applies to identity fields.
:type allow_groups: bool
:param default_value: The default value of the field.
:type default_value: object
:param read_only: If true the field cannot be edited.
:type read_only: bool
:param required: The default value of the field.
:type required: bool
"""
_attribute_map = {
'allowed_values': {'key': 'allowedValues', 'type': '[str]'},
'allow_groups': {'key': 'allowGroups', 'type': 'bool'},
'default_value': {'key': 'defaultValue', 'type': 'object'},
'read_only': {'key': 'readOnly', 'type': 'bool'},
'required': {'key': 'required', 'type': 'bool'}
}
def __init__(self, allowed_values=None, allow_groups=None, default_value=None, read_only=None, required=None):
super(UpdateProcessWorkItemTypeFieldRequest, self).__init__()
self.allowed_values = allowed_values
self.allow_groups = allow_groups
self.default_value = default_value
self.read_only = read_only
self.required = required
class UpdateProcessWorkItemTypeRequest(Model):
"""
Class for update request on a work item type
:param color: Color of the work item type
:type color: str
:param description: Description of the work item type
:type description: str
:param icon: Icon of the work item type
:type icon: str
:param is_disabled: If set will disable the work item type
:type is_disabled: bool
"""
_attribute_map = {
'color': {'key': 'color', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'icon': {'key': 'icon', 'type': 'str'},
'is_disabled': {'key': 'isDisabled', 'type': 'bool'}
}
def __init__(self, color=None, description=None, icon=None, is_disabled=None):
super(UpdateProcessWorkItemTypeRequest, self).__init__()
self.color = color
self.description = description
self.icon = icon
self.is_disabled = is_disabled
class WitContribution(Model):
"""
Properties of a work item form contribution
:param contribution_id: The id for the contribution.
:type contribution_id: str
:param height: The height for the contribution.
:type height: int
:param inputs: A dictionary holding key value pairs for contribution inputs.
:type inputs: dict
:param show_on_deleted_work_item: A value indicating if the contribution should be show on deleted workItem.
:type show_on_deleted_work_item: bool
"""
_attribute_map = {
'contribution_id': {'key': 'contributionId', 'type': 'str'},
'height': {'key': 'height', 'type': 'int'},
'inputs': {'key': 'inputs', 'type': '{object}'},
'show_on_deleted_work_item': {'key': 'showOnDeletedWorkItem', 'type': 'bool'}
}
def __init__(self, contribution_id=None, height=None, inputs=None, show_on_deleted_work_item=None):
super(WitContribution, self).__init__()
self.contribution_id = contribution_id
self.height = height
self.inputs = inputs
self.show_on_deleted_work_item = show_on_deleted_work_item
class WorkItemBehavior(Model):
"""
:param abstract:
:type abstract: bool
:param color:
:type color: str
:param description:
:type description: str
:param fields:
:type fields: list of :class:`WorkItemBehaviorField <azure.devops.v7_0.work_item_tracking.models.WorkItemBehaviorField>`
:param id:
:type id: str
:param inherits:
:type inherits: :class:`WorkItemBehaviorReference <azure.devops.v7_0.work_item_tracking.models.WorkItemBehaviorReference>`
:param name:
:type name: str
:param overriden:
:type overriden: bool
:param rank:
:type rank: int
:param url:
:type url: str
"""
_attribute_map = {
'abstract': {'key': 'abstract', 'type': 'bool'},
'color': {'key': 'color', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'fields': {'key': 'fields', 'type': '[WorkItemBehaviorField]'},
'id': {'key': 'id', 'type': 'str'},
'inherits': {'key': 'inherits', 'type': 'WorkItemBehaviorReference'},
'name': {'key': 'name', 'type': 'str'},
'overriden': {'key': 'overriden', 'type': 'bool'},
'rank': {'key': 'rank', 'type': 'int'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, abstract=None, color=None, description=None, fields=None, id=None, inherits=None, name=None, overriden=None, rank=None, url=None):
super(WorkItemBehavior, self).__init__()
self.abstract = abstract
self.color = color
self.description = description
self.fields = fields
self.id = id
self.inherits = inherits
self.name = name
self.overriden = overriden
self.rank = rank
self.url = url
class WorkItemBehaviorField(Model):
"""
:param behavior_field_id:
:type behavior_field_id: str
:param id:
:type id: str
:param url:
:type url: str
"""
_attribute_map = {
'behavior_field_id': {'key': 'behaviorFieldId', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, behavior_field_id=None, id=None, url=None):
super(WorkItemBehaviorField, self).__init__()
self.behavior_field_id = behavior_field_id
self.id = id
self.url = url
class WorkItemBehaviorReference(Model):
"""
Reference to the behavior of a work item type.
:param id: The ID of the reference behavior.
:type id: str
:param url: The url of the reference behavior.
:type url: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, id=None, url=None):
super(WorkItemBehaviorReference, self).__init__()
self.id = id
self.url = url
class WorkItemStateInputModel(Model):
"""
Class That represents a work item state input.
:param color: Color of the state
:type color: str
:param name: Name of the state
:type name: str
:param order: Order in which state should appear
:type order: int
:param state_category: Category of the state
:type state_category: str
"""
_attribute_map = {
'color': {'key': 'color', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'order': {'key': 'order', 'type': 'int'},
'state_category': {'key': 'stateCategory', 'type': 'str'}
}
def __init__(self, color=None, name=None, order=None, state_category=None):
super(WorkItemStateInputModel, self).__init__()
self.color = color
self.name = name
self.order = order
self.state_category = state_category
class WorkItemStateResultModel(Model):
"""
Class that represents a work item state result.
:param color: Work item state color.
:type color: str
:param customization_type: Work item state customization type.
:type customization_type: object
:param hidden: If the Work item state is hidden.
:type hidden: bool
:param id: Id of the Workitemstate.
:type id: str
:param name: Work item state name.
:type name: str
:param order: Work item state order.
:type order: int
:param state_category: Work item state statecategory.
:type state_category: str
:param url: Work item state url.
:type url: str
"""
_attribute_map = {
'color': {'key': 'color', 'type': 'str'},
'customization_type': {'key': 'customizationType', 'type': 'object'},
'hidden': {'key': 'hidden', 'type': 'bool'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'order': {'key': 'order', 'type': 'int'},
'state_category': {'key': 'stateCategory', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, color=None, customization_type=None, hidden=None, id=None, name=None, order=None, state_category=None, url=None):
super(WorkItemStateResultModel, self).__init__()
self.color = color
self.customization_type = customization_type
self.hidden = hidden
self.id = id
self.name = name
self.order = order
self.state_category = state_category
self.url = url
class WorkItemTypeBehavior(Model):
"""
Association between a work item type and it's behavior
:param behavior: Reference to the behavior of a work item type
:type behavior: :class:`WorkItemBehaviorReference <azure.devops.v7_0.work_item_tracking.models.WorkItemBehaviorReference>`
:param is_default: If true the work item type is the default work item type in the behavior
:type is_default: bool
:param is_legacy_default: If true the work item type is the default work item type in the parent behavior
:type is_legacy_default: bool
:param url: URL of the work item type behavior
:type url: str
"""
_attribute_map = {
'behavior': {'key': 'behavior', 'type': 'WorkItemBehaviorReference'},
'is_default': {'key': 'isDefault', 'type': 'bool'},
'is_legacy_default': {'key': 'isLegacyDefault', 'type': 'bool'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, behavior=None, is_default=None, is_legacy_default=None, url=None):
super(WorkItemTypeBehavior, self).__init__()
self.behavior = behavior
self.is_default = is_default
self.is_legacy_default = is_legacy_default
self.url = url
class WorkItemTypeModel(Model):
"""
:param behaviors:
:type behaviors: list of :class:`WorkItemTypeBehavior <azure.devops.v7_0.work_item_tracking.models.WorkItemTypeBehavior>`
:param class_:
:type class_: object
:param color:
:type color: str
:param description:
:type description: str
:param icon:
:type icon: str
:param id:
:type id: str
:param inherits: Parent WIT Id/Internal ReferenceName that it inherits from
:type inherits: str
:param is_disabled:
:type is_disabled: bool
:param layout:
:type layout: :class:`FormLayout <azure.devops.v7_0.work_item_tracking.models.FormLayout>`
:param name:
:type name: str
:param states:
:type states: list of :class:`WorkItemStateResultModel <azure.devops.v7_0.work_item_tracking.models.WorkItemStateResultModel>`
:param url:
:type url: str
"""
_attribute_map = {
'behaviors': {'key': 'behaviors', 'type': '[WorkItemTypeBehavior]'},
'class_': {'key': 'class', 'type': 'object'},
'color': {'key': 'color', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'icon': {'key': 'icon', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'inherits': {'key': 'inherits', 'type': 'str'},
'is_disabled': {'key': 'isDisabled', 'type': 'bool'},
'layout': {'key': 'layout', 'type': 'FormLayout'},
'name': {'key': 'name', 'type': 'str'},
'states': {'key': 'states', 'type': '[WorkItemStateResultModel]'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, behaviors=None, class_=None, color=None, description=None, icon=None, id=None, inherits=None, is_disabled=None, layout=None, name=None, states=None, url=None):
super(WorkItemTypeModel, self).__init__()
self.behaviors = behaviors
self.class_ = class_
self.color = color
self.description = description
self.icon = icon
self.id = id
self.inherits = inherits
self.is_disabled = is_disabled
self.layout = layout
self.name = name
self.states = states
self.url = url
class PickList(PickListMetadata):
"""
Picklist.
:param id: ID of the picklist
:type id: str
:param is_suggested: Indicates whether items outside of suggested list are allowed
:type is_suggested: bool
:param name: Name of the picklist
:type name: str
:param type: DataType of picklist
:type type: str
:param url: Url of the picklist
:type url: str
:param items: A list of PicklistItemModel.
:type items: list of str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'is_suggested': {'key': 'isSuggested', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'items': {'key': 'items', 'type': '[str]'}
}
def __init__(self, id=None, is_suggested=None, name=None, type=None, url=None, items=None):
super(PickList, self).__init__(id=id, is_suggested=is_suggested, name=name, type=type, url=url)
self.items = items
__all__ = [
'AddProcessWorkItemTypeFieldRequest',
'Control',
'CreateProcessModel',
'CreateProcessRuleRequest',
'CreateProcessWorkItemTypeRequest',
'Extension',
'FieldModel',
'FieldRuleModel',
'FormLayout',
'Group',
'HideStateModel',
'Page',
'PickListMetadata',
'ProcessBehavior',
'ProcessBehaviorCreateRequest',
'ProcessBehaviorField',
'ProcessBehaviorReference',
'ProcessBehaviorUpdateRequest',
'ProcessInfo',
'ProcessModel',
'ProcessProperties',
'ProcessRule',
'ProcessWorkItemType',
'ProcessWorkItemTypeField',
'ProjectReference',
'RuleAction',
'RuleActionModel',
'RuleCondition',
'RuleConditionModel',
'Section',
'UpdateProcessModel',
'UpdateProcessRuleRequest',
'UpdateProcessWorkItemTypeFieldRequest',
'UpdateProcessWorkItemTypeRequest',
'WitContribution',
'WorkItemBehavior',
'WorkItemBehaviorField',
'WorkItemBehaviorReference',
'WorkItemStateInputModel',
'WorkItemStateResultModel',
'WorkItemTypeBehavior',
'WorkItemTypeModel',
'PickList',
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_0/work_item_tracking_process/models.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_0/work_item_tracking_process/models.py",
"repo_id": "azure-devops-python-api",
"token_count": 21780
}
| 392 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest import Serializer, Deserializer
from ...client import Client
from . import models
class CixClient(Client):
"""Cix
:param str base_url: Service URL
:param Authentication creds: Authenticated credentials.
"""
def __init__(self, base_url=None, creds=None):
super(CixClient, self).__init__(base_url, creds)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
resource_area_identifier = None
def get_configurations(self, project, repository_type=None, repository_id=None, branch=None, service_connection_id=None):
"""GetConfigurations.
[Preview API] Gets a list of existing configuration files for the given repository.
:param str project: Project ID or project name
:param str repository_type: The type of the repository such as GitHub, TfsGit (i.e. Azure Repos), Bitbucket, etc.
:param str repository_id: The vendor-specific identifier or the name of the repository, e.g. Microsoft/vscode (GitHub) or e9d82045-ddba-4e01-a63d-2ab9f040af62 (Azure Repos)
:param str branch: The repository branch where to look for the configuration file.
:param str service_connection_id: If specified, the ID of the service endpoint to query. Can only be omitted for providers that do not use service endpoints, e.g. TfsGit (i.e. Azure Repos).
:rtype: [ConfigurationFile]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if repository_type is not None:
query_parameters['repositoryType'] = self._serialize.query('repository_type', repository_type, 'str')
if repository_id is not None:
query_parameters['repositoryId'] = self._serialize.query('repository_id', repository_id, 'str')
if branch is not None:
query_parameters['branch'] = self._serialize.query('branch', branch, 'str')
if service_connection_id is not None:
query_parameters['serviceConnectionId'] = self._serialize.query('service_connection_id', service_connection_id, 'str')
response = self._send(http_method='GET',
location_id='8fc87684-9ebc-4c37-ab92-f4ac4a58cb3a',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[ConfigurationFile]', self._unwrap_collection(response))
def create_project_connection(self, create_connection_inputs, project):
"""CreateProjectConnection.
[Preview API] Creates a new Pipeline connection between the provider installation and the specified project. Returns the PipelineConnection object created.
:param :class:`<CreatePipelineConnectionInputs> <azure.devops.v7_1.cix.models.CreatePipelineConnectionInputs>` create_connection_inputs:
:param str project:
:rtype: :class:`<PipelineConnection> <azure.devops.v7_1.cix.models.PipelineConnection>`
"""
query_parameters = {}
if project is not None:
query_parameters['project'] = self._serialize.query('project', project, 'str')
content = self._serialize.body(create_connection_inputs, 'CreatePipelineConnectionInputs')
response = self._send(http_method='POST',
location_id='00df4879-9216-45d5-b38d-4a487b626b2c',
version='7.1-preview.1',
query_parameters=query_parameters,
content=content)
return self._deserialize('PipelineConnection', response)
def get_detected_build_frameworks(self, project, repository_type=None, repository_id=None, branch=None, detection_type=None, service_connection_id=None):
"""GetDetectedBuildFrameworks.
[Preview API] Returns a list of build frameworks that best match the given repository based on its contents.
:param str project: Project ID or project name
:param str repository_type: The type of the repository such as GitHub, TfsGit (i.e. Azure Repos), Bitbucket, etc.
:param str repository_id: The vendor-specific identifier or the name of the repository, e.g. Microsoft/vscode (GitHub) or e9d82045-ddba-4e01-a63d-2ab9f040af62 (Azure Repos)
:param str branch: The repository branch to detect build frameworks for.
:param str detection_type:
:param str service_connection_id: If specified, the ID of the service endpoint to query. Can only be omitted for providers that do not use service endpoints, e.g. TfsGit (i.e. Azure Repos).
:rtype: [DetectedBuildFramework]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if repository_type is not None:
query_parameters['repositoryType'] = self._serialize.query('repository_type', repository_type, 'str')
if repository_id is not None:
query_parameters['repositoryId'] = self._serialize.query('repository_id', repository_id, 'str')
if branch is not None:
query_parameters['branch'] = self._serialize.query('branch', branch, 'str')
if detection_type is not None:
query_parameters['detectionType'] = self._serialize.query('detection_type', detection_type, 'str')
if service_connection_id is not None:
query_parameters['serviceConnectionId'] = self._serialize.query('service_connection_id', service_connection_id, 'str')
response = self._send(http_method='GET',
location_id='29a30bab-9efb-4652-bf1b-9269baca0980',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[DetectedBuildFramework]', self._unwrap_collection(response))
def create_resources(self, creation_parameters, project):
"""CreateResources.
[Preview API]
:param {ResourceCreationParameter} creation_parameters:
:param str project: Project ID or project name
:rtype: :class:`<CreatedResources> <azure.devops.v7_1.cix.models.CreatedResources>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(creation_parameters, '{ResourceCreationParameter}')
response = self._send(http_method='POST',
location_id='43201899-7690-4870-9c79-ab69605f21ed',
version='7.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('CreatedResources', response)
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/cix/cix_client.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/cix/cix_client.py",
"repo_id": "azure-devops-python-api",
"token_count": 3011
}
| 393 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest import Serializer, Deserializer
from ...client import Client
from . import models
class DashboardClient(Client):
"""Dashboard
:param str base_url: Service URL
:param Authentication creds: Authenticated credentials.
"""
def __init__(self, base_url=None, creds=None):
super(DashboardClient, self).__init__(base_url, creds)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
resource_area_identifier = '31c84e0a-3ece-48fd-a29d-100849af99ba'
def create_dashboard(self, dashboard, team_context):
"""CreateDashboard.
[Preview API] Create the supplied dashboard.
:param :class:`<Dashboard> <azure.devops.v7_1.dashboard.models.Dashboard>` dashboard: The initial state of the dashboard
:param :class:`<TeamContext> <azure.devops.v7_1.dashboard.models.TeamContext>` team_context: The team context for the operation
:rtype: :class:`<Dashboard> <azure.devops.v7_1.dashboard.models.Dashboard>`
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
content = self._serialize.body(dashboard, 'Dashboard')
response = self._send(http_method='POST',
location_id='454b3e51-2e6e-48d4-ad81-978154089351',
version='7.1-preview.3',
route_values=route_values,
content=content)
return self._deserialize('Dashboard', response)
def delete_dashboard(self, team_context, dashboard_id):
"""DeleteDashboard.
[Preview API] Delete a dashboard given its ID. This also deletes the widgets associated with this dashboard.
:param :class:`<TeamContext> <azure.devops.v7_1.dashboard.models.TeamContext>` team_context: The team context for the operation
:param str dashboard_id: ID of the dashboard to delete.
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
if dashboard_id is not None:
route_values['dashboardId'] = self._serialize.url('dashboard_id', dashboard_id, 'str')
self._send(http_method='DELETE',
location_id='454b3e51-2e6e-48d4-ad81-978154089351',
version='7.1-preview.3',
route_values=route_values)
def get_dashboard(self, team_context, dashboard_id):
"""GetDashboard.
[Preview API] Get a dashboard by its ID.
:param :class:`<TeamContext> <azure.devops.v7_1.dashboard.models.TeamContext>` team_context: The team context for the operation
:param str dashboard_id:
:rtype: :class:`<Dashboard> <azure.devops.v7_1.dashboard.models.Dashboard>`
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
if dashboard_id is not None:
route_values['dashboardId'] = self._serialize.url('dashboard_id', dashboard_id, 'str')
response = self._send(http_method='GET',
location_id='454b3e51-2e6e-48d4-ad81-978154089351',
version='7.1-preview.3',
route_values=route_values)
return self._deserialize('Dashboard', response)
def get_dashboards_by_project(self, team_context):
"""GetDashboardsByProject.
[Preview API] Get a list of dashboards under a project.
:param :class:`<TeamContext> <azure.devops.v7_1.dashboard.models.TeamContext>` team_context: The team context for the operation
:rtype: [Dashboard]
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
response = self._send(http_method='GET',
location_id='454b3e51-2e6e-48d4-ad81-978154089351',
version='7.1-preview.3',
route_values=route_values)
return self._deserialize('[Dashboard]', self._unwrap_collection(response))
def replace_dashboard(self, dashboard, team_context, dashboard_id):
"""ReplaceDashboard.
[Preview API] Replace configuration for the specified dashboard. Replaces Widget list on Dashboard, only if property is supplied.
:param :class:`<Dashboard> <azure.devops.v7_1.dashboard.models.Dashboard>` dashboard: The Configuration of the dashboard to replace.
:param :class:`<TeamContext> <azure.devops.v7_1.dashboard.models.TeamContext>` team_context: The team context for the operation
:param str dashboard_id: ID of the dashboard to replace.
:rtype: :class:`<Dashboard> <azure.devops.v7_1.dashboard.models.Dashboard>`
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
if dashboard_id is not None:
route_values['dashboardId'] = self._serialize.url('dashboard_id', dashboard_id, 'str')
content = self._serialize.body(dashboard, 'Dashboard')
response = self._send(http_method='PUT',
location_id='454b3e51-2e6e-48d4-ad81-978154089351',
version='7.1-preview.3',
route_values=route_values,
content=content)
return self._deserialize('Dashboard', response)
def replace_dashboards(self, group, team_context):
"""ReplaceDashboards.
[Preview API] Update the name and position of dashboards in the supplied group, and remove omitted dashboards. Does not modify dashboard content.
:param :class:`<DashboardGroup> <azure.devops.v7_1.dashboard.models.DashboardGroup>` group:
:param :class:`<TeamContext> <azure.devops.v7_1.dashboard.models.TeamContext>` team_context: The team context for the operation
:rtype: :class:`<DashboardGroup> <azure.devops.v7_1.dashboard.models.DashboardGroup>`
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
content = self._serialize.body(group, 'DashboardGroup')
response = self._send(http_method='PUT',
location_id='454b3e51-2e6e-48d4-ad81-978154089351',
version='7.1-preview.3',
route_values=route_values,
content=content)
return self._deserialize('DashboardGroup', response)
def create_widget(self, widget, team_context, dashboard_id):
"""CreateWidget.
[Preview API] Create a widget on the specified dashboard.
:param :class:`<Widget> <azure.devops.v7_1.dashboard.models.Widget>` widget: State of the widget to add
:param :class:`<TeamContext> <azure.devops.v7_1.dashboard.models.TeamContext>` team_context: The team context for the operation
:param str dashboard_id: ID of dashboard the widget will be added to.
:rtype: :class:`<Widget> <azure.devops.v7_1.dashboard.models.Widget>`
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
if dashboard_id is not None:
route_values['dashboardId'] = self._serialize.url('dashboard_id', dashboard_id, 'str')
content = self._serialize.body(widget, 'Widget')
response = self._send(http_method='POST',
location_id='bdcff53a-8355-4172-a00a-40497ea23afc',
version='7.1-preview.2',
route_values=route_values,
content=content)
return self._deserialize('Widget', response)
def delete_widget(self, team_context, dashboard_id, widget_id):
"""DeleteWidget.
[Preview API] Delete the specified widget.
:param :class:`<TeamContext> <azure.devops.v7_1.dashboard.models.TeamContext>` team_context: The team context for the operation
:param str dashboard_id: ID of the dashboard containing the widget.
:param str widget_id: ID of the widget to update.
:rtype: :class:`<Dashboard> <azure.devops.v7_1.dashboard.models.Dashboard>`
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
if dashboard_id is not None:
route_values['dashboardId'] = self._serialize.url('dashboard_id', dashboard_id, 'str')
if widget_id is not None:
route_values['widgetId'] = self._serialize.url('widget_id', widget_id, 'str')
response = self._send(http_method='DELETE',
location_id='bdcff53a-8355-4172-a00a-40497ea23afc',
version='7.1-preview.2',
route_values=route_values)
return self._deserialize('Dashboard', response)
def get_widget(self, team_context, dashboard_id, widget_id):
"""GetWidget.
[Preview API] Get the current state of the specified widget.
:param :class:`<TeamContext> <azure.devops.v7_1.dashboard.models.TeamContext>` team_context: The team context for the operation
:param str dashboard_id: ID of the dashboard containing the widget.
:param str widget_id: ID of the widget to read.
:rtype: :class:`<Widget> <azure.devops.v7_1.dashboard.models.Widget>`
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
if dashboard_id is not None:
route_values['dashboardId'] = self._serialize.url('dashboard_id', dashboard_id, 'str')
if widget_id is not None:
route_values['widgetId'] = self._serialize.url('widget_id', widget_id, 'str')
response = self._send(http_method='GET',
location_id='bdcff53a-8355-4172-a00a-40497ea23afc',
version='7.1-preview.2',
route_values=route_values)
return self._deserialize('Widget', response)
def get_widgets(self, team_context, dashboard_id, eTag=None):
"""GetWidgets.
[Preview API] Get widgets contained on the specified dashboard.
:param :class:`<TeamContext> <azure.devops.v7_1.dashboard.models.TeamContext>` team_context: The team context for the operation
:param str dashboard_id: ID of the dashboard to read.
:param String eTag: Dashboard Widgets Version
:rtype: :class:`<WidgetsVersionedList> <azure.devops.v7_1.dashboard.models.WidgetsVersionedList>`
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
if dashboard_id is not None:
route_values['dashboardId'] = self._serialize.url('dashboard_id', dashboard_id, 'str')
additional_headers = {}
if eTag is not None:
additional_headers['ETag'] = eTag
response = self._send(http_method='GET',
location_id='bdcff53a-8355-4172-a00a-40497ea23afc',
version='7.1-preview.2',
route_values=route_values,
additional_headers=additional_headers)
response_object = models.WidgetsVersionedList()
response_object.widgets = self._deserialize('[Widget]', self._unwrap_collection(response))
response_object.eTag = response.headers.get('ETag')
return response_object
def replace_widget(self, widget, team_context, dashboard_id, widget_id):
"""ReplaceWidget.
[Preview API] Override the state of the specified widget.
:param :class:`<Widget> <azure.devops.v7_1.dashboard.models.Widget>` widget: State to be written for the widget.
:param :class:`<TeamContext> <azure.devops.v7_1.dashboard.models.TeamContext>` team_context: The team context for the operation
:param str dashboard_id: ID of the dashboard containing the widget.
:param str widget_id: ID of the widget to update.
:rtype: :class:`<Widget> <azure.devops.v7_1.dashboard.models.Widget>`
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
if dashboard_id is not None:
route_values['dashboardId'] = self._serialize.url('dashboard_id', dashboard_id, 'str')
if widget_id is not None:
route_values['widgetId'] = self._serialize.url('widget_id', widget_id, 'str')
content = self._serialize.body(widget, 'Widget')
response = self._send(http_method='PUT',
location_id='bdcff53a-8355-4172-a00a-40497ea23afc',
version='7.1-preview.2',
route_values=route_values,
content=content)
return self._deserialize('Widget', response)
def replace_widgets(self, widgets, team_context, dashboard_id, eTag=None):
"""ReplaceWidgets.
[Preview API] Replace the widgets on specified dashboard with the supplied widgets.
:param [Widget] widgets: Revised state of widgets to store for the dashboard.
:param :class:`<TeamContext> <azure.devops.v7_1.dashboard.models.TeamContext>` team_context: The team context for the operation
:param str dashboard_id: ID of the Dashboard to modify.
:param String eTag: Dashboard Widgets Version
:rtype: :class:`<WidgetsVersionedList> <azure.devops.v7_1.dashboard.models.WidgetsVersionedList>`
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
if dashboard_id is not None:
route_values['dashboardId'] = self._serialize.url('dashboard_id', dashboard_id, 'str')
additional_headers = {}
if eTag is not None:
additional_headers['ETag'] = eTag
content = self._serialize.body(widgets, '[Widget]')
response = self._send(http_method='PUT',
location_id='bdcff53a-8355-4172-a00a-40497ea23afc',
version='7.1-preview.2',
route_values=route_values,
additional_headers=additional_headers,
content=content)
response_object = models.WidgetsVersionedList()
response_object.widgets = self._deserialize('[Widget]', self._unwrap_collection(response))
response_object.eTag = response.headers.get('ETag')
return response_object
def update_widget(self, widget, team_context, dashboard_id, widget_id):
"""UpdateWidget.
[Preview API] Perform a partial update of the specified widget.
:param :class:`<Widget> <azure.devops.v7_1.dashboard.models.Widget>` widget: Description of the widget changes to apply. All non-null fields will be replaced.
:param :class:`<TeamContext> <azure.devops.v7_1.dashboard.models.TeamContext>` team_context: The team context for the operation
:param str dashboard_id: ID of the dashboard containing the widget.
:param str widget_id: ID of the widget to update.
:rtype: :class:`<Widget> <azure.devops.v7_1.dashboard.models.Widget>`
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
if dashboard_id is not None:
route_values['dashboardId'] = self._serialize.url('dashboard_id', dashboard_id, 'str')
if widget_id is not None:
route_values['widgetId'] = self._serialize.url('widget_id', widget_id, 'str')
content = self._serialize.body(widget, 'Widget')
response = self._send(http_method='PATCH',
location_id='bdcff53a-8355-4172-a00a-40497ea23afc',
version='7.1-preview.2',
route_values=route_values,
content=content)
return self._deserialize('Widget', response)
def update_widgets(self, widgets, team_context, dashboard_id, eTag=None):
"""UpdateWidgets.
[Preview API] Update the supplied widgets on the dashboard using supplied state. State of existing Widgets not passed in the widget list is preserved.
:param [Widget] widgets: The set of widget states to update on the dashboard.
:param :class:`<TeamContext> <azure.devops.v7_1.dashboard.models.TeamContext>` team_context: The team context for the operation
:param str dashboard_id: ID of the Dashboard to modify.
:param String eTag: Dashboard Widgets Version
:rtype: :class:`<WidgetsVersionedList> <azure.devops.v7_1.dashboard.models.WidgetsVersionedList>`
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
if dashboard_id is not None:
route_values['dashboardId'] = self._serialize.url('dashboard_id', dashboard_id, 'str')
additional_headers = {}
if eTag is not None:
additional_headers['ETag'] = eTag
content = self._serialize.body(widgets, '[Widget]')
response = self._send(http_method='PATCH',
location_id='bdcff53a-8355-4172-a00a-40497ea23afc',
version='7.1-preview.2',
route_values=route_values,
additional_headers=additional_headers,
content=content)
response_object = models.WidgetsVersionedList()
response_object.widgets = self._deserialize('[Widget]', self._unwrap_collection(response))
response_object.eTag = response.headers.get('ETag')
return response_object
def get_widget_metadata(self, contribution_id, project=None):
"""GetWidgetMetadata.
[Preview API] Get the widget metadata satisfying the specified contribution ID.
:param str contribution_id: The ID of Contribution for the Widget
:param str project: Project ID or project name
:rtype: :class:`<WidgetMetadataResponse> <azure.devops.v7_1.dashboard.models.WidgetMetadataResponse>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if contribution_id is not None:
route_values['contributionId'] = self._serialize.url('contribution_id', contribution_id, 'str')
response = self._send(http_method='GET',
location_id='6b3628d3-e96f-4fc7-b176-50240b03b515',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('WidgetMetadataResponse', response)
def get_widget_types(self, scope, project=None):
"""GetWidgetTypes.
[Preview API] Get all available widget metadata in alphabetical order, including widgets marked with isVisibleFromCatalog == false.
:param str scope:
:param str project: Project ID or project name
:rtype: :class:`<WidgetTypesResponse> <azure.devops.v7_1.dashboard.models.WidgetTypesResponse>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if scope is not None:
query_parameters['$scope'] = self._serialize.query('scope', scope, 'str')
response = self._send(http_method='GET',
location_id='6b3628d3-e96f-4fc7-b176-50240b03b515',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('WidgetTypesResponse', response)
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/dashboard/dashboard_client.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/dashboard/dashboard_client.py",
"repo_id": "azure-devops-python-api",
"token_count": 12710
}
| 394 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class FeedBatchData(Model):
"""
:param data:
:type data: :class:`FeedBatchOperationData <azure.devops.v7_1.packaging.models.FeedBatchOperationData>`
:param operation:
:type operation: object
"""
_attribute_map = {
'data': {'key': 'data', 'type': 'FeedBatchOperationData'},
'operation': {'key': 'operation', 'type': 'object'}
}
def __init__(self, data=None, operation=None):
super(FeedBatchData, self).__init__()
self.data = data
self.operation = operation
class FeedBatchOperationData(Model):
"""
"""
_attribute_map = {
}
def __init__(self):
super(FeedBatchOperationData, self).__init__()
class FeedCore(Model):
"""
An object that contains all of the settings for a specific feed.
:param allow_upstream_name_conflict: OBSOLETE: If set, the feed will allow upload of packages that exist on the upstream
:type allow_upstream_name_conflict: bool
:param capabilities: Supported capabilities of a feed.
:type capabilities: object
:param fully_qualified_id: This will either be the feed GUID or the feed GUID and view GUID depending on how the feed was accessed.
:type fully_qualified_id: str
:param fully_qualified_name: Full name of the view, in feed@view format.
:type fully_qualified_name: str
:param id: A GUID that uniquely identifies this feed.
:type id: str
:param is_read_only: If set, all packages in the feed are immutable. It is important to note that feed views are immutable; therefore, this flag will always be set for views.
:type is_read_only: bool
:param name: A name for the feed. feed names must follow these rules: <list type="bullet"><item><description> Must not exceed 64 characters </description></item><item><description> Must not contain whitespaces </description></item><item><description> Must not start with an underscore or a period </description></item><item><description> Must not end with a period </description></item><item><description> Must not contain any of the following illegal characters: <![CDATA[ @, ~, ;, {, }, \, +, =, <, >, |, /, \\, ?, :, &, $, *, \", #, [, ] ]]></description></item></list>
:type name: str
:param project: The project that this feed is associated with.
:type project: :class:`ProjectReference <azure.devops.v7_1.packaging.models.ProjectReference>`
:param upstream_enabled: This should always be true. Setting to false will override all sources in UpstreamSources.
:type upstream_enabled: bool
:param upstream_sources: A list of sources that this feed will fetch packages from. An empty list indicates that this feed will not search any additional sources for packages.
:type upstream_sources: list of :class:`UpstreamSource <azure.devops.v7_1.packaging.models.UpstreamSource>`
:param view: Definition of the view.
:type view: :class:`FeedView <azure.devops.v7_1.packaging.models.FeedView>`
:param view_id: View Id.
:type view_id: str
:param view_name: View name.
:type view_name: str
"""
_attribute_map = {
'allow_upstream_name_conflict': {'key': 'allowUpstreamNameConflict', 'type': 'bool'},
'capabilities': {'key': 'capabilities', 'type': 'object'},
'fully_qualified_id': {'key': 'fullyQualifiedId', 'type': 'str'},
'fully_qualified_name': {'key': 'fullyQualifiedName', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'is_read_only': {'key': 'isReadOnly', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'project': {'key': 'project', 'type': 'ProjectReference'},
'upstream_enabled': {'key': 'upstreamEnabled', 'type': 'bool'},
'upstream_sources': {'key': 'upstreamSources', 'type': '[UpstreamSource]'},
'view': {'key': 'view', 'type': 'FeedView'},
'view_id': {'key': 'viewId', 'type': 'str'},
'view_name': {'key': 'viewName', 'type': 'str'}
}
def __init__(self, allow_upstream_name_conflict=None, capabilities=None, fully_qualified_id=None, fully_qualified_name=None, id=None, is_read_only=None, name=None, project=None, upstream_enabled=None, upstream_sources=None, view=None, view_id=None, view_name=None):
super(FeedCore, self).__init__()
self.allow_upstream_name_conflict = allow_upstream_name_conflict
self.capabilities = capabilities
self.fully_qualified_id = fully_qualified_id
self.fully_qualified_name = fully_qualified_name
self.id = id
self.is_read_only = is_read_only
self.name = name
self.project = project
self.upstream_enabled = upstream_enabled
self.upstream_sources = upstream_sources
self.view = view
self.view_id = view_id
self.view_name = view_name
class FeedChange(Model):
"""
A container that encapsulates the state of the feed after a create, update, or delete.
:param feed: The state of the feed after a after a create, update, or delete operation completed.
:type feed: :class:`Feed <azure.devops.v7_1.packaging.models.Feed>`
:param feed_continuation_token: A token that identifies the next change in the log of changes.
:type feed_continuation_token: long
:param change_type: The type of operation.
:type change_type: object
:param latest_package_continuation_token: A token that identifies the latest package change for this feed. This can be used to quickly determine if there have been any changes to packages in a specific feed.
:type latest_package_continuation_token: long
"""
_attribute_map = {
'feed': {'key': 'feed', 'type': 'Feed'},
'feed_continuation_token': {'key': 'feedContinuationToken', 'type': 'long'},
'change_type': {'key': 'changeType', 'type': 'object'},
'latest_package_continuation_token': {'key': 'latestPackageContinuationToken', 'type': 'long'}
}
def __init__(self, feed=None, feed_continuation_token=None, change_type=None, latest_package_continuation_token=None):
super(FeedChange, self).__init__()
self.feed = feed
self.feed_continuation_token = feed_continuation_token
self.change_type = change_type
self.latest_package_continuation_token = latest_package_continuation_token
class FeedChangesResponse(Model):
"""
A result set containing the feed changes for the range that was requested.
:param _links:
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.packaging.models.ReferenceLinks>`
:param count: The number of changes in this set.
:type count: int
:param feed_changes: A container that encapsulates the state of the feed after a create, update, or delete.
:type feed_changes: list of :class:`FeedChange <azure.devops.v7_1.packaging.models.FeedChange>`
:param next_feed_continuation_token: When iterating through the log of changes this value indicates the value that should be used for the next continuation token.
:type next_feed_continuation_token: long
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'count': {'key': 'count', 'type': 'int'},
'feed_changes': {'key': 'feedChanges', 'type': '[FeedChange]'},
'next_feed_continuation_token': {'key': 'nextFeedContinuationToken', 'type': 'long'}
}
def __init__(self, _links=None, count=None, feed_changes=None, next_feed_continuation_token=None):
super(FeedChangesResponse, self).__init__()
self._links = _links
self.count = count
self.feed_changes = feed_changes
self.next_feed_continuation_token = next_feed_continuation_token
class FeedIdsResult(Model):
"""
:param id:
:type id: str
:param name:
:type name: str
:param project_id:
:type project_id: str
:param project_name:
:type project_name: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'project_id': {'key': 'projectId', 'type': 'str'},
'project_name': {'key': 'projectName', 'type': 'str'}
}
def __init__(self, id=None, name=None, project_id=None, project_name=None):
super(FeedIdsResult, self).__init__()
self.id = id
self.name = name
self.project_id = project_id
self.project_name = project_name
class FeedPermission(Model):
"""
Permissions for a feed.
:param display_name: Display name for the identity.
:type display_name: str
:param identity_descriptor: Identity associated with this role.
:type identity_descriptor: :class:`str <azure.devops.v7_1.packaging.models.str>`
:param identity_id: Id of the identity associated with this role.
:type identity_id: str
:param is_inherited_role: Boolean indicating whether the role is inherited or set directly.
:type is_inherited_role: bool
:param role: The role for this identity on a feed.
:type role: object
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'identity_descriptor': {'key': 'identityDescriptor', 'type': 'str'},
'identity_id': {'key': 'identityId', 'type': 'str'},
'is_inherited_role': {'key': 'isInheritedRole', 'type': 'bool'},
'role': {'key': 'role', 'type': 'object'}
}
def __init__(self, display_name=None, identity_descriptor=None, identity_id=None, is_inherited_role=None, role=None):
super(FeedPermission, self).__init__()
self.display_name = display_name
self.identity_descriptor = identity_descriptor
self.identity_id = identity_id
self.is_inherited_role = is_inherited_role
self.role = role
class FeedRetentionPolicy(Model):
"""
Retention policy settings.
:param age_limit_in_days: This attribute is deprecated and is not honoured by retention
:type age_limit_in_days: int
:param count_limit: Maximum versions to preserve per package and package type.
:type count_limit: int
:param days_to_keep_recently_downloaded_packages: Number of days to preserve a package version after its latest download.
:type days_to_keep_recently_downloaded_packages: int
"""
_attribute_map = {
'age_limit_in_days': {'key': 'ageLimitInDays', 'type': 'int'},
'count_limit': {'key': 'countLimit', 'type': 'int'},
'days_to_keep_recently_downloaded_packages': {'key': 'daysToKeepRecentlyDownloadedPackages', 'type': 'int'}
}
def __init__(self, age_limit_in_days=None, count_limit=None, days_to_keep_recently_downloaded_packages=None):
super(FeedRetentionPolicy, self).__init__()
self.age_limit_in_days = age_limit_in_days
self.count_limit = count_limit
self.days_to_keep_recently_downloaded_packages = days_to_keep_recently_downloaded_packages
class FeedUpdate(Model):
"""
Update a feed definition with these new values.
:param allow_upstream_name_conflict: If set, the feed will allow upload of packages that exist on the upstream
:type allow_upstream_name_conflict: bool
:param badges_enabled: If set, this feed supports generation of package badges.
:type badges_enabled: bool
:param default_view_id: The view that the feed administrator has indicated is the default experience for readers.
:type default_view_id: str
:param description: A description for the feed. Descriptions must not exceed 255 characters.
:type description: str
:param hide_deleted_package_versions: If set, feed will hide all deleted/unpublished versions
:type hide_deleted_package_versions: bool
:param id: A GUID that uniquely identifies this feed.
:type id: str
:param name: A name for the feed. feed names must follow these rules: <list type="bullet"><item><description> Must not exceed 64 characters </description></item><item><description> Must not contain whitespaces </description></item><item><description> Must not start with an underscore or a period </description></item><item><description> Must not end with a period </description></item><item><description> Must not contain any of the following illegal characters: <![CDATA[ @, ~, ;, {, }, \, +, =, <, >, |, /, \\, ?, :, &, $, *, \", #, [, ] ]]></description></item></list>
:type name: str
:param upstream_enabled: If set, the feed can proxy packages from an upstream feed
:type upstream_enabled: bool
:param upstream_sources: A list of sources that this feed will fetch packages from. An empty list indicates that this feed will not search any additional sources for packages.
:type upstream_sources: list of :class:`UpstreamSource <azure.devops.v7_1.packaging.models.UpstreamSource>`
"""
_attribute_map = {
'allow_upstream_name_conflict': {'key': 'allowUpstreamNameConflict', 'type': 'bool'},
'badges_enabled': {'key': 'badgesEnabled', 'type': 'bool'},
'default_view_id': {'key': 'defaultViewId', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'hide_deleted_package_versions': {'key': 'hideDeletedPackageVersions', 'type': 'bool'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'upstream_enabled': {'key': 'upstreamEnabled', 'type': 'bool'},
'upstream_sources': {'key': 'upstreamSources', 'type': '[UpstreamSource]'}
}
def __init__(self, allow_upstream_name_conflict=None, badges_enabled=None, default_view_id=None, description=None, hide_deleted_package_versions=None, id=None, name=None, upstream_enabled=None, upstream_sources=None):
super(FeedUpdate, self).__init__()
self.allow_upstream_name_conflict = allow_upstream_name_conflict
self.badges_enabled = badges_enabled
self.default_view_id = default_view_id
self.description = description
self.hide_deleted_package_versions = hide_deleted_package_versions
self.id = id
self.name = name
self.upstream_enabled = upstream_enabled
self.upstream_sources = upstream_sources
class FeedView(Model):
"""
A view on top of a feed.
:param _links: Related REST links.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.packaging.models.ReferenceLinks>`
:param id: Id of the view.
:type id: str
:param name: Name of the view.
:type name: str
:param type: Type of view.
:type type: object
:param url: Url of the view.
:type url: str
:param visibility: Visibility status of the view.
:type visibility: object
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'object'},
'url': {'key': 'url', 'type': 'str'},
'visibility': {'key': 'visibility', 'type': 'object'}
}
def __init__(self, _links=None, id=None, name=None, type=None, url=None, visibility=None):
super(FeedView, self).__init__()
self._links = _links
self.id = id
self.name = name
self.type = type
self.url = url
self.visibility = visibility
class GlobalPermission(Model):
"""
Permissions for feed service-wide operations such as the creation of new feeds.
:param identity_descriptor: Identity of the user with the provided Role.
:type identity_descriptor: :class:`str <azure.devops.v7_1.packaging.models.str>`
:param identity_id: IdentityId corresponding to the IdentityDescriptor
:type identity_id: str
:param role: Role associated with the Identity.
:type role: object
"""
_attribute_map = {
'identity_descriptor': {'key': 'identityDescriptor', 'type': 'str'},
'identity_id': {'key': 'identityId', 'type': 'str'},
'role': {'key': 'role', 'type': 'object'}
}
def __init__(self, identity_descriptor=None, identity_id=None, role=None):
super(GlobalPermission, self).__init__()
self.identity_descriptor = identity_descriptor
self.identity_id = identity_id
self.role = role
class JsonPatchOperation(Model):
"""
The JSON model for a JSON Patch operation
:param from_: The path to copy from for the Move/Copy operation.
:type from_: str
:param op: The patch operation
:type op: object
:param path: The path for the operation. In the case of an array, a zero based index can be used to specify the position in the array (e.g. /biscuits/0/name). The "-" character can be used instead of an index to insert at the end of the array (e.g. /biscuits/-).
:type path: str
:param value: The value for the operation. This is either a primitive or a JToken.
:type value: object
"""
_attribute_map = {
'from_': {'key': 'from', 'type': 'str'},
'op': {'key': 'op', 'type': 'object'},
'path': {'key': 'path', 'type': 'str'},
'value': {'key': 'value', 'type': 'object'}
}
def __init__(self, from_=None, op=None, path=None, value=None):
super(JsonPatchOperation, self).__init__()
self.from_ = from_
self.op = op
self.path = path
self.value = value
class MinimalPackageVersion(Model):
"""
Core data about any package, including its id and version information and basic state.
:param direct_upstream_source_id: Upstream source this package was ingested from.
:type direct_upstream_source_id: str
:param id: Id for the package.
:type id: str
:param is_cached_version: [Obsolete] Used for legacy scenarios and may be removed in future versions.
:type is_cached_version: bool
:param is_deleted: True if this package has been deleted.
:type is_deleted: bool
:param is_latest: True if this is the latest version of the package by package type sort order.
:type is_latest: bool
:param is_listed: (NuGet and Cargo Only) True if this package is listed.
:type is_listed: bool
:param normalized_version: Normalized version using normalization rules specific to a package type.
:type normalized_version: str
:param package_description: Package description.
:type package_description: str
:param publish_date: UTC Date the package was published to the service.
:type publish_date: datetime
:param storage_id: Internal storage id.
:type storage_id: str
:param version: Display version.
:type version: str
:param views: List of views containing this package version.
:type views: list of :class:`FeedView <azure.devops.v7_1.packaging.models.FeedView>`
"""
_attribute_map = {
'direct_upstream_source_id': {'key': 'directUpstreamSourceId', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'is_cached_version': {'key': 'isCachedVersion', 'type': 'bool'},
'is_deleted': {'key': 'isDeleted', 'type': 'bool'},
'is_latest': {'key': 'isLatest', 'type': 'bool'},
'is_listed': {'key': 'isListed', 'type': 'bool'},
'normalized_version': {'key': 'normalizedVersion', 'type': 'str'},
'package_description': {'key': 'packageDescription', 'type': 'str'},
'publish_date': {'key': 'publishDate', 'type': 'iso-8601'},
'storage_id': {'key': 'storageId', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'views': {'key': 'views', 'type': '[FeedView]'}
}
def __init__(self, direct_upstream_source_id=None, id=None, is_cached_version=None, is_deleted=None, is_latest=None, is_listed=None, normalized_version=None, package_description=None, publish_date=None, storage_id=None, version=None, views=None):
super(MinimalPackageVersion, self).__init__()
self.direct_upstream_source_id = direct_upstream_source_id
self.id = id
self.is_cached_version = is_cached_version
self.is_deleted = is_deleted
self.is_latest = is_latest
self.is_listed = is_listed
self.normalized_version = normalized_version
self.package_description = package_description
self.publish_date = publish_date
self.storage_id = storage_id
self.version = version
self.views = views
class OperationReference(Model):
"""
Reference for an async operation.
:param id: Unique identifier for the operation.
:type id: str
:param plugin_id: Unique identifier for the plugin.
:type plugin_id: str
:param status: The current status of the operation.
:type status: object
:param url: URL to get the full operation object.
:type url: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'plugin_id': {'key': 'pluginId', 'type': 'str'},
'status': {'key': 'status', 'type': 'object'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, id=None, plugin_id=None, status=None, url=None):
super(OperationReference, self).__init__()
self.id = id
self.plugin_id = plugin_id
self.status = status
self.url = url
class Package(Model):
"""
A package, which is a container for one or more package versions.
:param _links: Related REST links.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.packaging.models.ReferenceLinks>`
:param id: Id of the package.
:type id: str
:param is_cached: Used for legacy scenarios and may be removed in future versions.
:type is_cached: bool
:param name: The display name of the package.
:type name: str
:param normalized_name: The normalized name representing the identity of this package within its package type.
:type normalized_name: str
:param protocol_type: Type of the package.
:type protocol_type: str
:param star_count: [Obsolete] - this field is unused and will be removed in a future release.
:type star_count: int
:param url: Url for this package.
:type url: str
:param versions: All versions for this package within its feed.
:type versions: list of :class:`MinimalPackageVersion <azure.devops.v7_1.packaging.models.MinimalPackageVersion>`
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'id': {'key': 'id', 'type': 'str'},
'is_cached': {'key': 'isCached', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'normalized_name': {'key': 'normalizedName', 'type': 'str'},
'protocol_type': {'key': 'protocolType', 'type': 'str'},
'star_count': {'key': 'starCount', 'type': 'int'},
'url': {'key': 'url', 'type': 'str'},
'versions': {'key': 'versions', 'type': '[MinimalPackageVersion]'}
}
def __init__(self, _links=None, id=None, is_cached=None, name=None, normalized_name=None, protocol_type=None, star_count=None, url=None, versions=None):
super(Package, self).__init__()
self._links = _links
self.id = id
self.is_cached = is_cached
self.name = name
self.normalized_name = normalized_name
self.protocol_type = protocol_type
self.star_count = star_count
self.url = url
self.versions = versions
class PackageDependency(Model):
"""
A dependency on another package version.
:param group: Dependency package group (an optional classification within some package types).
:type group: str
:param package_name: Dependency package name.
:type package_name: str
:param version_range: Dependency package version range.
:type version_range: str
"""
_attribute_map = {
'group': {'key': 'group', 'type': 'str'},
'package_name': {'key': 'packageName', 'type': 'str'},
'version_range': {'key': 'versionRange', 'type': 'str'}
}
def __init__(self, group=None, package_name=None, version_range=None):
super(PackageDependency, self).__init__()
self.group = group
self.package_name = package_name
self.version_range = version_range
class PackageFile(Model):
"""
A package file for a specific package version, only relevant to package types that contain multiple files per version.
:param children: Hierarchical representation of files.
:type children: list of :class:`PackageFile <azure.devops.v7_1.packaging.models.PackageFile>`
:param name: File name.
:type name: str
:param protocol_metadata: Extended data unique to a specific package type.
:type protocol_metadata: :class:`ProtocolMetadata <azure.devops.v7_1.packaging.models.ProtocolMetadata>`
"""
_attribute_map = {
'children': {'key': 'children', 'type': '[PackageFile]'},
'name': {'key': 'name', 'type': 'str'},
'protocol_metadata': {'key': 'protocolMetadata', 'type': 'ProtocolMetadata'}
}
def __init__(self, children=None, name=None, protocol_metadata=None):
super(PackageFile, self).__init__()
self.children = children
self.name = name
self.protocol_metadata = protocol_metadata
class PackageChange(Model):
"""
A single change to a feed's packages.
:param package: Package that was changed.
:type package: :class:`Package <azure.devops.v7_1.packaging.models.Package>`
:param package_version_change: Change that was performed on a package version.
:type package_version_change: :class:`PackageVersionChange <azure.devops.v7_1.packaging.models.PackageVersionChange>`
"""
_attribute_map = {
'package': {'key': 'package', 'type': 'Package'},
'package_version_change': {'key': 'packageVersionChange', 'type': 'PackageVersionChange'}
}
def __init__(self, package=None, package_version_change=None):
super(PackageChange, self).__init__()
self.package = package
self.package_version_change = package_version_change
class PackageChangesResponse(Model):
"""
A set of change operations to a feed's packages.
:param _links: Related REST links.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.packaging.models.ReferenceLinks>`
:param count: Number of changes in this batch.
:type count: int
:param next_package_continuation_token: Token that should be used in future calls for this feed to retrieve new changes.
:type next_package_continuation_token: long
:param package_changes: List of changes.
:type package_changes: list of :class:`PackageChange <azure.devops.v7_1.packaging.models.PackageChange>`
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'count': {'key': 'count', 'type': 'int'},
'next_package_continuation_token': {'key': 'nextPackageContinuationToken', 'type': 'long'},
'package_changes': {'key': 'packageChanges', 'type': '[PackageChange]'}
}
def __init__(self, _links=None, count=None, next_package_continuation_token=None, package_changes=None):
super(PackageChangesResponse, self).__init__()
self._links = _links
self.count = count
self.next_package_continuation_token = next_package_continuation_token
self.package_changes = package_changes
class PackageMetrics(Model):
"""
All metrics for a certain package id
:param download_count: Total count of downloads per package id.
:type download_count: float
:param download_unique_users: Number of downloads per unique user per package id.
:type download_unique_users: float
:param last_downloaded: UTC date and time when package was last downloaded.
:type last_downloaded: datetime
:param package_id: Package id.
:type package_id: str
"""
_attribute_map = {
'download_count': {'key': 'downloadCount', 'type': 'float'},
'download_unique_users': {'key': 'downloadUniqueUsers', 'type': 'float'},
'last_downloaded': {'key': 'lastDownloaded', 'type': 'iso-8601'},
'package_id': {'key': 'packageId', 'type': 'str'}
}
def __init__(self, download_count=None, download_unique_users=None, last_downloaded=None, package_id=None):
super(PackageMetrics, self).__init__()
self.download_count = download_count
self.download_unique_users = download_unique_users
self.last_downloaded = last_downloaded
self.package_id = package_id
class PackageMetricsQuery(Model):
"""
Query to get package metrics
:param package_ids: List of package ids
:type package_ids: list of str
"""
_attribute_map = {
'package_ids': {'key': 'packageIds', 'type': '[str]'}
}
def __init__(self, package_ids=None):
super(PackageMetricsQuery, self).__init__()
self.package_ids = package_ids
class PackageVersion(MinimalPackageVersion):
"""
A specific version of a package.
:param direct_upstream_source_id: Upstream source this package was ingested from.
:type direct_upstream_source_id: str
:param id: Id for the package.
:type id: str
:param is_cached_version: [Obsolete] Used for legacy scenarios and may be removed in future versions.
:type is_cached_version: bool
:param is_deleted: True if this package has been deleted.
:type is_deleted: bool
:param is_latest: True if this is the latest version of the package by package type sort order.
:type is_latest: bool
:param is_listed: (NuGet and Cargo Only) True if this package is listed.
:type is_listed: bool
:param normalized_version: Normalized version using normalization rules specific to a package type.
:type normalized_version: str
:param package_description: Package description.
:type package_description: str
:param publish_date: UTC Date the package was published to the service.
:type publish_date: datetime
:param storage_id: Internal storage id.
:type storage_id: str
:param version: Display version.
:type version: str
:param views: List of views containing this package version.
:type views: list of :class:`FeedView <azure.devops.v7_1.packaging.models.FeedView>`
:param _links: Related links
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.packaging.models.ReferenceLinks>`
:param author: Package version author.
:type author: str
:param deleted_date: UTC date that this package version was deleted.
:type deleted_date: datetime
:param dependencies: List of dependencies for this package version.
:type dependencies: list of :class:`PackageDependency <azure.devops.v7_1.packaging.models.PackageDependency>`
:param description: Package version description.
:type description: str
:param files: Files associated with this package version, only relevant for multi-file package types.
:type files: list of :class:`PackageFile <azure.devops.v7_1.packaging.models.PackageFile>`
:param other_versions: Other versions of this package.
:type other_versions: list of :class:`MinimalPackageVersion <azure.devops.v7_1.packaging.models.MinimalPackageVersion>`
:param protocol_metadata: Extended data specific to a package type.
:type protocol_metadata: :class:`ProtocolMetadata <azure.devops.v7_1.packaging.models.ProtocolMetadata>`
:param source_chain: List of upstream sources through which a package version moved to land in this feed.
:type source_chain: list of :class:`UpstreamSource <azure.devops.v7_1.packaging.models.UpstreamSource>`
:param summary: Package version summary.
:type summary: str
:param tags: Package version tags.
:type tags: list of str
:param url: Package version url.
:type url: str
"""
_attribute_map = {
'direct_upstream_source_id': {'key': 'directUpstreamSourceId', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'is_cached_version': {'key': 'isCachedVersion', 'type': 'bool'},
'is_deleted': {'key': 'isDeleted', 'type': 'bool'},
'is_latest': {'key': 'isLatest', 'type': 'bool'},
'is_listed': {'key': 'isListed', 'type': 'bool'},
'normalized_version': {'key': 'normalizedVersion', 'type': 'str'},
'package_description': {'key': 'packageDescription', 'type': 'str'},
'publish_date': {'key': 'publishDate', 'type': 'iso-8601'},
'storage_id': {'key': 'storageId', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'views': {'key': 'views', 'type': '[FeedView]'},
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'author': {'key': 'author', 'type': 'str'},
'deleted_date': {'key': 'deletedDate', 'type': 'iso-8601'},
'dependencies': {'key': 'dependencies', 'type': '[PackageDependency]'},
'description': {'key': 'description', 'type': 'str'},
'files': {'key': 'files', 'type': '[PackageFile]'},
'other_versions': {'key': 'otherVersions', 'type': '[MinimalPackageVersion]'},
'protocol_metadata': {'key': 'protocolMetadata', 'type': 'ProtocolMetadata'},
'source_chain': {'key': 'sourceChain', 'type': '[UpstreamSource]'},
'summary': {'key': 'summary', 'type': 'str'},
'tags': {'key': 'tags', 'type': '[str]'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, direct_upstream_source_id=None, id=None, is_cached_version=None, is_deleted=None, is_latest=None, is_listed=None, normalized_version=None, package_description=None, publish_date=None, storage_id=None, version=None, views=None, _links=None, author=None, deleted_date=None, dependencies=None, description=None, files=None, other_versions=None, protocol_metadata=None, source_chain=None, summary=None, tags=None, url=None):
super(PackageVersion, self).__init__(direct_upstream_source_id=direct_upstream_source_id, id=id, is_cached_version=is_cached_version, is_deleted=is_deleted, is_latest=is_latest, is_listed=is_listed, normalized_version=normalized_version, package_description=package_description, publish_date=publish_date, storage_id=storage_id, version=version, views=views)
self._links = _links
self.author = author
self.deleted_date = deleted_date
self.dependencies = dependencies
self.description = description
self.files = files
self.other_versions = other_versions
self.protocol_metadata = protocol_metadata
self.source_chain = source_chain
self.summary = summary
self.tags = tags
self.url = url
class PackageVersionChange(Model):
"""
A change to a single package version.
:param continuation_token: Token marker for this change, allowing the caller to send this value back to the service and receive changes beyond this one.
:type continuation_token: long
:param change_type: The type of change that was performed.
:type change_type: object
:param package_version: Package version that was changed.
:type package_version: :class:`PackageVersion <azure.devops.v7_1.packaging.models.PackageVersion>`
"""
_attribute_map = {
'continuation_token': {'key': 'continuationToken', 'type': 'long'},
'change_type': {'key': 'changeType', 'type': 'object'},
'package_version': {'key': 'packageVersion', 'type': 'PackageVersion'}
}
def __init__(self, continuation_token=None, change_type=None, package_version=None):
super(PackageVersionChange, self).__init__()
self.continuation_token = continuation_token
self.change_type = change_type
self.package_version = package_version
class PackageVersionMetrics(Model):
"""
All metrics for a certain package version id
:param download_count: Total count of downloads per package version id.
:type download_count: float
:param download_unique_users: Number of downloads per unique user per package version id.
:type download_unique_users: float
:param last_downloaded: UTC date and time when package version was last downloaded.
:type last_downloaded: datetime
:param package_id: Package id.
:type package_id: str
:param package_version_id: Package version id.
:type package_version_id: str
"""
_attribute_map = {
'download_count': {'key': 'downloadCount', 'type': 'float'},
'download_unique_users': {'key': 'downloadUniqueUsers', 'type': 'float'},
'last_downloaded': {'key': 'lastDownloaded', 'type': 'iso-8601'},
'package_id': {'key': 'packageId', 'type': 'str'},
'package_version_id': {'key': 'packageVersionId', 'type': 'str'}
}
def __init__(self, download_count=None, download_unique_users=None, last_downloaded=None, package_id=None, package_version_id=None):
super(PackageVersionMetrics, self).__init__()
self.download_count = download_count
self.download_unique_users = download_unique_users
self.last_downloaded = last_downloaded
self.package_id = package_id
self.package_version_id = package_version_id
class PackageVersionMetricsQuery(Model):
"""
Query to get package version metrics
:param package_version_ids: List of package version ids
:type package_version_ids: list of str
"""
_attribute_map = {
'package_version_ids': {'key': 'packageVersionIds', 'type': '[str]'}
}
def __init__(self, package_version_ids=None):
super(PackageVersionMetricsQuery, self).__init__()
self.package_version_ids = package_version_ids
class PackageVersionProvenance(Model):
"""
Provenance for a published package version
:param feed_id: Name or Id of the feed.
:type feed_id: str
:param package_id: Id of the package (GUID Id, not name).
:type package_id: str
:param package_version_id: Id of the package version (GUID Id, not name).
:type package_version_id: str
:param provenance: Provenance information for this package version.
:type provenance: :class:`Provenance <azure.devops.v7_1.packaging.models.Provenance>`
"""
_attribute_map = {
'feed_id': {'key': 'feedId', 'type': 'str'},
'package_id': {'key': 'packageId', 'type': 'str'},
'package_version_id': {'key': 'packageVersionId', 'type': 'str'},
'provenance': {'key': 'provenance', 'type': 'Provenance'}
}
def __init__(self, feed_id=None, package_id=None, package_version_id=None, provenance=None):
super(PackageVersionProvenance, self).__init__()
self.feed_id = feed_id
self.package_id = package_id
self.package_version_id = package_version_id
self.provenance = provenance
class ProjectReference(Model):
"""
:param id: Gets or sets id of the project.
:type id: str
:param name: Gets or sets name of the project.
:type name: str
:param visibility: Gets or sets visibility of the project.
:type visibility: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'visibility': {'key': 'visibility', 'type': 'str'}
}
def __init__(self, id=None, name=None, visibility=None):
super(ProjectReference, self).__init__()
self.id = id
self.name = name
self.visibility = visibility
class ProtocolMetadata(Model):
"""
Extended metadata for a specific package type.
:param data: Extended metadata for a specific package type, formatted to the associated schema version definition.
:type data: object
:param schema_version: Schema version.
:type schema_version: int
"""
_attribute_map = {
'data': {'key': 'data', 'type': 'object'},
'schema_version': {'key': 'schemaVersion', 'type': 'int'}
}
def __init__(self, data=None, schema_version=None):
super(ProtocolMetadata, self).__init__()
self.data = data
self.schema_version = schema_version
class Provenance(Model):
"""
Data about the origin of a published package
:param data: Other provenance data.
:type data: dict
:param provenance_source: Type of provenance source, for example "InternalBuild", "InternalRelease"
:type provenance_source: str
:param publisher_user_identity: Identity of user that published the package
:type publisher_user_identity: str
:param user_agent: HTTP User-Agent used when pushing the package.
:type user_agent: str
"""
_attribute_map = {
'data': {'key': 'data', 'type': '{str}'},
'provenance_source': {'key': 'provenanceSource', 'type': 'str'},
'publisher_user_identity': {'key': 'publisherUserIdentity', 'type': 'str'},
'user_agent': {'key': 'userAgent', 'type': 'str'}
}
def __init__(self, data=None, provenance_source=None, publisher_user_identity=None, user_agent=None):
super(Provenance, self).__init__()
self.data = data
self.provenance_source = provenance_source
self.publisher_user_identity = publisher_user_identity
self.user_agent = user_agent
class RecycleBinPackageVersion(PackageVersion):
"""
A single package version within the recycle bin.
:param direct_upstream_source_id: Upstream source this package was ingested from.
:type direct_upstream_source_id: str
:param id: Id for the package.
:type id: str
:param is_cached_version: [Obsolete] Used for legacy scenarios and may be removed in future versions.
:type is_cached_version: bool
:param is_deleted: True if this package has been deleted.
:type is_deleted: bool
:param is_latest: True if this is the latest version of the package by package type sort order.
:type is_latest: bool
:param is_listed: (NuGet and Cargo Only) True if this package is listed.
:type is_listed: bool
:param normalized_version: Normalized version using normalization rules specific to a package type.
:type normalized_version: str
:param package_description: Package description.
:type package_description: str
:param publish_date: UTC Date the package was published to the service.
:type publish_date: datetime
:param storage_id: Internal storage id.
:type storage_id: str
:param version: Display version.
:type version: str
:param views: List of views containing this package version.
:type views: list of :class:`FeedView <azure.devops.v7_1.packaging.models.FeedView>`
:param _links: Related links
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.packaging.models.ReferenceLinks>`
:param author: Package version author.
:type author: str
:param deleted_date: UTC date that this package version was deleted.
:type deleted_date: datetime
:param dependencies: List of dependencies for this package version.
:type dependencies: list of :class:`PackageDependency <azure.devops.v7_1.packaging.models.PackageDependency>`
:param description: Package version description.
:type description: str
:param files: Files associated with this package version, only relevant for multi-file package types.
:type files: list of :class:`PackageFile <azure.devops.v7_1.packaging.models.PackageFile>`
:param other_versions: Other versions of this package.
:type other_versions: list of :class:`MinimalPackageVersion <azure.devops.v7_1.packaging.models.MinimalPackageVersion>`
:param protocol_metadata: Extended data specific to a package type.
:type protocol_metadata: :class:`ProtocolMetadata <azure.devops.v7_1.packaging.models.ProtocolMetadata>`
:param source_chain: List of upstream sources through which a package version moved to land in this feed.
:type source_chain: list of :class:`UpstreamSource <azure.devops.v7_1.packaging.models.UpstreamSource>`
:param summary: Package version summary.
:type summary: str
:param tags: Package version tags.
:type tags: list of str
:param url: Package version url.
:type url: str
:param scheduled_permanent_delete_date: UTC date on which the package will automatically be removed from the recycle bin and permanently deleted.
:type scheduled_permanent_delete_date: datetime
"""
_attribute_map = {
'direct_upstream_source_id': {'key': 'directUpstreamSourceId', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'is_cached_version': {'key': 'isCachedVersion', 'type': 'bool'},
'is_deleted': {'key': 'isDeleted', 'type': 'bool'},
'is_latest': {'key': 'isLatest', 'type': 'bool'},
'is_listed': {'key': 'isListed', 'type': 'bool'},
'normalized_version': {'key': 'normalizedVersion', 'type': 'str'},
'package_description': {'key': 'packageDescription', 'type': 'str'},
'publish_date': {'key': 'publishDate', 'type': 'iso-8601'},
'storage_id': {'key': 'storageId', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'views': {'key': 'views', 'type': '[FeedView]'},
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'author': {'key': 'author', 'type': 'str'},
'deleted_date': {'key': 'deletedDate', 'type': 'iso-8601'},
'dependencies': {'key': 'dependencies', 'type': '[PackageDependency]'},
'description': {'key': 'description', 'type': 'str'},
'files': {'key': 'files', 'type': '[PackageFile]'},
'other_versions': {'key': 'otherVersions', 'type': '[MinimalPackageVersion]'},
'protocol_metadata': {'key': 'protocolMetadata', 'type': 'ProtocolMetadata'},
'source_chain': {'key': 'sourceChain', 'type': '[UpstreamSource]'},
'summary': {'key': 'summary', 'type': 'str'},
'tags': {'key': 'tags', 'type': '[str]'},
'url': {'key': 'url', 'type': 'str'},
'scheduled_permanent_delete_date': {'key': 'scheduledPermanentDeleteDate', 'type': 'iso-8601'}
}
def __init__(self, direct_upstream_source_id=None, id=None, is_cached_version=None, is_deleted=None, is_latest=None, is_listed=None, normalized_version=None, package_description=None, publish_date=None, storage_id=None, version=None, views=None, _links=None, author=None, deleted_date=None, dependencies=None, description=None, files=None, other_versions=None, protocol_metadata=None, source_chain=None, summary=None, tags=None, url=None, scheduled_permanent_delete_date=None):
super(RecycleBinPackageVersion, self).__init__(direct_upstream_source_id=direct_upstream_source_id, id=id, is_cached_version=is_cached_version, is_deleted=is_deleted, is_latest=is_latest, is_listed=is_listed, normalized_version=normalized_version, package_description=package_description, publish_date=publish_date, storage_id=storage_id, version=version, views=views, _links=_links, author=author, deleted_date=deleted_date, dependencies=dependencies, description=description, files=files, other_versions=other_versions, protocol_metadata=protocol_metadata, source_chain=source_chain, summary=summary, tags=tags, url=url)
self.scheduled_permanent_delete_date = scheduled_permanent_delete_date
class ReferenceLinks(Model):
"""
The class to represent a collection of REST reference links.
:param links: The readonly view of the links. Because Reference links are readonly, we only want to expose them as read only.
:type links: dict
"""
_attribute_map = {
'links': {'key': 'links', 'type': '{object}'}
}
def __init__(self, links=None):
super(ReferenceLinks, self).__init__()
self.links = links
class UpstreamSource(Model):
"""
Upstream source definition, including its Identity, package type, and other associated information.
:param deleted_date: UTC date that this upstream was deleted.
:type deleted_date: datetime
:param display_location: Locator for connecting to the upstream source in a user friendly format, that may potentially change over time
:type display_location: str
:param id: Identity of the upstream source.
:type id: str
:param internal_upstream_collection_id: For an internal upstream type, track the Azure DevOps organization that contains it.
:type internal_upstream_collection_id: str
:param internal_upstream_feed_id: For an internal upstream type, track the feed id being referenced.
:type internal_upstream_feed_id: str
:param internal_upstream_project_id: For an internal upstream type, track the project of the feed being referenced.
:type internal_upstream_project_id: str
:param internal_upstream_view_id: For an internal upstream type, track the view of the feed being referenced.
:type internal_upstream_view_id: str
:param location: Consistent locator for connecting to the upstream source.
:type location: str
:param name: Display name.
:type name: str
:param protocol: Package type associated with the upstream source.
:type protocol: str
:param service_endpoint_id: The identity of the service endpoint that holds credentials to use when accessing the upstream.
:type service_endpoint_id: str
:param service_endpoint_project_id: Specifies the projectId of the Service Endpoint.
:type service_endpoint_project_id: str
:param status: Specifies the status of the upstream.
:type status: object
:param status_details: Provides a human-readable reason for the status of the upstream.
:type status_details: list of :class:`UpstreamStatusDetail <azure.devops.v7_1.packaging.models.UpstreamStatusDetail>`
:param upstream_source_type: Source type, such as Public or Internal.
:type upstream_source_type: object
"""
_attribute_map = {
'deleted_date': {'key': 'deletedDate', 'type': 'iso-8601'},
'display_location': {'key': 'displayLocation', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'internal_upstream_collection_id': {'key': 'internalUpstreamCollectionId', 'type': 'str'},
'internal_upstream_feed_id': {'key': 'internalUpstreamFeedId', 'type': 'str'},
'internal_upstream_project_id': {'key': 'internalUpstreamProjectId', 'type': 'str'},
'internal_upstream_view_id': {'key': 'internalUpstreamViewId', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'protocol': {'key': 'protocol', 'type': 'str'},
'service_endpoint_id': {'key': 'serviceEndpointId', 'type': 'str'},
'service_endpoint_project_id': {'key': 'serviceEndpointProjectId', 'type': 'str'},
'status': {'key': 'status', 'type': 'object'},
'status_details': {'key': 'statusDetails', 'type': '[UpstreamStatusDetail]'},
'upstream_source_type': {'key': 'upstreamSourceType', 'type': 'object'}
}
def __init__(self, deleted_date=None, display_location=None, id=None, internal_upstream_collection_id=None, internal_upstream_feed_id=None, internal_upstream_project_id=None, internal_upstream_view_id=None, location=None, name=None, protocol=None, service_endpoint_id=None, service_endpoint_project_id=None, status=None, status_details=None, upstream_source_type=None):
super(UpstreamSource, self).__init__()
self.deleted_date = deleted_date
self.display_location = display_location
self.id = id
self.internal_upstream_collection_id = internal_upstream_collection_id
self.internal_upstream_feed_id = internal_upstream_feed_id
self.internal_upstream_project_id = internal_upstream_project_id
self.internal_upstream_view_id = internal_upstream_view_id
self.location = location
self.name = name
self.protocol = protocol
self.service_endpoint_id = service_endpoint_id
self.service_endpoint_project_id = service_endpoint_project_id
self.status = status
self.status_details = status_details
self.upstream_source_type = upstream_source_type
class UpstreamStatusDetail(Model):
"""
:param reason: Provides a human-readable reason for the status of the upstream.
:type reason: str
"""
_attribute_map = {
'reason': {'key': 'reason', 'type': 'str'}
}
def __init__(self, reason=None):
super(UpstreamStatusDetail, self).__init__()
self.reason = reason
class Feed(FeedCore):
"""
A container for artifacts.
:param allow_upstream_name_conflict: OBSOLETE: If set, the feed will allow upload of packages that exist on the upstream
:type allow_upstream_name_conflict: bool
:param capabilities: Supported capabilities of a feed.
:type capabilities: object
:param fully_qualified_id: This will either be the feed GUID or the feed GUID and view GUID depending on how the feed was accessed.
:type fully_qualified_id: str
:param fully_qualified_name: Full name of the view, in feed@view format.
:type fully_qualified_name: str
:param id: A GUID that uniquely identifies this feed.
:type id: str
:param is_read_only: If set, all packages in the feed are immutable. It is important to note that feed views are immutable; therefore, this flag will always be set for views.
:type is_read_only: bool
:param name: A name for the feed. feed names must follow these rules: <list type="bullet"><item><description> Must not exceed 64 characters </description></item><item><description> Must not contain whitespaces </description></item><item><description> Must not start with an underscore or a period </description></item><item><description> Must not end with a period </description></item><item><description> Must not contain any of the following illegal characters: <![CDATA[ @, ~, ;, {, }, \, +, =, <, >, |, /, \\, ?, :, &, $, *, \", #, [, ] ]]></description></item></list>
:type name: str
:param project: The project that this feed is associated with.
:type project: :class:`ProjectReference <azure.devops.v7_1.packaging.models.ProjectReference>`
:param upstream_enabled: This should always be true. Setting to false will override all sources in UpstreamSources.
:type upstream_enabled: bool
:param upstream_sources: A list of sources that this feed will fetch packages from. An empty list indicates that this feed will not search any additional sources for packages.
:type upstream_sources: list of :class:`UpstreamSource <azure.devops.v7_1.packaging.models.UpstreamSource>`
:param view: Definition of the view.
:type view: :class:`FeedView <azure.devops.v7_1.packaging.models.FeedView>`
:param view_id: View Id.
:type view_id: str
:param view_name: View name.
:type view_name: str
:param _links: Related REST links.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.packaging.models.ReferenceLinks>`
:param badges_enabled: If set, this feed supports generation of package badges.
:type badges_enabled: bool
:param default_view_id: The view that the feed administrator has indicated is the default experience for readers.
:type default_view_id: str
:param deleted_date: The date that this feed was deleted.
:type deleted_date: datetime
:param description: A description for the feed. Descriptions must not exceed 255 characters.
:type description: str
:param hide_deleted_package_versions: If set, the feed will hide all deleted/unpublished versions
:type hide_deleted_package_versions: bool
:param permanent_deleted_date: The date that this feed was permanently deleted.
:type permanent_deleted_date: datetime
:param permissions: Explicit permissions for the feed.
:type permissions: list of :class:`FeedPermission <azure.devops.v7_1.packaging.models.FeedPermission>`
:param scheduled_permanent_delete_date: The date that this feed is scheduled to be permanently deleted.
:type scheduled_permanent_delete_date: datetime
:param upstream_enabled_changed_date: If set, time that the UpstreamEnabled property was changed. Will be null if UpstreamEnabled was never changed after Feed creation.
:type upstream_enabled_changed_date: datetime
:param url: The URL of the base feed in GUID form.
:type url: str
"""
_attribute_map = {
'allow_upstream_name_conflict': {'key': 'allowUpstreamNameConflict', 'type': 'bool'},
'capabilities': {'key': 'capabilities', 'type': 'object'},
'fully_qualified_id': {'key': 'fullyQualifiedId', 'type': 'str'},
'fully_qualified_name': {'key': 'fullyQualifiedName', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'is_read_only': {'key': 'isReadOnly', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'project': {'key': 'project', 'type': 'ProjectReference'},
'upstream_enabled': {'key': 'upstreamEnabled', 'type': 'bool'},
'upstream_sources': {'key': 'upstreamSources', 'type': '[UpstreamSource]'},
'view': {'key': 'view', 'type': 'FeedView'},
'view_id': {'key': 'viewId', 'type': 'str'},
'view_name': {'key': 'viewName', 'type': 'str'},
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'badges_enabled': {'key': 'badgesEnabled', 'type': 'bool'},
'default_view_id': {'key': 'defaultViewId', 'type': 'str'},
'deleted_date': {'key': 'deletedDate', 'type': 'iso-8601'},
'description': {'key': 'description', 'type': 'str'},
'hide_deleted_package_versions': {'key': 'hideDeletedPackageVersions', 'type': 'bool'},
'permanent_deleted_date': {'key': 'permanentDeletedDate', 'type': 'iso-8601'},
'permissions': {'key': 'permissions', 'type': '[FeedPermission]'},
'scheduled_permanent_delete_date': {'key': 'scheduledPermanentDeleteDate', 'type': 'iso-8601'},
'upstream_enabled_changed_date': {'key': 'upstreamEnabledChangedDate', 'type': 'iso-8601'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, allow_upstream_name_conflict=None, capabilities=None, fully_qualified_id=None, fully_qualified_name=None, id=None, is_read_only=None, name=None, project=None, upstream_enabled=None, upstream_sources=None, view=None, view_id=None, view_name=None, _links=None, badges_enabled=None, default_view_id=None, deleted_date=None, description=None, hide_deleted_package_versions=None, permanent_deleted_date=None, permissions=None, scheduled_permanent_delete_date=None, upstream_enabled_changed_date=None, url=None):
super(Feed, self).__init__(allow_upstream_name_conflict=allow_upstream_name_conflict, capabilities=capabilities, fully_qualified_id=fully_qualified_id, fully_qualified_name=fully_qualified_name, id=id, is_read_only=is_read_only, name=name, project=project, upstream_enabled=upstream_enabled, upstream_sources=upstream_sources, view=view, view_id=view_id, view_name=view_name)
self._links = _links
self.badges_enabled = badges_enabled
self.default_view_id = default_view_id
self.deleted_date = deleted_date
self.description = description
self.hide_deleted_package_versions = hide_deleted_package_versions
self.permanent_deleted_date = permanent_deleted_date
self.permissions = permissions
self.scheduled_permanent_delete_date = scheduled_permanent_delete_date
self.upstream_enabled_changed_date = upstream_enabled_changed_date
self.url = url
__all__ = [
'FeedBatchData',
'FeedBatchOperationData',
'FeedCore',
'FeedChange',
'FeedChangesResponse',
'FeedIdsResult',
'FeedPermission',
'FeedRetentionPolicy',
'FeedUpdate',
'FeedView',
'GlobalPermission',
'JsonPatchOperation',
'MinimalPackageVersion',
'OperationReference',
'Package',
'PackageDependency',
'PackageFile',
'PackageChange',
'PackageChangesResponse',
'PackageMetrics',
'PackageMetricsQuery',
'PackageVersion',
'PackageVersionChange',
'PackageVersionMetrics',
'PackageVersionMetricsQuery',
'PackageVersionProvenance',
'ProjectReference',
'ProtocolMetadata',
'Provenance',
'RecycleBinPackageVersion',
'ReferenceLinks',
'UpstreamSource',
'UpstreamStatusDetail',
'Feed',
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/feed/models.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/feed/models.py",
"repo_id": "azure-devops-python-api",
"token_count": 21924
}
| 395 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class AccessTokenResult(Model):
"""
:param access_token:
:type access_token: :class:`JsonWebToken <azure.devops.v7_1.microsoft._visual_studio._services._web_api.models.JsonWebToken>`
:param access_token_error:
:type access_token_error: object
:param authorization_id:
:type authorization_id: str
:param error_description:
:type error_description: str
:param has_error:
:type has_error: bool
:param is_first_party_client:
:type is_first_party_client: bool
:param refresh_token:
:type refresh_token: :class:`RefreshTokenGrant <azure.devops.v7_1.microsoft._visual_studio._services._web_api.models.RefreshTokenGrant>`
:param scope:
:type scope: str
:param token_type:
:type token_type: str
:param valid_to:
:type valid_to: datetime
"""
_attribute_map = {
'access_token': {'key': 'accessToken', 'type': 'JsonWebToken'},
'access_token_error': {'key': 'accessTokenError', 'type': 'object'},
'authorization_id': {'key': 'authorizationId', 'type': 'str'},
'error_description': {'key': 'errorDescription', 'type': 'str'},
'has_error': {'key': 'hasError', 'type': 'bool'},
'is_first_party_client': {'key': 'isFirstPartyClient', 'type': 'bool'},
'refresh_token': {'key': 'refreshToken', 'type': 'RefreshTokenGrant'},
'scope': {'key': 'scope', 'type': 'str'},
'token_type': {'key': 'tokenType', 'type': 'str'},
'valid_to': {'key': 'validTo', 'type': 'iso-8601'}
}
def __init__(self, access_token=None, access_token_error=None, authorization_id=None, error_description=None, has_error=None, is_first_party_client=None, refresh_token=None, scope=None, token_type=None, valid_to=None):
super(AccessTokenResult, self).__init__()
self.access_token = access_token
self.access_token_error = access_token_error
self.authorization_id = authorization_id
self.error_description = error_description
self.has_error = has_error
self.is_first_party_client = is_first_party_client
self.refresh_token = refresh_token
self.scope = scope
self.token_type = token_type
self.valid_to = valid_to
class AuthorizationGrant(Model):
"""
:param grant_type:
:type grant_type: object
"""
_attribute_map = {
'grant_type': {'key': 'grantType', 'type': 'object'}
}
def __init__(self, grant_type=None):
super(AuthorizationGrant, self).__init__()
self.grant_type = grant_type
class CreateScopeInfo(Model):
"""
:param admin_group_description:
:type admin_group_description: str
:param admin_group_name:
:type admin_group_name: str
:param creator_id:
:type creator_id: str
:param parent_scope_id:
:type parent_scope_id: str
:param scope_name:
:type scope_name: str
:param scope_type:
:type scope_type: object
"""
_attribute_map = {
'admin_group_description': {'key': 'adminGroupDescription', 'type': 'str'},
'admin_group_name': {'key': 'adminGroupName', 'type': 'str'},
'creator_id': {'key': 'creatorId', 'type': 'str'},
'parent_scope_id': {'key': 'parentScopeId', 'type': 'str'},
'scope_name': {'key': 'scopeName', 'type': 'str'},
'scope_type': {'key': 'scopeType', 'type': 'object'}
}
def __init__(self, admin_group_description=None, admin_group_name=None, creator_id=None, parent_scope_id=None, scope_name=None, scope_type=None):
super(CreateScopeInfo, self).__init__()
self.admin_group_description = admin_group_description
self.admin_group_name = admin_group_name
self.creator_id = creator_id
self.parent_scope_id = parent_scope_id
self.scope_name = scope_name
self.scope_type = scope_type
class FrameworkIdentityInfo(Model):
"""
:param display_name:
:type display_name: str
:param identifier:
:type identifier: str
:param identity_type:
:type identity_type: object
:param role:
:type role: str
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'identifier': {'key': 'identifier', 'type': 'str'},
'identity_type': {'key': 'identityType', 'type': 'object'},
'role': {'key': 'role', 'type': 'str'}
}
def __init__(self, display_name=None, identifier=None, identity_type=None, role=None):
super(FrameworkIdentityInfo, self).__init__()
self.display_name = display_name
self.identifier = identifier
self.identity_type = identity_type
self.role = role
class GroupMembership(Model):
"""
:param active:
:type active: bool
:param descriptor:
:type descriptor: :class:`str <azure.devops.v7_1.identities.models.str>`
:param id:
:type id: str
:param queried_id:
:type queried_id: str
"""
_attribute_map = {
'active': {'key': 'active', 'type': 'bool'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'queried_id': {'key': 'queriedId', 'type': 'str'}
}
def __init__(self, active=None, descriptor=None, id=None, queried_id=None):
super(GroupMembership, self).__init__()
self.active = active
self.descriptor = descriptor
self.id = id
self.queried_id = queried_id
class ChangedIdentities(Model):
"""
Container class for changed identities
:param identities: Changed Identities
:type identities: list of :class:`Identity <azure.devops.v7_1.identities.models.Identity>`
:param more_data: More data available, set to true if pagesize is specified.
:type more_data: bool
:param sequence_context: Last Identity SequenceId
:type sequence_context: :class:`ChangedIdentitiesContext <azure.devops.v7_1.identities.models.ChangedIdentitiesContext>`
"""
_attribute_map = {
'identities': {'key': 'identities', 'type': '[Identity]'},
'more_data': {'key': 'moreData', 'type': 'bool'},
'sequence_context': {'key': 'sequenceContext', 'type': 'ChangedIdentitiesContext'}
}
def __init__(self, identities=None, more_data=None, sequence_context=None):
super(ChangedIdentities, self).__init__()
self.identities = identities
self.more_data = more_data
self.sequence_context = sequence_context
class ChangedIdentitiesContext(Model):
"""
Context class for changed identities
:param group_sequence_id: Last Group SequenceId
:type group_sequence_id: int
:param identity_sequence_id: Last Identity SequenceId
:type identity_sequence_id: int
:param organization_identity_sequence_id: Last Group OrganizationIdentitySequenceId
:type organization_identity_sequence_id: int
:param page_size: Page size
:type page_size: int
"""
_attribute_map = {
'group_sequence_id': {'key': 'groupSequenceId', 'type': 'int'},
'identity_sequence_id': {'key': 'identitySequenceId', 'type': 'int'},
'organization_identity_sequence_id': {'key': 'organizationIdentitySequenceId', 'type': 'int'},
'page_size': {'key': 'pageSize', 'type': 'int'}
}
def __init__(self, group_sequence_id=None, identity_sequence_id=None, organization_identity_sequence_id=None, page_size=None):
super(ChangedIdentitiesContext, self).__init__()
self.group_sequence_id = group_sequence_id
self.identity_sequence_id = identity_sequence_id
self.organization_identity_sequence_id = organization_identity_sequence_id
self.page_size = page_size
class IdentityBase(Model):
"""
Base Identity class to allow "trimmed" identity class in the GetConnectionData API Makes sure that on-the-wire representations of the derived classes are compatible with each other (e.g. Server responds with PublicIdentity object while client deserializes it as Identity object) Derived classes should not have additional [DataMember] properties
:param custom_display_name: The custom display name for the identity (if any). Setting this property to an empty string will clear the existing custom display name. Setting this property to null will not affect the existing persisted value (since null values do not get sent over the wire or to the database)
:type custom_display_name: str
:param descriptor:
:type descriptor: :class:`str <azure.devops.v7_1.identities.models.str>`
:param id: Identity Identifier. Also called Storage Key, or VSID
:type id: str
:param is_active: True if the identity has a membership in any Azure Devops group in the organization.
:type is_active: bool
:param is_container: True if the identity is a group.
:type is_container: bool
:param master_id:
:type master_id: str
:param member_ids: Id of the members of the identity (groups only).
:type member_ids: list of str
:param member_of:
:type member_of: list of :class:`str <azure.devops.v7_1.identities.models.str>`
:param members:
:type members: list of :class:`str <azure.devops.v7_1.identities.models.str>`
:param meta_type_id:
:type meta_type_id: int
:param properties:
:type properties: :class:`object <azure.devops.v7_1.identities.models.object>`
:param provider_display_name: The display name for the identity as specified by the source identity provider.
:type provider_display_name: str
:param resource_version:
:type resource_version: int
:param social_descriptor:
:type social_descriptor: :class:`str <azure.devops.v7_1.identities.models.str>`
:param subject_descriptor: Subject descriptor of a Graph entity.
:type subject_descriptor: :class:`str <azure.devops.v7_1.identities.models.str>`
:param unique_user_id:
:type unique_user_id: int
"""
_attribute_map = {
'custom_display_name': {'key': 'customDisplayName', 'type': 'str'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'is_active': {'key': 'isActive', 'type': 'bool'},
'is_container': {'key': 'isContainer', 'type': 'bool'},
'master_id': {'key': 'masterId', 'type': 'str'},
'member_ids': {'key': 'memberIds', 'type': '[str]'},
'member_of': {'key': 'memberOf', 'type': '[str]'},
'members': {'key': 'members', 'type': '[str]'},
'meta_type_id': {'key': 'metaTypeId', 'type': 'int'},
'properties': {'key': 'properties', 'type': 'object'},
'provider_display_name': {'key': 'providerDisplayName', 'type': 'str'},
'resource_version': {'key': 'resourceVersion', 'type': 'int'},
'social_descriptor': {'key': 'socialDescriptor', 'type': 'str'},
'subject_descriptor': {'key': 'subjectDescriptor', 'type': 'str'},
'unique_user_id': {'key': 'uniqueUserId', 'type': 'int'}
}
def __init__(self, custom_display_name=None, descriptor=None, id=None, is_active=None, is_container=None, master_id=None, member_ids=None, member_of=None, members=None, meta_type_id=None, properties=None, provider_display_name=None, resource_version=None, social_descriptor=None, subject_descriptor=None, unique_user_id=None):
super(IdentityBase, self).__init__()
self.custom_display_name = custom_display_name
self.descriptor = descriptor
self.id = id
self.is_active = is_active
self.is_container = is_container
self.master_id = master_id
self.member_ids = member_ids
self.member_of = member_of
self.members = members
self.meta_type_id = meta_type_id
self.properties = properties
self.provider_display_name = provider_display_name
self.resource_version = resource_version
self.social_descriptor = social_descriptor
self.subject_descriptor = subject_descriptor
self.unique_user_id = unique_user_id
class IdentityBatchInfo(Model):
"""
:param descriptors:
:type descriptors: list of :class:`str <azure.devops.v7_1.identities.models.str>`
:param identity_ids:
:type identity_ids: list of str
:param include_restricted_visibility:
:type include_restricted_visibility: bool
:param property_names:
:type property_names: list of str
:param query_membership:
:type query_membership: object
:param social_descriptors:
:type social_descriptors: list of :class:`str <azure.devops.v7_1.identities.models.str>`
:param subject_descriptors:
:type subject_descriptors: list of :class:`str <azure.devops.v7_1.identities.models.str>`
"""
_attribute_map = {
'descriptors': {'key': 'descriptors', 'type': '[str]'},
'identity_ids': {'key': 'identityIds', 'type': '[str]'},
'include_restricted_visibility': {'key': 'includeRestrictedVisibility', 'type': 'bool'},
'property_names': {'key': 'propertyNames', 'type': '[str]'},
'query_membership': {'key': 'queryMembership', 'type': 'object'},
'social_descriptors': {'key': 'socialDescriptors', 'type': '[str]'},
'subject_descriptors': {'key': 'subjectDescriptors', 'type': '[str]'}
}
def __init__(self, descriptors=None, identity_ids=None, include_restricted_visibility=None, property_names=None, query_membership=None, social_descriptors=None, subject_descriptors=None):
super(IdentityBatchInfo, self).__init__()
self.descriptors = descriptors
self.identity_ids = identity_ids
self.include_restricted_visibility = include_restricted_visibility
self.property_names = property_names
self.query_membership = query_membership
self.social_descriptors = social_descriptors
self.subject_descriptors = subject_descriptors
class IdentityRightsTransferData(Model):
"""
:param user_principal_name_mappings:
:type user_principal_name_mappings: dict
"""
_attribute_map = {
'user_principal_name_mappings': {'key': 'userPrincipalNameMappings', 'type': '{str}'}
}
def __init__(self, user_principal_name_mappings=None):
super(IdentityRightsTransferData, self).__init__()
self.user_principal_name_mappings = user_principal_name_mappings
class IdentityScope(Model):
"""
:param administrators:
:type administrators: :class:`str <azure.devops.v7_1.identities.models.str>`
:param id:
:type id: str
:param is_active:
:type is_active: bool
:param is_global:
:type is_global: bool
:param local_scope_id:
:type local_scope_id: str
:param name:
:type name: str
:param parent_id:
:type parent_id: str
:param scope_type:
:type scope_type: object
:param securing_host_id:
:type securing_host_id: str
:param subject_descriptor:
:type subject_descriptor: :class:`str <azure.devops.v7_1.identities.models.str>`
"""
_attribute_map = {
'administrators': {'key': 'administrators', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'is_active': {'key': 'isActive', 'type': 'bool'},
'is_global': {'key': 'isGlobal', 'type': 'bool'},
'local_scope_id': {'key': 'localScopeId', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'parent_id': {'key': 'parentId', 'type': 'str'},
'scope_type': {'key': 'scopeType', 'type': 'object'},
'securing_host_id': {'key': 'securingHostId', 'type': 'str'},
'subject_descriptor': {'key': 'subjectDescriptor', 'type': 'str'}
}
def __init__(self, administrators=None, id=None, is_active=None, is_global=None, local_scope_id=None, name=None, parent_id=None, scope_type=None, securing_host_id=None, subject_descriptor=None):
super(IdentityScope, self).__init__()
self.administrators = administrators
self.id = id
self.is_active = is_active
self.is_global = is_global
self.local_scope_id = local_scope_id
self.name = name
self.parent_id = parent_id
self.scope_type = scope_type
self.securing_host_id = securing_host_id
self.subject_descriptor = subject_descriptor
class IdentitySelf(Model):
"""
Identity information.
:param account_name: The UserPrincipalName (UPN) of the account. This value comes from the source provider.
:type account_name: str
:param display_name: The display name. For AAD accounts with multiple tenants this is the display name of the profile in the home tenant.
:type display_name: str
:param domain: This represents the name of the container of origin. For AAD accounts this is the tenantID of the home tenant. For MSA accounts this is the string "Windows Live ID".
:type domain: str
:param id: This is the VSID of the home tenant profile. If the profile is signed into the home tenant or if the profile has no tenants then this Id is the same as the Id returned by the profile/profiles/me endpoint. Going forward it is recommended that you use the combined values of Origin, OriginId and Domain to uniquely identify a user rather than this Id.
:type id: str
:param origin: The type of source provider for the origin identifier. For MSA accounts this is "msa". For AAD accounts this is "aad".
:type origin: str
:param origin_id: The unique identifier from the system of origin. If there are multiple tenants this is the unique identifier of the account in the home tenant. (For MSA this is the PUID in hex notation, for AAD this is the object id.)
:type origin_id: str
:param tenants: For AAD accounts this is all of the tenants that this account is a member of.
:type tenants: list of :class:`TenantInfo <azure.devops.v7_1.identities.models.TenantInfo>`
"""
_attribute_map = {
'account_name': {'key': 'accountName', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'domain': {'key': 'domain', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'origin': {'key': 'origin', 'type': 'str'},
'origin_id': {'key': 'originId', 'type': 'str'},
'tenants': {'key': 'tenants', 'type': '[TenantInfo]'}
}
def __init__(self, account_name=None, display_name=None, domain=None, id=None, origin=None, origin_id=None, tenants=None):
super(IdentitySelf, self).__init__()
self.account_name = account_name
self.display_name = display_name
self.domain = domain
self.id = id
self.origin = origin
self.origin_id = origin_id
self.tenants = tenants
class IdentitySnapshot(Model):
"""
:param groups:
:type groups: list of :class:`Identity <azure.devops.v7_1.identities.models.Identity>`
:param identity_ids:
:type identity_ids: list of str
:param memberships:
:type memberships: list of :class:`GroupMembership <azure.devops.v7_1.identities.models.GroupMembership>`
:param scope_id:
:type scope_id: str
:param scopes:
:type scopes: list of :class:`IdentityScope <azure.devops.v7_1.identities.models.IdentityScope>`
"""
_attribute_map = {
'groups': {'key': 'groups', 'type': '[Identity]'},
'identity_ids': {'key': 'identityIds', 'type': '[str]'},
'memberships': {'key': 'memberships', 'type': '[GroupMembership]'},
'scope_id': {'key': 'scopeId', 'type': 'str'},
'scopes': {'key': 'scopes', 'type': '[IdentityScope]'}
}
def __init__(self, groups=None, identity_ids=None, memberships=None, scope_id=None, scopes=None):
super(IdentitySnapshot, self).__init__()
self.groups = groups
self.identity_ids = identity_ids
self.memberships = memberships
self.scope_id = scope_id
self.scopes = scopes
class IdentityUpdateData(Model):
"""
:param id:
:type id: str
:param index:
:type index: int
:param updated:
:type updated: bool
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'index': {'key': 'index', 'type': 'int'},
'updated': {'key': 'updated', 'type': 'bool'}
}
def __init__(self, id=None, index=None, updated=None):
super(IdentityUpdateData, self).__init__()
self.id = id
self.index = index
self.updated = updated
class JsonPatchOperation(Model):
"""
The JSON model for a JSON Patch operation
:param from_: The path to copy from for the Move/Copy operation.
:type from_: str
:param op: The patch operation
:type op: object
:param path: The path for the operation. In the case of an array, a zero based index can be used to specify the position in the array (e.g. /biscuits/0/name). The "-" character can be used instead of an index to insert at the end of the array (e.g. /biscuits/-).
:type path: str
:param value: The value for the operation. This is either a primitive or a JToken.
:type value: object
"""
_attribute_map = {
'from_': {'key': 'from', 'type': 'str'},
'op': {'key': 'op', 'type': 'object'},
'path': {'key': 'path', 'type': 'str'},
'value': {'key': 'value', 'type': 'object'}
}
def __init__(self, from_=None, op=None, path=None, value=None):
super(JsonPatchOperation, self).__init__()
self.from_ = from_
self.op = op
self.path = path
self.value = value
class JsonWebToken(Model):
"""
"""
_attribute_map = {
}
def __init__(self):
super(JsonWebToken, self).__init__()
class PagedIdentities(Model):
"""
:param continuation_token:
:type continuation_token: list of str
:param identities:
:type identities: list of :class:`Identity <azure.devops.v7_1.identities.models.Identity>`
"""
_attribute_map = {
'continuation_token': {'key': 'continuationToken', 'type': '[str]'},
'identities': {'key': 'identities', 'type': '[Identity]'}
}
def __init__(self, continuation_token=None, identities=None):
super(PagedIdentities, self).__init__()
self.continuation_token = continuation_token
self.identities = identities
class RefreshTokenGrant(AuthorizationGrant):
"""
:param grant_type:
:type grant_type: object
:param jwt:
:type jwt: :class:`JsonWebToken <azure.devops.v7_1.microsoft._visual_studio._services._web_api.models.JsonWebToken>`
"""
_attribute_map = {
'grant_type': {'key': 'grantType', 'type': 'object'},
'jwt': {'key': 'jwt', 'type': 'JsonWebToken'}
}
def __init__(self, grant_type=None, jwt=None):
super(RefreshTokenGrant, self).__init__(grant_type=grant_type)
self.jwt = jwt
class SwapIdentityInfo(Model):
"""
:param id1:
:type id1: str
:param id2:
:type id2: str
"""
_attribute_map = {
'id1': {'key': 'id1', 'type': 'str'},
'id2': {'key': 'id2', 'type': 'str'}
}
def __init__(self, id1=None, id2=None):
super(SwapIdentityInfo, self).__init__()
self.id1 = id1
self.id2 = id2
class TenantInfo(Model):
"""
:param home_tenant:
:type home_tenant: bool
:param tenant_id:
:type tenant_id: str
:param tenant_name:
:type tenant_name: str
:param verified_domains:
:type verified_domains: list of str
"""
_attribute_map = {
'home_tenant': {'key': 'homeTenant', 'type': 'bool'},
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'tenant_name': {'key': 'tenantName', 'type': 'str'},
'verified_domains': {'key': 'verifiedDomains', 'type': '[str]'}
}
def __init__(self, home_tenant=None, tenant_id=None, tenant_name=None, verified_domains=None):
super(TenantInfo, self).__init__()
self.home_tenant = home_tenant
self.tenant_id = tenant_id
self.tenant_name = tenant_name
self.verified_domains = verified_domains
class Identity(IdentityBase):
"""
:param custom_display_name: The custom display name for the identity (if any). Setting this property to an empty string will clear the existing custom display name. Setting this property to null will not affect the existing persisted value (since null values do not get sent over the wire or to the database)
:type custom_display_name: str
:param descriptor:
:type descriptor: :class:`str <azure.devops.v7_1.identities.models.str>`
:param id: Identity Identifier. Also called Storage Key, or VSID
:type id: str
:param is_active: True if the identity has a membership in any Azure Devops group in the organization.
:type is_active: bool
:param is_container: True if the identity is a group.
:type is_container: bool
:param master_id:
:type master_id: str
:param member_ids: Id of the members of the identity (groups only).
:type member_ids: list of str
:param member_of:
:type member_of: list of :class:`str <azure.devops.v7_1.identities.models.str>`
:param members:
:type members: list of :class:`str <azure.devops.v7_1.identities.models.str>`
:param meta_type_id:
:type meta_type_id: int
:param properties:
:type properties: :class:`object <azure.devops.v7_1.identities.models.object>`
:param provider_display_name: The display name for the identity as specified by the source identity provider.
:type provider_display_name: str
:param resource_version:
:type resource_version: int
:param social_descriptor:
:type social_descriptor: :class:`str <azure.devops.v7_1.identities.models.str>`
:param subject_descriptor: Subject descriptor of a Graph entity.
:type subject_descriptor: :class:`str <azure.devops.v7_1.identities.models.str>`
:param unique_user_id:
:type unique_user_id: int
"""
_attribute_map = {
'custom_display_name': {'key': 'customDisplayName', 'type': 'str'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'is_active': {'key': 'isActive', 'type': 'bool'},
'is_container': {'key': 'isContainer', 'type': 'bool'},
'master_id': {'key': 'masterId', 'type': 'str'},
'member_ids': {'key': 'memberIds', 'type': '[str]'},
'member_of': {'key': 'memberOf', 'type': '[str]'},
'members': {'key': 'members', 'type': '[str]'},
'meta_type_id': {'key': 'metaTypeId', 'type': 'int'},
'properties': {'key': 'properties', 'type': 'object'},
'provider_display_name': {'key': 'providerDisplayName', 'type': 'str'},
'resource_version': {'key': 'resourceVersion', 'type': 'int'},
'social_descriptor': {'key': 'socialDescriptor', 'type': 'str'},
'subject_descriptor': {'key': 'subjectDescriptor', 'type': 'str'},
'unique_user_id': {'key': 'uniqueUserId', 'type': 'int'},
}
def __init__(self, custom_display_name=None, descriptor=None, id=None, is_active=None, is_container=None, master_id=None, member_ids=None, member_of=None, members=None, meta_type_id=None, properties=None, provider_display_name=None, resource_version=None, social_descriptor=None, subject_descriptor=None, unique_user_id=None):
super(Identity, self).__init__(custom_display_name=custom_display_name, descriptor=descriptor, id=id, is_active=is_active, is_container=is_container, master_id=master_id, member_ids=member_ids, member_of=member_of, members=members, meta_type_id=meta_type_id, properties=properties, provider_display_name=provider_display_name, resource_version=resource_version, social_descriptor=social_descriptor, subject_descriptor=subject_descriptor, unique_user_id=unique_user_id)
__all__ = [
'AccessTokenResult',
'AuthorizationGrant',
'CreateScopeInfo',
'FrameworkIdentityInfo',
'GroupMembership',
'ChangedIdentities',
'ChangedIdentitiesContext',
'IdentityBase',
'IdentityBatchInfo',
'IdentityRightsTransferData',
'IdentityScope',
'IdentitySelf',
'IdentitySnapshot',
'IdentityUpdateData',
'JsonPatchOperation',
'JsonWebToken',
'PagedIdentities',
'RefreshTokenGrant',
'SwapIdentityInfo',
'TenantInfo',
'Identity',
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/identity/models.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/identity/models.py",
"repo_id": "azure-devops-python-api",
"token_count": 11205
}
| 396 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from .models import *
from .nuget_client import NuGetClient
__all__ = [
'BatchListData',
'BatchOperationData',
'JsonPatchOperation',
'MinimalPackageDetails',
'NuGetPackagesBatchRequest',
'NuGetPackageVersionDeletionState',
'NuGetRecycleBinPackageVersionDetails',
'Package',
'PackageVersionDetails',
'ReferenceLinks',
'UpstreamingBehavior',
'UpstreamSourceInfo',
'NuGetClient'
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/nuget/__init__.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/nuget/__init__.py",
"repo_id": "azure-devops-python-api",
"token_count": 246
}
| 397 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class GraphSubjectBase(Model):
"""
:param _links: This field contains zero or more interesting links about the graph subject. These links may be invoked to obtain additional relationships or more detailed information about this graph subject.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.microsoft._visual_studio._services._web_api.models.ReferenceLinks>`
:param descriptor: The descriptor is the primary way to reference the graph subject while the system is running. This field will uniquely identify the same graph subject across both Accounts and Organizations.
:type descriptor: str
:param display_name: This is the non-unique display name of the graph subject. To change this field, you must alter its value in the source provider.
:type display_name: str
:param url: This url is the full route to the source resource of this graph subject.
:type url: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, _links=None, descriptor=None, display_name=None, url=None):
super(GraphSubjectBase, self).__init__()
self._links = _links
self.descriptor = descriptor
self.display_name = display_name
self.url = url
class IdentityRef(GraphSubjectBase):
"""
:param _links: This field contains zero or more interesting links about the graph subject. These links may be invoked to obtain additional relationships or more detailed information about this graph subject.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.microsoft._visual_studio._services._web_api.models.ReferenceLinks>`
:param descriptor: The descriptor is the primary way to reference the graph subject while the system is running. This field will uniquely identify the same graph subject across both Accounts and Organizations.
:type descriptor: str
:param display_name: This is the non-unique display name of the graph subject. To change this field, you must alter its value in the source provider.
:type display_name: str
:param url: This url is the full route to the source resource of this graph subject.
:type url: str
:param directory_alias: Deprecated - Can be retrieved by querying the Graph user referenced in the "self" entry of the IdentityRef "_links" dictionary
:type directory_alias: str
:param id:
:type id: str
:param image_url: Deprecated - Available in the "avatar" entry of the IdentityRef "_links" dictionary
:type image_url: str
:param inactive: Deprecated - Can be retrieved by querying the Graph membership state referenced in the "membershipState" entry of the GraphUser "_links" dictionary
:type inactive: bool
:param is_aad_identity: Deprecated - Can be inferred from the subject type of the descriptor (Descriptor.IsAadUserType/Descriptor.IsAadGroupType)
:type is_aad_identity: bool
:param is_container: Deprecated - Can be inferred from the subject type of the descriptor (Descriptor.IsGroupType)
:type is_container: bool
:param is_deleted_in_origin:
:type is_deleted_in_origin: bool
:param profile_url: Deprecated - not in use in most preexisting implementations of ToIdentityRef
:type profile_url: str
:param unique_name: Deprecated - use Domain+PrincipalName instead
:type unique_name: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'directory_alias': {'key': 'directoryAlias', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'image_url': {'key': 'imageUrl', 'type': 'str'},
'inactive': {'key': 'inactive', 'type': 'bool'},
'is_aad_identity': {'key': 'isAadIdentity', 'type': 'bool'},
'is_container': {'key': 'isContainer', 'type': 'bool'},
'is_deleted_in_origin': {'key': 'isDeletedInOrigin', 'type': 'bool'},
'profile_url': {'key': 'profileUrl', 'type': 'str'},
'unique_name': {'key': 'uniqueName', 'type': 'str'}
}
def __init__(self, _links=None, descriptor=None, display_name=None, url=None, directory_alias=None, id=None, image_url=None, inactive=None, is_aad_identity=None, is_container=None, is_deleted_in_origin=None, profile_url=None, unique_name=None):
super(IdentityRef, self).__init__(_links=_links, descriptor=descriptor, display_name=display_name, url=url)
self.directory_alias = directory_alias
self.id = id
self.image_url = image_url
self.inactive = inactive
self.is_aad_identity = is_aad_identity
self.is_container = is_container
self.is_deleted_in_origin = is_deleted_in_origin
self.profile_url = profile_url
self.unique_name = unique_name
class PolicyConfigurationRef(Model):
"""
Policy configuration reference.
:param id: The policy configuration ID.
:type id: int
:param type: The policy configuration type.
:type type: :class:`PolicyTypeRef <azure.devops.v7_1.policy.models.PolicyTypeRef>`
:param url: The URL where the policy configuration can be retrieved.
:type url: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'type': {'key': 'type', 'type': 'PolicyTypeRef'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, id=None, type=None, url=None):
super(PolicyConfigurationRef, self).__init__()
self.id = id
self.type = type
self.url = url
class PolicyEvaluationRecord(Model):
"""
This record encapsulates the current state of a policy as it applies to one specific pull request. Each pull request has a unique PolicyEvaluationRecord for each pull request which the policy applies to.
:param _links: Links to other related objects
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.policy.models.ReferenceLinks>`
:param artifact_id: A string which uniquely identifies the target of a policy evaluation.
:type artifact_id: str
:param completed_date: Time when this policy finished evaluating on this pull request.
:type completed_date: datetime
:param configuration: Contains all configuration data for the policy which is being evaluated.
:type configuration: :class:`PolicyConfiguration <azure.devops.v7_1.policy.models.PolicyConfiguration>`
:param context: Internal context data of this policy evaluation.
:type context: :class:`object <azure.devops.v7_1.policy.models.object>`
:param evaluation_id: Guid which uniquely identifies this evaluation record (one policy running on one pull request).
:type evaluation_id: str
:param started_date: Time when this policy was first evaluated on this pull request.
:type started_date: datetime
:param status: Status of the policy (Running, Approved, Failed, etc.)
:type status: object
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'artifact_id': {'key': 'artifactId', 'type': 'str'},
'completed_date': {'key': 'completedDate', 'type': 'iso-8601'},
'configuration': {'key': 'configuration', 'type': 'PolicyConfiguration'},
'context': {'key': 'context', 'type': 'object'},
'evaluation_id': {'key': 'evaluationId', 'type': 'str'},
'started_date': {'key': 'startedDate', 'type': 'iso-8601'},
'status': {'key': 'status', 'type': 'object'}
}
def __init__(self, _links=None, artifact_id=None, completed_date=None, configuration=None, context=None, evaluation_id=None, started_date=None, status=None):
super(PolicyEvaluationRecord, self).__init__()
self._links = _links
self.artifact_id = artifact_id
self.completed_date = completed_date
self.configuration = configuration
self.context = context
self.evaluation_id = evaluation_id
self.started_date = started_date
self.status = status
class PolicyTypeRef(Model):
"""
Policy type reference.
:param display_name: Display name of the policy type.
:type display_name: str
:param id: The policy type ID.
:type id: str
:param url: The URL where the policy type can be retrieved.
:type url: str
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, display_name=None, id=None, url=None):
super(PolicyTypeRef, self).__init__()
self.display_name = display_name
self.id = id
self.url = url
class ReferenceLinks(Model):
"""
The class to represent a collection of REST reference links.
:param links: The readonly view of the links. Because Reference links are readonly, we only want to expose them as read only.
:type links: dict
"""
_attribute_map = {
'links': {'key': 'links', 'type': '{object}'}
}
def __init__(self, links=None):
super(ReferenceLinks, self).__init__()
self.links = links
class VersionedPolicyConfigurationRef(PolicyConfigurationRef):
"""
A particular revision for a policy configuration.
:param id: The policy configuration ID.
:type id: int
:param type: The policy configuration type.
:type type: :class:`PolicyTypeRef <azure.devops.v7_1.policy.models.PolicyTypeRef>`
:param url: The URL where the policy configuration can be retrieved.
:type url: str
:param revision: The policy configuration revision ID.
:type revision: int
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'type': {'key': 'type', 'type': 'PolicyTypeRef'},
'url': {'key': 'url', 'type': 'str'},
'revision': {'key': 'revision', 'type': 'int'}
}
def __init__(self, id=None, type=None, url=None, revision=None):
super(VersionedPolicyConfigurationRef, self).__init__(id=id, type=type, url=url)
self.revision = revision
class PolicyConfiguration(VersionedPolicyConfigurationRef):
"""
The full policy configuration with settings.
:param id: The policy configuration ID.
:type id: int
:param type: The policy configuration type.
:type type: :class:`PolicyTypeRef <azure.devops.v7_1.policy.models.PolicyTypeRef>`
:param url: The URL where the policy configuration can be retrieved.
:type url: str
:param revision: The policy configuration revision ID.
:type revision: int
:param _links: The links to other objects related to this object.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.policy.models.ReferenceLinks>`
:param created_by: A reference to the identity that created the policy.
:type created_by: :class:`IdentityRef <azure.devops.v7_1.policy.models.IdentityRef>`
:param created_date: The date and time when the policy was created.
:type created_date: datetime
:param is_blocking: Indicates whether the policy is blocking.
:type is_blocking: bool
:param is_deleted: Indicates whether the policy has been (soft) deleted.
:type is_deleted: bool
:param is_enabled: Indicates whether the policy is enabled.
:type is_enabled: bool
:param is_enterprise_managed: If set, this policy requires "Manage Enterprise Policies" permission to create, edit, or delete.
:type is_enterprise_managed: bool
:param settings: The policy configuration settings.
:type settings: :class:`object <azure.devops.v7_1.policy.models.object>`
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'type': {'key': 'type', 'type': 'PolicyTypeRef'},
'url': {'key': 'url', 'type': 'str'},
'revision': {'key': 'revision', 'type': 'int'},
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'created_by': {'key': 'createdBy', 'type': 'IdentityRef'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'is_blocking': {'key': 'isBlocking', 'type': 'bool'},
'is_deleted': {'key': 'isDeleted', 'type': 'bool'},
'is_enabled': {'key': 'isEnabled', 'type': 'bool'},
'is_enterprise_managed': {'key': 'isEnterpriseManaged', 'type': 'bool'},
'settings': {'key': 'settings', 'type': 'object'}
}
def __init__(self, id=None, type=None, url=None, revision=None, _links=None, created_by=None, created_date=None, is_blocking=None, is_deleted=None, is_enabled=None, is_enterprise_managed=None, settings=None):
super(PolicyConfiguration, self).__init__(id=id, type=type, url=url, revision=revision)
self._links = _links
self.created_by = created_by
self.created_date = created_date
self.is_blocking = is_blocking
self.is_deleted = is_deleted
self.is_enabled = is_enabled
self.is_enterprise_managed = is_enterprise_managed
self.settings = settings
class PolicyType(PolicyTypeRef):
"""
User-friendly policy type with description (used for querying policy types).
:param display_name: Display name of the policy type.
:type display_name: str
:param id: The policy type ID.
:type id: str
:param url: The URL where the policy type can be retrieved.
:type url: str
:param _links: The links to other objects related to this object.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.policy.models.ReferenceLinks>`
:param description: Detailed description of the policy type.
:type description: str
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'description': {'key': 'description', 'type': 'str'}
}
def __init__(self, display_name=None, id=None, url=None, _links=None, description=None):
super(PolicyType, self).__init__(display_name=display_name, id=id, url=url)
self._links = _links
self.description = description
__all__ = [
'GraphSubjectBase',
'IdentityRef',
'PolicyConfigurationRef',
'PolicyEvaluationRecord',
'PolicyTypeRef',
'ReferenceLinks',
'VersionedPolicyConfigurationRef',
'PolicyConfiguration',
'PolicyType',
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/policy/models.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/policy/models.py",
"repo_id": "azure-devops-python-api",
"token_count": 5377
}
| 398 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest import Serializer, Deserializer
from ...client import Client
from . import models
class PyPiApiClient(Client):
"""PyPiApi
:param str base_url: Service URL
:param Authentication creds: Authenticated credentials.
"""
def __init__(self, base_url=None, creds=None):
super(PyPiApiClient, self).__init__(base_url, creds)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
resource_area_identifier = '92f0314b-06c5-46e0-abe7-15fd9d13276a'
def download_package(self, feed_id, package_name, package_version, file_name, project=None, **kwargs):
"""DownloadPackage.
[Preview API] Download a python package file directly. This API is intended for manual UI download options, not for programmatic access and scripting.
:param str feed_id: Name or ID of the feed.
:param str package_name: Name of the package.
:param str package_version: Version of the package.
:param str file_name: Name of the file in the package
:param str project: Project ID or project name
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
if package_name is not None:
route_values['packageName'] = self._serialize.url('package_name', package_name, 'str')
if package_version is not None:
route_values['packageVersion'] = self._serialize.url('package_version', package_version, 'str')
if file_name is not None:
route_values['fileName'] = self._serialize.url('file_name', file_name, 'str')
response = self._send(http_method='GET',
location_id='97218bae-a64d-4381-9257-b5b7951f0b98',
version='7.1-preview.1',
route_values=route_values,
accept_media_type='application/octet-stream')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def update_package_versions(self, batch_request, feed_id, project=None):
"""UpdatePackageVersions.
[Preview API] Update several packages from a single feed in a single request. The updates to the packages do not happen atomically.
:param :class:`<PyPiPackagesBatchRequest> <azure.devops.v7_1.py_pi_api.models.PyPiPackagesBatchRequest>` batch_request: Information about the packages to update, the operation to perform, and its associated data.
:param str feed_id: Name or ID of the feed.
:param str project: Project ID or project name
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
content = self._serialize.body(batch_request, 'PyPiPackagesBatchRequest')
self._send(http_method='POST',
location_id='4e53d561-70c1-4c98-b937-0f22acb27b0b',
version='7.1-preview.1',
route_values=route_values,
content=content)
def update_recycle_bin_package_versions(self, batch_request, feed_id, project=None):
"""UpdateRecycleBinPackageVersions.
[Preview API] Delete or restore several package versions from the recycle bin.
:param :class:`<PyPiPackagesBatchRequest> <azure.devops.v7_1.py_pi_api.models.PyPiPackagesBatchRequest>` batch_request: Information about the packages to update, the operation to perform, and its associated data. <c>Operation</c> must be <c>PermanentDelete</c> or <c>RestoreToFeed</c>
:param str feed_id: Feed which contains the packages to update.
:param str project: Project ID or project name
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
content = self._serialize.body(batch_request, 'PyPiPackagesBatchRequest')
self._send(http_method='POST',
location_id='d2d89918-c69e-4ef4-b357-1c3ccb4d28d2',
version='7.1-preview.1',
route_values=route_values,
content=content)
def delete_package_version_from_recycle_bin(self, feed_id, package_name, package_version, project=None):
"""DeletePackageVersionFromRecycleBin.
[Preview API] Delete a package version from the feed, moving it to the recycle bin.
:param str feed_id: Name or ID of the feed.
:param str package_name: Name of the package.
:param str package_version: Version of the package.
:param str project: Project ID or project name
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
if package_name is not None:
route_values['packageName'] = self._serialize.url('package_name', package_name, 'str')
if package_version is not None:
route_values['packageVersion'] = self._serialize.url('package_version', package_version, 'str')
self._send(http_method='DELETE',
location_id='07143752-3d94-45fd-86c2-0c77ed87847b',
version='7.1-preview.1',
route_values=route_values)
def get_package_version_metadata_from_recycle_bin(self, feed_id, package_name, package_version, project=None):
"""GetPackageVersionMetadataFromRecycleBin.
[Preview API] Get information about a package version in the recycle bin.
:param str feed_id: Name or ID of the feed.
:param str package_name: Name of the package.
:param str package_version: Version of the package.
:param str project: Project ID or project name
:rtype: :class:`<PyPiPackageVersionDeletionState> <azure.devops.v7_1.py_pi_api.models.PyPiPackageVersionDeletionState>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
if package_name is not None:
route_values['packageName'] = self._serialize.url('package_name', package_name, 'str')
if package_version is not None:
route_values['packageVersion'] = self._serialize.url('package_version', package_version, 'str')
response = self._send(http_method='GET',
location_id='07143752-3d94-45fd-86c2-0c77ed87847b',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('PyPiPackageVersionDeletionState', response)
def restore_package_version_from_recycle_bin(self, package_version_details, feed_id, package_name, package_version, project=None):
"""RestorePackageVersionFromRecycleBin.
[Preview API] Restore a package version from the recycle bin to its associated feed.
:param :class:`<PyPiRecycleBinPackageVersionDetails> <azure.devops.v7_1.py_pi_api.models.PyPiRecycleBinPackageVersionDetails>` package_version_details: Set the 'Deleted' state to 'false' to restore the package to its feed.
:param str feed_id: Name or ID of the feed.
:param str package_name: Name of the package.
:param str package_version: Version of the package.
:param str project: Project ID or project name
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
if package_name is not None:
route_values['packageName'] = self._serialize.url('package_name', package_name, 'str')
if package_version is not None:
route_values['packageVersion'] = self._serialize.url('package_version', package_version, 'str')
content = self._serialize.body(package_version_details, 'PyPiRecycleBinPackageVersionDetails')
self._send(http_method='PATCH',
location_id='07143752-3d94-45fd-86c2-0c77ed87847b',
version='7.1-preview.1',
route_values=route_values,
content=content)
def get_upstreaming_behavior(self, feed_id, package_name, project=None):
"""GetUpstreamingBehavior.
[Preview API] Get the upstreaming behavior of a package within the context of a feed
:param str feed_id: The name or id of the feed
:param str package_name: The name of the package
:param str project: Project ID or project name
:rtype: :class:`<UpstreamingBehavior> <azure.devops.v7_1.py_pi_api.models.UpstreamingBehavior>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
if package_name is not None:
route_values['packageName'] = self._serialize.url('package_name', package_name, 'str')
response = self._send(http_method='GET',
location_id='21b8c9a7-7080-45be-a5ba-e50bb4c18130',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('UpstreamingBehavior', response)
def set_upstreaming_behavior(self, feed_id, package_name, behavior, project=None):
"""SetUpstreamingBehavior.
[Preview API] Set the upstreaming behavior of a package within the context of a feed
:param str feed_id: The name or id of the feed
:param str package_name: The name of the package
:param :class:`<UpstreamingBehavior> <azure.devops.v7_1.py_pi_api.models.UpstreamingBehavior>` behavior: The behavior to apply to the package within the scope of the feed
:param str project: Project ID or project name
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
if package_name is not None:
route_values['packageName'] = self._serialize.url('package_name', package_name, 'str')
content = self._serialize.body(behavior, 'UpstreamingBehavior')
self._send(http_method='PATCH',
location_id='21b8c9a7-7080-45be-a5ba-e50bb4c18130',
version='7.1-preview.1',
route_values=route_values,
content=content)
def delete_package_version(self, feed_id, package_name, package_version, project=None):
"""DeletePackageVersion.
[Preview API] Delete a package version, moving it to the recycle bin.
:param str feed_id: Name or ID of the feed.
:param str package_name: Name of the package.
:param str package_version: Version of the package.
:param str project: Project ID or project name
:rtype: :class:`<Package> <azure.devops.v7_1.py_pi_api.models.Package>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
if package_name is not None:
route_values['packageName'] = self._serialize.url('package_name', package_name, 'str')
if package_version is not None:
route_values['packageVersion'] = self._serialize.url('package_version', package_version, 'str')
response = self._send(http_method='DELETE',
location_id='d146ac7e-9e3f-4448-b956-f9bb3bdf9b2e',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('Package', response)
def get_package_version(self, feed_id, package_name, package_version, project=None, show_deleted=None):
"""GetPackageVersion.
[Preview API] Get information about a package version.
:param str feed_id: Name or ID of the feed.
:param str package_name: Name of the package.
:param str package_version: Version of the package.
:param str project: Project ID or project name
:param bool show_deleted: True to show information for deleted package versions.
:rtype: :class:`<Package> <azure.devops.v7_1.py_pi_api.models.Package>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
if package_name is not None:
route_values['packageName'] = self._serialize.url('package_name', package_name, 'str')
if package_version is not None:
route_values['packageVersion'] = self._serialize.url('package_version', package_version, 'str')
query_parameters = {}
if show_deleted is not None:
query_parameters['showDeleted'] = self._serialize.query('show_deleted', show_deleted, 'bool')
response = self._send(http_method='GET',
location_id='d146ac7e-9e3f-4448-b956-f9bb3bdf9b2e',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('Package', response)
def update_package_version(self, package_version_details, feed_id, package_name, package_version, project=None):
"""UpdatePackageVersion.
[Preview API] Update state for a package version.
:param :class:`<PackageVersionDetails> <azure.devops.v7_1.py_pi_api.models.PackageVersionDetails>` package_version_details: Details to be updated.
:param str feed_id: Name or ID of the feed.
:param str package_name: Name of the package.
:param str package_version: Version of the package.
:param str project: Project ID or project name
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
if package_name is not None:
route_values['packageName'] = self._serialize.url('package_name', package_name, 'str')
if package_version is not None:
route_values['packageVersion'] = self._serialize.url('package_version', package_version, 'str')
content = self._serialize.body(package_version_details, 'PackageVersionDetails')
self._send(http_method='PATCH',
location_id='d146ac7e-9e3f-4448-b956-f9bb3bdf9b2e',
version='7.1-preview.1',
route_values=route_values,
content=content)
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/py_pi_api/py_pi_api_client.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/py_pi_api/py_pi_api_client.py",
"repo_id": "azure-devops-python-api",
"token_count": 6933
}
| 399 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from .models import *
from .service_hooks_client import ServiceHooksClient
__all__ = [
'Consumer',
'ConsumerAction',
'Event',
'EventTypeDescriptor',
'ExternalConfigurationDescriptor',
'FormattedEventMessage',
'GraphSubjectBase',
'IdentityRef',
'InputDescriptor',
'InputFilter',
'InputFilterCondition',
'InputValidation',
'InputValue',
'InputValues',
'InputValuesError',
'InputValuesQuery',
'Notification',
'NotificationDetails',
'NotificationResultsSummaryDetail',
'NotificationsQuery',
'NotificationSummary',
'Publisher',
'PublisherEvent',
'PublishersQuery',
'ReferenceLinks',
'ResourceContainer',
'SessionToken',
'Subscription',
'SubscriptionDiagnostics',
'SubscriptionInputValuesQuery',
'SubscriptionsQuery',
'SubscriptionTracing',
'UpdateSubscripitonDiagnosticsParameters',
'UpdateSubscripitonTracingParameters',
'VersionedResource',
'ServiceHooksClient'
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/service_hooks/__init__.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/service_hooks/__init__.py",
"repo_id": "azure-devops-python-api",
"token_count": 443
}
| 400 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest import Serializer, Deserializer
from ...client import Client
from . import models
class TestClient(Client):
"""Test
:param str base_url: Service URL
:param Authentication creds: Authenticated credentials.
"""
def __init__(self, base_url=None, creds=None):
super(TestClient, self).__init__(base_url, creds)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
resource_area_identifier = 'c2aa639c-3ccc-4740-b3b6-ce2a1e1d984e'
def create_test_result_attachment(self, attachment_request_model, project, run_id, test_case_result_id):
"""CreateTestResultAttachment.
[Preview API] Attach a file to a test result.
:param :class:`<TestAttachmentRequestModel> <azure.devops.v7_1.test.models.TestAttachmentRequestModel>` attachment_request_model: Attachment details TestAttachmentRequestModel
:param str project: Project ID or project name
:param int run_id: ID of the test run that contains the result.
:param int test_case_result_id: ID of the test result against which attachment has to be uploaded.
:rtype: :class:`<TestAttachmentReference> <azure.devops.v7_1.test.models.TestAttachmentReference>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
if test_case_result_id is not None:
route_values['testCaseResultId'] = self._serialize.url('test_case_result_id', test_case_result_id, 'int')
content = self._serialize.body(attachment_request_model, 'TestAttachmentRequestModel')
response = self._send(http_method='POST',
location_id='2bffebe9-2f0f-4639-9af8-56129e9fed2d',
version='7.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('TestAttachmentReference', response)
def create_test_sub_result_attachment(self, attachment_request_model, project, run_id, test_case_result_id, test_sub_result_id):
"""CreateTestSubResultAttachment.
[Preview API] Attach a file to a test result
:param :class:`<TestAttachmentRequestModel> <azure.devops.v7_1.test.models.TestAttachmentRequestModel>` attachment_request_model: Attachment Request Model.
:param str project: Project ID or project name
:param int run_id: ID of the test run that contains the result.
:param int test_case_result_id: ID of the test results that contains sub result.
:param int test_sub_result_id: ID of the test sub results against which attachment has to be uploaded.
:rtype: :class:`<TestAttachmentReference> <azure.devops.v7_1.test.models.TestAttachmentReference>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
if test_case_result_id is not None:
route_values['testCaseResultId'] = self._serialize.url('test_case_result_id', test_case_result_id, 'int')
query_parameters = {}
if test_sub_result_id is not None:
query_parameters['testSubResultId'] = self._serialize.query('test_sub_result_id', test_sub_result_id, 'int')
content = self._serialize.body(attachment_request_model, 'TestAttachmentRequestModel')
response = self._send(http_method='POST',
location_id='2bffebe9-2f0f-4639-9af8-56129e9fed2d',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('TestAttachmentReference', response)
def get_test_result_attachment_content(self, project, run_id, test_case_result_id, attachment_id, **kwargs):
"""GetTestResultAttachmentContent.
[Preview API] Download a test result attachment by its ID.
:param str project: Project ID or project name
:param int run_id: ID of the test run that contains the testCaseResultId.
:param int test_case_result_id: ID of the test result whose attachment has to be downloaded.
:param int attachment_id: ID of the test result attachment to be downloaded.
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
if test_case_result_id is not None:
route_values['testCaseResultId'] = self._serialize.url('test_case_result_id', test_case_result_id, 'int')
if attachment_id is not None:
route_values['attachmentId'] = self._serialize.url('attachment_id', attachment_id, 'int')
response = self._send(http_method='GET',
location_id='2bffebe9-2f0f-4639-9af8-56129e9fed2d',
version='7.1-preview.1',
route_values=route_values,
accept_media_type='application/octet-stream')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def get_test_result_attachments(self, project, run_id, test_case_result_id):
"""GetTestResultAttachments.
[Preview API] Get list of test result attachments reference.
:param str project: Project ID or project name
:param int run_id: ID of the test run that contains the result.
:param int test_case_result_id: ID of the test result.
:rtype: [TestAttachment]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
if test_case_result_id is not None:
route_values['testCaseResultId'] = self._serialize.url('test_case_result_id', test_case_result_id, 'int')
response = self._send(http_method='GET',
location_id='2bffebe9-2f0f-4639-9af8-56129e9fed2d',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('[TestAttachment]', self._unwrap_collection(response))
def get_test_result_attachment_zip(self, project, run_id, test_case_result_id, attachment_id, **kwargs):
"""GetTestResultAttachmentZip.
[Preview API] Download a test result attachment by its ID.
:param str project: Project ID or project name
:param int run_id: ID of the test run that contains the testCaseResultId.
:param int test_case_result_id: ID of the test result whose attachment has to be downloaded.
:param int attachment_id: ID of the test result attachment to be downloaded.
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
if test_case_result_id is not None:
route_values['testCaseResultId'] = self._serialize.url('test_case_result_id', test_case_result_id, 'int')
if attachment_id is not None:
route_values['attachmentId'] = self._serialize.url('attachment_id', attachment_id, 'int')
response = self._send(http_method='GET',
location_id='2bffebe9-2f0f-4639-9af8-56129e9fed2d',
version='7.1-preview.1',
route_values=route_values,
accept_media_type='application/zip')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def get_test_sub_result_attachment_content(self, project, run_id, test_case_result_id, attachment_id, test_sub_result_id, **kwargs):
"""GetTestSubResultAttachmentContent.
[Preview API] Download a test sub result attachment
:param str project: Project ID or project name
:param int run_id: ID of the test run that contains the result.
:param int test_case_result_id: ID of the test results that contains sub result.
:param int attachment_id: ID of the test result attachment to be downloaded
:param int test_sub_result_id: ID of the test sub result whose attachment has to be downloaded
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
if test_case_result_id is not None:
route_values['testCaseResultId'] = self._serialize.url('test_case_result_id', test_case_result_id, 'int')
if attachment_id is not None:
route_values['attachmentId'] = self._serialize.url('attachment_id', attachment_id, 'int')
query_parameters = {}
if test_sub_result_id is not None:
query_parameters['testSubResultId'] = self._serialize.query('test_sub_result_id', test_sub_result_id, 'int')
response = self._send(http_method='GET',
location_id='2bffebe9-2f0f-4639-9af8-56129e9fed2d',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters,
accept_media_type='application/octet-stream')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def get_test_sub_result_attachments(self, project, run_id, test_case_result_id, test_sub_result_id):
"""GetTestSubResultAttachments.
[Preview API] Get list of test sub result attachments
:param str project: Project ID or project name
:param int run_id: ID of the test run that contains the result.
:param int test_case_result_id: ID of the test results that contains sub result.
:param int test_sub_result_id: ID of the test sub result whose attachment has to be downloaded
:rtype: [TestAttachment]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
if test_case_result_id is not None:
route_values['testCaseResultId'] = self._serialize.url('test_case_result_id', test_case_result_id, 'int')
query_parameters = {}
if test_sub_result_id is not None:
query_parameters['testSubResultId'] = self._serialize.query('test_sub_result_id', test_sub_result_id, 'int')
response = self._send(http_method='GET',
location_id='2bffebe9-2f0f-4639-9af8-56129e9fed2d',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[TestAttachment]', self._unwrap_collection(response))
def get_test_sub_result_attachment_zip(self, project, run_id, test_case_result_id, attachment_id, test_sub_result_id, **kwargs):
"""GetTestSubResultAttachmentZip.
[Preview API] Download a test sub result attachment
:param str project: Project ID or project name
:param int run_id: ID of the test run that contains the result.
:param int test_case_result_id: ID of the test results that contains sub result.
:param int attachment_id: ID of the test result attachment to be downloaded
:param int test_sub_result_id: ID of the test sub result whose attachment has to be downloaded
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
if test_case_result_id is not None:
route_values['testCaseResultId'] = self._serialize.url('test_case_result_id', test_case_result_id, 'int')
if attachment_id is not None:
route_values['attachmentId'] = self._serialize.url('attachment_id', attachment_id, 'int')
query_parameters = {}
if test_sub_result_id is not None:
query_parameters['testSubResultId'] = self._serialize.query('test_sub_result_id', test_sub_result_id, 'int')
response = self._send(http_method='GET',
location_id='2bffebe9-2f0f-4639-9af8-56129e9fed2d',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters,
accept_media_type='application/zip')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def create_test_run_attachment(self, attachment_request_model, project, run_id):
"""CreateTestRunAttachment.
[Preview API] Attach a file to a test run.
:param :class:`<TestAttachmentRequestModel> <azure.devops.v7_1.test.models.TestAttachmentRequestModel>` attachment_request_model: Attachment details TestAttachmentRequestModel
:param str project: Project ID or project name
:param int run_id: ID of the test run against which attachment has to be uploaded.
:rtype: :class:`<TestAttachmentReference> <azure.devops.v7_1.test.models.TestAttachmentReference>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
content = self._serialize.body(attachment_request_model, 'TestAttachmentRequestModel')
response = self._send(http_method='POST',
location_id='4f004af4-a507-489c-9b13-cb62060beb11',
version='7.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('TestAttachmentReference', response)
def get_test_run_attachment_content(self, project, run_id, attachment_id, **kwargs):
"""GetTestRunAttachmentContent.
[Preview API] Download a test run attachment by its ID.
:param str project: Project ID or project name
:param int run_id: ID of the test run whose attachment has to be downloaded.
:param int attachment_id: ID of the test run attachment to be downloaded.
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
if attachment_id is not None:
route_values['attachmentId'] = self._serialize.url('attachment_id', attachment_id, 'int')
response = self._send(http_method='GET',
location_id='4f004af4-a507-489c-9b13-cb62060beb11',
version='7.1-preview.1',
route_values=route_values,
accept_media_type='application/octet-stream')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def get_test_run_attachments(self, project, run_id):
"""GetTestRunAttachments.
[Preview API] Get list of test run attachments reference.
:param str project: Project ID or project name
:param int run_id: ID of the test run.
:rtype: [TestAttachment]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
response = self._send(http_method='GET',
location_id='4f004af4-a507-489c-9b13-cb62060beb11',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('[TestAttachment]', self._unwrap_collection(response))
def get_test_run_attachment_zip(self, project, run_id, attachment_id, **kwargs):
"""GetTestRunAttachmentZip.
[Preview API] Download a test run attachment by its ID.
:param str project: Project ID or project name
:param int run_id: ID of the test run whose attachment has to be downloaded.
:param int attachment_id: ID of the test run attachment to be downloaded.
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
if attachment_id is not None:
route_values['attachmentId'] = self._serialize.url('attachment_id', attachment_id, 'int')
response = self._send(http_method='GET',
location_id='4f004af4-a507-489c-9b13-cb62060beb11',
version='7.1-preview.1',
route_values=route_values,
accept_media_type='application/zip')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def get_build_code_coverage(self, project, build_id, flags):
"""GetBuildCodeCoverage.
[Preview API] Get code coverage data for a build.
:param str project: Project ID or project name
:param int build_id: ID of the build for which code coverage data needs to be fetched.
:param int flags: Value of flags determine the level of code coverage details to be fetched. Flags are additive. Expected Values are 1 for Modules, 2 for Functions, 4 for BlockData.
:rtype: [BuildCoverage]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if build_id is not None:
query_parameters['buildId'] = self._serialize.query('build_id', build_id, 'int')
if flags is not None:
query_parameters['flags'] = self._serialize.query('flags', flags, 'int')
response = self._send(http_method='GET',
location_id='77560e8a-4e8c-4d59-894e-a5f264c24444',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[BuildCoverage]', self._unwrap_collection(response))
def get_test_run_code_coverage(self, project, run_id, flags):
"""GetTestRunCodeCoverage.
[Preview API] Get code coverage data for a test run
:param str project: Project ID or project name
:param int run_id: ID of the test run for which code coverage data needs to be fetched.
:param int flags: Value of flags determine the level of code coverage details to be fetched. Flags are additive. Expected Values are 1 for Modules, 2 for Functions, 4 for BlockData.
:rtype: [TestRunCoverage]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
query_parameters = {}
if flags is not None:
query_parameters['flags'] = self._serialize.query('flags', flags, 'int')
response = self._send(http_method='GET',
location_id='9629116f-3b89-4ed8-b358-d4694efda160',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[TestRunCoverage]', self._unwrap_collection(response))
def get_test_iteration(self, project, run_id, test_case_result_id, iteration_id, include_action_results=None):
"""GetTestIteration.
[Preview API] Get iteration for a result
:param str project: Project ID or project name
:param int run_id: ID of the test run that contains the result.
:param int test_case_result_id: ID of the test result that contains the iterations.
:param int iteration_id: Id of the test results Iteration.
:param bool include_action_results: Include result details for each action performed in the test iteration. ActionResults refer to outcome (pass/fail) of test steps that are executed as part of a running a manual test. Including the ActionResults flag gets the outcome of test steps in the actionResults section and test parameters in the parameters section for each test iteration.
:rtype: :class:`<TestIterationDetailsModel> <azure.devops.v7_1.test.models.TestIterationDetailsModel>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
if test_case_result_id is not None:
route_values['testCaseResultId'] = self._serialize.url('test_case_result_id', test_case_result_id, 'int')
if iteration_id is not None:
route_values['iterationId'] = self._serialize.url('iteration_id', iteration_id, 'int')
query_parameters = {}
if include_action_results is not None:
query_parameters['includeActionResults'] = self._serialize.query('include_action_results', include_action_results, 'bool')
response = self._send(http_method='GET',
location_id='73eb9074-3446-4c44-8296-2f811950ff8d',
version='7.1-preview.3',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('TestIterationDetailsModel', response)
def get_test_iterations(self, project, run_id, test_case_result_id, include_action_results=None):
"""GetTestIterations.
[Preview API] Get iterations for a result
:param str project: Project ID or project name
:param int run_id: ID of the test run that contains the result.
:param int test_case_result_id: ID of the test result that contains the iterations.
:param bool include_action_results: Include result details for each action performed in the test iteration. ActionResults refer to outcome (pass/fail) of test steps that are executed as part of a running a manual test. Including the ActionResults flag gets the outcome of test steps in the actionResults section and test parameters in the parameters section for each test iteration.
:rtype: [TestIterationDetailsModel]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
if test_case_result_id is not None:
route_values['testCaseResultId'] = self._serialize.url('test_case_result_id', test_case_result_id, 'int')
query_parameters = {}
if include_action_results is not None:
query_parameters['includeActionResults'] = self._serialize.query('include_action_results', include_action_results, 'bool')
response = self._send(http_method='GET',
location_id='73eb9074-3446-4c44-8296-2f811950ff8d',
version='7.1-preview.3',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[TestIterationDetailsModel]', self._unwrap_collection(response))
def get_point(self, project, plan_id, suite_id, point_ids, wit_fields=None):
"""GetPoint.
[Preview API] Get a test point.
:param str project: Project ID or project name
:param int plan_id: ID of the test plan.
:param int suite_id: ID of the suite that contains the point.
:param int point_ids: ID of the test point to get.
:param str wit_fields: Comma-separated list of work item field names.
:rtype: :class:`<TestPoint> <azure.devops.v7_1.test.models.TestPoint>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if plan_id is not None:
route_values['planId'] = self._serialize.url('plan_id', plan_id, 'int')
if suite_id is not None:
route_values['suiteId'] = self._serialize.url('suite_id', suite_id, 'int')
if point_ids is not None:
route_values['pointIds'] = self._serialize.url('point_ids', point_ids, 'int')
query_parameters = {}
if wit_fields is not None:
query_parameters['witFields'] = self._serialize.query('wit_fields', wit_fields, 'str')
response = self._send(http_method='GET',
location_id='3bcfd5c8-be62-488e-b1da-b8289ce9299c',
version='7.1-preview.2',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('TestPoint', response)
def get_points(self, project, plan_id, suite_id, wit_fields=None, configuration_id=None, test_case_id=None, test_point_ids=None, include_point_details=None, skip=None, top=None):
"""GetPoints.
[Preview API] Get a list of test points.
:param str project: Project ID or project name
:param int plan_id: ID of the test plan.
:param int suite_id: ID of the suite that contains the points.
:param str wit_fields: Comma-separated list of work item field names.
:param str configuration_id: Get test points for specific configuration.
:param str test_case_id: Get test points for a specific test case, valid when configurationId is not set.
:param str test_point_ids: Get test points for comma-separated list of test point IDs, valid only when configurationId and testCaseId are not set.
:param bool include_point_details: Include all properties for the test point.
:param int skip: Number of test points to skip..
:param int top: Number of test points to return.
:rtype: [TestPoint]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if plan_id is not None:
route_values['planId'] = self._serialize.url('plan_id', plan_id, 'int')
if suite_id is not None:
route_values['suiteId'] = self._serialize.url('suite_id', suite_id, 'int')
query_parameters = {}
if wit_fields is not None:
query_parameters['witFields'] = self._serialize.query('wit_fields', wit_fields, 'str')
if configuration_id is not None:
query_parameters['configurationId'] = self._serialize.query('configuration_id', configuration_id, 'str')
if test_case_id is not None:
query_parameters['testCaseId'] = self._serialize.query('test_case_id', test_case_id, 'str')
if test_point_ids is not None:
query_parameters['testPointIds'] = self._serialize.query('test_point_ids', test_point_ids, 'str')
if include_point_details is not None:
query_parameters['includePointDetails'] = self._serialize.query('include_point_details', include_point_details, 'bool')
if skip is not None:
query_parameters['$skip'] = self._serialize.query('skip', skip, 'int')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
response = self._send(http_method='GET',
location_id='3bcfd5c8-be62-488e-b1da-b8289ce9299c',
version='7.1-preview.2',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[TestPoint]', self._unwrap_collection(response))
def update_test_points(self, point_update_model, project, plan_id, suite_id, point_ids):
"""UpdateTestPoints.
[Preview API] Update test points.
:param :class:`<PointUpdateModel> <azure.devops.v7_1.test.models.PointUpdateModel>` point_update_model: Data to update.
:param str project: Project ID or project name
:param int plan_id: ID of the test plan.
:param int suite_id: ID of the suite that contains the points.
:param str point_ids: ID of the test point to get. Use a comma-separated list of IDs to update multiple test points.
:rtype: [TestPoint]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if plan_id is not None:
route_values['planId'] = self._serialize.url('plan_id', plan_id, 'int')
if suite_id is not None:
route_values['suiteId'] = self._serialize.url('suite_id', suite_id, 'int')
if point_ids is not None:
route_values['pointIds'] = self._serialize.url('point_ids', point_ids, 'str')
content = self._serialize.body(point_update_model, 'PointUpdateModel')
response = self._send(http_method='PATCH',
location_id='3bcfd5c8-be62-488e-b1da-b8289ce9299c',
version='7.1-preview.2',
route_values=route_values,
content=content)
return self._deserialize('[TestPoint]', self._unwrap_collection(response))
def get_points_by_query(self, query, project, skip=None, top=None):
"""GetPointsByQuery.
[Preview API] Get test points using query.
:param :class:`<TestPointsQuery> <azure.devops.v7_1.test.models.TestPointsQuery>` query: TestPointsQuery to get test points.
:param str project: Project ID or project name
:param int skip: Number of test points to skip..
:param int top: Number of test points to return.
:rtype: :class:`<TestPointsQuery> <azure.devops.v7_1.test.models.TestPointsQuery>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if skip is not None:
query_parameters['$skip'] = self._serialize.query('skip', skip, 'int')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
content = self._serialize.body(query, 'TestPointsQuery')
response = self._send(http_method='POST',
location_id='b4264fd0-a5d1-43e2-82a5-b9c46b7da9ce',
version='7.1-preview.2',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('TestPointsQuery', response)
def get_result_retention_settings(self, project):
"""GetResultRetentionSettings.
[Preview API] Get test result retention settings
:param str project: Project ID or project name
:rtype: :class:`<ResultRetentionSettings> <azure.devops.v7_1.test.models.ResultRetentionSettings>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
response = self._send(http_method='GET',
location_id='a3206d9e-fa8d-42d3-88cb-f75c51e69cde',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('ResultRetentionSettings', response)
def update_result_retention_settings(self, retention_settings, project):
"""UpdateResultRetentionSettings.
[Preview API] Update test result retention settings
:param :class:`<ResultRetentionSettings> <azure.devops.v7_1.test.models.ResultRetentionSettings>` retention_settings: Test result retention settings details to be updated
:param str project: Project ID or project name
:rtype: :class:`<ResultRetentionSettings> <azure.devops.v7_1.test.models.ResultRetentionSettings>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(retention_settings, 'ResultRetentionSettings')
response = self._send(http_method='PATCH',
location_id='a3206d9e-fa8d-42d3-88cb-f75c51e69cde',
version='7.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('ResultRetentionSettings', response)
def add_test_results_to_test_run(self, results, project, run_id):
"""AddTestResultsToTestRun.
[Preview API] Add test results to a test run.
:param [TestCaseResult] results: List of test results to add.
:param str project: Project ID or project name
:param int run_id: Test run ID into which test results to add.
:rtype: [TestCaseResult]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
content = self._serialize.body(results, '[TestCaseResult]')
response = self._send(http_method='POST',
location_id='4637d869-3a76-4468-8057-0bb02aa385cf',
version='7.1-preview.6',
route_values=route_values,
content=content)
return self._deserialize('[TestCaseResult]', self._unwrap_collection(response))
def get_test_result_by_id(self, project, run_id, test_case_result_id, details_to_include=None):
"""GetTestResultById.
[Preview API] Get a test result for a test run.
:param str project: Project ID or project name
:param int run_id: Test run ID of a test result to fetch.
:param int test_case_result_id: Test result ID.
:param str details_to_include: Details to include with test results. Default is None. Other values are Iterations, WorkItems and SubResults.
:rtype: :class:`<TestCaseResult> <azure.devops.v7_1.test.models.TestCaseResult>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
if test_case_result_id is not None:
route_values['testCaseResultId'] = self._serialize.url('test_case_result_id', test_case_result_id, 'int')
query_parameters = {}
if details_to_include is not None:
query_parameters['detailsToInclude'] = self._serialize.query('details_to_include', details_to_include, 'str')
response = self._send(http_method='GET',
location_id='4637d869-3a76-4468-8057-0bb02aa385cf',
version='7.1-preview.6',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('TestCaseResult', response)
def get_test_results(self, project, run_id, details_to_include=None, skip=None, top=None, outcomes=None):
"""GetTestResults.
[Preview API] Get test results for a test run.
:param str project: Project ID or project name
:param int run_id: Test run ID of test results to fetch.
:param str details_to_include: Details to include with test results. Default is None. Other values are Iterations and WorkItems.
:param int skip: Number of test results to skip from beginning.
:param int top: Number of test results to return. Maximum is 1000 when detailsToInclude is None and 200 otherwise.
:param [TestOutcome] outcomes: Comma separated list of test outcomes to filter test results.
:rtype: [TestCaseResult]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
query_parameters = {}
if details_to_include is not None:
query_parameters['detailsToInclude'] = self._serialize.query('details_to_include', details_to_include, 'str')
if skip is not None:
query_parameters['$skip'] = self._serialize.query('skip', skip, 'int')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if outcomes is not None:
outcomes = ",".join(map(str, outcomes))
query_parameters['outcomes'] = self._serialize.query('outcomes', outcomes, 'str')
response = self._send(http_method='GET',
location_id='4637d869-3a76-4468-8057-0bb02aa385cf',
version='7.1-preview.6',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[TestCaseResult]', self._unwrap_collection(response))
def update_test_results(self, results, project, run_id):
"""UpdateTestResults.
[Preview API] Update test results in a test run.
:param [TestCaseResult] results: List of test results to update.
:param str project: Project ID or project name
:param int run_id: Test run ID whose test results to update.
:rtype: [TestCaseResult]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
content = self._serialize.body(results, '[TestCaseResult]')
response = self._send(http_method='PATCH',
location_id='4637d869-3a76-4468-8057-0bb02aa385cf',
version='7.1-preview.6',
route_values=route_values,
content=content)
return self._deserialize('[TestCaseResult]', self._unwrap_collection(response))
def get_test_run_statistics(self, project, run_id):
"""GetTestRunStatistics.
[Preview API] Get test run statistics , used when we want to get summary of a run by outcome.
:param str project: Project ID or project name
:param int run_id: ID of the run to get.
:rtype: :class:`<TestRunStatistic> <azure.devops.v7_1.test.models.TestRunStatistic>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
response = self._send(http_method='GET',
location_id='0a42c424-d764-4a16-a2d5-5c85f87d0ae8',
version='7.1-preview.3',
route_values=route_values)
return self._deserialize('TestRunStatistic', response)
def create_test_run(self, test_run, project):
"""CreateTestRun.
[Preview API] Create new test run.
:param :class:`<RunCreateModel> <azure.devops.v7_1.test.models.RunCreateModel>` test_run: Run details RunCreateModel
:param str project: Project ID or project name
:rtype: :class:`<TestRun> <azure.devops.v7_1.test.models.TestRun>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(test_run, 'RunCreateModel')
response = self._send(http_method='POST',
location_id='cadb3810-d47d-4a3c-a234-fe5f3be50138',
version='7.1-preview.3',
route_values=route_values,
content=content)
return self._deserialize('TestRun', response)
def delete_test_run(self, project, run_id):
"""DeleteTestRun.
[Preview API] Delete a test run by its ID.
:param str project: Project ID or project name
:param int run_id: ID of the run to delete.
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
self._send(http_method='DELETE',
location_id='cadb3810-d47d-4a3c-a234-fe5f3be50138',
version='7.1-preview.3',
route_values=route_values)
def get_test_run_by_id(self, project, run_id, include_details=None):
"""GetTestRunById.
[Preview API] Get a test run by its ID.
:param str project: Project ID or project name
:param int run_id: ID of the run to get.
:param bool include_details: Default value is true. It includes details like run statistics, release, build, test environment, post process state, and more.
:rtype: :class:`<TestRun> <azure.devops.v7_1.test.models.TestRun>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
query_parameters = {}
if include_details is not None:
query_parameters['includeDetails'] = self._serialize.query('include_details', include_details, 'bool')
response = self._send(http_method='GET',
location_id='cadb3810-d47d-4a3c-a234-fe5f3be50138',
version='7.1-preview.3',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('TestRun', response)
def get_test_runs(self, project, build_uri=None, owner=None, tmi_run_id=None, plan_id=None, include_run_details=None, automated=None, skip=None, top=None):
"""GetTestRuns.
[Preview API] Get a list of test runs.
:param str project: Project ID or project name
:param str build_uri: URI of the build that the runs used.
:param str owner: Team foundation ID of the owner of the runs.
:param str tmi_run_id:
:param int plan_id: ID of the test plan that the runs are a part of.
:param bool include_run_details: If true, include all the properties of the runs.
:param bool automated: If true, only returns automated runs.
:param int skip: Number of test runs to skip.
:param int top: Number of test runs to return.
:rtype: [TestRun]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if build_uri is not None:
query_parameters['buildUri'] = self._serialize.query('build_uri', build_uri, 'str')
if owner is not None:
query_parameters['owner'] = self._serialize.query('owner', owner, 'str')
if tmi_run_id is not None:
query_parameters['tmiRunId'] = self._serialize.query('tmi_run_id', tmi_run_id, 'str')
if plan_id is not None:
query_parameters['planId'] = self._serialize.query('plan_id', plan_id, 'int')
if include_run_details is not None:
query_parameters['includeRunDetails'] = self._serialize.query('include_run_details', include_run_details, 'bool')
if automated is not None:
query_parameters['automated'] = self._serialize.query('automated', automated, 'bool')
if skip is not None:
query_parameters['$skip'] = self._serialize.query('skip', skip, 'int')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
response = self._send(http_method='GET',
location_id='cadb3810-d47d-4a3c-a234-fe5f3be50138',
version='7.1-preview.3',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[TestRun]', self._unwrap_collection(response))
def query_test_runs(self, project, min_last_updated_date, max_last_updated_date, state=None, plan_ids=None, is_automated=None, publish_context=None, build_ids=None, build_def_ids=None, branch_name=None, release_ids=None, release_def_ids=None, release_env_ids=None, release_env_def_ids=None, run_title=None, top=None, continuation_token=None):
"""QueryTestRuns.
[Preview API] Query Test Runs based on filters. Mandatory fields are minLastUpdatedDate and maxLastUpdatedDate.
:param str project: Project ID or project name
:param datetime min_last_updated_date: Minimum Last Modified Date of run to be queried (Mandatory).
:param datetime max_last_updated_date: Maximum Last Modified Date of run to be queried (Mandatory, difference between min and max date can be atmost 7 days).
:param str state: Current state of the Runs to be queried.
:param [int] plan_ids: Plan Ids of the Runs to be queried, comma separated list of valid ids (limit no. of ids 10).
:param bool is_automated: Automation type of the Runs to be queried.
:param str publish_context: PublishContext of the Runs to be queried.
:param [int] build_ids: Build Ids of the Runs to be queried, comma separated list of valid ids (limit no. of ids 10).
:param [int] build_def_ids: Build Definition Ids of the Runs to be queried, comma separated list of valid ids (limit no. of ids 10).
:param str branch_name: Source Branch name of the Runs to be queried.
:param [int] release_ids: Release Ids of the Runs to be queried, comma separated list of valid ids (limit no. of ids 10).
:param [int] release_def_ids: Release Definition Ids of the Runs to be queried, comma separated list of valid ids (limit no. of ids 10).
:param [int] release_env_ids: Release Environment Ids of the Runs to be queried, comma separated list of valid ids (limit no. of ids 10).
:param [int] release_env_def_ids: Release Environment Definition Ids of the Runs to be queried, comma separated list of valid ids (limit no. of ids 10).
:param str run_title: Run Title of the Runs to be queried.
:param int top: Number of runs to be queried. Limit is 100
:param str continuation_token: continuationToken received from previous batch or null for first batch. It is not supposed to be created (or altered, if received from last batch) by user.
:rtype: :class:`<[TestRun]> <azure.devops.v7_1.test.models.[TestRun]>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if min_last_updated_date is not None:
query_parameters['minLastUpdatedDate'] = self._serialize.query('min_last_updated_date', min_last_updated_date, 'iso-8601')
if max_last_updated_date is not None:
query_parameters['maxLastUpdatedDate'] = self._serialize.query('max_last_updated_date', max_last_updated_date, 'iso-8601')
if state is not None:
query_parameters['state'] = self._serialize.query('state', state, 'str')
if plan_ids is not None:
plan_ids = ",".join(map(str, plan_ids))
query_parameters['planIds'] = self._serialize.query('plan_ids', plan_ids, 'str')
if is_automated is not None:
query_parameters['isAutomated'] = self._serialize.query('is_automated', is_automated, 'bool')
if publish_context is not None:
query_parameters['publishContext'] = self._serialize.query('publish_context', publish_context, 'str')
if build_ids is not None:
build_ids = ",".join(map(str, build_ids))
query_parameters['buildIds'] = self._serialize.query('build_ids', build_ids, 'str')
if build_def_ids is not None:
build_def_ids = ",".join(map(str, build_def_ids))
query_parameters['buildDefIds'] = self._serialize.query('build_def_ids', build_def_ids, 'str')
if branch_name is not None:
query_parameters['branchName'] = self._serialize.query('branch_name', branch_name, 'str')
if release_ids is not None:
release_ids = ",".join(map(str, release_ids))
query_parameters['releaseIds'] = self._serialize.query('release_ids', release_ids, 'str')
if release_def_ids is not None:
release_def_ids = ",".join(map(str, release_def_ids))
query_parameters['releaseDefIds'] = self._serialize.query('release_def_ids', release_def_ids, 'str')
if release_env_ids is not None:
release_env_ids = ",".join(map(str, release_env_ids))
query_parameters['releaseEnvIds'] = self._serialize.query('release_env_ids', release_env_ids, 'str')
if release_env_def_ids is not None:
release_env_def_ids = ",".join(map(str, release_env_def_ids))
query_parameters['releaseEnvDefIds'] = self._serialize.query('release_env_def_ids', release_env_def_ids, 'str')
if run_title is not None:
query_parameters['runTitle'] = self._serialize.query('run_title', run_title, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'str')
response = self._send(http_method='GET',
location_id='cadb3810-d47d-4a3c-a234-fe5f3be50138',
version='7.1-preview.3',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[TestRun]', self._unwrap_collection(response))
def update_test_run(self, run_update_model, project, run_id):
"""UpdateTestRun.
[Preview API] Update test run by its ID.
:param :class:`<RunUpdateModel> <azure.devops.v7_1.test.models.RunUpdateModel>` run_update_model: Run details RunUpdateModel
:param str project: Project ID or project name
:param int run_id: ID of the run to update.
:rtype: :class:`<TestRun> <azure.devops.v7_1.test.models.TestRun>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if run_id is not None:
route_values['runId'] = self._serialize.url('run_id', run_id, 'int')
content = self._serialize.body(run_update_model, 'RunUpdateModel')
response = self._send(http_method='PATCH',
location_id='cadb3810-d47d-4a3c-a234-fe5f3be50138',
version='7.1-preview.3',
route_values=route_values,
content=content)
return self._deserialize('TestRun', response)
def create_test_session(self, test_session, team_context):
"""CreateTestSession.
[Preview API] Create a test session
:param :class:`<TestSession> <azure.devops.v7_1.test.models.TestSession>` test_session: Test session details for creation
:param :class:`<TeamContext> <azure.devops.v7_1.test.models.TeamContext>` team_context: The team context for the operation
:rtype: :class:`<TestSession> <azure.devops.v7_1.test.models.TestSession>`
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
content = self._serialize.body(test_session, 'TestSession')
response = self._send(http_method='POST',
location_id='1500b4b4-6c69-4ca6-9b18-35e9e97fe2ac',
version='7.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('TestSession', response)
def get_test_sessions(self, team_context, period=None, all_sessions=None, include_all_properties=None, source=None, include_only_completed_sessions=None):
"""GetTestSessions.
[Preview API] Get a list of test sessions
:param :class:`<TeamContext> <azure.devops.v7_1.test.models.TeamContext>` team_context: The team context for the operation
:param int period: Period in days from now, for which test sessions are fetched.
:param bool all_sessions: If false, returns test sessions for current user. Otherwise, it returns test sessions for all users
:param bool include_all_properties: If true, it returns all properties of the test sessions. Otherwise, it returns the skinny version.
:param str source: Source of the test session.
:param bool include_only_completed_sessions: If true, it returns test sessions in completed state. Otherwise, it returns test sessions for all states
:rtype: [TestSession]
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
query_parameters = {}
if period is not None:
query_parameters['period'] = self._serialize.query('period', period, 'int')
if all_sessions is not None:
query_parameters['allSessions'] = self._serialize.query('all_sessions', all_sessions, 'bool')
if include_all_properties is not None:
query_parameters['includeAllProperties'] = self._serialize.query('include_all_properties', include_all_properties, 'bool')
if source is not None:
query_parameters['source'] = self._serialize.query('source', source, 'str')
if include_only_completed_sessions is not None:
query_parameters['includeOnlyCompletedSessions'] = self._serialize.query('include_only_completed_sessions', include_only_completed_sessions, 'bool')
response = self._send(http_method='GET',
location_id='1500b4b4-6c69-4ca6-9b18-35e9e97fe2ac',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[TestSession]', self._unwrap_collection(response))
def update_test_session(self, test_session, team_context):
"""UpdateTestSession.
[Preview API] Update a test session
:param :class:`<TestSession> <azure.devops.v7_1.test.models.TestSession>` test_session: Test session details for update
:param :class:`<TeamContext> <azure.devops.v7_1.test.models.TeamContext>` team_context: The team context for the operation
:rtype: :class:`<TestSession> <azure.devops.v7_1.test.models.TestSession>`
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
content = self._serialize.body(test_session, 'TestSession')
response = self._send(http_method='PATCH',
location_id='1500b4b4-6c69-4ca6-9b18-35e9e97fe2ac',
version='7.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('TestSession', response)
def add_test_cases_to_suite(self, project, plan_id, suite_id, test_case_ids):
"""AddTestCasesToSuite.
[Preview API] Add test cases to suite.
:param str project: Project ID or project name
:param int plan_id: ID of the test plan that contains the suite.
:param int suite_id: ID of the test suite to which the test cases must be added.
:param str test_case_ids: IDs of the test cases to add to the suite. Ids are specified in comma separated format.
:rtype: [SuiteTestCase]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if plan_id is not None:
route_values['planId'] = self._serialize.url('plan_id', plan_id, 'int')
if suite_id is not None:
route_values['suiteId'] = self._serialize.url('suite_id', suite_id, 'int')
if test_case_ids is not None:
route_values['testCaseIds'] = self._serialize.url('test_case_ids', test_case_ids, 'str')
route_values['action'] = 'TestCases'
response = self._send(http_method='POST',
location_id='a4a1ec1c-b03f-41ca-8857-704594ecf58e',
version='7.1-preview.3',
route_values=route_values)
return self._deserialize('[SuiteTestCase]', self._unwrap_collection(response))
def get_test_case_by_id(self, project, plan_id, suite_id, test_case_ids):
"""GetTestCaseById.
[Preview API] Get a specific test case in a test suite with test case id.
:param str project: Project ID or project name
:param int plan_id: ID of the test plan that contains the suites.
:param int suite_id: ID of the suite that contains the test case.
:param int test_case_ids: ID of the test case to get.
:rtype: :class:`<SuiteTestCase> <azure.devops.v7_1.test.models.SuiteTestCase>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if plan_id is not None:
route_values['planId'] = self._serialize.url('plan_id', plan_id, 'int')
if suite_id is not None:
route_values['suiteId'] = self._serialize.url('suite_id', suite_id, 'int')
if test_case_ids is not None:
route_values['testCaseIds'] = self._serialize.url('test_case_ids', test_case_ids, 'int')
route_values['action'] = 'TestCases'
response = self._send(http_method='GET',
location_id='a4a1ec1c-b03f-41ca-8857-704594ecf58e',
version='7.1-preview.3',
route_values=route_values)
return self._deserialize('SuiteTestCase', response)
def get_test_cases(self, project, plan_id, suite_id):
"""GetTestCases.
[Preview API] Get all test cases in a suite.
:param str project: Project ID or project name
:param int plan_id: ID of the test plan that contains the suites.
:param int suite_id: ID of the suite to get.
:rtype: [SuiteTestCase]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if plan_id is not None:
route_values['planId'] = self._serialize.url('plan_id', plan_id, 'int')
if suite_id is not None:
route_values['suiteId'] = self._serialize.url('suite_id', suite_id, 'int')
route_values['action'] = 'TestCases'
response = self._send(http_method='GET',
location_id='a4a1ec1c-b03f-41ca-8857-704594ecf58e',
version='7.1-preview.3',
route_values=route_values)
return self._deserialize('[SuiteTestCase]', self._unwrap_collection(response))
def remove_test_cases_from_suite_url(self, project, plan_id, suite_id, test_case_ids):
"""RemoveTestCasesFromSuiteUrl.
[Preview API] The test points associated with the test cases are removed from the test suite. The test case work item is not deleted from the system. See test cases resource to delete a test case permanently.
:param str project: Project ID or project name
:param int plan_id: ID of the test plan that contains the suite.
:param int suite_id: ID of the suite to get.
:param str test_case_ids: IDs of the test cases to remove from the suite.
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if plan_id is not None:
route_values['planId'] = self._serialize.url('plan_id', plan_id, 'int')
if suite_id is not None:
route_values['suiteId'] = self._serialize.url('suite_id', suite_id, 'int')
if test_case_ids is not None:
route_values['testCaseIds'] = self._serialize.url('test_case_ids', test_case_ids, 'str')
route_values['action'] = 'TestCases'
self._send(http_method='DELETE',
location_id='a4a1ec1c-b03f-41ca-8857-704594ecf58e',
version='7.1-preview.3',
route_values=route_values)
def update_suite_test_cases(self, suite_test_case_update_model, project, plan_id, suite_id, test_case_ids):
"""UpdateSuiteTestCases.
[Preview API] Updates the properties of the test case association in a suite.
:param :class:`<SuiteTestCaseUpdateModel> <azure.devops.v7_1.test.models.SuiteTestCaseUpdateModel>` suite_test_case_update_model: Model for updation of the properties of test case suite association.
:param str project: Project ID or project name
:param int plan_id: ID of the test plan that contains the suite.
:param int suite_id: ID of the test suite to which the test cases must be added.
:param str test_case_ids: IDs of the test cases to add to the suite. Ids are specified in comma separated format.
:rtype: [SuiteTestCase]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if plan_id is not None:
route_values['planId'] = self._serialize.url('plan_id', plan_id, 'int')
if suite_id is not None:
route_values['suiteId'] = self._serialize.url('suite_id', suite_id, 'int')
if test_case_ids is not None:
route_values['testCaseIds'] = self._serialize.url('test_case_ids', test_case_ids, 'str')
route_values['action'] = 'TestCases'
content = self._serialize.body(suite_test_case_update_model, 'SuiteTestCaseUpdateModel')
response = self._send(http_method='PATCH',
location_id='a4a1ec1c-b03f-41ca-8857-704594ecf58e',
version='7.1-preview.3',
route_values=route_values,
content=content)
return self._deserialize('[SuiteTestCase]', self._unwrap_collection(response))
def delete_test_case(self, project, test_case_id):
"""DeleteTestCase.
[Preview API] Delete a test case.
:param str project: Project ID or project name
:param int test_case_id: Id of test case to delete.
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if test_case_id is not None:
route_values['testCaseId'] = self._serialize.url('test_case_id', test_case_id, 'int')
self._send(http_method='DELETE',
location_id='4d472e0f-e32c-4ef8-adf4-a4078772889c',
version='7.1-preview.1',
route_values=route_values)
def query_test_history(self, filter, project):
"""QueryTestHistory.
[Preview API] Get history of a test method using TestHistoryQuery
:param :class:`<TestHistoryQuery> <azure.devops.v7_1.test.models.TestHistoryQuery>` filter: TestHistoryQuery to get history
:param str project: Project ID or project name
:rtype: :class:`<TestHistoryQuery> <azure.devops.v7_1.test.models.TestHistoryQuery>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(filter, 'TestHistoryQuery')
response = self._send(http_method='POST',
location_id='929fd86c-3e38-4d8c-b4b6-90df256e5971',
version='7.1-preview.2',
route_values=route_values,
content=content)
return self._deserialize('TestHistoryQuery', response)
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/test/test_client.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/test/test_client.py",
"repo_id": "azure-devops-python-api",
"token_count": 30477
}
| 401 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class AdminBehavior(Model):
"""
Describes an admin behavior for a process.
:param abstract: Is the behavior abstract (i.e. can not be associated with any work item type).
:type abstract: bool
:param color: The color associated with the behavior.
:type color: str
:param custom: Indicates if the behavior is custom.
:type custom: bool
:param description: The description of the behavior.
:type description: str
:param fields: List of behavior fields.
:type fields: list of :class:`AdminBehaviorField <azure.devops.v7_1.work_item_tracking_process_template.models.AdminBehaviorField>`
:param id: Behavior ID.
:type id: str
:param inherits: Parent behavior reference.
:type inherits: str
:param name: The behavior name.
:type name: str
:param overriden: Is the behavior overrides a behavior from system process.
:type overriden: bool
:param rank: The rank.
:type rank: int
"""
_attribute_map = {
'abstract': {'key': 'abstract', 'type': 'bool'},
'color': {'key': 'color', 'type': 'str'},
'custom': {'key': 'custom', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'fields': {'key': 'fields', 'type': '[AdminBehaviorField]'},
'id': {'key': 'id', 'type': 'str'},
'inherits': {'key': 'inherits', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'overriden': {'key': 'overriden', 'type': 'bool'},
'rank': {'key': 'rank', 'type': 'int'}
}
def __init__(self, abstract=None, color=None, custom=None, description=None, fields=None, id=None, inherits=None, name=None, overriden=None, rank=None):
super(AdminBehavior, self).__init__()
self.abstract = abstract
self.color = color
self.custom = custom
self.description = description
self.fields = fields
self.id = id
self.inherits = inherits
self.name = name
self.overriden = overriden
self.rank = rank
class AdminBehaviorField(Model):
"""
Describes an admin behavior field.
:param behavior_field_id: The behavior field identifier.
:type behavior_field_id: str
:param id: The behavior ID.
:type id: str
:param name: The behavior name.
:type name: str
"""
_attribute_map = {
'behavior_field_id': {'key': 'behaviorFieldId', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, behavior_field_id=None, id=None, name=None):
super(AdminBehaviorField, self).__init__()
self.behavior_field_id = behavior_field_id
self.id = id
self.name = name
class CheckTemplateExistenceResult(Model):
"""
Describes result of a check template existence request.
:param does_template_exist: Indicates whether a template exists.
:type does_template_exist: bool
:param existing_template_name: The name of the existing template.
:type existing_template_name: str
:param existing_template_type_id: The existing template type identifier.
:type existing_template_type_id: str
:param requested_template_name: The name of the requested template.
:type requested_template_name: str
"""
_attribute_map = {
'does_template_exist': {'key': 'doesTemplateExist', 'type': 'bool'},
'existing_template_name': {'key': 'existingTemplateName', 'type': 'str'},
'existing_template_type_id': {'key': 'existingTemplateTypeId', 'type': 'str'},
'requested_template_name': {'key': 'requestedTemplateName', 'type': 'str'}
}
def __init__(self, does_template_exist=None, existing_template_name=None, existing_template_type_id=None, requested_template_name=None):
super(CheckTemplateExistenceResult, self).__init__()
self.does_template_exist = does_template_exist
self.existing_template_name = existing_template_name
self.existing_template_type_id = existing_template_type_id
self.requested_template_name = requested_template_name
class ProcessImportResult(Model):
"""
Describes the result of a Process Import request.
:param help_url: Help URL.
:type help_url: str
:param check_existence_result: Check template existence result.
:type check_existence_result: :class:`CheckTemplateExistenceResult <azure.devops.v7_1.work_item_tracking_process_template.models.CheckTemplateExistenceResult>`
:param id: ID of the import operation.
:type id: str
:param is_new: Whether this imported process is new.
:type is_new: bool
:param promote_job_id: The promote job identifier.
:type promote_job_id: str
:param validation_results: The list of validation results.
:type validation_results: list of :class:`ValidationIssue <azure.devops.v7_1.work_item_tracking_process_template.models.ValidationIssue>`
"""
_attribute_map = {
'help_url': {'key': 'helpUrl', 'type': 'str'},
'check_existence_result': {'key': 'checkExistenceResult', 'type': 'CheckTemplateExistenceResult'},
'id': {'key': 'id', 'type': 'str'},
'is_new': {'key': 'isNew', 'type': 'bool'},
'promote_job_id': {'key': 'promoteJobId', 'type': 'str'},
'validation_results': {'key': 'validationResults', 'type': '[ValidationIssue]'}
}
def __init__(self, help_url=None, check_existence_result=None, id=None, is_new=None, promote_job_id=None, validation_results=None):
super(ProcessImportResult, self).__init__()
self.help_url = help_url
self.check_existence_result = check_existence_result
self.id = id
self.is_new = is_new
self.promote_job_id = promote_job_id
self.validation_results = validation_results
class ProcessPromoteStatus(Model):
"""
Describes result of process operation promote.
:param complete: Number of projects for which promote is complete.
:type complete: int
:param id: ID of the promote operation.
:type id: str
:param message: The error message associated with the promote operation. The string will be empty if there are no errors.
:type message: str
:param pending: Number of projects for which promote is pending.
:type pending: int
:param remaining_retries: The remaining retries.
:type remaining_retries: int
:param successful: True if promote finished all the projects successfully. False if still in progress or any project promote failed.
:type successful: bool
"""
_attribute_map = {
'complete': {'key': 'complete', 'type': 'int'},
'id': {'key': 'id', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'pending': {'key': 'pending', 'type': 'int'},
'remaining_retries': {'key': 'remainingRetries', 'type': 'int'},
'successful': {'key': 'successful', 'type': 'bool'}
}
def __init__(self, complete=None, id=None, message=None, pending=None, remaining_retries=None, successful=None):
super(ProcessPromoteStatus, self).__init__()
self.complete = complete
self.id = id
self.message = message
self.pending = pending
self.remaining_retries = remaining_retries
self.successful = successful
class ValidationIssue(Model):
"""
:param description:
:type description: str
:param file:
:type file: str
:param help_link:
:type help_link: str
:param issue_type:
:type issue_type: object
:param line:
:type line: int
"""
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'file': {'key': 'file', 'type': 'str'},
'help_link': {'key': 'helpLink', 'type': 'str'},
'issue_type': {'key': 'issueType', 'type': 'object'},
'line': {'key': 'line', 'type': 'int'}
}
def __init__(self, description=None, file=None, help_link=None, issue_type=None, line=None):
super(ValidationIssue, self).__init__()
self.description = description
self.file = file
self.help_link = help_link
self.issue_type = issue_type
self.line = line
__all__ = [
'AdminBehavior',
'AdminBehaviorField',
'CheckTemplateExistenceResult',
'ProcessImportResult',
'ProcessPromoteStatus',
'ValidationIssue',
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/work_item_tracking_process_template/models.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/work_item_tracking_process_template/models.py",
"repo_id": "azure-devops-python-api",
"token_count": 3282
}
| 402 |
trigger:
- main
pr:
- main
schedules:
- cron: "0 9 * * Sat"
displayName: 'Build for Component Governance'
branches:
include:
- main
always: true
jobs:
- job: "Build_Azure_Quantum_Python"
displayName: Build "azure-quantum" package
pool:
vmImage: 'windows-latest'
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '3.11'
displayName: Set Python version
- script: |
pip install wheel
displayName: Install wheel
- script: |
pip freeze
displayName: List installed packages
- script: |
python set_version.py
env:
RELEASE_TYPE: "patch"
BUILD_TYPE: "dev"
displayName: Set "azure-quantum" package version
- script: |
cd $(Build.SourcesDirectory)/azure-quantum
python setup.py sdist --dist-dir=target/wheels
python setup.py bdist_wheel --dist-dir=target/wheels
displayName: Build azure-quantum package
- publish: $(Build.SourcesDirectory)/azure-quantum/target/wheels/
artifact: azure-quantum-wheels
displayName: Upload azure-quantum artifacts
- job: "Test_Azure_Quantum_Python"
displayName: Test "azure-quantum" package
pool:
vmImage: 'windows-latest'
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '3.11'
displayName: Set Python version
- script: |
pip install pytest pytest-azurepipelines pytest-cov
displayName: Install pytest dependencies
- script: |
pip freeze
displayName: List installed packages
- script: |
cd $(Build.SourcesDirectory)/azure-quantum
pip install .[all]
pytest --cov-report term --cov=azure.quantum --junitxml test-output-azure-quantum.xml $(Build.SourcesDirectory)/azure-quantum
displayName: Run azure-quantum unit tests
- task: PublishTestResults@2
displayName: 'Publish tests results (python)'
condition: succeededOrFailed()
inputs:
testResultsFormat: 'JUnit'
testResultsFiles: '**/test-*.xml'
testRunTitle: 'Azure Quantum Python Tests'
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component detection'
|
azure-quantum-python/.ado/ci.yml/0
|
{
"file_path": "azure-quantum-python/.ado/ci.yml",
"repo_id": "azure-quantum-python",
"token_count": 907
}
| 403 |
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
import logging
import sys
from azure.core.exceptions import ClientAuthenticationError
from azure.identity import CredentialUnavailableError
from azure.core.credentials import AccessToken, TokenCredential
_LOGGER = logging.getLogger(__name__)
def filter_credential_warnings(record):
"""Suppress warnings from credentials other than DefaultAzureCredential"""
if record.levelno == logging.WARNING:
message = record.getMessage()
return "DefaultAzureCredential" in message
return True
def _get_error_message(history):
attempts = []
for credential, error in history:
if error:
attempts.append(f"{credential.__class__.__name__}: {error}")
else:
attempts.append(credential.__class__.__name__)
return """
Attempted credentials:\n\t{}""".format(
"\n\t".join(attempts)
)
class _ChainedTokenCredential(object):
"""
Based on Azure.Identity.ChainedTokenCredential from:
https://github.com/Azure/azure-sdk-for-python/blob/master/sdk/identity/azure-identity/azure/identity/_credentials/chained.py
The key difference is that we don't stop attempting all credentials
if some of then failed or raised an exception.
We also don't log a warning unless all credential attempts have failed.
"""
def __init__(self, *credentials: TokenCredential):
self._successful_credential = None
self.credentials = credentials
def get_token(self, *scopes: str, **kwargs) -> AccessToken: # pylint:disable=unused-argument
"""
Request a token from each chained credential, in order,
returning the first token received.
This method is called automatically by Azure SDK clients.
:param str scopes: desired scopes for the access token.
This method requires at least one scope.
:raises ~azure.core.exceptions.ClientAuthenticationError:
no credential in the chain provided a token
"""
history = []
# Suppress warnings from credentials in Azure.Identity
azure_identity_logger = logging.getLogger("azure.identity")
handler = logging.StreamHandler(stream=sys.stdout)
handler.addFilter(filter_credential_warnings)
azure_identity_logger.addHandler(handler)
try:
for credential in self.credentials:
try:
token = credential.get_token(*scopes, **kwargs)
_LOGGER.info(
"%s acquired a token from %s",
self.__class__.__name__,
credential.__class__.__name__,
)
self._successful_credential = credential
return token
except CredentialUnavailableError as ex:
# credential didn't attempt authentication because
# it lacks required data or state -> continue
history.append((credential, ex.message))
_LOGGER.info(
"%s - %s is unavailable",
self.__class__.__name__,
credential.__class__.__name__,
)
except Exception as ex: # pylint: disable=broad-except
# credential failed to authenticate,
# or something unexpectedly raised -> break
history.append((credential, str(ex)))
# instead of logging a warning, we just want to log an info
# since other credentials might succeed
_LOGGER.info(
'%s.get_token failed: %s raised unexpected error "%s"',
self.__class__.__name__,
credential.__class__.__name__,
ex,
exc_info=_LOGGER.isEnabledFor(logging.DEBUG),
)
# here we do NOT want break and
# will continue to try other credentials
finally:
# Re-enable warnings from credentials in Azure.Identity
azure_identity_logger.removeHandler(handler)
# if all attempts failed, only then we log a warning and raise an error
attempts = _get_error_message(history)
message = (
self.__class__.__name__
+ " failed to retrieve a token from the included credentials."
+ attempts
)
_LOGGER.warning(message)
raise ClientAuthenticationError(message=message)
|
azure-quantum-python/azure-quantum/azure/quantum/_authentication/_chained.py/0
|
{
"file_path": "azure-quantum-python/azure-quantum/azure/quantum/_authentication/_chained.py",
"repo_id": "azure-quantum-python",
"token_count": 2064
}
| 404 |
# Marker file for PEP 561.
|
azure-quantum-python/azure-quantum/azure/quantum/_client/py.typed/0
|
{
"file_path": "azure-quantum-python/azure-quantum/azure/quantum/_client/py.typed",
"repo_id": "azure-quantum-python",
"token_count": 10
}
| 405 |
##
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
##
import logging
import time
import json
from typing import TYPE_CHECKING
from azure.quantum._client.models import JobDetails
from azure.quantum.job.job_failed_with_results_error import JobFailedWithResultsError
from azure.quantum.job.base_job import BaseJob, ContentType, DEFAULT_TIMEOUT
from azure.quantum.job.filtered_job import FilteredJob
__all__ = ["Job", "JobDetails"]
logger = logging.getLogger(__name__)
if TYPE_CHECKING:
from azure.quantum.workspace import Workspace
_log = logging.getLogger(__name__)
class Job(BaseJob, FilteredJob):
"""Azure Quantum Job that is submitted to a given Workspace.
:param workspace: Workspace instance to submit job to
:type workspace: Workspace
:param job_details: Job details model,
contains Job ID, name and other details
:type job_details: JobDetails
"""
_default_poll_wait = 0.2
def __init__(self, workspace: "Workspace", job_details: JobDetails, **kwargs):
self.results = None
super().__init__(
workspace=workspace,
details=job_details,
**kwargs
)
def submit(self):
"""Submit a job to Azure Quantum."""
_log.debug(f"Submitting job with ID {self.id}")
job = self.workspace.submit_job(self)
self.details = job.details
def refresh(self):
"""Refreshes the Job's details by querying the workspace."""
self.details = self.workspace.get_job(self.id).details
def has_completed(self) -> bool:
"""Check if the job has completed."""
return (
self.details.status == "Succeeded"
or self.details.status == "Failed"
or self.details.status == "Cancelled"
)
def wait_until_completed(
self,
max_poll_wait_secs=30,
timeout_secs=None,
print_progress=True
) -> None:
"""Keeps refreshing the Job's details
until it reaches a finished status.
:param max_poll_wait_secs: Maximum poll wait time, defaults to 30
:type max_poll_wait_secs: int
:param timeout_secs: Timeout in seconds, defaults to None
:type timeout_secs: int
:param print_progress: Print "." to stdout to display progress
:type print_progress: bool
:raises: :class:`TimeoutError` If the total poll time exceeds timeout, raise.
"""
self.refresh()
poll_wait = Job._default_poll_wait
start_time = time.time()
while not self.has_completed():
if timeout_secs is not None and (time.time() - start_time) >= timeout_secs:
raise TimeoutError(f"The wait time has exceeded {timeout_secs} seconds.")
logger.debug(
f"Waiting for job {self.id},"
+ f"it is in status '{self.details.status}'"
)
if print_progress:
print(".", end="", flush=True)
time.sleep(poll_wait)
self.refresh()
poll_wait = (
max_poll_wait_secs
if poll_wait >= max_poll_wait_secs
else poll_wait * 1.5
)
def get_results(self, timeout_secs: float = DEFAULT_TIMEOUT):
"""Get job results by downloading the results blob from the
storage container linked via the workspace.
Raises :class:`RuntimeError` if job execution fails.
Raises :class:`azure.quantum.job.JobFailedWithResultsError` if job execution fails,
but failure results could still be retrieved (e.g. for jobs submitted against "microsoft.dft" target).
:param timeout_secs: Timeout in seconds, defaults to 300
:type timeout_secs: float
:return: Results dictionary with histogram shots, or raw results if not a json object.
:rtype: typing.Any
"""
if self.results is not None:
return self.results
if not self.has_completed():
self.wait_until_completed(timeout_secs=timeout_secs)
if not self.details.status == "Succeeded":
if self.details.status == "Failed" and self._allow_failure_results():
job_blob_properties = self.download_blob_properties(self.details.output_data_uri)
if job_blob_properties.size > 0:
job_failure_data = self.download_data(self.details.output_data_uri)
raise JobFailedWithResultsError("An error occurred during job execution.", job_failure_data)
raise RuntimeError(
f'{"Cannot retrieve results as job execution failed"}'
+ f"(status: {self.details.status}."
+ f"error: {self.details.error_data})"
)
payload = self.download_data(self.details.output_data_uri)
try:
payload = payload.decode("utf8")
results = json.loads(payload)
if self.details.output_data_format == "microsoft.quantum-results.v1":
if "Histogram" not in results:
raise f"\"Histogram\" array was expected to be in the Job results for \"{self.details.output_data_format}\" output format."
histogram_values = results["Histogram"]
if len(histogram_values) % 2 == 0:
# Re-mapping {'Histogram': ['[0]', 0.50, '[1]', 0.50] } to {'[0]': 0.50, '[1]': 0.50}
return {histogram_values[i]: histogram_values[i + 1] for i in range(0, len(histogram_values), 2)}
else:
raise f"\"Histogram\" array has invalid format. Even number of items is expected."
return results
except:
# If errors decoding the data, return the raw payload:
return payload
@classmethod
def _allow_failure_results(cls) -> bool:
"""
Allow to download job results even if the Job status is "Failed".
This method can be overridden in derived classes to alter the default
behaviour.
The default is False.
"""
return False
|
azure-quantum-python/azure-quantum/azure/quantum/job/job.py/0
|
{
"file_path": "azure-quantum-python/azure-quantum/azure/quantum/job/job.py",
"repo_id": "azure-quantum-python",
"token_count": 2690
}
| 406 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.